1use std::mem;
85
86use interpret::ErrorHandled;
87use rustc_data_structures::fx::FxHashMap;
88use rustc_hir::HirId;
89use rustc_index::{IndexSlice, IndexVec};
90use rustc_middle::middle::region;
91use rustc_middle::mir::{self, *};
92use rustc_middle::thir::{AdtExpr, AdtExprBase, ArmId, ExprId, ExprKind, LintLevel};
93use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt, ValTree};
94use rustc_middle::{bug, span_bug};
95use rustc_pattern_analysis::rustc::RustcPatCtxt;
96use rustc_session::lint::Level;
97use rustc_span::source_map::Spanned;
98use rustc_span::{DUMMY_SP, Span};
99use tracing::{debug, instrument};
100
101use super::matches::BuiltMatchTree;
102use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
103use crate::errors::{
104 ConstContinueBadConst, ConstContinueNotMonomorphicConst, ConstContinueUnknownJumpTarget,
105};
106
107#[derive(Debug)]
108pub(crate) struct Scopes<'tcx> {
109 scopes: Vec<Scope>,
110
111 breakable_scopes: Vec<BreakableScope<'tcx>>,
113
114 const_continuable_scopes: Vec<ConstContinuableScope<'tcx>>,
115
116 if_then_scope: Option<IfThenScope>,
118
119 unwind_drops: DropTree,
122
123 coroutine_drops: DropTree,
125}
126
127#[derive(Debug)]
128struct Scope {
129 source_scope: SourceScope,
131
132 region_scope: region::Scope,
134
135 drops: Vec<DropData>,
140
141 moved_locals: Vec<Local>,
142
143 cached_unwind_block: Option<DropIdx>,
146
147 cached_coroutine_drop_block: Option<DropIdx>,
150}
151
152#[derive(Clone, Copy, Debug)]
153struct DropData {
154 source_info: SourceInfo,
157
158 local: Local,
160
161 kind: DropKind,
163}
164
165#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
166pub(crate) enum DropKind {
167 Value,
168 Storage,
169 ForLint,
170}
171
172#[derive(Debug)]
173struct BreakableScope<'tcx> {
174 region_scope: region::Scope,
176 break_destination: Place<'tcx>,
179 break_drops: DropTree,
181 continue_drops: Option<DropTree>,
183}
184
185#[derive(Debug)]
186struct ConstContinuableScope<'tcx> {
187 region_scope: region::Scope,
189 state_place: Place<'tcx>,
191
192 arms: Box<[ArmId]>,
193 built_match_tree: BuiltMatchTree<'tcx>,
194
195 const_continue_drops: DropTree,
197}
198
199#[derive(Debug)]
200struct IfThenScope {
201 region_scope: region::Scope,
203 else_drops: DropTree,
205}
206
207#[derive(Clone, Copy, Debug)]
209pub(crate) enum BreakableTarget {
210 Continue(region::Scope),
211 Break(region::Scope),
212 Return,
213}
214
215rustc_index::newtype_index! {
216 #[orderable]
217 struct DropIdx {}
218}
219
220const ROOT_NODE: DropIdx = DropIdx::ZERO;
221
222#[derive(Debug)]
232struct DropTree {
233 drop_nodes: IndexVec<DropIdx, DropNode>,
235 existing_drops_map: FxHashMap<DropNodeKey, DropIdx>,
237 entry_points: Vec<(DropIdx, BasicBlock)>,
239}
240
241#[derive(Debug)]
243struct DropNode {
244 data: DropData,
246 next: DropIdx,
248}
249
250#[derive(Debug, PartialEq, Eq, Hash)]
252struct DropNodeKey {
253 next: DropIdx,
254 local: Local,
255}
256
257impl Scope {
258 fn needs_cleanup(&self) -> bool {
270 self.drops.iter().any(|drop| match drop.kind {
271 DropKind::Value | DropKind::ForLint => true,
272 DropKind::Storage => false,
273 })
274 }
275
276 fn invalidate_cache(&mut self) {
277 self.cached_unwind_block = None;
278 self.cached_coroutine_drop_block = None;
279 }
280}
281
282trait DropTreeBuilder<'tcx> {
285 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock;
288
289 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock);
292}
293
294impl DropTree {
295 fn new() -> Self {
296 let fake_source_info = SourceInfo::outermost(DUMMY_SP);
300 let fake_data =
301 DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage };
302 let drop_nodes = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
303 Self { drop_nodes, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
304 }
305
306 fn add_drop(&mut self, data: DropData, next: DropIdx) -> DropIdx {
312 let drop_nodes = &mut self.drop_nodes;
313 *self
314 .existing_drops_map
315 .entry(DropNodeKey { next, local: data.local })
316 .or_insert_with(|| drop_nodes.push(DropNode { data, next }))
318 }
319
320 fn add_entry_point(&mut self, from: BasicBlock, to: DropIdx) {
325 debug_assert!(to < self.drop_nodes.next_index());
326 self.entry_points.push((to, from));
327 }
328
329 fn build_mir<'tcx, T: DropTreeBuilder<'tcx>>(
331 &mut self,
332 cfg: &mut CFG<'tcx>,
333 root_node: Option<BasicBlock>,
334 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
335 debug!("DropTree::build_mir(drops = {:#?})", self);
336
337 let mut blocks = self.assign_blocks::<T>(cfg, root_node);
338 self.link_blocks(cfg, &mut blocks);
339
340 blocks
341 }
342
343 fn assign_blocks<'tcx, T: DropTreeBuilder<'tcx>>(
345 &mut self,
346 cfg: &mut CFG<'tcx>,
347 root_node: Option<BasicBlock>,
348 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
349 #[derive(Clone, Copy)]
353 enum Block {
354 None,
356 Shares(DropIdx),
359 Own,
363 }
364
365 let mut blocks = IndexVec::from_elem(None, &self.drop_nodes);
366 blocks[ROOT_NODE] = root_node;
367
368 let mut needs_block = IndexVec::from_elem(Block::None, &self.drop_nodes);
369 if root_node.is_some() {
370 needs_block[ROOT_NODE] = Block::Own;
374 }
375
376 let entry_points = &mut self.entry_points;
378 entry_points.sort();
379
380 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
381 if entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
382 let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
383 needs_block[drop_idx] = Block::Own;
384 while entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
385 let entry_block = entry_points.pop().unwrap().1;
386 T::link_entry_point(cfg, entry_block, block);
387 }
388 }
389 match needs_block[drop_idx] {
390 Block::None => continue,
391 Block::Own => {
392 blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
393 }
394 Block::Shares(pred) => {
395 blocks[drop_idx] = blocks[pred];
396 }
397 }
398 if let DropKind::Value = drop_node.data.kind {
399 needs_block[drop_node.next] = Block::Own;
400 } else if drop_idx != ROOT_NODE {
401 match &mut needs_block[drop_node.next] {
402 pred @ Block::None => *pred = Block::Shares(drop_idx),
403 pred @ Block::Shares(_) => *pred = Block::Own,
404 Block::Own => (),
405 }
406 }
407 }
408
409 debug!("assign_blocks: blocks = {:#?}", blocks);
410 assert!(entry_points.is_empty());
411
412 blocks
413 }
414
415 fn link_blocks<'tcx>(
416 &self,
417 cfg: &mut CFG<'tcx>,
418 blocks: &IndexSlice<DropIdx, Option<BasicBlock>>,
419 ) {
420 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
421 let Some(block) = blocks[drop_idx] else { continue };
422 match drop_node.data.kind {
423 DropKind::Value => {
424 let terminator = TerminatorKind::Drop {
425 target: blocks[drop_node.next].unwrap(),
426 unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
428 place: drop_node.data.local.into(),
429 replace: false,
430 drop: None,
431 async_fut: None,
432 };
433 cfg.terminate(block, drop_node.data.source_info, terminator);
434 }
435 DropKind::ForLint => {
436 let stmt = Statement::new(
437 drop_node.data.source_info,
438 StatementKind::BackwardIncompatibleDropHint {
439 place: Box::new(drop_node.data.local.into()),
440 reason: BackwardIncompatibleDropReason::Edition2024,
441 },
442 );
443 cfg.push(block, stmt);
444 let target = blocks[drop_node.next].unwrap();
445 if target != block {
446 let source_info =
451 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
452 let terminator = TerminatorKind::Goto { target };
453 cfg.terminate(block, source_info, terminator);
454 }
455 }
456 DropKind::Storage if drop_idx == ROOT_NODE => {}
458 DropKind::Storage => {
459 let stmt = Statement::new(
460 drop_node.data.source_info,
461 StatementKind::StorageDead(drop_node.data.local),
462 );
463 cfg.push(block, stmt);
464 let target = blocks[drop_node.next].unwrap();
465 if target != block {
466 let source_info =
471 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
472 let terminator = TerminatorKind::Goto { target };
473 cfg.terminate(block, source_info, terminator);
474 }
475 }
476 }
477 }
478 }
479}
480
481impl<'tcx> Scopes<'tcx> {
482 pub(crate) fn new() -> Self {
483 Self {
484 scopes: Vec::new(),
485 breakable_scopes: Vec::new(),
486 const_continuable_scopes: Vec::new(),
487 if_then_scope: None,
488 unwind_drops: DropTree::new(),
489 coroutine_drops: DropTree::new(),
490 }
491 }
492
493 fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo), vis_scope: SourceScope) {
494 debug!("push_scope({:?})", region_scope);
495 self.scopes.push(Scope {
496 source_scope: vis_scope,
497 region_scope: region_scope.0,
498 drops: vec![],
499 moved_locals: vec![],
500 cached_unwind_block: None,
501 cached_coroutine_drop_block: None,
502 });
503 }
504
505 fn pop_scope(&mut self, region_scope: (region::Scope, SourceInfo)) -> Scope {
506 let scope = self.scopes.pop().unwrap();
507 assert_eq!(scope.region_scope, region_scope.0);
508 scope
509 }
510
511 fn scope_index(&self, region_scope: region::Scope, span: Span) -> usize {
512 self.scopes
513 .iter()
514 .rposition(|scope| scope.region_scope == region_scope)
515 .unwrap_or_else(|| span_bug!(span, "region_scope {:?} does not enclose", region_scope))
516 }
517
518 fn topmost(&self) -> region::Scope {
521 self.scopes.last().expect("topmost_scope: no scopes present").region_scope
522 }
523}
524
525impl<'a, 'tcx> Builder<'a, 'tcx> {
526 pub(crate) fn in_breakable_scope<F>(
532 &mut self,
533 loop_block: Option<BasicBlock>,
534 break_destination: Place<'tcx>,
535 span: Span,
536 f: F,
537 ) -> BlockAnd<()>
538 where
539 F: FnOnce(&mut Builder<'a, 'tcx>) -> Option<BlockAnd<()>>,
540 {
541 let region_scope = self.scopes.topmost();
542 let scope = BreakableScope {
543 region_scope,
544 break_destination,
545 break_drops: DropTree::new(),
546 continue_drops: loop_block.map(|_| DropTree::new()),
547 };
548 self.scopes.breakable_scopes.push(scope);
549 let normal_exit_block = f(self);
550 let breakable_scope = self.scopes.breakable_scopes.pop().unwrap();
551 assert!(breakable_scope.region_scope == region_scope);
552 let break_block =
553 self.build_exit_tree(breakable_scope.break_drops, region_scope, span, None);
554 if let Some(drops) = breakable_scope.continue_drops {
555 self.build_exit_tree(drops, region_scope, span, loop_block);
556 }
557 match (normal_exit_block, break_block) {
558 (Some(block), None) | (None, Some(block)) => block,
559 (None, None) => self.cfg.start_new_block().unit(),
560 (Some(normal_block), Some(exit_block)) => {
561 let target = self.cfg.start_new_block();
562 let source_info = self.source_info(span);
563 self.cfg.terminate(
564 normal_block.into_block(),
565 source_info,
566 TerminatorKind::Goto { target },
567 );
568 self.cfg.terminate(
569 exit_block.into_block(),
570 source_info,
571 TerminatorKind::Goto { target },
572 );
573 target.unit()
574 }
575 }
576 }
577
578 pub(crate) fn in_const_continuable_scope<F>(
581 &mut self,
582 arms: Box<[ArmId]>,
583 built_match_tree: BuiltMatchTree<'tcx>,
584 state_place: Place<'tcx>,
585 span: Span,
586 f: F,
587 ) -> BlockAnd<()>
588 where
589 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
590 {
591 let region_scope = self.scopes.topmost();
592 let scope = ConstContinuableScope {
593 region_scope,
594 state_place,
595 const_continue_drops: DropTree::new(),
596 arms,
597 built_match_tree,
598 };
599 self.scopes.const_continuable_scopes.push(scope);
600 let normal_exit_block = f(self);
601 let const_continue_scope = self.scopes.const_continuable_scopes.pop().unwrap();
602 assert!(const_continue_scope.region_scope == region_scope);
603
604 let break_block = self.build_exit_tree(
605 const_continue_scope.const_continue_drops,
606 region_scope,
607 span,
608 None,
609 );
610
611 match (normal_exit_block, break_block) {
612 (block, None) => block,
613 (normal_block, Some(exit_block)) => {
614 let target = self.cfg.start_new_block();
615 let source_info = self.source_info(span);
616 self.cfg.terminate(
617 normal_block.into_block(),
618 source_info,
619 TerminatorKind::Goto { target },
620 );
621 self.cfg.terminate(
622 exit_block.into_block(),
623 source_info,
624 TerminatorKind::Goto { target },
625 );
626 target.unit()
627 }
628 }
629 }
630
631 pub(crate) fn in_if_then_scope<F>(
648 &mut self,
649 region_scope: region::Scope,
650 span: Span,
651 f: F,
652 ) -> (BasicBlock, BasicBlock)
653 where
654 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
655 {
656 let scope = IfThenScope { region_scope, else_drops: DropTree::new() };
657 let previous_scope = mem::replace(&mut self.scopes.if_then_scope, Some(scope));
658
659 let then_block = f(self).into_block();
660
661 let if_then_scope = mem::replace(&mut self.scopes.if_then_scope, previous_scope).unwrap();
662 assert!(if_then_scope.region_scope == region_scope);
663
664 let else_block =
665 self.build_exit_tree(if_then_scope.else_drops, region_scope, span, None).map_or_else(
666 || self.cfg.start_new_block(),
667 |else_block_and| else_block_and.into_block(),
668 );
669
670 (then_block, else_block)
671 }
672
673 #[instrument(skip(self, f), level = "debug")]
676 pub(crate) fn in_scope<F, R>(
677 &mut self,
678 region_scope: (region::Scope, SourceInfo),
679 lint_level: LintLevel,
680 f: F,
681 ) -> BlockAnd<R>
682 where
683 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
684 {
685 let source_scope = self.source_scope;
686 if let LintLevel::Explicit(current_hir_id) = lint_level {
687 let parent_id =
688 self.source_scopes[source_scope].local_data.as_ref().unwrap_crate_local().lint_root;
689 self.maybe_new_source_scope(region_scope.1.span, current_hir_id, parent_id);
690 }
691 self.push_scope(region_scope);
692 let mut block;
693 let rv = unpack!(block = f(self));
694 block = self.pop_scope(region_scope, block).into_block();
695 self.source_scope = source_scope;
696 debug!(?block);
697 block.and(rv)
698 }
699
700 pub(crate) fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) {
705 self.scopes.push_scope(region_scope, self.source_scope);
706 }
707
708 pub(crate) fn pop_scope(
712 &mut self,
713 region_scope: (region::Scope, SourceInfo),
714 mut block: BasicBlock,
715 ) -> BlockAnd<()> {
716 debug!("pop_scope({:?}, {:?})", region_scope, block);
717
718 block = self.leave_top_scope(block);
719
720 self.scopes.pop_scope(region_scope);
721
722 block.unit()
723 }
724
725 pub(crate) fn break_scope(
727 &mut self,
728 mut block: BasicBlock,
729 value: Option<ExprId>,
730 target: BreakableTarget,
731 source_info: SourceInfo,
732 ) -> BlockAnd<()> {
733 let span = source_info.span;
734
735 let get_scope_index = |scope: region::Scope| {
736 self.scopes
738 .breakable_scopes
739 .iter()
740 .rposition(|breakable_scope| breakable_scope.region_scope == scope)
741 .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found"))
742 };
743 let (break_index, destination) = match target {
744 BreakableTarget::Return => {
745 let scope = &self.scopes.breakable_scopes[0];
746 if scope.break_destination != Place::return_place() {
747 span_bug!(span, "`return` in item with no return scope");
748 }
749 (0, Some(scope.break_destination))
750 }
751 BreakableTarget::Break(scope) => {
752 let break_index = get_scope_index(scope);
753 let scope = &self.scopes.breakable_scopes[break_index];
754 (break_index, Some(scope.break_destination))
755 }
756 BreakableTarget::Continue(scope) => {
757 let break_index = get_scope_index(scope);
758 (break_index, None)
759 }
760 };
761
762 match (destination, value) {
763 (Some(destination), Some(value)) => {
764 debug!("stmt_expr Break val block_context.push(SubExpr)");
765 self.block_context.push(BlockFrame::SubExpr);
766 block = self.expr_into_dest(destination, block, value).into_block();
767 self.block_context.pop();
768 }
769 (Some(destination), None) => {
770 self.cfg.push_assign_unit(block, source_info, destination, self.tcx)
771 }
772 (None, Some(_)) => {
773 panic!("`return`, `become` and `break` with value and must have a destination")
774 }
775 (None, None) => {
776 if self.tcx.sess.instrument_coverage() {
777 self.cfg.push_coverage_span_marker(block, source_info);
782 }
783 }
784 }
785
786 let region_scope = self.scopes.breakable_scopes[break_index].region_scope;
787 let scope_index = self.scopes.scope_index(region_scope, span);
788 let drops = if destination.is_some() {
789 &mut self.scopes.breakable_scopes[break_index].break_drops
790 } else {
791 let Some(drops) = self.scopes.breakable_scopes[break_index].continue_drops.as_mut()
792 else {
793 self.tcx.dcx().span_delayed_bug(
794 source_info.span,
795 "unlabelled `continue` within labelled block",
796 );
797 self.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
798
799 return self.cfg.start_new_block().unit();
800 };
801 drops
802 };
803
804 let mut drop_idx = ROOT_NODE;
805 for scope in &self.scopes.scopes[scope_index + 1..] {
806 for drop in &scope.drops {
807 drop_idx = drops.add_drop(*drop, drop_idx);
808 }
809 }
810 drops.add_entry_point(block, drop_idx);
811
812 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
817
818 self.cfg.start_new_block().unit()
819 }
820
821 fn eval_unevaluated_mir_constant_to_valtree(
823 &self,
824 constant: ConstOperand<'tcx>,
825 ) -> Result<(ty::ValTree<'tcx>, Ty<'tcx>), interpret::ErrorHandled> {
826 assert!(!constant.const_.ty().has_param());
827 let (uv, ty) = match constant.const_ {
828 mir::Const::Unevaluated(uv, ty) => (uv.shrink(), ty),
829 mir::Const::Ty(_, c) => match c.kind() {
830 ty::ConstKind::Value(cv) => return Ok((cv.valtree, cv.ty)),
833 other => span_bug!(constant.span, "{other:#?}"),
834 },
835 mir::Const::Val(mir::ConstValue::Scalar(mir::interpret::Scalar::Int(val)), ty) => {
836 return Ok((ValTree::from_scalar_int(self.tcx, val), ty));
837 }
838 other => span_bug!(constant.span, "{other:#?}"),
848 };
849
850 match self.tcx.const_eval_resolve_for_typeck(self.typing_env(), uv, constant.span) {
851 Ok(Ok(valtree)) => Ok((valtree, ty)),
852 Ok(Err(ty)) => span_bug!(constant.span, "could not convert {ty:?} to a valtree"),
853 Err(e) => Err(e),
854 }
855 }
856
857 pub(crate) fn break_const_continuable_scope(
859 &mut self,
860 mut block: BasicBlock,
861 value: ExprId,
862 scope: region::Scope,
863 source_info: SourceInfo,
864 ) -> BlockAnd<()> {
865 let span = source_info.span;
866
867 let rustc_middle::thir::ExprKind::Scope { value, .. } = self.thir[value].kind else {
869 span_bug!(span, "break value must be a scope")
870 };
871
872 let expr = &self.thir[value];
873 let constant = match &expr.kind {
874 ExprKind::Adt(box AdtExpr { variant_index, fields, base, .. }) => {
875 assert!(matches!(base, AdtExprBase::None));
876 assert!(fields.is_empty());
877 ConstOperand {
878 span: self.thir[value].span,
879 user_ty: None,
880 const_: Const::Ty(
881 self.thir[value].ty,
882 ty::Const::new_value(
883 self.tcx,
884 ValTree::from_branches(
885 self.tcx,
886 [ValTree::from_scalar_int(self.tcx, variant_index.as_u32().into())],
887 ),
888 self.thir[value].ty,
889 ),
890 ),
891 }
892 }
893
894 ExprKind::Literal { .. }
895 | ExprKind::NonHirLiteral { .. }
896 | ExprKind::ZstLiteral { .. }
897 | ExprKind::NamedConst { .. } => self.as_constant(&self.thir[value]),
898
899 other => {
900 use crate::errors::ConstContinueNotMonomorphicConstReason as Reason;
901
902 let span = expr.span;
903 let reason = match other {
904 ExprKind::ConstParam { .. } => Reason::ConstantParameter { span },
905 ExprKind::ConstBlock { .. } => Reason::ConstBlock { span },
906 _ => Reason::Other { span },
907 };
908
909 self.tcx
910 .dcx()
911 .emit_err(ConstContinueNotMonomorphicConst { span: expr.span, reason });
912 return block.unit();
913 }
914 };
915
916 let break_index = self
917 .scopes
918 .const_continuable_scopes
919 .iter()
920 .rposition(|const_continuable_scope| const_continuable_scope.region_scope == scope)
921 .unwrap_or_else(|| span_bug!(span, "no enclosing const-continuable scope found"));
922
923 let scope = &self.scopes.const_continuable_scopes[break_index];
924
925 let state_decl = &self.local_decls[scope.state_place.as_local().unwrap()];
926 let state_ty = state_decl.ty;
927 let (discriminant_ty, rvalue) = match state_ty.kind() {
928 ty::Adt(adt_def, _) if adt_def.is_enum() => {
929 (state_ty.discriminant_ty(self.tcx), Rvalue::Discriminant(scope.state_place))
930 }
931 ty::Uint(_) | ty::Int(_) | ty::Float(_) | ty::Bool | ty::Char => {
932 (state_ty, Rvalue::Use(Operand::Copy(scope.state_place)))
933 }
934 _ => span_bug!(state_decl.source_info.span, "unsupported #[loop_match] state"),
935 };
936
937 let dropless_arena = rustc_arena::DroplessArena::default();
940 let typeck_results = self.tcx.typeck(self.def_id);
941 let cx = RustcPatCtxt {
942 tcx: self.tcx,
943 typeck_results,
944 module: self.tcx.parent_module(self.hir_id).to_def_id(),
945 typing_env: rustc_middle::ty::TypingEnv::non_body_analysis(self.tcx, self.def_id),
947 dropless_arena: &dropless_arena,
948 match_lint_level: self.hir_id,
949 whole_match_span: Some(rustc_span::Span::default()),
950 scrut_span: rustc_span::Span::default(),
951 refutable: true,
952 known_valid_scrutinee: true,
953 internal_state: Default::default(),
954 };
955
956 let valtree = match self.eval_unevaluated_mir_constant_to_valtree(constant) {
957 Ok((valtree, ty)) => {
958 assert!(!ty.has_param());
960
961 valtree
962 }
963 Err(ErrorHandled::Reported(..)) => {
964 return block.unit();
965 }
966 Err(ErrorHandled::TooGeneric(_)) => {
967 self.tcx.dcx().emit_fatal(ConstContinueBadConst { span: constant.span });
968 }
969 };
970
971 let Some(real_target) =
972 self.static_pattern_match(&cx, valtree, &*scope.arms, &scope.built_match_tree)
973 else {
974 self.tcx.dcx().emit_fatal(ConstContinueUnknownJumpTarget { span })
975 };
976
977 self.block_context.push(BlockFrame::SubExpr);
978 let state_place = scope.state_place;
979 block = self.expr_into_dest(state_place, block, value).into_block();
980 self.block_context.pop();
981
982 let discr = self.temp(discriminant_ty, source_info.span);
983 let scope_index = self
984 .scopes
985 .scope_index(self.scopes.const_continuable_scopes[break_index].region_scope, span);
986 let scope = &mut self.scopes.const_continuable_scopes[break_index];
987 self.cfg.push_assign(block, source_info, discr, rvalue);
988 let drop_and_continue_block = self.cfg.start_new_block();
989 let imaginary_target = self.cfg.start_new_block();
990 self.cfg.terminate(
991 block,
992 source_info,
993 TerminatorKind::FalseEdge { real_target: drop_and_continue_block, imaginary_target },
994 );
995
996 let drops = &mut scope.const_continue_drops;
997
998 let drop_idx = self.scopes.scopes[scope_index + 1..]
999 .iter()
1000 .flat_map(|scope| &scope.drops)
1001 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1002
1003 drops.add_entry_point(imaginary_target, drop_idx);
1004
1005 self.cfg.terminate(imaginary_target, source_info, TerminatorKind::UnwindResume);
1006
1007 let region_scope = scope.region_scope;
1008 let scope_index = self.scopes.scope_index(region_scope, span);
1009 let mut drops = DropTree::new();
1010
1011 let drop_idx = self.scopes.scopes[scope_index + 1..]
1012 .iter()
1013 .flat_map(|scope| &scope.drops)
1014 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1015
1016 drops.add_entry_point(drop_and_continue_block, drop_idx);
1017
1018 self.cfg.terminate(drop_and_continue_block, source_info, TerminatorKind::UnwindResume);
1023
1024 self.build_exit_tree(drops, region_scope, span, Some(real_target));
1025
1026 return self.cfg.start_new_block().unit();
1027 }
1028
1029 pub(crate) fn break_for_else(&mut self, block: BasicBlock, source_info: SourceInfo) {
1035 let if_then_scope = self
1036 .scopes
1037 .if_then_scope
1038 .as_ref()
1039 .unwrap_or_else(|| span_bug!(source_info.span, "no if-then scope found"));
1040
1041 let target = if_then_scope.region_scope;
1042 let scope_index = self.scopes.scope_index(target, source_info.span);
1043
1044 let if_then_scope = self.scopes.if_then_scope.as_mut().expect("upgrading & to &mut");
1046
1047 let mut drop_idx = ROOT_NODE;
1048 let drops = &mut if_then_scope.else_drops;
1049 for scope in &self.scopes.scopes[scope_index + 1..] {
1050 for drop in &scope.drops {
1051 drop_idx = drops.add_drop(*drop, drop_idx);
1052 }
1053 }
1054 drops.add_entry_point(block, drop_idx);
1055
1056 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
1061 }
1062
1063 pub(crate) fn break_for_tail_call(
1068 &mut self,
1069 mut block: BasicBlock,
1070 args: &[Spanned<Operand<'tcx>>],
1071 source_info: SourceInfo,
1072 ) -> BlockAnd<()> {
1073 let arg_drops: Vec<_> = args
1074 .iter()
1075 .rev()
1076 .filter_map(|arg| match &arg.node {
1077 Operand::Copy(_) => bug!("copy op in tail call args"),
1078 Operand::Move(place) => {
1079 let local =
1080 place.as_local().unwrap_or_else(|| bug!("projection in tail call args"));
1081
1082 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1083 return None;
1084 }
1085
1086 Some(DropData { source_info, local, kind: DropKind::Value })
1087 }
1088 Operand::Constant(_) => None,
1089 })
1090 .collect();
1091
1092 let mut unwind_to = self.diverge_cleanup_target(
1093 self.scopes.scopes.iter().rev().nth(1).unwrap().region_scope,
1094 DUMMY_SP,
1095 );
1096 let typing_env = self.typing_env();
1097 let unwind_drops = &mut self.scopes.unwind_drops;
1098
1099 for scope in self.scopes.scopes[1..].iter().rev().skip(1) {
1102 for drop_data in scope.drops.iter().rev() {
1104 let source_info = drop_data.source_info;
1105 let local = drop_data.local;
1106
1107 if !self.local_decls[local].ty.needs_drop(self.tcx, typing_env) {
1108 continue;
1109 }
1110
1111 match drop_data.kind {
1112 DropKind::Value => {
1113 debug_assert_eq!(
1117 unwind_drops.drop_nodes[unwind_to].data.local,
1118 drop_data.local
1119 );
1120 debug_assert_eq!(
1121 unwind_drops.drop_nodes[unwind_to].data.kind,
1122 drop_data.kind
1123 );
1124 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1125
1126 let mut unwind_entry_point = unwind_to;
1127
1128 for drop in arg_drops.iter().copied() {
1130 unwind_entry_point = unwind_drops.add_drop(drop, unwind_entry_point);
1131 }
1132
1133 unwind_drops.add_entry_point(block, unwind_entry_point);
1134
1135 let next = self.cfg.start_new_block();
1136 self.cfg.terminate(
1137 block,
1138 source_info,
1139 TerminatorKind::Drop {
1140 place: local.into(),
1141 target: next,
1142 unwind: UnwindAction::Continue,
1143 replace: false,
1144 drop: None,
1145 async_fut: None,
1146 },
1147 );
1148 block = next;
1149 }
1150 DropKind::ForLint => {
1151 self.cfg.push(
1152 block,
1153 Statement::new(
1154 source_info,
1155 StatementKind::BackwardIncompatibleDropHint {
1156 place: Box::new(local.into()),
1157 reason: BackwardIncompatibleDropReason::Edition2024,
1158 },
1159 ),
1160 );
1161 }
1162 DropKind::Storage => {
1163 assert!(local.index() > self.arg_count);
1165 self.cfg.push(
1166 block,
1167 Statement::new(source_info, StatementKind::StorageDead(local)),
1168 );
1169 }
1170 }
1171 }
1172 }
1173
1174 block.unit()
1175 }
1176
1177 fn is_async_drop_impl(
1178 tcx: TyCtxt<'tcx>,
1179 local_decls: &IndexVec<Local, LocalDecl<'tcx>>,
1180 typing_env: ty::TypingEnv<'tcx>,
1181 local: Local,
1182 ) -> bool {
1183 let ty = local_decls[local].ty;
1184 if ty.is_async_drop(tcx, typing_env) || ty.is_coroutine() {
1185 return true;
1186 }
1187 ty.needs_async_drop(tcx, typing_env)
1188 }
1189 fn is_async_drop(&self, local: Local) -> bool {
1190 Self::is_async_drop_impl(self.tcx, &self.local_decls, self.typing_env(), local)
1191 }
1192
1193 fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock {
1194 let needs_cleanup = self.scopes.scopes.last().is_some_and(|scope| scope.needs_cleanup());
1197 let is_coroutine = self.coroutine.is_some();
1198 let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX };
1199
1200 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1201 let has_async_drops = is_coroutine
1202 && scope.drops.iter().any(|v| v.kind == DropKind::Value && self.is_async_drop(v.local));
1203 let dropline_to = if has_async_drops { Some(self.diverge_dropline()) } else { None };
1204 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1205 let typing_env = self.typing_env();
1206 build_scope_drops(
1207 &mut self.cfg,
1208 &mut self.scopes.unwind_drops,
1209 &mut self.scopes.coroutine_drops,
1210 scope,
1211 block,
1212 unwind_to,
1213 dropline_to,
1214 is_coroutine && needs_cleanup,
1215 self.arg_count,
1216 |v: Local| Self::is_async_drop_impl(self.tcx, &self.local_decls, typing_env, v),
1217 )
1218 .into_block()
1219 }
1220
1221 pub(crate) fn maybe_new_source_scope(
1224 &mut self,
1225 span: Span,
1226 current_id: HirId,
1227 parent_id: HirId,
1228 ) {
1229 let (current_root, parent_root) =
1230 if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
1231 (current_id, parent_id)
1239 } else {
1240 (
1244 self.maybe_lint_level_root_bounded(current_id),
1245 if parent_id == self.hir_id {
1246 parent_id } else {
1248 self.maybe_lint_level_root_bounded(parent_id)
1249 },
1250 )
1251 };
1252
1253 if current_root != parent_root {
1254 let lint_level = LintLevel::Explicit(current_root);
1255 self.source_scope = self.new_source_scope(span, lint_level);
1256 }
1257 }
1258
1259 fn maybe_lint_level_root_bounded(&mut self, orig_id: HirId) -> HirId {
1262 assert_eq!(orig_id.owner, self.hir_id.owner);
1265
1266 let mut id = orig_id;
1267 loop {
1268 if id == self.hir_id {
1269 break;
1271 }
1272
1273 if self.tcx.hir_attrs(id).iter().any(|attr| Level::from_attr(attr).is_some()) {
1274 return id;
1277 }
1278
1279 let next = self.tcx.parent_hir_id(id);
1280 if next == id {
1281 bug!("lint traversal reached the root of the crate");
1282 }
1283 id = next;
1284
1285 if self.lint_level_roots_cache.contains(id.local_id) {
1291 break;
1292 }
1293 }
1294
1295 self.lint_level_roots_cache.insert(orig_id.local_id);
1299 self.hir_id
1300 }
1301
1302 pub(crate) fn new_source_scope(&mut self, span: Span, lint_level: LintLevel) -> SourceScope {
1304 let parent = self.source_scope;
1305 debug!(
1306 "new_source_scope({:?}, {:?}) - parent({:?})={:?}",
1307 span,
1308 lint_level,
1309 parent,
1310 self.source_scopes.get(parent)
1311 );
1312 let scope_local_data = SourceScopeLocalData {
1313 lint_root: if let LintLevel::Explicit(lint_root) = lint_level {
1314 lint_root
1315 } else {
1316 self.source_scopes[parent].local_data.as_ref().unwrap_crate_local().lint_root
1317 },
1318 };
1319 self.source_scopes.push(SourceScopeData {
1320 span,
1321 parent_scope: Some(parent),
1322 inlined: None,
1323 inlined_parent_scope: None,
1324 local_data: ClearCrossCrate::Set(scope_local_data),
1325 })
1326 }
1327
1328 pub(crate) fn source_info(&self, span: Span) -> SourceInfo {
1330 SourceInfo { span, scope: self.source_scope }
1331 }
1332
1333 pub(crate) fn local_scope(&self) -> region::Scope {
1356 self.scopes.topmost()
1357 }
1358
1359 pub(crate) fn schedule_drop_storage_and_value(
1363 &mut self,
1364 span: Span,
1365 region_scope: region::Scope,
1366 local: Local,
1367 ) {
1368 self.schedule_drop(span, region_scope, local, DropKind::Storage);
1369 self.schedule_drop(span, region_scope, local, DropKind::Value);
1370 }
1371
1372 pub(crate) fn schedule_drop(
1377 &mut self,
1378 span: Span,
1379 region_scope: region::Scope,
1380 local: Local,
1381 drop_kind: DropKind,
1382 ) {
1383 let needs_drop = match drop_kind {
1384 DropKind::Value | DropKind::ForLint => {
1385 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1386 return;
1387 }
1388 true
1389 }
1390 DropKind::Storage => {
1391 if local.index() <= self.arg_count {
1392 span_bug!(
1393 span,
1394 "`schedule_drop` called with body argument {:?} \
1395 but its storage does not require a drop",
1396 local,
1397 )
1398 }
1399 false
1400 }
1401 };
1402
1403 let invalidate_caches = needs_drop || self.coroutine.is_some();
1450 for scope in self.scopes.scopes.iter_mut().rev() {
1451 if invalidate_caches {
1452 scope.invalidate_cache();
1453 }
1454
1455 if scope.region_scope == region_scope {
1456 let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
1457 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1459
1460 scope.drops.push(DropData {
1461 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1462 local,
1463 kind: drop_kind,
1464 });
1465
1466 return;
1467 }
1468 }
1469
1470 span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local);
1471 }
1472
1473 #[instrument(level = "debug", skip(self))]
1476 pub(crate) fn schedule_backwards_incompatible_drop(
1477 &mut self,
1478 span: Span,
1479 region_scope: region::Scope,
1480 local: Local,
1481 ) {
1482 for scope in self.scopes.scopes.iter_mut().rev() {
1485 scope.invalidate_cache();
1487 if scope.region_scope == region_scope {
1488 let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
1489 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1490
1491 scope.drops.push(DropData {
1492 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1493 local,
1494 kind: DropKind::ForLint,
1495 });
1496
1497 return;
1498 }
1499 }
1500 span_bug!(
1501 span,
1502 "region scope {:?} not in scope to drop {:?} for linting",
1503 region_scope,
1504 local
1505 );
1506 }
1507
1508 pub(crate) fn record_operands_moved(&mut self, operands: &[Spanned<Operand<'tcx>>]) {
1545 let local_scope = self.local_scope();
1546 let scope = self.scopes.scopes.last_mut().unwrap();
1547
1548 assert_eq!(scope.region_scope, local_scope, "local scope is not the topmost scope!",);
1549
1550 let locals_moved = operands.iter().flat_map(|operand| match operand.node {
1552 Operand::Copy(_) | Operand::Constant(_) => None,
1553 Operand::Move(place) => place.as_local(),
1554 });
1555
1556 for local in locals_moved {
1557 if scope.drops.iter().any(|drop| drop.local == local && drop.kind == DropKind::Value) {
1562 scope.moved_locals.push(local);
1563 }
1564 }
1565 }
1566
1567 fn diverge_cleanup(&mut self) -> DropIdx {
1573 self.diverge_cleanup_target(self.scopes.topmost(), DUMMY_SP)
1576 }
1577
1578 fn diverge_cleanup_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1583 let target = self.scopes.scope_index(target_scope, span);
1584 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1585 .iter()
1586 .enumerate()
1587 .rev()
1588 .find_map(|(scope_idx, scope)| {
1589 scope.cached_unwind_block.map(|cached_block| (scope_idx + 1, cached_block))
1590 })
1591 .unwrap_or((0, ROOT_NODE));
1592
1593 if uncached_scope > target {
1594 return cached_drop;
1595 }
1596
1597 let is_coroutine = self.coroutine.is_some();
1598 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1599 for drop in &scope.drops {
1600 if is_coroutine || drop.kind == DropKind::Value {
1601 cached_drop = self.scopes.unwind_drops.add_drop(*drop, cached_drop);
1602 }
1603 }
1604 scope.cached_unwind_block = Some(cached_drop);
1605 }
1606
1607 cached_drop
1608 }
1609
1610 pub(crate) fn diverge_from(&mut self, start: BasicBlock) {
1616 debug_assert!(
1617 matches!(
1618 self.cfg.block_data(start).terminator().kind,
1619 TerminatorKind::Assert { .. }
1620 | TerminatorKind::Call { .. }
1621 | TerminatorKind::Drop { .. }
1622 | TerminatorKind::FalseUnwind { .. }
1623 | TerminatorKind::InlineAsm { .. }
1624 ),
1625 "diverge_from called on block with terminator that cannot unwind."
1626 );
1627
1628 let next_drop = self.diverge_cleanup();
1629 self.scopes.unwind_drops.add_entry_point(start, next_drop);
1630 }
1631
1632 fn diverge_dropline(&mut self) -> DropIdx {
1635 self.diverge_dropline_target(self.scopes.topmost(), DUMMY_SP)
1638 }
1639
1640 fn diverge_dropline_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1642 debug_assert!(
1643 self.coroutine.is_some(),
1644 "diverge_dropline_target is valid only for coroutine"
1645 );
1646 let target = self.scopes.scope_index(target_scope, span);
1647 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1648 .iter()
1649 .enumerate()
1650 .rev()
1651 .find_map(|(scope_idx, scope)| {
1652 scope.cached_coroutine_drop_block.map(|cached_block| (scope_idx + 1, cached_block))
1653 })
1654 .unwrap_or((0, ROOT_NODE));
1655
1656 if uncached_scope > target {
1657 return cached_drop;
1658 }
1659
1660 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1661 for drop in &scope.drops {
1662 cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop);
1663 }
1664 scope.cached_coroutine_drop_block = Some(cached_drop);
1665 }
1666
1667 cached_drop
1668 }
1669
1670 pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) {
1676 debug_assert!(
1677 matches!(
1678 self.cfg.block_data(yield_block).terminator().kind,
1679 TerminatorKind::Yield { .. }
1680 ),
1681 "coroutine_drop_cleanup called on block with non-yield terminator."
1682 );
1683 let cached_drop = self.diverge_dropline();
1684 self.scopes.coroutine_drops.add_entry_point(yield_block, cached_drop);
1685 }
1686
1687 pub(crate) fn build_drop_and_replace(
1690 &mut self,
1691 block: BasicBlock,
1692 span: Span,
1693 place: Place<'tcx>,
1694 value: Rvalue<'tcx>,
1695 ) -> BlockAnd<()> {
1696 let source_info = self.source_info(span);
1697
1698 let assign = self.cfg.start_new_block();
1700 self.cfg.push_assign(assign, source_info, place, value.clone());
1701
1702 let assign_unwind = self.cfg.start_new_cleanup_block();
1704 self.cfg.push_assign(assign_unwind, source_info, place, value.clone());
1705
1706 self.cfg.terminate(
1707 block,
1708 source_info,
1709 TerminatorKind::Drop {
1710 place,
1711 target: assign,
1712 unwind: UnwindAction::Cleanup(assign_unwind),
1713 replace: true,
1714 drop: None,
1715 async_fut: None,
1716 },
1717 );
1718 self.diverge_from(block);
1719
1720 assign.unit()
1721 }
1722
1723 pub(crate) fn assert(
1727 &mut self,
1728 block: BasicBlock,
1729 cond: Operand<'tcx>,
1730 expected: bool,
1731 msg: AssertMessage<'tcx>,
1732 span: Span,
1733 ) -> BasicBlock {
1734 let source_info = self.source_info(span);
1735 let success_block = self.cfg.start_new_block();
1736
1737 self.cfg.terminate(
1738 block,
1739 source_info,
1740 TerminatorKind::Assert {
1741 cond,
1742 expected,
1743 msg: Box::new(msg),
1744 target: success_block,
1745 unwind: UnwindAction::Continue,
1746 },
1747 );
1748 self.diverge_from(block);
1749
1750 success_block
1751 }
1752
1753 pub(crate) fn clear_top_scope(&mut self, region_scope: region::Scope) {
1758 let top_scope = self.scopes.scopes.last_mut().unwrap();
1759
1760 assert_eq!(top_scope.region_scope, region_scope);
1761
1762 top_scope.drops.clear();
1763 top_scope.invalidate_cache();
1764 }
1765}
1766
1767fn build_scope_drops<'tcx, F>(
1782 cfg: &mut CFG<'tcx>,
1783 unwind_drops: &mut DropTree,
1784 coroutine_drops: &mut DropTree,
1785 scope: &Scope,
1786 block: BasicBlock,
1787 unwind_to: DropIdx,
1788 dropline_to: Option<DropIdx>,
1789 storage_dead_on_unwind: bool,
1790 arg_count: usize,
1791 is_async_drop: F,
1792) -> BlockAnd<()>
1793where
1794 F: Fn(Local) -> bool,
1795{
1796 debug!("build_scope_drops({:?} -> {:?}), dropline_to={:?}", block, scope, dropline_to);
1797
1798 let mut unwind_to = unwind_to;
1822
1823 let mut block = block;
1828
1829 let mut dropline_to = dropline_to;
1831
1832 for drop_data in scope.drops.iter().rev() {
1833 let source_info = drop_data.source_info;
1834 let local = drop_data.local;
1835
1836 match drop_data.kind {
1837 DropKind::Value => {
1838 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.local, drop_data.local);
1845 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1846 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1847
1848 if let Some(idx) = dropline_to {
1849 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1850 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1851 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1852 }
1853
1854 if scope.moved_locals.contains(&local) {
1859 continue;
1860 }
1861
1862 unwind_drops.add_entry_point(block, unwind_to);
1863 if let Some(to) = dropline_to
1864 && is_async_drop(local)
1865 {
1866 coroutine_drops.add_entry_point(block, to);
1867 }
1868
1869 let next = cfg.start_new_block();
1870 cfg.terminate(
1871 block,
1872 source_info,
1873 TerminatorKind::Drop {
1874 place: local.into(),
1875 target: next,
1876 unwind: UnwindAction::Continue,
1877 replace: false,
1878 drop: None,
1879 async_fut: None,
1880 },
1881 );
1882 block = next;
1883 }
1884 DropKind::ForLint => {
1885 if storage_dead_on_unwind {
1891 debug_assert_eq!(
1892 unwind_drops.drop_nodes[unwind_to].data.local,
1893 drop_data.local
1894 );
1895 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1896 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1897 }
1898
1899 if scope.moved_locals.contains(&local) {
1904 continue;
1905 }
1906
1907 cfg.push(
1908 block,
1909 Statement::new(
1910 source_info,
1911 StatementKind::BackwardIncompatibleDropHint {
1912 place: Box::new(local.into()),
1913 reason: BackwardIncompatibleDropReason::Edition2024,
1914 },
1915 ),
1916 );
1917 }
1918 DropKind::Storage => {
1919 if storage_dead_on_unwind {
1925 debug_assert_eq!(
1926 unwind_drops.drop_nodes[unwind_to].data.local,
1927 drop_data.local
1928 );
1929 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1930 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1931 }
1932 if let Some(idx) = dropline_to {
1933 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1934 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1935 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1936 }
1937 assert!(local.index() > arg_count);
1939 cfg.push(block, Statement::new(source_info, StatementKind::StorageDead(local)));
1940 }
1941 }
1942 }
1943 block.unit()
1944}
1945
1946impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
1947 fn build_exit_tree(
1952 &mut self,
1953 mut drops: DropTree,
1954 else_scope: region::Scope,
1955 span: Span,
1956 continue_block: Option<BasicBlock>,
1957 ) -> Option<BlockAnd<()>> {
1958 let blocks = drops.build_mir::<ExitScopes>(&mut self.cfg, continue_block);
1959 let is_coroutine = self.coroutine.is_some();
1960
1961 if drops.drop_nodes.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
1963 let unwind_target = self.diverge_cleanup_target(else_scope, span);
1964 let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
1965 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated().skip(1) {
1966 match drop_node.data.kind {
1967 DropKind::Storage | DropKind::ForLint => {
1968 if is_coroutine {
1969 let unwind_drop = self
1970 .scopes
1971 .unwind_drops
1972 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
1973 unwind_indices.push(unwind_drop);
1974 } else {
1975 unwind_indices.push(unwind_indices[drop_node.next]);
1976 }
1977 }
1978 DropKind::Value => {
1979 let unwind_drop = self
1980 .scopes
1981 .unwind_drops
1982 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
1983 self.scopes.unwind_drops.add_entry_point(
1984 blocks[drop_idx].unwrap(),
1985 unwind_indices[drop_node.next],
1986 );
1987 unwind_indices.push(unwind_drop);
1988 }
1989 }
1990 }
1991 }
1992 if is_coroutine
1994 && drops.drop_nodes.iter().any(|DropNode { data, next: _ }| {
1995 data.kind == DropKind::Value && self.is_async_drop(data.local)
1996 })
1997 {
1998 let dropline_target = self.diverge_dropline_target(else_scope, span);
1999 let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1);
2000 for (drop_idx, drop_data) in drops.drop_nodes.iter_enumerated().skip(1) {
2001 let coroutine_drop = self
2002 .scopes
2003 .coroutine_drops
2004 .add_drop(drop_data.data, dropline_indices[drop_data.next]);
2005 match drop_data.data.kind {
2006 DropKind::Storage | DropKind::ForLint => {}
2007 DropKind::Value => {
2008 if self.is_async_drop(drop_data.data.local) {
2009 self.scopes.coroutine_drops.add_entry_point(
2010 blocks[drop_idx].unwrap(),
2011 dropline_indices[drop_data.next],
2012 );
2013 }
2014 }
2015 }
2016 dropline_indices.push(coroutine_drop);
2017 }
2018 }
2019 blocks[ROOT_NODE].map(BasicBlock::unit)
2020 }
2021
2022 pub(crate) fn build_drop_trees(&mut self) {
2024 if self.coroutine.is_some() {
2025 self.build_coroutine_drop_trees();
2026 } else {
2027 Self::build_unwind_tree(
2028 &mut self.cfg,
2029 &mut self.scopes.unwind_drops,
2030 self.fn_span,
2031 &mut None,
2032 );
2033 }
2034 }
2035
2036 fn build_coroutine_drop_trees(&mut self) {
2037 let drops = &mut self.scopes.coroutine_drops;
2039 let cfg = &mut self.cfg;
2040 let fn_span = self.fn_span;
2041 let blocks = drops.build_mir::<CoroutineDrop>(cfg, None);
2042 if let Some(root_block) = blocks[ROOT_NODE] {
2043 cfg.terminate(
2044 root_block,
2045 SourceInfo::outermost(fn_span),
2046 TerminatorKind::CoroutineDrop,
2047 );
2048 }
2049
2050 let resume_block = &mut None;
2052 let unwind_drops = &mut self.scopes.unwind_drops;
2053 Self::build_unwind_tree(cfg, unwind_drops, fn_span, resume_block);
2054
2055 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated() {
2063 if let DropKind::Value = drop_node.data.kind
2064 && let Some(bb) = blocks[drop_idx]
2065 {
2066 debug_assert!(drop_node.next < drops.drop_nodes.next_index());
2067 drops.entry_points.push((drop_node.next, bb));
2068 }
2069 }
2070 Self::build_unwind_tree(cfg, drops, fn_span, resume_block);
2071 }
2072
2073 fn build_unwind_tree(
2074 cfg: &mut CFG<'tcx>,
2075 drops: &mut DropTree,
2076 fn_span: Span,
2077 resume_block: &mut Option<BasicBlock>,
2078 ) {
2079 let blocks = drops.build_mir::<Unwind>(cfg, *resume_block);
2080 if let (None, Some(resume)) = (*resume_block, blocks[ROOT_NODE]) {
2081 cfg.terminate(resume, SourceInfo::outermost(fn_span), TerminatorKind::UnwindResume);
2082
2083 *resume_block = blocks[ROOT_NODE];
2084 }
2085 }
2086}
2087
2088struct ExitScopes;
2091
2092impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes {
2093 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2094 cfg.start_new_block()
2095 }
2096 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2097 let term = cfg.block_data_mut(from).terminator_mut();
2101 if let TerminatorKind::UnwindResume = term.kind {
2102 term.kind = TerminatorKind::Goto { target: to };
2103 } else {
2104 span_bug!(term.source_info.span, "unexpected dummy terminator kind: {:?}", term.kind);
2105 }
2106 }
2107}
2108
2109struct CoroutineDrop;
2110
2111impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop {
2112 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2113 cfg.start_new_block()
2114 }
2115 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2116 let term = cfg.block_data_mut(from).terminator_mut();
2117 if let TerminatorKind::Yield { ref mut drop, .. } = term.kind {
2118 *drop = Some(to);
2119 } else if let TerminatorKind::Drop { ref mut drop, .. } = term.kind {
2120 *drop = Some(to);
2121 } else {
2122 span_bug!(
2123 term.source_info.span,
2124 "cannot enter coroutine drop tree from {:?}",
2125 term.kind
2126 )
2127 }
2128 }
2129}
2130
2131struct Unwind;
2132
2133impl<'tcx> DropTreeBuilder<'tcx> for Unwind {
2134 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2135 cfg.start_new_cleanup_block()
2136 }
2137 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2138 let term = &mut cfg.block_data_mut(from).terminator_mut();
2139 match &mut term.kind {
2140 TerminatorKind::Drop { unwind, .. } => {
2141 if let UnwindAction::Cleanup(unwind) = *unwind {
2142 let source_info = term.source_info;
2143 cfg.terminate(unwind, source_info, TerminatorKind::Goto { target: to });
2144 } else {
2145 *unwind = UnwindAction::Cleanup(to);
2146 }
2147 }
2148 TerminatorKind::FalseUnwind { unwind, .. }
2149 | TerminatorKind::Call { unwind, .. }
2150 | TerminatorKind::Assert { unwind, .. }
2151 | TerminatorKind::InlineAsm { unwind, .. } => {
2152 *unwind = UnwindAction::Cleanup(to);
2153 }
2154 TerminatorKind::Goto { .. }
2155 | TerminatorKind::SwitchInt { .. }
2156 | TerminatorKind::UnwindResume
2157 | TerminatorKind::UnwindTerminate(_)
2158 | TerminatorKind::Return
2159 | TerminatorKind::TailCall { .. }
2160 | TerminatorKind::Unreachable
2161 | TerminatorKind::Yield { .. }
2162 | TerminatorKind::CoroutineDrop
2163 | TerminatorKind::FalseEdge { .. } => {
2164 span_bug!(term.source_info.span, "cannot unwind from {:?}", term.kind)
2165 }
2166 }
2167 }
2168}