1use std::mem;
85
86use interpret::ErrorHandled;
87use rustc_data_structures::fx::FxHashMap;
88use rustc_hir::HirId;
89use rustc_index::{IndexSlice, IndexVec};
90use rustc_middle::middle::region;
91use rustc_middle::mir::{self, *};
92use rustc_middle::thir::{AdtExpr, AdtExprBase, ArmId, ExprId, ExprKind, LintLevel};
93use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt, ValTree};
94use rustc_middle::{bug, span_bug};
95use rustc_pattern_analysis::rustc::RustcPatCtxt;
96use rustc_session::lint::Level;
97use rustc_span::source_map::Spanned;
98use rustc_span::{DUMMY_SP, Span};
99use tracing::{debug, instrument};
100
101use super::matches::BuiltMatchTree;
102use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
103use crate::errors::{
104 ConstContinueBadConst, ConstContinueNotMonomorphicConst, ConstContinueUnknownJumpTarget,
105};
106
107#[derive(Debug)]
108pub(crate) struct Scopes<'tcx> {
109 scopes: Vec<Scope>,
110
111 breakable_scopes: Vec<BreakableScope<'tcx>>,
113
114 const_continuable_scopes: Vec<ConstContinuableScope<'tcx>>,
115
116 if_then_scope: Option<IfThenScope>,
118
119 unwind_drops: DropTree,
122
123 coroutine_drops: DropTree,
125}
126
127#[derive(Debug)]
128struct Scope {
129 source_scope: SourceScope,
131
132 region_scope: region::Scope,
134
135 drops: Vec<DropData>,
140
141 moved_locals: Vec<Local>,
142
143 cached_unwind_block: Option<DropIdx>,
146
147 cached_coroutine_drop_block: Option<DropIdx>,
150}
151
152#[derive(Clone, Copy, Debug)]
153struct DropData {
154 source_info: SourceInfo,
157
158 local: Local,
160
161 kind: DropKind,
163}
164
165#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
166pub(crate) enum DropKind {
167 Value,
168 Storage,
169 ForLint,
170}
171
172#[derive(Debug)]
173struct BreakableScope<'tcx> {
174 region_scope: region::Scope,
176 break_destination: Place<'tcx>,
179 break_drops: DropTree,
181 continue_drops: Option<DropTree>,
183}
184
185#[derive(Debug)]
186struct ConstContinuableScope<'tcx> {
187 region_scope: region::Scope,
189 state_place: Place<'tcx>,
191
192 arms: Box<[ArmId]>,
193 built_match_tree: BuiltMatchTree<'tcx>,
194
195 const_continue_drops: DropTree,
197}
198
199#[derive(Debug)]
200struct IfThenScope {
201 region_scope: region::Scope,
203 else_drops: DropTree,
205}
206
207#[derive(Clone, Copy, Debug)]
209pub(crate) enum BreakableTarget {
210 Continue(region::Scope),
211 Break(region::Scope),
212 Return,
213}
214
215rustc_index::newtype_index! {
216 #[orderable]
217 struct DropIdx {}
218}
219
220const ROOT_NODE: DropIdx = DropIdx::ZERO;
221
222#[derive(Debug)]
232struct DropTree {
233 drop_nodes: IndexVec<DropIdx, DropNode>,
235 existing_drops_map: FxHashMap<DropNodeKey, DropIdx>,
237 entry_points: Vec<(DropIdx, BasicBlock)>,
239}
240
241#[derive(Debug)]
243struct DropNode {
244 data: DropData,
246 next: DropIdx,
248}
249
250#[derive(Debug, PartialEq, Eq, Hash)]
252struct DropNodeKey {
253 next: DropIdx,
254 local: Local,
255}
256
257impl Scope {
258 fn needs_cleanup(&self) -> bool {
270 self.drops.iter().any(|drop| match drop.kind {
271 DropKind::Value | DropKind::ForLint => true,
272 DropKind::Storage => false,
273 })
274 }
275
276 fn invalidate_cache(&mut self) {
277 self.cached_unwind_block = None;
278 self.cached_coroutine_drop_block = None;
279 }
280}
281
282trait DropTreeBuilder<'tcx> {
285 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock;
288
289 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock);
292}
293
294impl DropTree {
295 fn new() -> Self {
296 let fake_source_info = SourceInfo::outermost(DUMMY_SP);
300 let fake_data =
301 DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage };
302 let drop_nodes = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
303 Self { drop_nodes, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
304 }
305
306 fn add_drop(&mut self, data: DropData, next: DropIdx) -> DropIdx {
312 let drop_nodes = &mut self.drop_nodes;
313 *self
314 .existing_drops_map
315 .entry(DropNodeKey { next, local: data.local })
316 .or_insert_with(|| drop_nodes.push(DropNode { data, next }))
318 }
319
320 fn add_entry_point(&mut self, from: BasicBlock, to: DropIdx) {
325 debug_assert!(to < self.drop_nodes.next_index());
326 self.entry_points.push((to, from));
327 }
328
329 fn build_mir<'tcx, T: DropTreeBuilder<'tcx>>(
331 &mut self,
332 cfg: &mut CFG<'tcx>,
333 root_node: Option<BasicBlock>,
334 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
335 debug!("DropTree::build_mir(drops = {:#?})", self);
336
337 let mut blocks = self.assign_blocks::<T>(cfg, root_node);
338 self.link_blocks(cfg, &mut blocks);
339
340 blocks
341 }
342
343 fn assign_blocks<'tcx, T: DropTreeBuilder<'tcx>>(
345 &mut self,
346 cfg: &mut CFG<'tcx>,
347 root_node: Option<BasicBlock>,
348 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
349 #[derive(Clone, Copy)]
353 enum Block {
354 None,
356 Shares(DropIdx),
359 Own,
363 }
364
365 let mut blocks = IndexVec::from_elem(None, &self.drop_nodes);
366 blocks[ROOT_NODE] = root_node;
367
368 let mut needs_block = IndexVec::from_elem(Block::None, &self.drop_nodes);
369 if root_node.is_some() {
370 needs_block[ROOT_NODE] = Block::Own;
374 }
375
376 let entry_points = &mut self.entry_points;
378 entry_points.sort();
379
380 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
381 if entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
382 let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
383 needs_block[drop_idx] = Block::Own;
384 while entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
385 let entry_block = entry_points.pop().unwrap().1;
386 T::link_entry_point(cfg, entry_block, block);
387 }
388 }
389 match needs_block[drop_idx] {
390 Block::None => continue,
391 Block::Own => {
392 blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
393 }
394 Block::Shares(pred) => {
395 blocks[drop_idx] = blocks[pred];
396 }
397 }
398 if let DropKind::Value = drop_node.data.kind {
399 needs_block[drop_node.next] = Block::Own;
400 } else if drop_idx != ROOT_NODE {
401 match &mut needs_block[drop_node.next] {
402 pred @ Block::None => *pred = Block::Shares(drop_idx),
403 pred @ Block::Shares(_) => *pred = Block::Own,
404 Block::Own => (),
405 }
406 }
407 }
408
409 debug!("assign_blocks: blocks = {:#?}", blocks);
410 assert!(entry_points.is_empty());
411
412 blocks
413 }
414
415 fn link_blocks<'tcx>(
416 &self,
417 cfg: &mut CFG<'tcx>,
418 blocks: &IndexSlice<DropIdx, Option<BasicBlock>>,
419 ) {
420 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
421 let Some(block) = blocks[drop_idx] else { continue };
422 match drop_node.data.kind {
423 DropKind::Value => {
424 let terminator = TerminatorKind::Drop {
425 target: blocks[drop_node.next].unwrap(),
426 unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
428 place: drop_node.data.local.into(),
429 replace: false,
430 drop: None,
431 async_fut: None,
432 };
433 cfg.terminate(block, drop_node.data.source_info, terminator);
434 }
435 DropKind::ForLint => {
436 let stmt = Statement::new(
437 drop_node.data.source_info,
438 StatementKind::BackwardIncompatibleDropHint {
439 place: Box::new(drop_node.data.local.into()),
440 reason: BackwardIncompatibleDropReason::Edition2024,
441 },
442 );
443 cfg.push(block, stmt);
444 let target = blocks[drop_node.next].unwrap();
445 if target != block {
446 let source_info =
451 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
452 let terminator = TerminatorKind::Goto { target };
453 cfg.terminate(block, source_info, terminator);
454 }
455 }
456 DropKind::Storage if drop_idx == ROOT_NODE => {}
458 DropKind::Storage => {
459 let stmt = Statement::new(
460 drop_node.data.source_info,
461 StatementKind::StorageDead(drop_node.data.local),
462 );
463 cfg.push(block, stmt);
464 let target = blocks[drop_node.next].unwrap();
465 if target != block {
466 let source_info =
471 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
472 let terminator = TerminatorKind::Goto { target };
473 cfg.terminate(block, source_info, terminator);
474 }
475 }
476 }
477 }
478 }
479}
480
481impl<'tcx> Scopes<'tcx> {
482 pub(crate) fn new() -> Self {
483 Self {
484 scopes: Vec::new(),
485 breakable_scopes: Vec::new(),
486 const_continuable_scopes: Vec::new(),
487 if_then_scope: None,
488 unwind_drops: DropTree::new(),
489 coroutine_drops: DropTree::new(),
490 }
491 }
492
493 fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo), vis_scope: SourceScope) {
494 debug!("push_scope({:?})", region_scope);
495 self.scopes.push(Scope {
496 source_scope: vis_scope,
497 region_scope: region_scope.0,
498 drops: vec![],
499 moved_locals: vec![],
500 cached_unwind_block: None,
501 cached_coroutine_drop_block: None,
502 });
503 }
504
505 fn pop_scope(&mut self, region_scope: (region::Scope, SourceInfo)) -> Scope {
506 let scope = self.scopes.pop().unwrap();
507 assert_eq!(scope.region_scope, region_scope.0);
508 scope
509 }
510
511 fn scope_index(&self, region_scope: region::Scope, span: Span) -> usize {
512 self.scopes
513 .iter()
514 .rposition(|scope| scope.region_scope == region_scope)
515 .unwrap_or_else(|| span_bug!(span, "region_scope {:?} does not enclose", region_scope))
516 }
517
518 fn topmost(&self) -> region::Scope {
521 self.scopes.last().expect("topmost_scope: no scopes present").region_scope
522 }
523}
524
525impl<'a, 'tcx> Builder<'a, 'tcx> {
526 pub(crate) fn in_breakable_scope<F>(
532 &mut self,
533 loop_block: Option<BasicBlock>,
534 break_destination: Place<'tcx>,
535 span: Span,
536 f: F,
537 ) -> BlockAnd<()>
538 where
539 F: FnOnce(&mut Builder<'a, 'tcx>) -> Option<BlockAnd<()>>,
540 {
541 let region_scope = self.scopes.topmost();
542 let scope = BreakableScope {
543 region_scope,
544 break_destination,
545 break_drops: DropTree::new(),
546 continue_drops: loop_block.map(|_| DropTree::new()),
547 };
548 self.scopes.breakable_scopes.push(scope);
549 let normal_exit_block = f(self);
550 let breakable_scope = self.scopes.breakable_scopes.pop().unwrap();
551 assert!(breakable_scope.region_scope == region_scope);
552 let break_block =
553 self.build_exit_tree(breakable_scope.break_drops, region_scope, span, None);
554 if let Some(drops) = breakable_scope.continue_drops {
555 self.build_exit_tree(drops, region_scope, span, loop_block);
556 }
557 match (normal_exit_block, break_block) {
558 (Some(block), None) | (None, Some(block)) => block,
559 (None, None) => self.cfg.start_new_block().unit(),
560 (Some(normal_block), Some(exit_block)) => {
561 let target = self.cfg.start_new_block();
562 let source_info = self.source_info(span);
563 self.cfg.terminate(
564 normal_block.into_block(),
565 source_info,
566 TerminatorKind::Goto { target },
567 );
568 self.cfg.terminate(
569 exit_block.into_block(),
570 source_info,
571 TerminatorKind::Goto { target },
572 );
573 target.unit()
574 }
575 }
576 }
577
578 pub(crate) fn in_const_continuable_scope<F>(
581 &mut self,
582 arms: Box<[ArmId]>,
583 built_match_tree: BuiltMatchTree<'tcx>,
584 state_place: Place<'tcx>,
585 span: Span,
586 f: F,
587 ) -> BlockAnd<()>
588 where
589 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
590 {
591 let region_scope = self.scopes.topmost();
592 let scope = ConstContinuableScope {
593 region_scope,
594 state_place,
595 const_continue_drops: DropTree::new(),
596 arms,
597 built_match_tree,
598 };
599 self.scopes.const_continuable_scopes.push(scope);
600 let normal_exit_block = f(self);
601 let const_continue_scope = self.scopes.const_continuable_scopes.pop().unwrap();
602 assert!(const_continue_scope.region_scope == region_scope);
603
604 let break_block = self.build_exit_tree(
605 const_continue_scope.const_continue_drops,
606 region_scope,
607 span,
608 None,
609 );
610
611 match (normal_exit_block, break_block) {
612 (block, None) => block,
613 (normal_block, Some(exit_block)) => {
614 let target = self.cfg.start_new_block();
615 let source_info = self.source_info(span);
616 self.cfg.terminate(
617 normal_block.into_block(),
618 source_info,
619 TerminatorKind::Goto { target },
620 );
621 self.cfg.terminate(
622 exit_block.into_block(),
623 source_info,
624 TerminatorKind::Goto { target },
625 );
626 target.unit()
627 }
628 }
629 }
630
631 pub(crate) fn in_if_then_scope<F>(
648 &mut self,
649 region_scope: region::Scope,
650 span: Span,
651 f: F,
652 ) -> (BasicBlock, BasicBlock)
653 where
654 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
655 {
656 let scope = IfThenScope { region_scope, else_drops: DropTree::new() };
657 let previous_scope = mem::replace(&mut self.scopes.if_then_scope, Some(scope));
658
659 let then_block = f(self).into_block();
660
661 let if_then_scope = mem::replace(&mut self.scopes.if_then_scope, previous_scope).unwrap();
662 assert!(if_then_scope.region_scope == region_scope);
663
664 let else_block =
665 self.build_exit_tree(if_then_scope.else_drops, region_scope, span, None).map_or_else(
666 || self.cfg.start_new_block(),
667 |else_block_and| else_block_and.into_block(),
668 );
669
670 (then_block, else_block)
671 }
672
673 #[instrument(skip(self, f), level = "debug")]
676 pub(crate) fn in_scope<F, R>(
677 &mut self,
678 region_scope: (region::Scope, SourceInfo),
679 lint_level: LintLevel,
680 f: F,
681 ) -> BlockAnd<R>
682 where
683 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
684 {
685 let source_scope = self.source_scope;
686 if let LintLevel::Explicit(current_hir_id) = lint_level {
687 let parent_id =
688 self.source_scopes[source_scope].local_data.as_ref().unwrap_crate_local().lint_root;
689 self.maybe_new_source_scope(region_scope.1.span, current_hir_id, parent_id);
690 }
691 self.push_scope(region_scope);
692 let mut block;
693 let rv = unpack!(block = f(self));
694 block = self.pop_scope(region_scope, block).into_block();
695 self.source_scope = source_scope;
696 debug!(?block);
697 block.and(rv)
698 }
699
700 pub(crate) fn opt_in_scope<R>(
703 &mut self,
704 opt_region_scope: Option<(region::Scope, SourceInfo)>,
705 f: impl FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
706 ) -> BlockAnd<R> {
707 if let Some(region_scope) = opt_region_scope {
708 self.in_scope(region_scope, LintLevel::Inherited, f)
709 } else {
710 f(self)
711 }
712 }
713
714 pub(crate) fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) {
719 self.scopes.push_scope(region_scope, self.source_scope);
720 }
721
722 pub(crate) fn pop_scope(
726 &mut self,
727 region_scope: (region::Scope, SourceInfo),
728 mut block: BasicBlock,
729 ) -> BlockAnd<()> {
730 debug!("pop_scope({:?}, {:?})", region_scope, block);
731
732 block = self.leave_top_scope(block);
733
734 self.scopes.pop_scope(region_scope);
735
736 block.unit()
737 }
738
739 pub(crate) fn break_scope(
741 &mut self,
742 mut block: BasicBlock,
743 value: Option<ExprId>,
744 target: BreakableTarget,
745 source_info: SourceInfo,
746 ) -> BlockAnd<()> {
747 let span = source_info.span;
748
749 let get_scope_index = |scope: region::Scope| {
750 self.scopes
752 .breakable_scopes
753 .iter()
754 .rposition(|breakable_scope| breakable_scope.region_scope == scope)
755 .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found"))
756 };
757 let (break_index, destination) = match target {
758 BreakableTarget::Return => {
759 let scope = &self.scopes.breakable_scopes[0];
760 if scope.break_destination != Place::return_place() {
761 span_bug!(span, "`return` in item with no return scope");
762 }
763 (0, Some(scope.break_destination))
764 }
765 BreakableTarget::Break(scope) => {
766 let break_index = get_scope_index(scope);
767 let scope = &self.scopes.breakable_scopes[break_index];
768 (break_index, Some(scope.break_destination))
769 }
770 BreakableTarget::Continue(scope) => {
771 let break_index = get_scope_index(scope);
772 (break_index, None)
773 }
774 };
775
776 match (destination, value) {
777 (Some(destination), Some(value)) => {
778 debug!("stmt_expr Break val block_context.push(SubExpr)");
779 self.block_context.push(BlockFrame::SubExpr);
780 block = self.expr_into_dest(destination, block, value).into_block();
781 self.block_context.pop();
782 }
783 (Some(destination), None) => {
784 self.cfg.push_assign_unit(block, source_info, destination, self.tcx)
785 }
786 (None, Some(_)) => {
787 panic!("`return`, `become` and `break` with value and must have a destination")
788 }
789 (None, None) => {
790 if self.tcx.sess.instrument_coverage() {
791 self.cfg.push_coverage_span_marker(block, source_info);
796 }
797 }
798 }
799
800 let region_scope = self.scopes.breakable_scopes[break_index].region_scope;
801 let scope_index = self.scopes.scope_index(region_scope, span);
802 let drops = if destination.is_some() {
803 &mut self.scopes.breakable_scopes[break_index].break_drops
804 } else {
805 let Some(drops) = self.scopes.breakable_scopes[break_index].continue_drops.as_mut()
806 else {
807 self.tcx.dcx().span_delayed_bug(
808 source_info.span,
809 "unlabelled `continue` within labelled block",
810 );
811 self.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
812
813 return self.cfg.start_new_block().unit();
814 };
815 drops
816 };
817
818 let mut drop_idx = ROOT_NODE;
819 for scope in &self.scopes.scopes[scope_index + 1..] {
820 for drop in &scope.drops {
821 drop_idx = drops.add_drop(*drop, drop_idx);
822 }
823 }
824 drops.add_entry_point(block, drop_idx);
825
826 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
831
832 self.cfg.start_new_block().unit()
833 }
834
835 fn eval_unevaluated_mir_constant_to_valtree(
837 &self,
838 constant: ConstOperand<'tcx>,
839 ) -> Result<(ty::ValTree<'tcx>, Ty<'tcx>), interpret::ErrorHandled> {
840 assert!(!constant.const_.ty().has_param());
841 let (uv, ty) = match constant.const_ {
842 mir::Const::Unevaluated(uv, ty) => (uv.shrink(), ty),
843 mir::Const::Ty(_, c) => match c.kind() {
844 ty::ConstKind::Value(cv) => return Ok((cv.valtree, cv.ty)),
847 other => span_bug!(constant.span, "{other:#?}"),
848 },
849 mir::Const::Val(mir::ConstValue::Scalar(mir::interpret::Scalar::Int(val)), ty) => {
850 return Ok((ValTree::from_scalar_int(self.tcx, val), ty));
851 }
852 other => span_bug!(constant.span, "{other:#?}"),
862 };
863
864 match self.tcx.const_eval_resolve_for_typeck(self.typing_env(), uv, constant.span) {
865 Ok(Ok(valtree)) => Ok((valtree, ty)),
866 Ok(Err(ty)) => span_bug!(constant.span, "could not convert {ty:?} to a valtree"),
867 Err(e) => Err(e),
868 }
869 }
870
871 pub(crate) fn break_const_continuable_scope(
873 &mut self,
874 mut block: BasicBlock,
875 value: ExprId,
876 scope: region::Scope,
877 source_info: SourceInfo,
878 ) -> BlockAnd<()> {
879 let span = source_info.span;
880
881 let rustc_middle::thir::ExprKind::Scope { value, .. } = self.thir[value].kind else {
883 span_bug!(span, "break value must be a scope")
884 };
885
886 let expr = &self.thir[value];
887 let constant = match &expr.kind {
888 ExprKind::Adt(box AdtExpr { variant_index, fields, base, .. }) => {
889 assert!(matches!(base, AdtExprBase::None));
890 assert!(fields.is_empty());
891 ConstOperand {
892 span: self.thir[value].span,
893 user_ty: None,
894 const_: Const::Ty(
895 self.thir[value].ty,
896 ty::Const::new_value(
897 self.tcx,
898 ValTree::from_branches(
899 self.tcx,
900 [ty::Const::new_value(
901 self.tcx,
902 ValTree::from_scalar_int(
903 self.tcx,
904 variant_index.as_u32().into(),
905 ),
906 self.tcx.types.u32,
907 )],
908 ),
909 self.thir[value].ty,
910 ),
911 ),
912 }
913 }
914
915 ExprKind::Literal { .. }
916 | ExprKind::NonHirLiteral { .. }
917 | ExprKind::ZstLiteral { .. }
918 | ExprKind::NamedConst { .. } => self.as_constant(&self.thir[value]),
919
920 other => {
921 use crate::errors::ConstContinueNotMonomorphicConstReason as Reason;
922
923 let span = expr.span;
924 let reason = match other {
925 ExprKind::ConstParam { .. } => Reason::ConstantParameter { span },
926 ExprKind::ConstBlock { .. } => Reason::ConstBlock { span },
927 _ => Reason::Other { span },
928 };
929
930 self.tcx
931 .dcx()
932 .emit_err(ConstContinueNotMonomorphicConst { span: expr.span, reason });
933 return block.unit();
934 }
935 };
936
937 let break_index = self
938 .scopes
939 .const_continuable_scopes
940 .iter()
941 .rposition(|const_continuable_scope| const_continuable_scope.region_scope == scope)
942 .unwrap_or_else(|| span_bug!(span, "no enclosing const-continuable scope found"));
943
944 let scope = &self.scopes.const_continuable_scopes[break_index];
945
946 let state_decl = &self.local_decls[scope.state_place.as_local().unwrap()];
947 let state_ty = state_decl.ty;
948 let (discriminant_ty, rvalue) = match state_ty.kind() {
949 ty::Adt(adt_def, _) if adt_def.is_enum() => {
950 (state_ty.discriminant_ty(self.tcx), Rvalue::Discriminant(scope.state_place))
951 }
952 ty::Uint(_) | ty::Int(_) | ty::Float(_) | ty::Bool | ty::Char => {
953 (state_ty, Rvalue::Use(Operand::Copy(scope.state_place)))
954 }
955 _ => span_bug!(state_decl.source_info.span, "unsupported #[loop_match] state"),
956 };
957
958 let dropless_arena = rustc_arena::DroplessArena::default();
961 let typeck_results = self.tcx.typeck(self.def_id);
962 let cx = RustcPatCtxt {
963 tcx: self.tcx,
964 typeck_results,
965 module: self.tcx.parent_module(self.hir_id).to_def_id(),
966 typing_env: rustc_middle::ty::TypingEnv::non_body_analysis(self.tcx, self.def_id),
968 dropless_arena: &dropless_arena,
969 match_lint_level: self.hir_id,
970 whole_match_span: Some(rustc_span::Span::default()),
971 scrut_span: rustc_span::Span::default(),
972 refutable: true,
973 known_valid_scrutinee: true,
974 internal_state: Default::default(),
975 };
976
977 let valtree = match self.eval_unevaluated_mir_constant_to_valtree(constant) {
978 Ok((valtree, ty)) => {
979 assert!(!ty.has_param());
981
982 valtree
983 }
984 Err(ErrorHandled::Reported(..)) => {
985 return block.unit();
986 }
987 Err(ErrorHandled::TooGeneric(_)) => {
988 self.tcx.dcx().emit_fatal(ConstContinueBadConst { span: constant.span });
989 }
990 };
991
992 let Some(real_target) =
993 self.static_pattern_match(&cx, valtree, &*scope.arms, &scope.built_match_tree)
994 else {
995 self.tcx.dcx().emit_fatal(ConstContinueUnknownJumpTarget { span })
996 };
997
998 self.block_context.push(BlockFrame::SubExpr);
999 let state_place = scope.state_place;
1000 block = self.expr_into_dest(state_place, block, value).into_block();
1001 self.block_context.pop();
1002
1003 let discr = self.temp(discriminant_ty, source_info.span);
1004 let scope_index = self
1005 .scopes
1006 .scope_index(self.scopes.const_continuable_scopes[break_index].region_scope, span);
1007 let scope = &mut self.scopes.const_continuable_scopes[break_index];
1008 self.cfg.push_assign(block, source_info, discr, rvalue);
1009 let drop_and_continue_block = self.cfg.start_new_block();
1010 let imaginary_target = self.cfg.start_new_block();
1011 self.cfg.terminate(
1012 block,
1013 source_info,
1014 TerminatorKind::FalseEdge { real_target: drop_and_continue_block, imaginary_target },
1015 );
1016
1017 let drops = &mut scope.const_continue_drops;
1018
1019 let drop_idx = self.scopes.scopes[scope_index + 1..]
1020 .iter()
1021 .flat_map(|scope| &scope.drops)
1022 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1023
1024 drops.add_entry_point(imaginary_target, drop_idx);
1025
1026 self.cfg.terminate(imaginary_target, source_info, TerminatorKind::UnwindResume);
1027
1028 let region_scope = scope.region_scope;
1029 let scope_index = self.scopes.scope_index(region_scope, span);
1030 let mut drops = DropTree::new();
1031
1032 let drop_idx = self.scopes.scopes[scope_index + 1..]
1033 .iter()
1034 .flat_map(|scope| &scope.drops)
1035 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1036
1037 drops.add_entry_point(drop_and_continue_block, drop_idx);
1038
1039 self.cfg.terminate(drop_and_continue_block, source_info, TerminatorKind::UnwindResume);
1044
1045 self.build_exit_tree(drops, region_scope, span, Some(real_target));
1046
1047 return self.cfg.start_new_block().unit();
1048 }
1049
1050 pub(crate) fn break_for_else(&mut self, block: BasicBlock, source_info: SourceInfo) {
1056 let if_then_scope = self
1057 .scopes
1058 .if_then_scope
1059 .as_ref()
1060 .unwrap_or_else(|| span_bug!(source_info.span, "no if-then scope found"));
1061
1062 let target = if_then_scope.region_scope;
1063 let scope_index = self.scopes.scope_index(target, source_info.span);
1064
1065 let if_then_scope = self.scopes.if_then_scope.as_mut().expect("upgrading & to &mut");
1067
1068 let mut drop_idx = ROOT_NODE;
1069 let drops = &mut if_then_scope.else_drops;
1070 for scope in &self.scopes.scopes[scope_index + 1..] {
1071 for drop in &scope.drops {
1072 drop_idx = drops.add_drop(*drop, drop_idx);
1073 }
1074 }
1075 drops.add_entry_point(block, drop_idx);
1076
1077 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
1082 }
1083
1084 pub(crate) fn break_for_tail_call(
1089 &mut self,
1090 mut block: BasicBlock,
1091 args: &[Spanned<Operand<'tcx>>],
1092 source_info: SourceInfo,
1093 ) -> BlockAnd<()> {
1094 let arg_drops: Vec<_> = args
1095 .iter()
1096 .rev()
1097 .filter_map(|arg| match &arg.node {
1098 Operand::Copy(_) => bug!("copy op in tail call args"),
1099 Operand::Move(place) => {
1100 let local =
1101 place.as_local().unwrap_or_else(|| bug!("projection in tail call args"));
1102
1103 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1104 return None;
1105 }
1106
1107 Some(DropData { source_info, local, kind: DropKind::Value })
1108 }
1109 Operand::Constant(_) | Operand::RuntimeChecks(_) => None,
1110 })
1111 .collect();
1112
1113 let mut unwind_to = self.diverge_cleanup_target(
1114 self.scopes.scopes.iter().rev().nth(1).unwrap().region_scope,
1115 DUMMY_SP,
1116 );
1117 let typing_env = self.typing_env();
1118 let unwind_drops = &mut self.scopes.unwind_drops;
1119
1120 for scope in self.scopes.scopes[1..].iter().rev().skip(1) {
1123 for drop_data in scope.drops.iter().rev() {
1125 let source_info = drop_data.source_info;
1126 let local = drop_data.local;
1127
1128 if !self.local_decls[local].ty.needs_drop(self.tcx, typing_env) {
1129 continue;
1130 }
1131
1132 match drop_data.kind {
1133 DropKind::Value => {
1134 debug_assert_eq!(
1138 unwind_drops.drop_nodes[unwind_to].data.local,
1139 drop_data.local
1140 );
1141 debug_assert_eq!(
1142 unwind_drops.drop_nodes[unwind_to].data.kind,
1143 drop_data.kind
1144 );
1145 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1146
1147 let mut unwind_entry_point = unwind_to;
1148
1149 for drop in arg_drops.iter().copied() {
1151 unwind_entry_point = unwind_drops.add_drop(drop, unwind_entry_point);
1152 }
1153
1154 unwind_drops.add_entry_point(block, unwind_entry_point);
1155
1156 let next = self.cfg.start_new_block();
1157 self.cfg.terminate(
1158 block,
1159 source_info,
1160 TerminatorKind::Drop {
1161 place: local.into(),
1162 target: next,
1163 unwind: UnwindAction::Continue,
1164 replace: false,
1165 drop: None,
1166 async_fut: None,
1167 },
1168 );
1169 block = next;
1170 }
1171 DropKind::ForLint => {
1172 self.cfg.push(
1173 block,
1174 Statement::new(
1175 source_info,
1176 StatementKind::BackwardIncompatibleDropHint {
1177 place: Box::new(local.into()),
1178 reason: BackwardIncompatibleDropReason::Edition2024,
1179 },
1180 ),
1181 );
1182 }
1183 DropKind::Storage => {
1184 assert!(local.index() > self.arg_count);
1186 self.cfg.push(
1187 block,
1188 Statement::new(source_info, StatementKind::StorageDead(local)),
1189 );
1190 }
1191 }
1192 }
1193 }
1194
1195 block.unit()
1196 }
1197
1198 fn is_async_drop_impl(
1199 tcx: TyCtxt<'tcx>,
1200 local_decls: &IndexVec<Local, LocalDecl<'tcx>>,
1201 typing_env: ty::TypingEnv<'tcx>,
1202 local: Local,
1203 ) -> bool {
1204 let ty = local_decls[local].ty;
1205 if ty.is_async_drop(tcx, typing_env) || ty.is_coroutine() {
1206 return true;
1207 }
1208 ty.needs_async_drop(tcx, typing_env)
1209 }
1210 fn is_async_drop(&self, local: Local) -> bool {
1211 Self::is_async_drop_impl(self.tcx, &self.local_decls, self.typing_env(), local)
1212 }
1213
1214 fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock {
1215 let needs_cleanup = self.scopes.scopes.last().is_some_and(|scope| scope.needs_cleanup());
1218 let is_coroutine = self.coroutine.is_some();
1219 let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX };
1220
1221 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1222 let has_async_drops = is_coroutine
1223 && scope.drops.iter().any(|v| v.kind == DropKind::Value && self.is_async_drop(v.local));
1224 let dropline_to = if has_async_drops { Some(self.diverge_dropline()) } else { None };
1225 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1226 let typing_env = self.typing_env();
1227 build_scope_drops(
1228 &mut self.cfg,
1229 &mut self.scopes.unwind_drops,
1230 &mut self.scopes.coroutine_drops,
1231 scope,
1232 block,
1233 unwind_to,
1234 dropline_to,
1235 is_coroutine && needs_cleanup,
1236 self.arg_count,
1237 |v: Local| Self::is_async_drop_impl(self.tcx, &self.local_decls, typing_env, v),
1238 )
1239 .into_block()
1240 }
1241
1242 pub(crate) fn maybe_new_source_scope(
1245 &mut self,
1246 span: Span,
1247 current_id: HirId,
1248 parent_id: HirId,
1249 ) {
1250 let (current_root, parent_root) =
1251 if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
1252 (current_id, parent_id)
1260 } else {
1261 (
1265 self.maybe_lint_level_root_bounded(current_id),
1266 if parent_id == self.hir_id {
1267 parent_id } else {
1269 self.maybe_lint_level_root_bounded(parent_id)
1270 },
1271 )
1272 };
1273
1274 if current_root != parent_root {
1275 let lint_level = LintLevel::Explicit(current_root);
1276 self.source_scope = self.new_source_scope(span, lint_level);
1277 }
1278 }
1279
1280 fn maybe_lint_level_root_bounded(&mut self, orig_id: HirId) -> HirId {
1283 assert_eq!(orig_id.owner, self.hir_id.owner);
1286
1287 let mut id = orig_id;
1288 loop {
1289 if id == self.hir_id {
1290 break;
1292 }
1293
1294 if self.tcx.hir_attrs(id).iter().any(|attr| Level::from_attr(attr).is_some()) {
1295 return id;
1298 }
1299
1300 let next = self.tcx.parent_hir_id(id);
1301 if next == id {
1302 bug!("lint traversal reached the root of the crate");
1303 }
1304 id = next;
1305
1306 if self.lint_level_roots_cache.contains(id.local_id) {
1312 break;
1313 }
1314 }
1315
1316 self.lint_level_roots_cache.insert(orig_id.local_id);
1320 self.hir_id
1321 }
1322
1323 pub(crate) fn new_source_scope(&mut self, span: Span, lint_level: LintLevel) -> SourceScope {
1325 let parent = self.source_scope;
1326 debug!(
1327 "new_source_scope({:?}, {:?}) - parent({:?})={:?}",
1328 span,
1329 lint_level,
1330 parent,
1331 self.source_scopes.get(parent)
1332 );
1333 let scope_local_data = SourceScopeLocalData {
1334 lint_root: if let LintLevel::Explicit(lint_root) = lint_level {
1335 lint_root
1336 } else {
1337 self.source_scopes[parent].local_data.as_ref().unwrap_crate_local().lint_root
1338 },
1339 };
1340 self.source_scopes.push(SourceScopeData {
1341 span,
1342 parent_scope: Some(parent),
1343 inlined: None,
1344 inlined_parent_scope: None,
1345 local_data: ClearCrossCrate::Set(scope_local_data),
1346 })
1347 }
1348
1349 pub(crate) fn source_info(&self, span: Span) -> SourceInfo {
1351 SourceInfo { span, scope: self.source_scope }
1352 }
1353
1354 pub(crate) fn local_scope(&self) -> region::Scope {
1377 self.scopes.topmost()
1378 }
1379
1380 pub(crate) fn schedule_drop_storage_and_value(
1384 &mut self,
1385 span: Span,
1386 region_scope: region::Scope,
1387 local: Local,
1388 ) {
1389 self.schedule_drop(span, region_scope, local, DropKind::Storage);
1390 self.schedule_drop(span, region_scope, local, DropKind::Value);
1391 }
1392
1393 pub(crate) fn schedule_drop(
1398 &mut self,
1399 span: Span,
1400 region_scope: region::Scope,
1401 local: Local,
1402 drop_kind: DropKind,
1403 ) {
1404 let needs_drop = match drop_kind {
1405 DropKind::Value | DropKind::ForLint => {
1406 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1407 return;
1408 }
1409 true
1410 }
1411 DropKind::Storage => {
1412 if local.index() <= self.arg_count {
1413 span_bug!(
1414 span,
1415 "`schedule_drop` called with body argument {:?} \
1416 but its storage does not require a drop",
1417 local,
1418 )
1419 }
1420 false
1421 }
1422 };
1423
1424 let invalidate_caches = needs_drop || self.coroutine.is_some();
1471 for scope in self.scopes.scopes.iter_mut().rev() {
1472 if invalidate_caches {
1473 scope.invalidate_cache();
1474 }
1475
1476 if scope.region_scope == region_scope {
1477 let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
1478 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1480
1481 scope.drops.push(DropData {
1482 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1483 local,
1484 kind: drop_kind,
1485 });
1486
1487 return;
1488 }
1489 }
1490
1491 span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local);
1492 }
1493
1494 #[instrument(level = "debug", skip(self))]
1497 pub(crate) fn schedule_backwards_incompatible_drop(
1498 &mut self,
1499 span: Span,
1500 region_scope: region::Scope,
1501 local: Local,
1502 ) {
1503 for scope in self.scopes.scopes.iter_mut().rev() {
1506 scope.invalidate_cache();
1508 if scope.region_scope == region_scope {
1509 let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
1510 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1511
1512 scope.drops.push(DropData {
1513 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1514 local,
1515 kind: DropKind::ForLint,
1516 });
1517
1518 return;
1519 }
1520 }
1521 span_bug!(
1522 span,
1523 "region scope {:?} not in scope to drop {:?} for linting",
1524 region_scope,
1525 local
1526 );
1527 }
1528
1529 pub(crate) fn record_operands_moved(&mut self, operands: &[Spanned<Operand<'tcx>>]) {
1566 let local_scope = self.local_scope();
1567 let scope = self.scopes.scopes.last_mut().unwrap();
1568
1569 assert_eq!(scope.region_scope, local_scope, "local scope is not the topmost scope!",);
1570
1571 let locals_moved = operands.iter().flat_map(|operand| match operand.node {
1573 Operand::Copy(_) | Operand::Constant(_) | Operand::RuntimeChecks(_) => None,
1574 Operand::Move(place) => place.as_local(),
1575 });
1576
1577 for local in locals_moved {
1578 if scope.drops.iter().any(|drop| drop.local == local && drop.kind == DropKind::Value) {
1583 scope.moved_locals.push(local);
1584 }
1585 }
1586 }
1587
1588 fn diverge_cleanup(&mut self) -> DropIdx {
1594 self.diverge_cleanup_target(self.scopes.topmost(), DUMMY_SP)
1597 }
1598
1599 fn diverge_cleanup_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1604 let target = self.scopes.scope_index(target_scope, span);
1605 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1606 .iter()
1607 .enumerate()
1608 .rev()
1609 .find_map(|(scope_idx, scope)| {
1610 scope.cached_unwind_block.map(|cached_block| (scope_idx + 1, cached_block))
1611 })
1612 .unwrap_or((0, ROOT_NODE));
1613
1614 if uncached_scope > target {
1615 return cached_drop;
1616 }
1617
1618 let is_coroutine = self.coroutine.is_some();
1619 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1620 for drop in &scope.drops {
1621 if is_coroutine || drop.kind == DropKind::Value {
1622 cached_drop = self.scopes.unwind_drops.add_drop(*drop, cached_drop);
1623 }
1624 }
1625 scope.cached_unwind_block = Some(cached_drop);
1626 }
1627
1628 cached_drop
1629 }
1630
1631 pub(crate) fn diverge_from(&mut self, start: BasicBlock) {
1637 debug_assert!(
1638 matches!(
1639 self.cfg.block_data(start).terminator().kind,
1640 TerminatorKind::Assert { .. }
1641 | TerminatorKind::Call { .. }
1642 | TerminatorKind::Drop { .. }
1643 | TerminatorKind::FalseUnwind { .. }
1644 | TerminatorKind::InlineAsm { .. }
1645 ),
1646 "diverge_from called on block with terminator that cannot unwind."
1647 );
1648
1649 let next_drop = self.diverge_cleanup();
1650 self.scopes.unwind_drops.add_entry_point(start, next_drop);
1651 }
1652
1653 fn diverge_dropline(&mut self) -> DropIdx {
1656 self.diverge_dropline_target(self.scopes.topmost(), DUMMY_SP)
1659 }
1660
1661 fn diverge_dropline_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1663 debug_assert!(
1664 self.coroutine.is_some(),
1665 "diverge_dropline_target is valid only for coroutine"
1666 );
1667 let target = self.scopes.scope_index(target_scope, span);
1668 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1669 .iter()
1670 .enumerate()
1671 .rev()
1672 .find_map(|(scope_idx, scope)| {
1673 scope.cached_coroutine_drop_block.map(|cached_block| (scope_idx + 1, cached_block))
1674 })
1675 .unwrap_or((0, ROOT_NODE));
1676
1677 if uncached_scope > target {
1678 return cached_drop;
1679 }
1680
1681 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1682 for drop in &scope.drops {
1683 cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop);
1684 }
1685 scope.cached_coroutine_drop_block = Some(cached_drop);
1686 }
1687
1688 cached_drop
1689 }
1690
1691 pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) {
1697 debug_assert!(
1698 matches!(
1699 self.cfg.block_data(yield_block).terminator().kind,
1700 TerminatorKind::Yield { .. }
1701 ),
1702 "coroutine_drop_cleanup called on block with non-yield terminator."
1703 );
1704 let cached_drop = self.diverge_dropline();
1705 self.scopes.coroutine_drops.add_entry_point(yield_block, cached_drop);
1706 }
1707
1708 pub(crate) fn build_drop_and_replace(
1711 &mut self,
1712 block: BasicBlock,
1713 span: Span,
1714 place: Place<'tcx>,
1715 value: Rvalue<'tcx>,
1716 ) -> BlockAnd<()> {
1717 let source_info = self.source_info(span);
1718
1719 let assign = self.cfg.start_new_block();
1721 self.cfg.push_assign(assign, source_info, place, value.clone());
1722
1723 let assign_unwind = self.cfg.start_new_cleanup_block();
1725 self.cfg.push_assign(assign_unwind, source_info, place, value.clone());
1726
1727 self.cfg.terminate(
1728 block,
1729 source_info,
1730 TerminatorKind::Drop {
1731 place,
1732 target: assign,
1733 unwind: UnwindAction::Cleanup(assign_unwind),
1734 replace: true,
1735 drop: None,
1736 async_fut: None,
1737 },
1738 );
1739 self.diverge_from(block);
1740
1741 assign.unit()
1742 }
1743
1744 pub(crate) fn assert(
1748 &mut self,
1749 block: BasicBlock,
1750 cond: Operand<'tcx>,
1751 expected: bool,
1752 msg: AssertMessage<'tcx>,
1753 span: Span,
1754 ) -> BasicBlock {
1755 let source_info = self.source_info(span);
1756 let success_block = self.cfg.start_new_block();
1757
1758 self.cfg.terminate(
1759 block,
1760 source_info,
1761 TerminatorKind::Assert {
1762 cond,
1763 expected,
1764 msg: Box::new(msg),
1765 target: success_block,
1766 unwind: UnwindAction::Continue,
1767 },
1768 );
1769 self.diverge_from(block);
1770
1771 success_block
1772 }
1773
1774 pub(crate) fn clear_match_arm_and_guard_scopes(&mut self, region_scope: region::Scope) {
1780 let [.., arm_scope, guard_scope] = &mut *self.scopes.scopes else {
1781 bug!("matches with guards should introduce separate scopes for the pattern and guard");
1782 };
1783
1784 assert_eq!(arm_scope.region_scope, region_scope);
1785 assert_eq!(guard_scope.region_scope.data, region::ScopeData::MatchGuard);
1786 assert_eq!(guard_scope.region_scope.local_id, region_scope.local_id);
1787
1788 arm_scope.drops.clear();
1789 arm_scope.invalidate_cache();
1790 guard_scope.drops.clear();
1791 guard_scope.invalidate_cache();
1792 }
1793}
1794
1795fn build_scope_drops<'tcx, F>(
1810 cfg: &mut CFG<'tcx>,
1811 unwind_drops: &mut DropTree,
1812 coroutine_drops: &mut DropTree,
1813 scope: &Scope,
1814 block: BasicBlock,
1815 unwind_to: DropIdx,
1816 dropline_to: Option<DropIdx>,
1817 storage_dead_on_unwind: bool,
1818 arg_count: usize,
1819 is_async_drop: F,
1820) -> BlockAnd<()>
1821where
1822 F: Fn(Local) -> bool,
1823{
1824 debug!("build_scope_drops({:?} -> {:?}), dropline_to={:?}", block, scope, dropline_to);
1825
1826 let mut unwind_to = unwind_to;
1850
1851 let mut block = block;
1856
1857 let mut dropline_to = dropline_to;
1859
1860 for drop_data in scope.drops.iter().rev() {
1861 let source_info = drop_data.source_info;
1862 let local = drop_data.local;
1863
1864 match drop_data.kind {
1865 DropKind::Value => {
1866 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.local, drop_data.local);
1873 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1874 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1875
1876 if let Some(idx) = dropline_to {
1877 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1878 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1879 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1880 }
1881
1882 if scope.moved_locals.contains(&local) {
1887 continue;
1888 }
1889
1890 unwind_drops.add_entry_point(block, unwind_to);
1891 if let Some(to) = dropline_to
1892 && is_async_drop(local)
1893 {
1894 coroutine_drops.add_entry_point(block, to);
1895 }
1896
1897 let next = cfg.start_new_block();
1898 cfg.terminate(
1899 block,
1900 source_info,
1901 TerminatorKind::Drop {
1902 place: local.into(),
1903 target: next,
1904 unwind: UnwindAction::Continue,
1905 replace: false,
1906 drop: None,
1907 async_fut: None,
1908 },
1909 );
1910 block = next;
1911 }
1912 DropKind::ForLint => {
1913 if storage_dead_on_unwind {
1919 debug_assert_eq!(
1920 unwind_drops.drop_nodes[unwind_to].data.local,
1921 drop_data.local
1922 );
1923 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1924 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1925 }
1926
1927 if scope.moved_locals.contains(&local) {
1932 continue;
1933 }
1934
1935 cfg.push(
1936 block,
1937 Statement::new(
1938 source_info,
1939 StatementKind::BackwardIncompatibleDropHint {
1940 place: Box::new(local.into()),
1941 reason: BackwardIncompatibleDropReason::Edition2024,
1942 },
1943 ),
1944 );
1945 }
1946 DropKind::Storage => {
1947 if storage_dead_on_unwind {
1953 debug_assert_eq!(
1954 unwind_drops.drop_nodes[unwind_to].data.local,
1955 drop_data.local
1956 );
1957 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1958 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1959 }
1960 if let Some(idx) = dropline_to {
1961 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1962 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1963 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1964 }
1965 assert!(local.index() > arg_count);
1967 cfg.push(block, Statement::new(source_info, StatementKind::StorageDead(local)));
1968 }
1969 }
1970 }
1971 block.unit()
1972}
1973
1974impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
1975 fn build_exit_tree(
1980 &mut self,
1981 mut drops: DropTree,
1982 else_scope: region::Scope,
1983 span: Span,
1984 continue_block: Option<BasicBlock>,
1985 ) -> Option<BlockAnd<()>> {
1986 let blocks = drops.build_mir::<ExitScopes>(&mut self.cfg, continue_block);
1987 let is_coroutine = self.coroutine.is_some();
1988
1989 if drops.drop_nodes.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
1991 let unwind_target = self.diverge_cleanup_target(else_scope, span);
1992 let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
1993 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated().skip(1) {
1994 match drop_node.data.kind {
1995 DropKind::Storage | DropKind::ForLint => {
1996 if is_coroutine {
1997 let unwind_drop = self
1998 .scopes
1999 .unwind_drops
2000 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
2001 unwind_indices.push(unwind_drop);
2002 } else {
2003 unwind_indices.push(unwind_indices[drop_node.next]);
2004 }
2005 }
2006 DropKind::Value => {
2007 let unwind_drop = self
2008 .scopes
2009 .unwind_drops
2010 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
2011 self.scopes.unwind_drops.add_entry_point(
2012 blocks[drop_idx].unwrap(),
2013 unwind_indices[drop_node.next],
2014 );
2015 unwind_indices.push(unwind_drop);
2016 }
2017 }
2018 }
2019 }
2020 if is_coroutine
2022 && drops.drop_nodes.iter().any(|DropNode { data, next: _ }| {
2023 data.kind == DropKind::Value && self.is_async_drop(data.local)
2024 })
2025 {
2026 let dropline_target = self.diverge_dropline_target(else_scope, span);
2027 let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1);
2028 for (drop_idx, drop_data) in drops.drop_nodes.iter_enumerated().skip(1) {
2029 let coroutine_drop = self
2030 .scopes
2031 .coroutine_drops
2032 .add_drop(drop_data.data, dropline_indices[drop_data.next]);
2033 match drop_data.data.kind {
2034 DropKind::Storage | DropKind::ForLint => {}
2035 DropKind::Value => {
2036 if self.is_async_drop(drop_data.data.local) {
2037 self.scopes.coroutine_drops.add_entry_point(
2038 blocks[drop_idx].unwrap(),
2039 dropline_indices[drop_data.next],
2040 );
2041 }
2042 }
2043 }
2044 dropline_indices.push(coroutine_drop);
2045 }
2046 }
2047 blocks[ROOT_NODE].map(BasicBlock::unit)
2048 }
2049
2050 pub(crate) fn build_drop_trees(&mut self) {
2052 if self.coroutine.is_some() {
2053 self.build_coroutine_drop_trees();
2054 } else {
2055 Self::build_unwind_tree(
2056 &mut self.cfg,
2057 &mut self.scopes.unwind_drops,
2058 self.fn_span,
2059 &mut None,
2060 );
2061 }
2062 }
2063
2064 fn build_coroutine_drop_trees(&mut self) {
2065 let drops = &mut self.scopes.coroutine_drops;
2067 let cfg = &mut self.cfg;
2068 let fn_span = self.fn_span;
2069 let blocks = drops.build_mir::<CoroutineDrop>(cfg, None);
2070 if let Some(root_block) = blocks[ROOT_NODE] {
2071 cfg.terminate(
2072 root_block,
2073 SourceInfo::outermost(fn_span),
2074 TerminatorKind::CoroutineDrop,
2075 );
2076 }
2077
2078 let resume_block = &mut None;
2080 let unwind_drops = &mut self.scopes.unwind_drops;
2081 Self::build_unwind_tree(cfg, unwind_drops, fn_span, resume_block);
2082
2083 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated() {
2091 if let DropKind::Value = drop_node.data.kind
2092 && let Some(bb) = blocks[drop_idx]
2093 {
2094 debug_assert!(drop_node.next < drops.drop_nodes.next_index());
2095 drops.entry_points.push((drop_node.next, bb));
2096 }
2097 }
2098 Self::build_unwind_tree(cfg, drops, fn_span, resume_block);
2099 }
2100
2101 fn build_unwind_tree(
2102 cfg: &mut CFG<'tcx>,
2103 drops: &mut DropTree,
2104 fn_span: Span,
2105 resume_block: &mut Option<BasicBlock>,
2106 ) {
2107 let blocks = drops.build_mir::<Unwind>(cfg, *resume_block);
2108 if let (None, Some(resume)) = (*resume_block, blocks[ROOT_NODE]) {
2109 cfg.terminate(resume, SourceInfo::outermost(fn_span), TerminatorKind::UnwindResume);
2110
2111 *resume_block = blocks[ROOT_NODE];
2112 }
2113 }
2114}
2115
2116struct ExitScopes;
2119
2120impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes {
2121 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2122 cfg.start_new_block()
2123 }
2124 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2125 let term = cfg.block_data_mut(from).terminator_mut();
2129 if let TerminatorKind::UnwindResume = term.kind {
2130 term.kind = TerminatorKind::Goto { target: to };
2131 } else {
2132 span_bug!(term.source_info.span, "unexpected dummy terminator kind: {:?}", term.kind);
2133 }
2134 }
2135}
2136
2137struct CoroutineDrop;
2138
2139impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop {
2140 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2141 cfg.start_new_block()
2142 }
2143 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2144 let term = cfg.block_data_mut(from).terminator_mut();
2145 if let TerminatorKind::Yield { ref mut drop, .. } = term.kind {
2146 *drop = Some(to);
2147 } else if let TerminatorKind::Drop { ref mut drop, .. } = term.kind {
2148 *drop = Some(to);
2149 } else {
2150 span_bug!(
2151 term.source_info.span,
2152 "cannot enter coroutine drop tree from {:?}",
2153 term.kind
2154 )
2155 }
2156 }
2157}
2158
2159struct Unwind;
2160
2161impl<'tcx> DropTreeBuilder<'tcx> for Unwind {
2162 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2163 cfg.start_new_cleanup_block()
2164 }
2165 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2166 let term = &mut cfg.block_data_mut(from).terminator_mut();
2167 match &mut term.kind {
2168 TerminatorKind::Drop { unwind, .. } => {
2169 if let UnwindAction::Cleanup(unwind) = *unwind {
2170 let source_info = term.source_info;
2171 cfg.terminate(unwind, source_info, TerminatorKind::Goto { target: to });
2172 } else {
2173 *unwind = UnwindAction::Cleanup(to);
2174 }
2175 }
2176 TerminatorKind::FalseUnwind { unwind, .. }
2177 | TerminatorKind::Call { unwind, .. }
2178 | TerminatorKind::Assert { unwind, .. }
2179 | TerminatorKind::InlineAsm { unwind, .. } => {
2180 *unwind = UnwindAction::Cleanup(to);
2181 }
2182 TerminatorKind::Goto { .. }
2183 | TerminatorKind::SwitchInt { .. }
2184 | TerminatorKind::UnwindResume
2185 | TerminatorKind::UnwindTerminate(_)
2186 | TerminatorKind::Return
2187 | TerminatorKind::TailCall { .. }
2188 | TerminatorKind::Unreachable
2189 | TerminatorKind::Yield { .. }
2190 | TerminatorKind::CoroutineDrop
2191 | TerminatorKind::FalseEdge { .. } => {
2192 span_bug!(term.source_info.span, "cannot unwind from {:?}", term.kind)
2193 }
2194 }
2195 }
2196}