1use std::mem;
85
86use interpret::ErrorHandled;
87use rustc_data_structures::fx::FxHashMap;
88use rustc_hir::{self as hir, HirId};
89use rustc_index::{IndexSlice, IndexVec};
90use rustc_middle::middle::region;
91use rustc_middle::mir::{self, *};
92use rustc_middle::thir::{AdtExpr, AdtExprBase, ArmId, ExprId, ExprKind, LintLevel};
93use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt, ValTree};
94use rustc_middle::{bug, span_bug};
95use rustc_pattern_analysis::rustc::RustcPatCtxt;
96use rustc_session::lint::Level;
97use rustc_span::source_map::Spanned;
98use rustc_span::{DUMMY_SP, Span};
99use tracing::{debug, instrument};
100
101use super::matches::BuiltMatchTree;
102use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
103use crate::errors::{
104 ConstContinueBadConst, ConstContinueNotMonomorphicConst, ConstContinueUnknownJumpTarget,
105};
106
107#[derive(Debug)]
108pub(crate) struct Scopes<'tcx> {
109 scopes: Vec<Scope>,
110
111 breakable_scopes: Vec<BreakableScope<'tcx>>,
113
114 const_continuable_scopes: Vec<ConstContinuableScope<'tcx>>,
115
116 if_then_scope: Option<IfThenScope>,
118
119 unwind_drops: DropTree,
122
123 coroutine_drops: DropTree,
125}
126
127#[derive(Debug)]
128struct Scope {
129 source_scope: SourceScope,
131
132 region_scope: region::Scope,
134
135 drops: Vec<DropData>,
140
141 moved_locals: Vec<Local>,
142
143 cached_unwind_block: Option<DropIdx>,
146
147 cached_coroutine_drop_block: Option<DropIdx>,
150}
151
152#[derive(Clone, Copy, Debug)]
153struct DropData {
154 source_info: SourceInfo,
157
158 local: Local,
160
161 kind: DropKind,
163}
164
165#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
166pub(crate) enum DropKind {
167 Value,
168 Storage,
169 ForLint(BackwardIncompatibleDropReason),
170}
171
172#[derive(Debug)]
173struct BreakableScope<'tcx> {
174 region_scope: region::Scope,
176 break_destination: Place<'tcx>,
179 break_drops: DropTree,
181 continue_drops: Option<DropTree>,
183}
184
185#[derive(Debug)]
186struct ConstContinuableScope<'tcx> {
187 region_scope: region::Scope,
189 state_place: Place<'tcx>,
191
192 arms: Box<[ArmId]>,
193 built_match_tree: BuiltMatchTree<'tcx>,
194
195 const_continue_drops: DropTree,
197}
198
199#[derive(Debug)]
200struct IfThenScope {
201 region_scope: region::Scope,
203 else_drops: DropTree,
205}
206
207#[derive(Clone, Copy, Debug)]
209pub(crate) enum BreakableTarget {
210 Continue(region::Scope),
211 Break(region::Scope),
212 Return,
213}
214
215rustc_index::newtype_index! {
216 #[orderable]
217 struct DropIdx {}
218}
219
220const ROOT_NODE: DropIdx = DropIdx::ZERO;
221
222#[derive(Debug)]
232struct DropTree {
233 drop_nodes: IndexVec<DropIdx, DropNode>,
235 existing_drops_map: FxHashMap<DropNodeKey, DropIdx>,
237 entry_points: Vec<(DropIdx, BasicBlock)>,
239}
240
241#[derive(Debug)]
243struct DropNode {
244 data: DropData,
246 next: DropIdx,
248}
249
250#[derive(Debug, PartialEq, Eq, Hash)]
252struct DropNodeKey {
253 next: DropIdx,
254 local: Local,
255}
256
257impl Scope {
258 fn needs_cleanup(&self) -> bool {
270 self.drops.iter().any(|drop| match drop.kind {
271 DropKind::Value | DropKind::ForLint(_) => true,
272 DropKind::Storage => false,
273 })
274 }
275
276 fn invalidate_cache(&mut self) {
277 self.cached_unwind_block = None;
278 self.cached_coroutine_drop_block = None;
279 }
280}
281
282trait DropTreeBuilder<'tcx> {
285 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock;
288
289 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock);
292}
293
294impl DropTree {
295 fn new() -> Self {
296 let fake_source_info = SourceInfo::outermost(DUMMY_SP);
300 let fake_data =
301 DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage };
302 let drop_nodes = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
303 Self { drop_nodes, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
304 }
305
306 fn add_drop(&mut self, data: DropData, next: DropIdx) -> DropIdx {
312 let drop_nodes = &mut self.drop_nodes;
313 *self
314 .existing_drops_map
315 .entry(DropNodeKey { next, local: data.local })
316 .or_insert_with(|| drop_nodes.push(DropNode { data, next }))
318 }
319
320 fn add_entry_point(&mut self, from: BasicBlock, to: DropIdx) {
325 debug_assert!(to < self.drop_nodes.next_index());
326 self.entry_points.push((to, from));
327 }
328
329 fn build_mir<'tcx, T: DropTreeBuilder<'tcx>>(
331 &mut self,
332 cfg: &mut CFG<'tcx>,
333 root_node: Option<BasicBlock>,
334 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
335 debug!("DropTree::build_mir(drops = {:#?})", self);
336
337 let mut blocks = self.assign_blocks::<T>(cfg, root_node);
338 self.link_blocks(cfg, &mut blocks);
339
340 blocks
341 }
342
343 fn assign_blocks<'tcx, T: DropTreeBuilder<'tcx>>(
345 &mut self,
346 cfg: &mut CFG<'tcx>,
347 root_node: Option<BasicBlock>,
348 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
349 #[derive(Clone, Copy)]
353 enum Block {
354 None,
356 Shares(DropIdx),
359 Own,
363 }
364
365 let mut blocks = IndexVec::from_elem(None, &self.drop_nodes);
366 blocks[ROOT_NODE] = root_node;
367
368 let mut needs_block = IndexVec::from_elem(Block::None, &self.drop_nodes);
369 if root_node.is_some() {
370 needs_block[ROOT_NODE] = Block::Own;
374 }
375
376 let entry_points = &mut self.entry_points;
378 entry_points.sort();
379
380 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
381 if entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
382 let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
383 needs_block[drop_idx] = Block::Own;
384 while entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
385 let entry_block = entry_points.pop().unwrap().1;
386 T::link_entry_point(cfg, entry_block, block);
387 }
388 }
389 match needs_block[drop_idx] {
390 Block::None => continue,
391 Block::Own => {
392 blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
393 }
394 Block::Shares(pred) => {
395 blocks[drop_idx] = blocks[pred];
396 }
397 }
398 if let DropKind::Value = drop_node.data.kind {
399 needs_block[drop_node.next] = Block::Own;
400 } else if drop_idx != ROOT_NODE {
401 match &mut needs_block[drop_node.next] {
402 pred @ Block::None => *pred = Block::Shares(drop_idx),
403 pred @ Block::Shares(_) => *pred = Block::Own,
404 Block::Own => (),
405 }
406 }
407 }
408
409 debug!("assign_blocks: blocks = {:#?}", blocks);
410 assert!(entry_points.is_empty());
411
412 blocks
413 }
414
415 fn link_blocks<'tcx>(
416 &self,
417 cfg: &mut CFG<'tcx>,
418 blocks: &IndexSlice<DropIdx, Option<BasicBlock>>,
419 ) {
420 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
421 let Some(block) = blocks[drop_idx] else { continue };
422 match drop_node.data.kind {
423 DropKind::Value => {
424 let terminator = TerminatorKind::Drop {
425 target: blocks[drop_node.next].unwrap(),
426 unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
428 place: drop_node.data.local.into(),
429 replace: false,
430 drop: None,
431 async_fut: None,
432 };
433 cfg.terminate(block, drop_node.data.source_info, terminator);
434 }
435 DropKind::ForLint(reason) => {
436 let stmt = Statement::new(
437 drop_node.data.source_info,
438 StatementKind::BackwardIncompatibleDropHint {
439 place: Box::new(drop_node.data.local.into()),
440 reason,
441 },
442 );
443 cfg.push(block, stmt);
444 let target = blocks[drop_node.next].unwrap();
445 if target != block {
446 let source_info =
451 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
452 let terminator = TerminatorKind::Goto { target };
453 cfg.terminate(block, source_info, terminator);
454 }
455 }
456 DropKind::Storage if drop_idx == ROOT_NODE => {}
458 DropKind::Storage => {
459 let stmt = Statement::new(
460 drop_node.data.source_info,
461 StatementKind::StorageDead(drop_node.data.local),
462 );
463 cfg.push(block, stmt);
464 let target = blocks[drop_node.next].unwrap();
465 if target != block {
466 let source_info =
471 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
472 let terminator = TerminatorKind::Goto { target };
473 cfg.terminate(block, source_info, terminator);
474 }
475 }
476 }
477 }
478 }
479}
480
481impl<'tcx> Scopes<'tcx> {
482 pub(crate) fn new() -> Self {
483 Self {
484 scopes: Vec::new(),
485 breakable_scopes: Vec::new(),
486 const_continuable_scopes: Vec::new(),
487 if_then_scope: None,
488 unwind_drops: DropTree::new(),
489 coroutine_drops: DropTree::new(),
490 }
491 }
492
493 fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo), vis_scope: SourceScope) {
494 debug!("push_scope({:?})", region_scope);
495 self.scopes.push(Scope {
496 source_scope: vis_scope,
497 region_scope: region_scope.0,
498 drops: vec![],
499 moved_locals: vec![],
500 cached_unwind_block: None,
501 cached_coroutine_drop_block: None,
502 });
503 }
504
505 fn pop_scope(&mut self, region_scope: (region::Scope, SourceInfo)) -> Scope {
506 let scope = self.scopes.pop().unwrap();
507 assert_eq!(scope.region_scope, region_scope.0);
508 scope
509 }
510
511 fn scope_index(&self, region_scope: region::Scope, span: Span) -> usize {
512 self.scopes
513 .iter()
514 .rposition(|scope| scope.region_scope == region_scope)
515 .unwrap_or_else(|| span_bug!(span, "region_scope {:?} does not enclose", region_scope))
516 }
517
518 fn topmost(&self) -> region::Scope {
521 self.scopes.last().expect("topmost_scope: no scopes present").region_scope
522 }
523}
524
525impl<'a, 'tcx> Builder<'a, 'tcx> {
526 pub(crate) fn in_breakable_scope<F>(
532 &mut self,
533 loop_block: Option<BasicBlock>,
534 break_destination: Place<'tcx>,
535 span: Span,
536 f: F,
537 ) -> BlockAnd<()>
538 where
539 F: FnOnce(&mut Builder<'a, 'tcx>) -> Option<BlockAnd<()>>,
540 {
541 let region_scope = self.scopes.topmost();
542 let scope = BreakableScope {
543 region_scope,
544 break_destination,
545 break_drops: DropTree::new(),
546 continue_drops: loop_block.map(|_| DropTree::new()),
547 };
548 self.scopes.breakable_scopes.push(scope);
549 let normal_exit_block = f(self);
550 let breakable_scope = self.scopes.breakable_scopes.pop().unwrap();
551 assert!(breakable_scope.region_scope == region_scope);
552 let break_block =
553 self.build_exit_tree(breakable_scope.break_drops, region_scope, span, None);
554 if let Some(drops) = breakable_scope.continue_drops {
555 self.build_exit_tree(drops, region_scope, span, loop_block);
556 }
557 match (normal_exit_block, break_block) {
558 (Some(block), None) | (None, Some(block)) => block,
559 (None, None) => self.cfg.start_new_block().unit(),
560 (Some(normal_block), Some(exit_block)) => {
561 let target = self.cfg.start_new_block();
562 let source_info = self.source_info(span);
563 self.cfg.terminate(
564 normal_block.into_block(),
565 source_info,
566 TerminatorKind::Goto { target },
567 );
568 self.cfg.terminate(
569 exit_block.into_block(),
570 source_info,
571 TerminatorKind::Goto { target },
572 );
573 target.unit()
574 }
575 }
576 }
577
578 pub(crate) fn in_const_continuable_scope<F>(
581 &mut self,
582 arms: Box<[ArmId]>,
583 built_match_tree: BuiltMatchTree<'tcx>,
584 state_place: Place<'tcx>,
585 span: Span,
586 f: F,
587 ) -> BlockAnd<()>
588 where
589 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
590 {
591 let region_scope = self.scopes.topmost();
592 let scope = ConstContinuableScope {
593 region_scope,
594 state_place,
595 const_continue_drops: DropTree::new(),
596 arms,
597 built_match_tree,
598 };
599 self.scopes.const_continuable_scopes.push(scope);
600 let normal_exit_block = f(self);
601 let const_continue_scope = self.scopes.const_continuable_scopes.pop().unwrap();
602 assert!(const_continue_scope.region_scope == region_scope);
603
604 let break_block = self.build_exit_tree(
605 const_continue_scope.const_continue_drops,
606 region_scope,
607 span,
608 None,
609 );
610
611 match (normal_exit_block, break_block) {
612 (block, None) => block,
613 (normal_block, Some(exit_block)) => {
614 let target = self.cfg.start_new_block();
615 let source_info = self.source_info(span);
616 self.cfg.terminate(
617 normal_block.into_block(),
618 source_info,
619 TerminatorKind::Goto { target },
620 );
621 self.cfg.terminate(
622 exit_block.into_block(),
623 source_info,
624 TerminatorKind::Goto { target },
625 );
626 target.unit()
627 }
628 }
629 }
630
631 pub(crate) fn in_if_then_scope<F>(
648 &mut self,
649 region_scope: region::Scope,
650 span: Span,
651 f: F,
652 ) -> (BasicBlock, BasicBlock)
653 where
654 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
655 {
656 let scope = IfThenScope { region_scope, else_drops: DropTree::new() };
657 let previous_scope = mem::replace(&mut self.scopes.if_then_scope, Some(scope));
658
659 let then_block = f(self).into_block();
660
661 let if_then_scope = mem::replace(&mut self.scopes.if_then_scope, previous_scope).unwrap();
662 assert!(if_then_scope.region_scope == region_scope);
663
664 let else_block =
665 self.build_exit_tree(if_then_scope.else_drops, region_scope, span, None).map_or_else(
666 || self.cfg.start_new_block(),
667 |else_block_and| else_block_and.into_block(),
668 );
669
670 (then_block, else_block)
671 }
672
673 #[instrument(skip(self, f), level = "debug")]
676 pub(crate) fn in_scope<F, R>(
677 &mut self,
678 region_scope: (region::Scope, SourceInfo),
679 lint_level: LintLevel,
680 f: F,
681 ) -> BlockAnd<R>
682 where
683 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
684 {
685 let source_scope = self.source_scope;
686 if let LintLevel::Explicit(current_hir_id) = lint_level {
687 let parent_id =
688 self.source_scopes[source_scope].local_data.as_ref().unwrap_crate_local().lint_root;
689 self.maybe_new_source_scope(region_scope.1.span, current_hir_id, parent_id);
690 }
691 self.push_scope(region_scope);
692 let mut block;
693 let rv = unpack!(block = f(self));
694 block = self.pop_scope(region_scope, block).into_block();
695 self.source_scope = source_scope;
696 debug!(?block);
697 block.and(rv)
698 }
699
700 pub(crate) fn opt_in_scope<R>(
703 &mut self,
704 opt_region_scope: Option<(region::Scope, SourceInfo)>,
705 f: impl FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
706 ) -> BlockAnd<R> {
707 if let Some(region_scope) = opt_region_scope {
708 self.in_scope(region_scope, LintLevel::Inherited, f)
709 } else {
710 f(self)
711 }
712 }
713
714 pub(crate) fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) {
719 self.scopes.push_scope(region_scope, self.source_scope);
720 }
721
722 pub(crate) fn pop_scope(
726 &mut self,
727 region_scope: (region::Scope, SourceInfo),
728 mut block: BasicBlock,
729 ) -> BlockAnd<()> {
730 debug!("pop_scope({:?}, {:?})", region_scope, block);
731
732 block = self.leave_top_scope(block);
733
734 self.scopes.pop_scope(region_scope);
735
736 block.unit()
737 }
738
739 pub(crate) fn break_scope(
741 &mut self,
742 mut block: BasicBlock,
743 value: Option<ExprId>,
744 target: BreakableTarget,
745 source_info: SourceInfo,
746 ) -> BlockAnd<()> {
747 let span = source_info.span;
748
749 let get_scope_index = |scope: region::Scope| {
750 self.scopes
752 .breakable_scopes
753 .iter()
754 .rposition(|breakable_scope| breakable_scope.region_scope == scope)
755 .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found"))
756 };
757 let (break_index, destination) = match target {
758 BreakableTarget::Return => {
759 let scope = &self.scopes.breakable_scopes[0];
760 if scope.break_destination != Place::return_place() {
761 span_bug!(span, "`return` in item with no return scope");
762 }
763 (0, Some(scope.break_destination))
764 }
765 BreakableTarget::Break(scope) => {
766 let break_index = get_scope_index(scope);
767 let scope = &self.scopes.breakable_scopes[break_index];
768 (break_index, Some(scope.break_destination))
769 }
770 BreakableTarget::Continue(scope) => {
771 let break_index = get_scope_index(scope);
772 (break_index, None)
773 }
774 };
775
776 match (destination, value) {
777 (Some(destination), Some(value)) => {
778 debug!("stmt_expr Break val block_context.push(SubExpr)");
779 self.block_context.push(BlockFrame::SubExpr);
780 block = self.expr_into_dest(destination, block, value).into_block();
781 self.block_context.pop();
782 }
783 (Some(destination), None) => {
784 self.cfg.push_assign_unit(block, source_info, destination, self.tcx)
785 }
786 (None, Some(_)) => {
787 panic!("`return`, `become` and `break` with value and must have a destination")
788 }
789 (None, None) => {
790 if self.tcx.sess.instrument_coverage() {
791 self.cfg.push_coverage_span_marker(block, source_info);
796 }
797 }
798 }
799
800 let region_scope = self.scopes.breakable_scopes[break_index].region_scope;
801 let scope_index = self.scopes.scope_index(region_scope, span);
802 let drops = if destination.is_some() {
803 &mut self.scopes.breakable_scopes[break_index].break_drops
804 } else {
805 let Some(drops) = self.scopes.breakable_scopes[break_index].continue_drops.as_mut()
806 else {
807 self.tcx.dcx().span_delayed_bug(
808 source_info.span,
809 "unlabelled `continue` within labelled block",
810 );
811 self.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
812
813 return self.cfg.start_new_block().unit();
814 };
815 drops
816 };
817
818 let mut drop_idx = ROOT_NODE;
819 for scope in &self.scopes.scopes[scope_index + 1..] {
820 for drop in &scope.drops {
821 drop_idx = drops.add_drop(*drop, drop_idx);
822 }
823 }
824 drops.add_entry_point(block, drop_idx);
825
826 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
831
832 self.cfg.start_new_block().unit()
833 }
834
835 fn eval_unevaluated_mir_constant_to_valtree(
837 &self,
838 constant: ConstOperand<'tcx>,
839 ) -> Result<(ty::ValTree<'tcx>, Ty<'tcx>), interpret::ErrorHandled> {
840 assert!(!constant.const_.ty().has_param());
841 let (uv, ty) = match constant.const_ {
842 mir::Const::Unevaluated(uv, ty) => (uv.shrink(), ty),
843 mir::Const::Ty(_, c) => match c.kind() {
844 ty::ConstKind::Value(cv) => return Ok((cv.valtree, cv.ty)),
847 other => span_bug!(constant.span, "{other:#?}"),
848 },
849 mir::Const::Val(mir::ConstValue::Scalar(mir::interpret::Scalar::Int(val)), ty) => {
850 return Ok((ValTree::from_scalar_int(self.tcx, val), ty));
851 }
852 other => span_bug!(constant.span, "{other:#?}"),
862 };
863
864 match self.tcx.const_eval_resolve_for_typeck(self.typing_env(), uv, constant.span) {
865 Ok(Ok(valtree)) => Ok((valtree, ty)),
866 Ok(Err(ty)) => span_bug!(constant.span, "could not convert {ty:?} to a valtree"),
867 Err(e) => Err(e),
868 }
869 }
870
871 pub(crate) fn break_const_continuable_scope(
873 &mut self,
874 mut block: BasicBlock,
875 value: ExprId,
876 scope: region::Scope,
877 source_info: SourceInfo,
878 ) -> BlockAnd<()> {
879 let span = source_info.span;
880
881 let rustc_middle::thir::ExprKind::Scope { value, .. } = self.thir[value].kind else {
883 span_bug!(span, "break value must be a scope")
884 };
885
886 let expr = &self.thir[value];
887 let constant = match &expr.kind {
888 ExprKind::Adt(box AdtExpr { variant_index, fields, base, .. }) => {
889 assert!(matches!(base, AdtExprBase::None));
890 assert!(fields.is_empty());
891 ConstOperand {
892 span: self.thir[value].span,
893 user_ty: None,
894 const_: Const::Ty(
895 self.thir[value].ty,
896 ty::Const::new_value(
897 self.tcx,
898 ValTree::from_branches(
899 self.tcx,
900 [ValTree::from_scalar_int(self.tcx, variant_index.as_u32().into())],
901 ),
902 self.thir[value].ty,
903 ),
904 ),
905 }
906 }
907
908 ExprKind::Literal { .. }
909 | ExprKind::NonHirLiteral { .. }
910 | ExprKind::ZstLiteral { .. }
911 | ExprKind::NamedConst { .. } => self.as_constant(&self.thir[value]),
912
913 other => {
914 use crate::errors::ConstContinueNotMonomorphicConstReason as Reason;
915
916 let span = expr.span;
917 let reason = match other {
918 ExprKind::ConstParam { .. } => Reason::ConstantParameter { span },
919 ExprKind::ConstBlock { .. } => Reason::ConstBlock { span },
920 _ => Reason::Other { span },
921 };
922
923 self.tcx
924 .dcx()
925 .emit_err(ConstContinueNotMonomorphicConst { span: expr.span, reason });
926 return block.unit();
927 }
928 };
929
930 let break_index = self
931 .scopes
932 .const_continuable_scopes
933 .iter()
934 .rposition(|const_continuable_scope| const_continuable_scope.region_scope == scope)
935 .unwrap_or_else(|| span_bug!(span, "no enclosing const-continuable scope found"));
936
937 let scope = &self.scopes.const_continuable_scopes[break_index];
938
939 let state_decl = &self.local_decls[scope.state_place.as_local().unwrap()];
940 let state_ty = state_decl.ty;
941 let (discriminant_ty, rvalue) = match state_ty.kind() {
942 ty::Adt(adt_def, _) if adt_def.is_enum() => {
943 (state_ty.discriminant_ty(self.tcx), Rvalue::Discriminant(scope.state_place))
944 }
945 ty::Uint(_) | ty::Int(_) | ty::Float(_) | ty::Bool | ty::Char => {
946 (state_ty, Rvalue::Use(Operand::Copy(scope.state_place)))
947 }
948 _ => span_bug!(state_decl.source_info.span, "unsupported #[loop_match] state"),
949 };
950
951 let dropless_arena = rustc_arena::DroplessArena::default();
954 let typeck_results = self.tcx.typeck(self.def_id);
955 let cx = RustcPatCtxt {
956 tcx: self.tcx,
957 typeck_results,
958 module: self.tcx.parent_module(self.hir_id).to_def_id(),
959 typing_env: rustc_middle::ty::TypingEnv::non_body_analysis(self.tcx, self.def_id),
961 dropless_arena: &dropless_arena,
962 match_lint_level: self.hir_id,
963 whole_match_span: Some(rustc_span::Span::default()),
964 scrut_span: rustc_span::Span::default(),
965 refutable: true,
966 known_valid_scrutinee: true,
967 internal_state: Default::default(),
968 };
969
970 let valtree = match self.eval_unevaluated_mir_constant_to_valtree(constant) {
971 Ok((valtree, ty)) => {
972 assert!(!ty.has_param());
974
975 valtree
976 }
977 Err(ErrorHandled::Reported(..)) => {
978 return block.unit();
979 }
980 Err(ErrorHandled::TooGeneric(_)) => {
981 self.tcx.dcx().emit_fatal(ConstContinueBadConst { span: constant.span });
982 }
983 };
984
985 let Some(real_target) =
986 self.static_pattern_match(&cx, valtree, &*scope.arms, &scope.built_match_tree)
987 else {
988 self.tcx.dcx().emit_fatal(ConstContinueUnknownJumpTarget { span })
989 };
990
991 self.block_context.push(BlockFrame::SubExpr);
992 let state_place = scope.state_place;
993 block = self.expr_into_dest(state_place, block, value).into_block();
994 self.block_context.pop();
995
996 let discr = self.temp(discriminant_ty, source_info.span);
997 let scope_index = self
998 .scopes
999 .scope_index(self.scopes.const_continuable_scopes[break_index].region_scope, span);
1000 let scope = &mut self.scopes.const_continuable_scopes[break_index];
1001 self.cfg.push_assign(block, source_info, discr, rvalue);
1002 let drop_and_continue_block = self.cfg.start_new_block();
1003 let imaginary_target = self.cfg.start_new_block();
1004 self.cfg.terminate(
1005 block,
1006 source_info,
1007 TerminatorKind::FalseEdge { real_target: drop_and_continue_block, imaginary_target },
1008 );
1009
1010 let drops = &mut scope.const_continue_drops;
1011
1012 let drop_idx = self.scopes.scopes[scope_index + 1..]
1013 .iter()
1014 .flat_map(|scope| &scope.drops)
1015 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1016
1017 drops.add_entry_point(imaginary_target, drop_idx);
1018
1019 self.cfg.terminate(imaginary_target, source_info, TerminatorKind::UnwindResume);
1020
1021 let region_scope = scope.region_scope;
1022 let scope_index = self.scopes.scope_index(region_scope, span);
1023 let mut drops = DropTree::new();
1024
1025 let drop_idx = self.scopes.scopes[scope_index + 1..]
1026 .iter()
1027 .flat_map(|scope| &scope.drops)
1028 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1029
1030 drops.add_entry_point(drop_and_continue_block, drop_idx);
1031
1032 self.cfg.terminate(drop_and_continue_block, source_info, TerminatorKind::UnwindResume);
1037
1038 self.build_exit_tree(drops, region_scope, span, Some(real_target));
1039
1040 return self.cfg.start_new_block().unit();
1041 }
1042
1043 pub(crate) fn break_for_else(&mut self, block: BasicBlock, source_info: SourceInfo) {
1049 let if_then_scope = self
1050 .scopes
1051 .if_then_scope
1052 .as_ref()
1053 .unwrap_or_else(|| span_bug!(source_info.span, "no if-then scope found"));
1054
1055 let target = if_then_scope.region_scope;
1056 let scope_index = self.scopes.scope_index(target, source_info.span);
1057
1058 let if_then_scope = self.scopes.if_then_scope.as_mut().expect("upgrading & to &mut");
1060
1061 let mut drop_idx = ROOT_NODE;
1062 let drops = &mut if_then_scope.else_drops;
1063 for scope in &self.scopes.scopes[scope_index + 1..] {
1064 for drop in &scope.drops {
1065 drop_idx = drops.add_drop(*drop, drop_idx);
1066 }
1067 }
1068 drops.add_entry_point(block, drop_idx);
1069
1070 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
1075 }
1076
1077 pub(crate) fn break_for_tail_call(
1082 &mut self,
1083 mut block: BasicBlock,
1084 args: &[Spanned<Operand<'tcx>>],
1085 source_info: SourceInfo,
1086 ) -> BlockAnd<()> {
1087 let arg_drops: Vec<_> = args
1088 .iter()
1089 .rev()
1090 .filter_map(|arg| match &arg.node {
1091 Operand::Copy(_) => bug!("copy op in tail call args"),
1092 Operand::Move(place) => {
1093 let local =
1094 place.as_local().unwrap_or_else(|| bug!("projection in tail call args"));
1095
1096 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1097 return None;
1098 }
1099
1100 Some(DropData { source_info, local, kind: DropKind::Value })
1101 }
1102 Operand::Constant(_) => None,
1103 })
1104 .collect();
1105
1106 let mut unwind_to = self.diverge_cleanup_target(
1107 self.scopes.scopes.iter().rev().nth(1).unwrap().region_scope,
1108 DUMMY_SP,
1109 );
1110 let typing_env = self.typing_env();
1111 let unwind_drops = &mut self.scopes.unwind_drops;
1112
1113 for scope in self.scopes.scopes[1..].iter().rev().skip(1) {
1116 for drop_data in scope.drops.iter().rev() {
1118 let source_info = drop_data.source_info;
1119 let local = drop_data.local;
1120
1121 if !self.local_decls[local].ty.needs_drop(self.tcx, typing_env) {
1122 continue;
1123 }
1124
1125 match drop_data.kind {
1126 DropKind::Value => {
1127 debug_assert_eq!(
1131 unwind_drops.drop_nodes[unwind_to].data.local,
1132 drop_data.local
1133 );
1134 debug_assert_eq!(
1135 unwind_drops.drop_nodes[unwind_to].data.kind,
1136 drop_data.kind
1137 );
1138 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1139
1140 let mut unwind_entry_point = unwind_to;
1141
1142 for drop in arg_drops.iter().copied() {
1144 unwind_entry_point = unwind_drops.add_drop(drop, unwind_entry_point);
1145 }
1146
1147 unwind_drops.add_entry_point(block, unwind_entry_point);
1148
1149 let next = self.cfg.start_new_block();
1150 self.cfg.terminate(
1151 block,
1152 source_info,
1153 TerminatorKind::Drop {
1154 place: local.into(),
1155 target: next,
1156 unwind: UnwindAction::Continue,
1157 replace: false,
1158 drop: None,
1159 async_fut: None,
1160 },
1161 );
1162 block = next;
1163 }
1164 DropKind::ForLint(reason) => {
1165 self.cfg.push(
1166 block,
1167 Statement::new(
1168 source_info,
1169 StatementKind::BackwardIncompatibleDropHint {
1170 place: Box::new(local.into()),
1171 reason,
1172 },
1173 ),
1174 );
1175 }
1176 DropKind::Storage => {
1177 assert!(local.index() > self.arg_count);
1179 self.cfg.push(
1180 block,
1181 Statement::new(source_info, StatementKind::StorageDead(local)),
1182 );
1183 }
1184 }
1185 }
1186 }
1187
1188 block.unit()
1189 }
1190
1191 fn is_async_drop_impl(
1192 tcx: TyCtxt<'tcx>,
1193 local_decls: &IndexVec<Local, LocalDecl<'tcx>>,
1194 typing_env: ty::TypingEnv<'tcx>,
1195 local: Local,
1196 ) -> bool {
1197 let ty = local_decls[local].ty;
1198 if ty.is_async_drop(tcx, typing_env) || ty.is_coroutine() {
1199 return true;
1200 }
1201 ty.needs_async_drop(tcx, typing_env)
1202 }
1203 fn is_async_drop(&self, local: Local) -> bool {
1204 Self::is_async_drop_impl(self.tcx, &self.local_decls, self.typing_env(), local)
1205 }
1206
1207 fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock {
1208 let needs_cleanup = self.scopes.scopes.last().is_some_and(|scope| scope.needs_cleanup());
1211 let is_coroutine = self.coroutine.is_some();
1212 let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX };
1213
1214 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1215 let has_async_drops = is_coroutine
1216 && scope.drops.iter().any(|v| v.kind == DropKind::Value && self.is_async_drop(v.local));
1217 let dropline_to = if has_async_drops { Some(self.diverge_dropline()) } else { None };
1218 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1219 let typing_env = self.typing_env();
1220 build_scope_drops(
1221 &mut self.cfg,
1222 &mut self.scopes.unwind_drops,
1223 &mut self.scopes.coroutine_drops,
1224 scope,
1225 block,
1226 unwind_to,
1227 dropline_to,
1228 is_coroutine && needs_cleanup,
1229 self.arg_count,
1230 |v: Local| Self::is_async_drop_impl(self.tcx, &self.local_decls, typing_env, v),
1231 )
1232 .into_block()
1233 }
1234
1235 pub(crate) fn maybe_new_source_scope(
1238 &mut self,
1239 span: Span,
1240 current_id: HirId,
1241 parent_id: HirId,
1242 ) {
1243 let (current_root, parent_root) =
1244 if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
1245 (current_id, parent_id)
1253 } else {
1254 (
1258 self.maybe_lint_level_root_bounded(current_id),
1259 if parent_id == self.hir_id {
1260 parent_id } else {
1262 self.maybe_lint_level_root_bounded(parent_id)
1263 },
1264 )
1265 };
1266
1267 if current_root != parent_root {
1268 let lint_level = LintLevel::Explicit(current_root);
1269 self.source_scope = self.new_source_scope(span, lint_level);
1270 }
1271 }
1272
1273 fn maybe_lint_level_root_bounded(&mut self, orig_id: HirId) -> HirId {
1276 assert_eq!(orig_id.owner, self.hir_id.owner);
1279
1280 let mut id = orig_id;
1281 loop {
1282 if id == self.hir_id {
1283 break;
1285 }
1286
1287 if self.tcx.hir_attrs(id).iter().any(|attr| Level::from_attr(attr).is_some()) {
1288 return id;
1291 }
1292
1293 let next = self.tcx.parent_hir_id(id);
1294 if next == id {
1295 bug!("lint traversal reached the root of the crate");
1296 }
1297 id = next;
1298
1299 if self.lint_level_roots_cache.contains(id.local_id) {
1305 break;
1306 }
1307 }
1308
1309 self.lint_level_roots_cache.insert(orig_id.local_id);
1313 self.hir_id
1314 }
1315
1316 pub(crate) fn new_source_scope(&mut self, span: Span, lint_level: LintLevel) -> SourceScope {
1318 let parent = self.source_scope;
1319 debug!(
1320 "new_source_scope({:?}, {:?}) - parent({:?})={:?}",
1321 span,
1322 lint_level,
1323 parent,
1324 self.source_scopes.get(parent)
1325 );
1326 let scope_local_data = SourceScopeLocalData {
1327 lint_root: if let LintLevel::Explicit(lint_root) = lint_level {
1328 lint_root
1329 } else {
1330 self.source_scopes[parent].local_data.as_ref().unwrap_crate_local().lint_root
1331 },
1332 };
1333 self.source_scopes.push(SourceScopeData {
1334 span,
1335 parent_scope: Some(parent),
1336 inlined: None,
1337 inlined_parent_scope: None,
1338 local_data: ClearCrossCrate::Set(scope_local_data),
1339 })
1340 }
1341
1342 pub(crate) fn source_info(&self, span: Span) -> SourceInfo {
1344 SourceInfo { span, scope: self.source_scope }
1345 }
1346
1347 pub(crate) fn local_scope(&self) -> region::Scope {
1370 self.scopes.topmost()
1371 }
1372
1373 pub(crate) fn schedule_drop_storage_and_value(
1377 &mut self,
1378 span: Span,
1379 region_scope: region::Scope,
1380 local: Local,
1381 ) {
1382 self.schedule_drop(span, region_scope, local, DropKind::Storage);
1383 self.schedule_drop(span, region_scope, local, DropKind::Value);
1384 }
1385
1386 pub(crate) fn schedule_drop(
1391 &mut self,
1392 span: Span,
1393 region_scope: region::Scope,
1394 local: Local,
1395 drop_kind: DropKind,
1396 ) {
1397 let needs_drop = match drop_kind {
1398 DropKind::Value | DropKind::ForLint(_) => {
1399 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1400 return;
1401 }
1402 true
1403 }
1404 DropKind::Storage => {
1405 if local.index() <= self.arg_count {
1406 span_bug!(
1407 span,
1408 "`schedule_drop` called with body argument {:?} \
1409 but its storage does not require a drop",
1410 local,
1411 )
1412 }
1413 false
1414 }
1415 };
1416
1417 let invalidate_caches = needs_drop || self.coroutine.is_some();
1464 for scope in self.scopes.scopes.iter_mut().rev() {
1465 if invalidate_caches {
1466 scope.invalidate_cache();
1467 }
1468
1469 if scope.region_scope == region_scope {
1470 let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
1471 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1473
1474 scope.drops.push(DropData {
1475 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1476 local,
1477 kind: drop_kind,
1478 });
1479
1480 return;
1481 }
1482 }
1483
1484 span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local);
1485 }
1486
1487 #[instrument(level = "debug", skip(self))]
1490 pub(crate) fn schedule_backwards_incompatible_drop(
1491 &mut self,
1492 span: Span,
1493 region_scope: region::Scope,
1494 local: Local,
1495 reason: BackwardIncompatibleDropReason,
1496 ) {
1497 for scope in self.scopes.scopes.iter_mut().rev() {
1500 scope.invalidate_cache();
1502 if scope.region_scope == region_scope {
1503 let region_scope_span = if reason
1506 == BackwardIncompatibleDropReason::MacroExtendedScope
1507 && let Some(scope_hir_id) = region_scope.hir_id(self.region_scope_tree)
1508 && let hir::Node::Expr(expr) = self.tcx.hir_node(scope_hir_id)
1509 && let hir::Node::Block(blk) = self.tcx.parent_hir_node(expr.hir_id)
1510 {
1511 blk.span
1512 } else {
1513 region_scope.span(self.tcx, self.region_scope_tree)
1514 };
1515 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1516
1517 scope.drops.push(DropData {
1518 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1519 local,
1520 kind: DropKind::ForLint(reason),
1521 });
1522
1523 return;
1524 }
1525 }
1526 span_bug!(
1527 span,
1528 "region scope {:?} not in scope to drop {:?} for linting",
1529 region_scope,
1530 local
1531 );
1532 }
1533
1534 pub(crate) fn record_operands_moved(&mut self, operands: &[Spanned<Operand<'tcx>>]) {
1571 let local_scope = self.local_scope();
1572 let scope = self.scopes.scopes.last_mut().unwrap();
1573
1574 assert_eq!(scope.region_scope, local_scope, "local scope is not the topmost scope!",);
1575
1576 let locals_moved = operands.iter().flat_map(|operand| match operand.node {
1578 Operand::Copy(_) | Operand::Constant(_) => None,
1579 Operand::Move(place) => place.as_local(),
1580 });
1581
1582 for local in locals_moved {
1583 if scope.drops.iter().any(|drop| drop.local == local && drop.kind == DropKind::Value) {
1588 scope.moved_locals.push(local);
1589 }
1590 }
1591 }
1592
1593 fn diverge_cleanup(&mut self) -> DropIdx {
1599 self.diverge_cleanup_target(self.scopes.topmost(), DUMMY_SP)
1602 }
1603
1604 fn diverge_cleanup_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1609 let target = self.scopes.scope_index(target_scope, span);
1610 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1611 .iter()
1612 .enumerate()
1613 .rev()
1614 .find_map(|(scope_idx, scope)| {
1615 scope.cached_unwind_block.map(|cached_block| (scope_idx + 1, cached_block))
1616 })
1617 .unwrap_or((0, ROOT_NODE));
1618
1619 if uncached_scope > target {
1620 return cached_drop;
1621 }
1622
1623 let is_coroutine = self.coroutine.is_some();
1624 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1625 for drop in &scope.drops {
1626 if is_coroutine || drop.kind == DropKind::Value {
1627 cached_drop = self.scopes.unwind_drops.add_drop(*drop, cached_drop);
1628 }
1629 }
1630 scope.cached_unwind_block = Some(cached_drop);
1631 }
1632
1633 cached_drop
1634 }
1635
1636 pub(crate) fn diverge_from(&mut self, start: BasicBlock) {
1642 debug_assert!(
1643 matches!(
1644 self.cfg.block_data(start).terminator().kind,
1645 TerminatorKind::Assert { .. }
1646 | TerminatorKind::Call { .. }
1647 | TerminatorKind::Drop { .. }
1648 | TerminatorKind::FalseUnwind { .. }
1649 | TerminatorKind::InlineAsm { .. }
1650 ),
1651 "diverge_from called on block with terminator that cannot unwind."
1652 );
1653
1654 let next_drop = self.diverge_cleanup();
1655 self.scopes.unwind_drops.add_entry_point(start, next_drop);
1656 }
1657
1658 fn diverge_dropline(&mut self) -> DropIdx {
1661 self.diverge_dropline_target(self.scopes.topmost(), DUMMY_SP)
1664 }
1665
1666 fn diverge_dropline_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1668 debug_assert!(
1669 self.coroutine.is_some(),
1670 "diverge_dropline_target is valid only for coroutine"
1671 );
1672 let target = self.scopes.scope_index(target_scope, span);
1673 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1674 .iter()
1675 .enumerate()
1676 .rev()
1677 .find_map(|(scope_idx, scope)| {
1678 scope.cached_coroutine_drop_block.map(|cached_block| (scope_idx + 1, cached_block))
1679 })
1680 .unwrap_or((0, ROOT_NODE));
1681
1682 if uncached_scope > target {
1683 return cached_drop;
1684 }
1685
1686 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1687 for drop in &scope.drops {
1688 cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop);
1689 }
1690 scope.cached_coroutine_drop_block = Some(cached_drop);
1691 }
1692
1693 cached_drop
1694 }
1695
1696 pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) {
1702 debug_assert!(
1703 matches!(
1704 self.cfg.block_data(yield_block).terminator().kind,
1705 TerminatorKind::Yield { .. }
1706 ),
1707 "coroutine_drop_cleanup called on block with non-yield terminator."
1708 );
1709 let cached_drop = self.diverge_dropline();
1710 self.scopes.coroutine_drops.add_entry_point(yield_block, cached_drop);
1711 }
1712
1713 pub(crate) fn build_drop_and_replace(
1716 &mut self,
1717 block: BasicBlock,
1718 span: Span,
1719 place: Place<'tcx>,
1720 value: Rvalue<'tcx>,
1721 ) -> BlockAnd<()> {
1722 let source_info = self.source_info(span);
1723
1724 let assign = self.cfg.start_new_block();
1726 self.cfg.push_assign(assign, source_info, place, value.clone());
1727
1728 let assign_unwind = self.cfg.start_new_cleanup_block();
1730 self.cfg.push_assign(assign_unwind, source_info, place, value.clone());
1731
1732 self.cfg.terminate(
1733 block,
1734 source_info,
1735 TerminatorKind::Drop {
1736 place,
1737 target: assign,
1738 unwind: UnwindAction::Cleanup(assign_unwind),
1739 replace: true,
1740 drop: None,
1741 async_fut: None,
1742 },
1743 );
1744 self.diverge_from(block);
1745
1746 assign.unit()
1747 }
1748
1749 pub(crate) fn assert(
1753 &mut self,
1754 block: BasicBlock,
1755 cond: Operand<'tcx>,
1756 expected: bool,
1757 msg: AssertMessage<'tcx>,
1758 span: Span,
1759 ) -> BasicBlock {
1760 let source_info = self.source_info(span);
1761 let success_block = self.cfg.start_new_block();
1762
1763 self.cfg.terminate(
1764 block,
1765 source_info,
1766 TerminatorKind::Assert {
1767 cond,
1768 expected,
1769 msg: Box::new(msg),
1770 target: success_block,
1771 unwind: UnwindAction::Continue,
1772 },
1773 );
1774 self.diverge_from(block);
1775
1776 success_block
1777 }
1778
1779 pub(crate) fn clear_match_arm_and_guard_scopes(&mut self, region_scope: region::Scope) {
1785 let [.., arm_scope, guard_scope] = &mut *self.scopes.scopes else {
1786 bug!("matches with guards should introduce separate scopes for the pattern and guard");
1787 };
1788
1789 assert_eq!(arm_scope.region_scope, region_scope);
1790 assert_eq!(guard_scope.region_scope.data, region::ScopeData::MatchGuard);
1791 assert_eq!(guard_scope.region_scope.local_id, region_scope.local_id);
1792
1793 arm_scope.drops.clear();
1794 arm_scope.invalidate_cache();
1795 guard_scope.drops.clear();
1796 guard_scope.invalidate_cache();
1797 }
1798}
1799
1800fn build_scope_drops<'tcx, F>(
1815 cfg: &mut CFG<'tcx>,
1816 unwind_drops: &mut DropTree,
1817 coroutine_drops: &mut DropTree,
1818 scope: &Scope,
1819 block: BasicBlock,
1820 unwind_to: DropIdx,
1821 dropline_to: Option<DropIdx>,
1822 storage_dead_on_unwind: bool,
1823 arg_count: usize,
1824 is_async_drop: F,
1825) -> BlockAnd<()>
1826where
1827 F: Fn(Local) -> bool,
1828{
1829 debug!("build_scope_drops({:?} -> {:?}), dropline_to={:?}", block, scope, dropline_to);
1830
1831 let mut unwind_to = unwind_to;
1855
1856 let mut block = block;
1861
1862 let mut dropline_to = dropline_to;
1864
1865 for drop_data in scope.drops.iter().rev() {
1866 let source_info = drop_data.source_info;
1867 let local = drop_data.local;
1868
1869 match drop_data.kind {
1870 DropKind::Value => {
1871 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.local, drop_data.local);
1878 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1879 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1880
1881 if let Some(idx) = dropline_to {
1882 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1883 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1884 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1885 }
1886
1887 if scope.moved_locals.contains(&local) {
1892 continue;
1893 }
1894
1895 unwind_drops.add_entry_point(block, unwind_to);
1896 if let Some(to) = dropline_to
1897 && is_async_drop(local)
1898 {
1899 coroutine_drops.add_entry_point(block, to);
1900 }
1901
1902 let next = cfg.start_new_block();
1903 cfg.terminate(
1904 block,
1905 source_info,
1906 TerminatorKind::Drop {
1907 place: local.into(),
1908 target: next,
1909 unwind: UnwindAction::Continue,
1910 replace: false,
1911 drop: None,
1912 async_fut: None,
1913 },
1914 );
1915 block = next;
1916 }
1917 DropKind::ForLint(reason) => {
1918 if storage_dead_on_unwind {
1924 debug_assert_eq!(
1925 unwind_drops.drop_nodes[unwind_to].data.local,
1926 drop_data.local
1927 );
1928 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1929 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1930 }
1931
1932 if scope.moved_locals.contains(&local) {
1937 continue;
1938 }
1939
1940 cfg.push(
1941 block,
1942 Statement::new(
1943 source_info,
1944 StatementKind::BackwardIncompatibleDropHint {
1945 place: Box::new(local.into()),
1946 reason,
1947 },
1948 ),
1949 );
1950 }
1951 DropKind::Storage => {
1952 if storage_dead_on_unwind {
1958 debug_assert_eq!(
1959 unwind_drops.drop_nodes[unwind_to].data.local,
1960 drop_data.local
1961 );
1962 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1963 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1964 }
1965 if let Some(idx) = dropline_to {
1966 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1967 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1968 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1969 }
1970 assert!(local.index() > arg_count);
1972 cfg.push(block, Statement::new(source_info, StatementKind::StorageDead(local)));
1973 }
1974 }
1975 }
1976 block.unit()
1977}
1978
1979impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
1980 fn build_exit_tree(
1985 &mut self,
1986 mut drops: DropTree,
1987 else_scope: region::Scope,
1988 span: Span,
1989 continue_block: Option<BasicBlock>,
1990 ) -> Option<BlockAnd<()>> {
1991 let blocks = drops.build_mir::<ExitScopes>(&mut self.cfg, continue_block);
1992 let is_coroutine = self.coroutine.is_some();
1993
1994 if drops.drop_nodes.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
1996 let unwind_target = self.diverge_cleanup_target(else_scope, span);
1997 let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
1998 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated().skip(1) {
1999 match drop_node.data.kind {
2000 DropKind::Storage | DropKind::ForLint(_) => {
2001 if is_coroutine {
2002 let unwind_drop = self
2003 .scopes
2004 .unwind_drops
2005 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
2006 unwind_indices.push(unwind_drop);
2007 } else {
2008 unwind_indices.push(unwind_indices[drop_node.next]);
2009 }
2010 }
2011 DropKind::Value => {
2012 let unwind_drop = self
2013 .scopes
2014 .unwind_drops
2015 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
2016 self.scopes.unwind_drops.add_entry_point(
2017 blocks[drop_idx].unwrap(),
2018 unwind_indices[drop_node.next],
2019 );
2020 unwind_indices.push(unwind_drop);
2021 }
2022 }
2023 }
2024 }
2025 if is_coroutine
2027 && drops.drop_nodes.iter().any(|DropNode { data, next: _ }| {
2028 data.kind == DropKind::Value && self.is_async_drop(data.local)
2029 })
2030 {
2031 let dropline_target = self.diverge_dropline_target(else_scope, span);
2032 let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1);
2033 for (drop_idx, drop_data) in drops.drop_nodes.iter_enumerated().skip(1) {
2034 let coroutine_drop = self
2035 .scopes
2036 .coroutine_drops
2037 .add_drop(drop_data.data, dropline_indices[drop_data.next]);
2038 match drop_data.data.kind {
2039 DropKind::Storage | DropKind::ForLint(_) => {}
2040 DropKind::Value => {
2041 if self.is_async_drop(drop_data.data.local) {
2042 self.scopes.coroutine_drops.add_entry_point(
2043 blocks[drop_idx].unwrap(),
2044 dropline_indices[drop_data.next],
2045 );
2046 }
2047 }
2048 }
2049 dropline_indices.push(coroutine_drop);
2050 }
2051 }
2052 blocks[ROOT_NODE].map(BasicBlock::unit)
2053 }
2054
2055 pub(crate) fn build_drop_trees(&mut self) {
2057 if self.coroutine.is_some() {
2058 self.build_coroutine_drop_trees();
2059 } else {
2060 Self::build_unwind_tree(
2061 &mut self.cfg,
2062 &mut self.scopes.unwind_drops,
2063 self.fn_span,
2064 &mut None,
2065 );
2066 }
2067 }
2068
2069 fn build_coroutine_drop_trees(&mut self) {
2070 let drops = &mut self.scopes.coroutine_drops;
2072 let cfg = &mut self.cfg;
2073 let fn_span = self.fn_span;
2074 let blocks = drops.build_mir::<CoroutineDrop>(cfg, None);
2075 if let Some(root_block) = blocks[ROOT_NODE] {
2076 cfg.terminate(
2077 root_block,
2078 SourceInfo::outermost(fn_span),
2079 TerminatorKind::CoroutineDrop,
2080 );
2081 }
2082
2083 let resume_block = &mut None;
2085 let unwind_drops = &mut self.scopes.unwind_drops;
2086 Self::build_unwind_tree(cfg, unwind_drops, fn_span, resume_block);
2087
2088 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated() {
2096 if let DropKind::Value = drop_node.data.kind
2097 && let Some(bb) = blocks[drop_idx]
2098 {
2099 debug_assert!(drop_node.next < drops.drop_nodes.next_index());
2100 drops.entry_points.push((drop_node.next, bb));
2101 }
2102 }
2103 Self::build_unwind_tree(cfg, drops, fn_span, resume_block);
2104 }
2105
2106 fn build_unwind_tree(
2107 cfg: &mut CFG<'tcx>,
2108 drops: &mut DropTree,
2109 fn_span: Span,
2110 resume_block: &mut Option<BasicBlock>,
2111 ) {
2112 let blocks = drops.build_mir::<Unwind>(cfg, *resume_block);
2113 if let (None, Some(resume)) = (*resume_block, blocks[ROOT_NODE]) {
2114 cfg.terminate(resume, SourceInfo::outermost(fn_span), TerminatorKind::UnwindResume);
2115
2116 *resume_block = blocks[ROOT_NODE];
2117 }
2118 }
2119}
2120
2121struct ExitScopes;
2124
2125impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes {
2126 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2127 cfg.start_new_block()
2128 }
2129 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2130 let term = cfg.block_data_mut(from).terminator_mut();
2134 if let TerminatorKind::UnwindResume = term.kind {
2135 term.kind = TerminatorKind::Goto { target: to };
2136 } else {
2137 span_bug!(term.source_info.span, "unexpected dummy terminator kind: {:?}", term.kind);
2138 }
2139 }
2140}
2141
2142struct CoroutineDrop;
2143
2144impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop {
2145 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2146 cfg.start_new_block()
2147 }
2148 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2149 let term = cfg.block_data_mut(from).terminator_mut();
2150 if let TerminatorKind::Yield { ref mut drop, .. } = term.kind {
2151 *drop = Some(to);
2152 } else if let TerminatorKind::Drop { ref mut drop, .. } = term.kind {
2153 *drop = Some(to);
2154 } else {
2155 span_bug!(
2156 term.source_info.span,
2157 "cannot enter coroutine drop tree from {:?}",
2158 term.kind
2159 )
2160 }
2161 }
2162}
2163
2164struct Unwind;
2165
2166impl<'tcx> DropTreeBuilder<'tcx> for Unwind {
2167 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2168 cfg.start_new_cleanup_block()
2169 }
2170 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2171 let term = &mut cfg.block_data_mut(from).terminator_mut();
2172 match &mut term.kind {
2173 TerminatorKind::Drop { unwind, .. } => {
2174 if let UnwindAction::Cleanup(unwind) = *unwind {
2175 let source_info = term.source_info;
2176 cfg.terminate(unwind, source_info, TerminatorKind::Goto { target: to });
2177 } else {
2178 *unwind = UnwindAction::Cleanup(to);
2179 }
2180 }
2181 TerminatorKind::FalseUnwind { unwind, .. }
2182 | TerminatorKind::Call { unwind, .. }
2183 | TerminatorKind::Assert { unwind, .. }
2184 | TerminatorKind::InlineAsm { unwind, .. } => {
2185 *unwind = UnwindAction::Cleanup(to);
2186 }
2187 TerminatorKind::Goto { .. }
2188 | TerminatorKind::SwitchInt { .. }
2189 | TerminatorKind::UnwindResume
2190 | TerminatorKind::UnwindTerminate(_)
2191 | TerminatorKind::Return
2192 | TerminatorKind::TailCall { .. }
2193 | TerminatorKind::Unreachable
2194 | TerminatorKind::Yield { .. }
2195 | TerminatorKind::CoroutineDrop
2196 | TerminatorKind::FalseEdge { .. } => {
2197 span_bug!(term.source_info.span, "cannot unwind from {:?}", term.kind)
2198 }
2199 }
2200 }
2201}