1use std::mem;
85
86use interpret::ErrorHandled;
87use rustc_data_structures::fx::FxHashMap;
88use rustc_hir::HirId;
89use rustc_index::{IndexSlice, IndexVec};
90use rustc_middle::middle::region;
91use rustc_middle::mir::{self, *};
92use rustc_middle::thir::{AdtExpr, AdtExprBase, ArmId, ExprId, ExprKind, LintLevel};
93use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt, ValTree};
94use rustc_middle::{bug, span_bug};
95use rustc_pattern_analysis::rustc::RustcPatCtxt;
96use rustc_session::lint::Level;
97use rustc_span::source_map::Spanned;
98use rustc_span::{DUMMY_SP, Span};
99use tracing::{debug, instrument};
100
101use super::matches::BuiltMatchTree;
102use crate::builder::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
103use crate::errors::{
104 ConstContinueBadConst, ConstContinueNotMonomorphicConst, ConstContinueUnknownJumpTarget,
105};
106
107#[derive(Debug)]
108pub(crate) struct Scopes<'tcx> {
109 scopes: Vec<Scope>,
110
111 breakable_scopes: Vec<BreakableScope<'tcx>>,
113
114 const_continuable_scopes: Vec<ConstContinuableScope<'tcx>>,
115
116 if_then_scope: Option<IfThenScope>,
118
119 unwind_drops: DropTree,
122
123 coroutine_drops: DropTree,
125}
126
127#[derive(Debug)]
128struct Scope {
129 source_scope: SourceScope,
131
132 region_scope: region::Scope,
134
135 drops: Vec<DropData>,
140
141 moved_locals: Vec<Local>,
142
143 cached_unwind_block: Option<DropIdx>,
146
147 cached_coroutine_drop_block: Option<DropIdx>,
150}
151
152#[derive(Clone, Copy, Debug)]
153struct DropData {
154 source_info: SourceInfo,
157
158 local: Local,
160
161 kind: DropKind,
163}
164
165#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
166pub(crate) enum DropKind {
167 Value,
168 Storage,
169 ForLint,
170}
171
172#[derive(Debug)]
173struct BreakableScope<'tcx> {
174 region_scope: region::Scope,
176 break_destination: Place<'tcx>,
179 break_drops: DropTree,
181 continue_drops: Option<DropTree>,
183}
184
185#[derive(Debug)]
186struct ConstContinuableScope<'tcx> {
187 region_scope: region::Scope,
189 state_place: Place<'tcx>,
191
192 arms: Box<[ArmId]>,
193 built_match_tree: BuiltMatchTree<'tcx>,
194
195 const_continue_drops: DropTree,
197}
198
199#[derive(Debug)]
200struct IfThenScope {
201 region_scope: region::Scope,
203 else_drops: DropTree,
205}
206
207#[derive(Clone, Copy, Debug)]
209pub(crate) enum BreakableTarget {
210 Continue(region::Scope),
211 Break(region::Scope),
212 Return,
213}
214
215rustc_index::newtype_index! {
216 #[orderable]
217 struct DropIdx {}
218}
219
220const ROOT_NODE: DropIdx = DropIdx::ZERO;
221
222#[derive(Debug)]
232struct DropTree {
233 drop_nodes: IndexVec<DropIdx, DropNode>,
235 existing_drops_map: FxHashMap<DropNodeKey, DropIdx>,
237 entry_points: Vec<(DropIdx, BasicBlock)>,
239}
240
241#[derive(Debug)]
243struct DropNode {
244 data: DropData,
246 next: DropIdx,
248}
249
250#[derive(Debug, PartialEq, Eq, Hash)]
252struct DropNodeKey {
253 next: DropIdx,
254 local: Local,
255}
256
257impl Scope {
258 fn needs_cleanup(&self) -> bool {
270 self.drops.iter().any(|drop| match drop.kind {
271 DropKind::Value | DropKind::ForLint => true,
272 DropKind::Storage => false,
273 })
274 }
275
276 fn invalidate_cache(&mut self) {
277 self.cached_unwind_block = None;
278 self.cached_coroutine_drop_block = None;
279 }
280}
281
282trait DropTreeBuilder<'tcx> {
285 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock;
288
289 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock);
292}
293
294impl DropTree {
295 fn new() -> Self {
296 let fake_source_info = SourceInfo::outermost(DUMMY_SP);
300 let fake_data =
301 DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage };
302 let drop_nodes = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
303 Self { drop_nodes, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
304 }
305
306 fn add_drop(&mut self, data: DropData, next: DropIdx) -> DropIdx {
312 let drop_nodes = &mut self.drop_nodes;
313 *self
314 .existing_drops_map
315 .entry(DropNodeKey { next, local: data.local })
316 .or_insert_with(|| drop_nodes.push(DropNode { data, next }))
318 }
319
320 fn add_entry_point(&mut self, from: BasicBlock, to: DropIdx) {
325 debug_assert!(to < self.drop_nodes.next_index());
326 self.entry_points.push((to, from));
327 }
328
329 fn build_mir<'tcx, T: DropTreeBuilder<'tcx>>(
331 &mut self,
332 cfg: &mut CFG<'tcx>,
333 root_node: Option<BasicBlock>,
334 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
335 debug!("DropTree::build_mir(drops = {:#?})", self);
336
337 let mut blocks = self.assign_blocks::<T>(cfg, root_node);
338 self.link_blocks(cfg, &mut blocks);
339
340 blocks
341 }
342
343 fn assign_blocks<'tcx, T: DropTreeBuilder<'tcx>>(
345 &mut self,
346 cfg: &mut CFG<'tcx>,
347 root_node: Option<BasicBlock>,
348 ) -> IndexVec<DropIdx, Option<BasicBlock>> {
349 #[derive(Clone, Copy)]
353 enum Block {
354 None,
356 Shares(DropIdx),
359 Own,
363 }
364
365 let mut blocks = IndexVec::from_elem(None, &self.drop_nodes);
366 blocks[ROOT_NODE] = root_node;
367
368 let mut needs_block = IndexVec::from_elem(Block::None, &self.drop_nodes);
369 if root_node.is_some() {
370 needs_block[ROOT_NODE] = Block::Own;
374 }
375
376 let entry_points = &mut self.entry_points;
378 entry_points.sort();
379
380 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
381 if entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
382 let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
383 needs_block[drop_idx] = Block::Own;
384 while entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
385 let entry_block = entry_points.pop().unwrap().1;
386 T::link_entry_point(cfg, entry_block, block);
387 }
388 }
389 match needs_block[drop_idx] {
390 Block::None => continue,
391 Block::Own => {
392 blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
393 }
394 Block::Shares(pred) => {
395 blocks[drop_idx] = blocks[pred];
396 }
397 }
398 if let DropKind::Value = drop_node.data.kind {
399 needs_block[drop_node.next] = Block::Own;
400 } else if drop_idx != ROOT_NODE {
401 match &mut needs_block[drop_node.next] {
402 pred @ Block::None => *pred = Block::Shares(drop_idx),
403 pred @ Block::Shares(_) => *pred = Block::Own,
404 Block::Own => (),
405 }
406 }
407 }
408
409 debug!("assign_blocks: blocks = {:#?}", blocks);
410 assert!(entry_points.is_empty());
411
412 blocks
413 }
414
415 fn link_blocks<'tcx>(
416 &self,
417 cfg: &mut CFG<'tcx>,
418 blocks: &IndexSlice<DropIdx, Option<BasicBlock>>,
419 ) {
420 for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
421 let Some(block) = blocks[drop_idx] else { continue };
422 match drop_node.data.kind {
423 DropKind::Value => {
424 let terminator = TerminatorKind::Drop {
425 target: blocks[drop_node.next].unwrap(),
426 unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
428 place: drop_node.data.local.into(),
429 replace: false,
430 drop: None,
431 async_fut: None,
432 };
433 cfg.terminate(block, drop_node.data.source_info, terminator);
434 }
435 DropKind::ForLint => {
436 let stmt = Statement::new(
437 drop_node.data.source_info,
438 StatementKind::BackwardIncompatibleDropHint {
439 place: Box::new(drop_node.data.local.into()),
440 reason: BackwardIncompatibleDropReason::Edition2024,
441 },
442 );
443 cfg.push(block, stmt);
444 let target = blocks[drop_node.next].unwrap();
445 if target != block {
446 let source_info =
451 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
452 let terminator = TerminatorKind::Goto { target };
453 cfg.terminate(block, source_info, terminator);
454 }
455 }
456 DropKind::Storage if drop_idx == ROOT_NODE => {}
458 DropKind::Storage => {
459 let stmt = Statement::new(
460 drop_node.data.source_info,
461 StatementKind::StorageDead(drop_node.data.local),
462 );
463 cfg.push(block, stmt);
464 let target = blocks[drop_node.next].unwrap();
465 if target != block {
466 let source_info =
471 SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
472 let terminator = TerminatorKind::Goto { target };
473 cfg.terminate(block, source_info, terminator);
474 }
475 }
476 }
477 }
478 }
479}
480
481impl<'tcx> Scopes<'tcx> {
482 pub(crate) fn new() -> Self {
483 Self {
484 scopes: Vec::new(),
485 breakable_scopes: Vec::new(),
486 const_continuable_scopes: Vec::new(),
487 if_then_scope: None,
488 unwind_drops: DropTree::new(),
489 coroutine_drops: DropTree::new(),
490 }
491 }
492
493 fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo), vis_scope: SourceScope) {
494 debug!("push_scope({:?})", region_scope);
495 self.scopes.push(Scope {
496 source_scope: vis_scope,
497 region_scope: region_scope.0,
498 drops: vec![],
499 moved_locals: vec![],
500 cached_unwind_block: None,
501 cached_coroutine_drop_block: None,
502 });
503 }
504
505 fn pop_scope(&mut self, region_scope: (region::Scope, SourceInfo)) -> Scope {
506 let scope = self.scopes.pop().unwrap();
507 assert_eq!(scope.region_scope, region_scope.0);
508 scope
509 }
510
511 fn scope_index(&self, region_scope: region::Scope, span: Span) -> usize {
512 self.scopes
513 .iter()
514 .rposition(|scope| scope.region_scope == region_scope)
515 .unwrap_or_else(|| span_bug!(span, "region_scope {:?} does not enclose", region_scope))
516 }
517
518 fn topmost(&self) -> region::Scope {
521 self.scopes.last().expect("topmost_scope: no scopes present").region_scope
522 }
523}
524
525impl<'a, 'tcx> Builder<'a, 'tcx> {
526 pub(crate) fn in_breakable_scope<F>(
532 &mut self,
533 loop_block: Option<BasicBlock>,
534 break_destination: Place<'tcx>,
535 span: Span,
536 f: F,
537 ) -> BlockAnd<()>
538 where
539 F: FnOnce(&mut Builder<'a, 'tcx>) -> Option<BlockAnd<()>>,
540 {
541 let region_scope = self.scopes.topmost();
542 let scope = BreakableScope {
543 region_scope,
544 break_destination,
545 break_drops: DropTree::new(),
546 continue_drops: loop_block.map(|_| DropTree::new()),
547 };
548 self.scopes.breakable_scopes.push(scope);
549 let normal_exit_block = f(self);
550 let breakable_scope = self.scopes.breakable_scopes.pop().unwrap();
551 assert!(breakable_scope.region_scope == region_scope);
552 let break_block =
553 self.build_exit_tree(breakable_scope.break_drops, region_scope, span, None);
554 if let Some(drops) = breakable_scope.continue_drops {
555 self.build_exit_tree(drops, region_scope, span, loop_block);
556 }
557 match (normal_exit_block, break_block) {
558 (Some(block), None) | (None, Some(block)) => block,
559 (None, None) => self.cfg.start_new_block().unit(),
560 (Some(normal_block), Some(exit_block)) => {
561 let target = self.cfg.start_new_block();
562 let source_info = self.source_info(span);
563 self.cfg.terminate(
564 normal_block.into_block(),
565 source_info,
566 TerminatorKind::Goto { target },
567 );
568 self.cfg.terminate(
569 exit_block.into_block(),
570 source_info,
571 TerminatorKind::Goto { target },
572 );
573 target.unit()
574 }
575 }
576 }
577
578 pub(crate) fn in_const_continuable_scope<F>(
581 &mut self,
582 arms: Box<[ArmId]>,
583 built_match_tree: BuiltMatchTree<'tcx>,
584 state_place: Place<'tcx>,
585 span: Span,
586 f: F,
587 ) -> BlockAnd<()>
588 where
589 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
590 {
591 let region_scope = self.scopes.topmost();
592 let scope = ConstContinuableScope {
593 region_scope,
594 state_place,
595 const_continue_drops: DropTree::new(),
596 arms,
597 built_match_tree,
598 };
599 self.scopes.const_continuable_scopes.push(scope);
600 let normal_exit_block = f(self);
601 let const_continue_scope = self.scopes.const_continuable_scopes.pop().unwrap();
602 assert!(const_continue_scope.region_scope == region_scope);
603
604 let break_block = self.build_exit_tree(
605 const_continue_scope.const_continue_drops,
606 region_scope,
607 span,
608 None,
609 );
610
611 match (normal_exit_block, break_block) {
612 (block, None) => block,
613 (normal_block, Some(exit_block)) => {
614 let target = self.cfg.start_new_block();
615 let source_info = self.source_info(span);
616 self.cfg.terminate(
617 normal_block.into_block(),
618 source_info,
619 TerminatorKind::Goto { target },
620 );
621 self.cfg.terminate(
622 exit_block.into_block(),
623 source_info,
624 TerminatorKind::Goto { target },
625 );
626 target.unit()
627 }
628 }
629 }
630
631 pub(crate) fn in_if_then_scope<F>(
648 &mut self,
649 region_scope: region::Scope,
650 span: Span,
651 f: F,
652 ) -> (BasicBlock, BasicBlock)
653 where
654 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<()>,
655 {
656 let scope = IfThenScope { region_scope, else_drops: DropTree::new() };
657 let previous_scope = mem::replace(&mut self.scopes.if_then_scope, Some(scope));
658
659 let then_block = f(self).into_block();
660
661 let if_then_scope = mem::replace(&mut self.scopes.if_then_scope, previous_scope).unwrap();
662 assert!(if_then_scope.region_scope == region_scope);
663
664 let else_block =
665 self.build_exit_tree(if_then_scope.else_drops, region_scope, span, None).map_or_else(
666 || self.cfg.start_new_block(),
667 |else_block_and| else_block_and.into_block(),
668 );
669
670 (then_block, else_block)
671 }
672
673 #[instrument(skip(self, f), level = "debug")]
676 pub(crate) fn in_scope<F, R>(
677 &mut self,
678 region_scope: (region::Scope, SourceInfo),
679 lint_level: LintLevel,
680 f: F,
681 ) -> BlockAnd<R>
682 where
683 F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
684 {
685 let source_scope = self.source_scope;
686 if let LintLevel::Explicit(current_hir_id) = lint_level {
687 let parent_id =
688 self.source_scopes[source_scope].local_data.as_ref().unwrap_crate_local().lint_root;
689 self.maybe_new_source_scope(region_scope.1.span, current_hir_id, parent_id);
690 }
691 self.push_scope(region_scope);
692 let mut block;
693 let rv = unpack!(block = f(self));
694 block = self.pop_scope(region_scope, block).into_block();
695 self.source_scope = source_scope;
696 debug!(?block);
697 block.and(rv)
698 }
699
700 pub(crate) fn opt_in_scope<R>(
703 &mut self,
704 opt_region_scope: Option<(region::Scope, SourceInfo)>,
705 f: impl FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
706 ) -> BlockAnd<R> {
707 if let Some(region_scope) = opt_region_scope {
708 self.in_scope(region_scope, LintLevel::Inherited, f)
709 } else {
710 f(self)
711 }
712 }
713
714 pub(crate) fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) {
719 self.scopes.push_scope(region_scope, self.source_scope);
720 }
721
722 pub(crate) fn pop_scope(
726 &mut self,
727 region_scope: (region::Scope, SourceInfo),
728 mut block: BasicBlock,
729 ) -> BlockAnd<()> {
730 debug!("pop_scope({:?}, {:?})", region_scope, block);
731
732 block = self.leave_top_scope(block);
733
734 self.scopes.pop_scope(region_scope);
735
736 block.unit()
737 }
738
739 pub(crate) fn break_scope(
741 &mut self,
742 mut block: BasicBlock,
743 value: Option<ExprId>,
744 target: BreakableTarget,
745 source_info: SourceInfo,
746 ) -> BlockAnd<()> {
747 let span = source_info.span;
748
749 let get_scope_index = |scope: region::Scope| {
750 self.scopes
752 .breakable_scopes
753 .iter()
754 .rposition(|breakable_scope| breakable_scope.region_scope == scope)
755 .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found"))
756 };
757 let (break_index, destination) = match target {
758 BreakableTarget::Return => {
759 let scope = &self.scopes.breakable_scopes[0];
760 if scope.break_destination != Place::return_place() {
761 span_bug!(span, "`return` in item with no return scope");
762 }
763 (0, Some(scope.break_destination))
764 }
765 BreakableTarget::Break(scope) => {
766 let break_index = get_scope_index(scope);
767 let scope = &self.scopes.breakable_scopes[break_index];
768 (break_index, Some(scope.break_destination))
769 }
770 BreakableTarget::Continue(scope) => {
771 let break_index = get_scope_index(scope);
772 (break_index, None)
773 }
774 };
775
776 match (destination, value) {
777 (Some(destination), Some(value)) => {
778 debug!("stmt_expr Break val block_context.push(SubExpr)");
779 self.block_context.push(BlockFrame::SubExpr);
780 block = self.expr_into_dest(destination, block, value).into_block();
781 self.block_context.pop();
782 }
783 (Some(destination), None) => {
784 self.cfg.push_assign_unit(block, source_info, destination, self.tcx)
785 }
786 (None, Some(_)) => {
787 panic!("`return`, `become` and `break` with value and must have a destination")
788 }
789 (None, None) => {
790 if self.tcx.sess.instrument_coverage() {
791 self.cfg.push_coverage_span_marker(block, source_info);
796 }
797 }
798 }
799
800 let region_scope = self.scopes.breakable_scopes[break_index].region_scope;
801 let scope_index = self.scopes.scope_index(region_scope, span);
802 let drops = if destination.is_some() {
803 &mut self.scopes.breakable_scopes[break_index].break_drops
804 } else {
805 let Some(drops) = self.scopes.breakable_scopes[break_index].continue_drops.as_mut()
806 else {
807 self.tcx.dcx().span_delayed_bug(
808 source_info.span,
809 "unlabelled `continue` within labelled block",
810 );
811 self.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
812
813 return self.cfg.start_new_block().unit();
814 };
815 drops
816 };
817
818 let mut drop_idx = ROOT_NODE;
819 for scope in &self.scopes.scopes[scope_index + 1..] {
820 for drop in &scope.drops {
821 drop_idx = drops.add_drop(*drop, drop_idx);
822 }
823 }
824 drops.add_entry_point(block, drop_idx);
825
826 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
831
832 self.cfg.start_new_block().unit()
833 }
834
835 fn eval_unevaluated_mir_constant_to_valtree(
837 &self,
838 constant: ConstOperand<'tcx>,
839 ) -> Result<(ty::ValTree<'tcx>, Ty<'tcx>), interpret::ErrorHandled> {
840 assert!(!constant.const_.ty().has_param());
841 let (uv, ty) = match constant.const_ {
842 mir::Const::Unevaluated(uv, ty) => (uv.shrink(), ty),
843 mir::Const::Ty(_, c) => match c.kind() {
844 ty::ConstKind::Value(cv) => return Ok((cv.valtree, cv.ty)),
847 other => span_bug!(constant.span, "{other:#?}"),
848 },
849 mir::Const::Val(mir::ConstValue::Scalar(mir::interpret::Scalar::Int(val)), ty) => {
850 return Ok((ValTree::from_scalar_int(self.tcx, val), ty));
851 }
852 other => span_bug!(constant.span, "{other:#?}"),
862 };
863
864 match self.tcx.const_eval_resolve_for_typeck(self.typing_env(), uv, constant.span) {
865 Ok(Ok(valtree)) => Ok((valtree, ty)),
866 Ok(Err(ty)) => span_bug!(constant.span, "could not convert {ty:?} to a valtree"),
867 Err(e) => Err(e),
868 }
869 }
870
871 pub(crate) fn break_const_continuable_scope(
873 &mut self,
874 mut block: BasicBlock,
875 value: ExprId,
876 scope: region::Scope,
877 source_info: SourceInfo,
878 ) -> BlockAnd<()> {
879 let span = source_info.span;
880
881 let rustc_middle::thir::ExprKind::Scope { value, .. } = self.thir[value].kind else {
883 span_bug!(span, "break value must be a scope")
884 };
885
886 let expr = &self.thir[value];
887 let constant = match &expr.kind {
888 ExprKind::Adt(box AdtExpr { variant_index, fields, base, .. }) => {
889 assert!(matches!(base, AdtExprBase::None));
890 assert!(fields.is_empty());
891 ConstOperand {
892 span: self.thir[value].span,
893 user_ty: None,
894 const_: Const::Ty(
895 self.thir[value].ty,
896 ty::Const::new_value(
897 self.tcx,
898 ValTree::from_branches(
899 self.tcx,
900 [ValTree::from_scalar_int(self.tcx, variant_index.as_u32().into())],
901 ),
902 self.thir[value].ty,
903 ),
904 ),
905 }
906 }
907
908 ExprKind::Literal { .. }
909 | ExprKind::NonHirLiteral { .. }
910 | ExprKind::ZstLiteral { .. }
911 | ExprKind::NamedConst { .. } => self.as_constant(&self.thir[value]),
912
913 other => {
914 use crate::errors::ConstContinueNotMonomorphicConstReason as Reason;
915
916 let span = expr.span;
917 let reason = match other {
918 ExprKind::ConstParam { .. } => Reason::ConstantParameter { span },
919 ExprKind::ConstBlock { .. } => Reason::ConstBlock { span },
920 _ => Reason::Other { span },
921 };
922
923 self.tcx
924 .dcx()
925 .emit_err(ConstContinueNotMonomorphicConst { span: expr.span, reason });
926 return block.unit();
927 }
928 };
929
930 let break_index = self
931 .scopes
932 .const_continuable_scopes
933 .iter()
934 .rposition(|const_continuable_scope| const_continuable_scope.region_scope == scope)
935 .unwrap_or_else(|| span_bug!(span, "no enclosing const-continuable scope found"));
936
937 let scope = &self.scopes.const_continuable_scopes[break_index];
938
939 let state_decl = &self.local_decls[scope.state_place.as_local().unwrap()];
940 let state_ty = state_decl.ty;
941 let (discriminant_ty, rvalue) = match state_ty.kind() {
942 ty::Adt(adt_def, _) if adt_def.is_enum() => {
943 (state_ty.discriminant_ty(self.tcx), Rvalue::Discriminant(scope.state_place))
944 }
945 ty::Uint(_) | ty::Int(_) | ty::Float(_) | ty::Bool | ty::Char => {
946 (state_ty, Rvalue::Use(Operand::Copy(scope.state_place)))
947 }
948 _ => span_bug!(state_decl.source_info.span, "unsupported #[loop_match] state"),
949 };
950
951 let dropless_arena = rustc_arena::DroplessArena::default();
954 let typeck_results = self.tcx.typeck(self.def_id);
955 let cx = RustcPatCtxt {
956 tcx: self.tcx,
957 typeck_results,
958 module: self.tcx.parent_module(self.hir_id).to_def_id(),
959 typing_env: rustc_middle::ty::TypingEnv::non_body_analysis(self.tcx, self.def_id),
961 dropless_arena: &dropless_arena,
962 match_lint_level: self.hir_id,
963 whole_match_span: Some(rustc_span::Span::default()),
964 scrut_span: rustc_span::Span::default(),
965 refutable: true,
966 known_valid_scrutinee: true,
967 internal_state: Default::default(),
968 };
969
970 let valtree = match self.eval_unevaluated_mir_constant_to_valtree(constant) {
971 Ok((valtree, ty)) => {
972 assert!(!ty.has_param());
974
975 valtree
976 }
977 Err(ErrorHandled::Reported(..)) => {
978 return block.unit();
979 }
980 Err(ErrorHandled::TooGeneric(_)) => {
981 self.tcx.dcx().emit_fatal(ConstContinueBadConst { span: constant.span });
982 }
983 };
984
985 let Some(real_target) =
986 self.static_pattern_match(&cx, valtree, &*scope.arms, &scope.built_match_tree)
987 else {
988 self.tcx.dcx().emit_fatal(ConstContinueUnknownJumpTarget { span })
989 };
990
991 self.block_context.push(BlockFrame::SubExpr);
992 let state_place = scope.state_place;
993 block = self.expr_into_dest(state_place, block, value).into_block();
994 self.block_context.pop();
995
996 let discr = self.temp(discriminant_ty, source_info.span);
997 let scope_index = self
998 .scopes
999 .scope_index(self.scopes.const_continuable_scopes[break_index].region_scope, span);
1000 let scope = &mut self.scopes.const_continuable_scopes[break_index];
1001 self.cfg.push_assign(block, source_info, discr, rvalue);
1002 let drop_and_continue_block = self.cfg.start_new_block();
1003 let imaginary_target = self.cfg.start_new_block();
1004 self.cfg.terminate(
1005 block,
1006 source_info,
1007 TerminatorKind::FalseEdge { real_target: drop_and_continue_block, imaginary_target },
1008 );
1009
1010 let drops = &mut scope.const_continue_drops;
1011
1012 let drop_idx = self.scopes.scopes[scope_index + 1..]
1013 .iter()
1014 .flat_map(|scope| &scope.drops)
1015 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1016
1017 drops.add_entry_point(imaginary_target, drop_idx);
1018
1019 self.cfg.terminate(imaginary_target, source_info, TerminatorKind::UnwindResume);
1020
1021 let region_scope = scope.region_scope;
1022 let scope_index = self.scopes.scope_index(region_scope, span);
1023 let mut drops = DropTree::new();
1024
1025 let drop_idx = self.scopes.scopes[scope_index + 1..]
1026 .iter()
1027 .flat_map(|scope| &scope.drops)
1028 .fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
1029
1030 drops.add_entry_point(drop_and_continue_block, drop_idx);
1031
1032 self.cfg.terminate(drop_and_continue_block, source_info, TerminatorKind::UnwindResume);
1037
1038 self.build_exit_tree(drops, region_scope, span, Some(real_target));
1039
1040 return self.cfg.start_new_block().unit();
1041 }
1042
1043 pub(crate) fn break_for_else(&mut self, block: BasicBlock, source_info: SourceInfo) {
1049 let if_then_scope = self
1050 .scopes
1051 .if_then_scope
1052 .as_ref()
1053 .unwrap_or_else(|| span_bug!(source_info.span, "no if-then scope found"));
1054
1055 let target = if_then_scope.region_scope;
1056 let scope_index = self.scopes.scope_index(target, source_info.span);
1057
1058 let if_then_scope = self.scopes.if_then_scope.as_mut().expect("upgrading & to &mut");
1060
1061 let mut drop_idx = ROOT_NODE;
1062 let drops = &mut if_then_scope.else_drops;
1063 for scope in &self.scopes.scopes[scope_index + 1..] {
1064 for drop in &scope.drops {
1065 drop_idx = drops.add_drop(*drop, drop_idx);
1066 }
1067 }
1068 drops.add_entry_point(block, drop_idx);
1069
1070 self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
1075 }
1076
1077 pub(crate) fn break_for_tail_call(
1082 &mut self,
1083 mut block: BasicBlock,
1084 args: &[Spanned<Operand<'tcx>>],
1085 source_info: SourceInfo,
1086 ) -> BlockAnd<()> {
1087 let arg_drops: Vec<_> = args
1088 .iter()
1089 .rev()
1090 .filter_map(|arg| match &arg.node {
1091 Operand::Copy(_) => bug!("copy op in tail call args"),
1092 Operand::Move(place) => {
1093 let local =
1094 place.as_local().unwrap_or_else(|| bug!("projection in tail call args"));
1095
1096 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1097 return None;
1098 }
1099
1100 Some(DropData { source_info, local, kind: DropKind::Value })
1101 }
1102 Operand::Constant(_) => None,
1103 })
1104 .collect();
1105
1106 let mut unwind_to = self.diverge_cleanup_target(
1107 self.scopes.scopes.iter().rev().nth(1).unwrap().region_scope,
1108 DUMMY_SP,
1109 );
1110 let typing_env = self.typing_env();
1111 let unwind_drops = &mut self.scopes.unwind_drops;
1112
1113 for scope in self.scopes.scopes[1..].iter().rev().skip(1) {
1116 for drop_data in scope.drops.iter().rev() {
1118 let source_info = drop_data.source_info;
1119 let local = drop_data.local;
1120
1121 if !self.local_decls[local].ty.needs_drop(self.tcx, typing_env) {
1122 continue;
1123 }
1124
1125 match drop_data.kind {
1126 DropKind::Value => {
1127 debug_assert_eq!(
1131 unwind_drops.drop_nodes[unwind_to].data.local,
1132 drop_data.local
1133 );
1134 debug_assert_eq!(
1135 unwind_drops.drop_nodes[unwind_to].data.kind,
1136 drop_data.kind
1137 );
1138 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1139
1140 let mut unwind_entry_point = unwind_to;
1141
1142 for drop in arg_drops.iter().copied() {
1144 unwind_entry_point = unwind_drops.add_drop(drop, unwind_entry_point);
1145 }
1146
1147 unwind_drops.add_entry_point(block, unwind_entry_point);
1148
1149 let next = self.cfg.start_new_block();
1150 self.cfg.terminate(
1151 block,
1152 source_info,
1153 TerminatorKind::Drop {
1154 place: local.into(),
1155 target: next,
1156 unwind: UnwindAction::Continue,
1157 replace: false,
1158 drop: None,
1159 async_fut: None,
1160 },
1161 );
1162 block = next;
1163 }
1164 DropKind::ForLint => {
1165 self.cfg.push(
1166 block,
1167 Statement::new(
1168 source_info,
1169 StatementKind::BackwardIncompatibleDropHint {
1170 place: Box::new(local.into()),
1171 reason: BackwardIncompatibleDropReason::Edition2024,
1172 },
1173 ),
1174 );
1175 }
1176 DropKind::Storage => {
1177 assert!(local.index() > self.arg_count);
1179 self.cfg.push(
1180 block,
1181 Statement::new(source_info, StatementKind::StorageDead(local)),
1182 );
1183 }
1184 }
1185 }
1186 }
1187
1188 block.unit()
1189 }
1190
1191 fn is_async_drop_impl(
1192 tcx: TyCtxt<'tcx>,
1193 local_decls: &IndexVec<Local, LocalDecl<'tcx>>,
1194 typing_env: ty::TypingEnv<'tcx>,
1195 local: Local,
1196 ) -> bool {
1197 let ty = local_decls[local].ty;
1198 if ty.is_async_drop(tcx, typing_env) || ty.is_coroutine() {
1199 return true;
1200 }
1201 ty.needs_async_drop(tcx, typing_env)
1202 }
1203 fn is_async_drop(&self, local: Local) -> bool {
1204 Self::is_async_drop_impl(self.tcx, &self.local_decls, self.typing_env(), local)
1205 }
1206
1207 fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock {
1208 let needs_cleanup = self.scopes.scopes.last().is_some_and(|scope| scope.needs_cleanup());
1211 let is_coroutine = self.coroutine.is_some();
1212 let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX };
1213
1214 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1215 let has_async_drops = is_coroutine
1216 && scope.drops.iter().any(|v| v.kind == DropKind::Value && self.is_async_drop(v.local));
1217 let dropline_to = if has_async_drops { Some(self.diverge_dropline()) } else { None };
1218 let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
1219 let typing_env = self.typing_env();
1220 build_scope_drops(
1221 &mut self.cfg,
1222 &mut self.scopes.unwind_drops,
1223 &mut self.scopes.coroutine_drops,
1224 scope,
1225 block,
1226 unwind_to,
1227 dropline_to,
1228 is_coroutine && needs_cleanup,
1229 self.arg_count,
1230 |v: Local| Self::is_async_drop_impl(self.tcx, &self.local_decls, typing_env, v),
1231 )
1232 .into_block()
1233 }
1234
1235 pub(crate) fn maybe_new_source_scope(
1238 &mut self,
1239 span: Span,
1240 current_id: HirId,
1241 parent_id: HirId,
1242 ) {
1243 let (current_root, parent_root) =
1244 if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
1245 (current_id, parent_id)
1253 } else {
1254 (
1258 self.maybe_lint_level_root_bounded(current_id),
1259 if parent_id == self.hir_id {
1260 parent_id } else {
1262 self.maybe_lint_level_root_bounded(parent_id)
1263 },
1264 )
1265 };
1266
1267 if current_root != parent_root {
1268 let lint_level = LintLevel::Explicit(current_root);
1269 self.source_scope = self.new_source_scope(span, lint_level);
1270 }
1271 }
1272
1273 fn maybe_lint_level_root_bounded(&mut self, orig_id: HirId) -> HirId {
1276 assert_eq!(orig_id.owner, self.hir_id.owner);
1279
1280 let mut id = orig_id;
1281 loop {
1282 if id == self.hir_id {
1283 break;
1285 }
1286
1287 if self.tcx.hir_attrs(id).iter().any(|attr| Level::from_attr(attr).is_some()) {
1288 return id;
1291 }
1292
1293 let next = self.tcx.parent_hir_id(id);
1294 if next == id {
1295 bug!("lint traversal reached the root of the crate");
1296 }
1297 id = next;
1298
1299 if self.lint_level_roots_cache.contains(id.local_id) {
1305 break;
1306 }
1307 }
1308
1309 self.lint_level_roots_cache.insert(orig_id.local_id);
1313 self.hir_id
1314 }
1315
1316 pub(crate) fn new_source_scope(&mut self, span: Span, lint_level: LintLevel) -> SourceScope {
1318 let parent = self.source_scope;
1319 debug!(
1320 "new_source_scope({:?}, {:?}) - parent({:?})={:?}",
1321 span,
1322 lint_level,
1323 parent,
1324 self.source_scopes.get(parent)
1325 );
1326 let scope_local_data = SourceScopeLocalData {
1327 lint_root: if let LintLevel::Explicit(lint_root) = lint_level {
1328 lint_root
1329 } else {
1330 self.source_scopes[parent].local_data.as_ref().unwrap_crate_local().lint_root
1331 },
1332 };
1333 self.source_scopes.push(SourceScopeData {
1334 span,
1335 parent_scope: Some(parent),
1336 inlined: None,
1337 inlined_parent_scope: None,
1338 local_data: ClearCrossCrate::Set(scope_local_data),
1339 })
1340 }
1341
1342 pub(crate) fn source_info(&self, span: Span) -> SourceInfo {
1344 SourceInfo { span, scope: self.source_scope }
1345 }
1346
1347 pub(crate) fn local_scope(&self) -> region::Scope {
1370 self.scopes.topmost()
1371 }
1372
1373 pub(crate) fn schedule_drop_storage_and_value(
1377 &mut self,
1378 span: Span,
1379 region_scope: region::Scope,
1380 local: Local,
1381 ) {
1382 self.schedule_drop(span, region_scope, local, DropKind::Storage);
1383 self.schedule_drop(span, region_scope, local, DropKind::Value);
1384 }
1385
1386 pub(crate) fn schedule_drop(
1391 &mut self,
1392 span: Span,
1393 region_scope: region::Scope,
1394 local: Local,
1395 drop_kind: DropKind,
1396 ) {
1397 let needs_drop = match drop_kind {
1398 DropKind::Value | DropKind::ForLint => {
1399 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
1400 return;
1401 }
1402 true
1403 }
1404 DropKind::Storage => {
1405 if local.index() <= self.arg_count {
1406 span_bug!(
1407 span,
1408 "`schedule_drop` called with body argument {:?} \
1409 but its storage does not require a drop",
1410 local,
1411 )
1412 }
1413 false
1414 }
1415 };
1416
1417 let invalidate_caches = needs_drop || self.coroutine.is_some();
1464 for scope in self.scopes.scopes.iter_mut().rev() {
1465 if invalidate_caches {
1466 scope.invalidate_cache();
1467 }
1468
1469 if scope.region_scope == region_scope {
1470 let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
1471 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1473
1474 scope.drops.push(DropData {
1475 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1476 local,
1477 kind: drop_kind,
1478 });
1479
1480 return;
1481 }
1482 }
1483
1484 span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local);
1485 }
1486
1487 #[instrument(level = "debug", skip(self))]
1490 pub(crate) fn schedule_backwards_incompatible_drop(
1491 &mut self,
1492 span: Span,
1493 region_scope: region::Scope,
1494 local: Local,
1495 ) {
1496 for scope in self.scopes.scopes.iter_mut().rev() {
1499 scope.invalidate_cache();
1501 if scope.region_scope == region_scope {
1502 let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
1503 let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
1504
1505 scope.drops.push(DropData {
1506 source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
1507 local,
1508 kind: DropKind::ForLint,
1509 });
1510
1511 return;
1512 }
1513 }
1514 span_bug!(
1515 span,
1516 "region scope {:?} not in scope to drop {:?} for linting",
1517 region_scope,
1518 local
1519 );
1520 }
1521
1522 pub(crate) fn record_operands_moved(&mut self, operands: &[Spanned<Operand<'tcx>>]) {
1559 let local_scope = self.local_scope();
1560 let scope = self.scopes.scopes.last_mut().unwrap();
1561
1562 assert_eq!(scope.region_scope, local_scope, "local scope is not the topmost scope!",);
1563
1564 let locals_moved = operands.iter().flat_map(|operand| match operand.node {
1566 Operand::Copy(_) | Operand::Constant(_) => None,
1567 Operand::Move(place) => place.as_local(),
1568 });
1569
1570 for local in locals_moved {
1571 if scope.drops.iter().any(|drop| drop.local == local && drop.kind == DropKind::Value) {
1576 scope.moved_locals.push(local);
1577 }
1578 }
1579 }
1580
1581 fn diverge_cleanup(&mut self) -> DropIdx {
1587 self.diverge_cleanup_target(self.scopes.topmost(), DUMMY_SP)
1590 }
1591
1592 fn diverge_cleanup_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1597 let target = self.scopes.scope_index(target_scope, span);
1598 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1599 .iter()
1600 .enumerate()
1601 .rev()
1602 .find_map(|(scope_idx, scope)| {
1603 scope.cached_unwind_block.map(|cached_block| (scope_idx + 1, cached_block))
1604 })
1605 .unwrap_or((0, ROOT_NODE));
1606
1607 if uncached_scope > target {
1608 return cached_drop;
1609 }
1610
1611 let is_coroutine = self.coroutine.is_some();
1612 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1613 for drop in &scope.drops {
1614 if is_coroutine || drop.kind == DropKind::Value {
1615 cached_drop = self.scopes.unwind_drops.add_drop(*drop, cached_drop);
1616 }
1617 }
1618 scope.cached_unwind_block = Some(cached_drop);
1619 }
1620
1621 cached_drop
1622 }
1623
1624 pub(crate) fn diverge_from(&mut self, start: BasicBlock) {
1630 debug_assert!(
1631 matches!(
1632 self.cfg.block_data(start).terminator().kind,
1633 TerminatorKind::Assert { .. }
1634 | TerminatorKind::Call { .. }
1635 | TerminatorKind::Drop { .. }
1636 | TerminatorKind::FalseUnwind { .. }
1637 | TerminatorKind::InlineAsm { .. }
1638 ),
1639 "diverge_from called on block with terminator that cannot unwind."
1640 );
1641
1642 let next_drop = self.diverge_cleanup();
1643 self.scopes.unwind_drops.add_entry_point(start, next_drop);
1644 }
1645
1646 fn diverge_dropline(&mut self) -> DropIdx {
1649 self.diverge_dropline_target(self.scopes.topmost(), DUMMY_SP)
1652 }
1653
1654 fn diverge_dropline_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
1656 debug_assert!(
1657 self.coroutine.is_some(),
1658 "diverge_dropline_target is valid only for coroutine"
1659 );
1660 let target = self.scopes.scope_index(target_scope, span);
1661 let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
1662 .iter()
1663 .enumerate()
1664 .rev()
1665 .find_map(|(scope_idx, scope)| {
1666 scope.cached_coroutine_drop_block.map(|cached_block| (scope_idx + 1, cached_block))
1667 })
1668 .unwrap_or((0, ROOT_NODE));
1669
1670 if uncached_scope > target {
1671 return cached_drop;
1672 }
1673
1674 for scope in &mut self.scopes.scopes[uncached_scope..=target] {
1675 for drop in &scope.drops {
1676 cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop);
1677 }
1678 scope.cached_coroutine_drop_block = Some(cached_drop);
1679 }
1680
1681 cached_drop
1682 }
1683
1684 pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) {
1690 debug_assert!(
1691 matches!(
1692 self.cfg.block_data(yield_block).terminator().kind,
1693 TerminatorKind::Yield { .. }
1694 ),
1695 "coroutine_drop_cleanup called on block with non-yield terminator."
1696 );
1697 let cached_drop = self.diverge_dropline();
1698 self.scopes.coroutine_drops.add_entry_point(yield_block, cached_drop);
1699 }
1700
1701 pub(crate) fn build_drop_and_replace(
1704 &mut self,
1705 block: BasicBlock,
1706 span: Span,
1707 place: Place<'tcx>,
1708 value: Rvalue<'tcx>,
1709 ) -> BlockAnd<()> {
1710 let source_info = self.source_info(span);
1711
1712 let assign = self.cfg.start_new_block();
1714 self.cfg.push_assign(assign, source_info, place, value.clone());
1715
1716 let assign_unwind = self.cfg.start_new_cleanup_block();
1718 self.cfg.push_assign(assign_unwind, source_info, place, value.clone());
1719
1720 self.cfg.terminate(
1721 block,
1722 source_info,
1723 TerminatorKind::Drop {
1724 place,
1725 target: assign,
1726 unwind: UnwindAction::Cleanup(assign_unwind),
1727 replace: true,
1728 drop: None,
1729 async_fut: None,
1730 },
1731 );
1732 self.diverge_from(block);
1733
1734 assign.unit()
1735 }
1736
1737 pub(crate) fn assert(
1741 &mut self,
1742 block: BasicBlock,
1743 cond: Operand<'tcx>,
1744 expected: bool,
1745 msg: AssertMessage<'tcx>,
1746 span: Span,
1747 ) -> BasicBlock {
1748 let source_info = self.source_info(span);
1749 let success_block = self.cfg.start_new_block();
1750
1751 self.cfg.terminate(
1752 block,
1753 source_info,
1754 TerminatorKind::Assert {
1755 cond,
1756 expected,
1757 msg: Box::new(msg),
1758 target: success_block,
1759 unwind: UnwindAction::Continue,
1760 },
1761 );
1762 self.diverge_from(block);
1763
1764 success_block
1765 }
1766
1767 pub(crate) fn clear_match_arm_and_guard_scopes(&mut self, region_scope: region::Scope) {
1773 let [.., arm_scope, guard_scope] = &mut *self.scopes.scopes else {
1774 bug!("matches with guards should introduce separate scopes for the pattern and guard");
1775 };
1776
1777 assert_eq!(arm_scope.region_scope, region_scope);
1778 assert_eq!(guard_scope.region_scope.data, region::ScopeData::MatchGuard);
1779 assert_eq!(guard_scope.region_scope.local_id, region_scope.local_id);
1780
1781 arm_scope.drops.clear();
1782 arm_scope.invalidate_cache();
1783 guard_scope.drops.clear();
1784 guard_scope.invalidate_cache();
1785 }
1786}
1787
1788fn build_scope_drops<'tcx, F>(
1803 cfg: &mut CFG<'tcx>,
1804 unwind_drops: &mut DropTree,
1805 coroutine_drops: &mut DropTree,
1806 scope: &Scope,
1807 block: BasicBlock,
1808 unwind_to: DropIdx,
1809 dropline_to: Option<DropIdx>,
1810 storage_dead_on_unwind: bool,
1811 arg_count: usize,
1812 is_async_drop: F,
1813) -> BlockAnd<()>
1814where
1815 F: Fn(Local) -> bool,
1816{
1817 debug!("build_scope_drops({:?} -> {:?}), dropline_to={:?}", block, scope, dropline_to);
1818
1819 let mut unwind_to = unwind_to;
1843
1844 let mut block = block;
1849
1850 let mut dropline_to = dropline_to;
1852
1853 for drop_data in scope.drops.iter().rev() {
1854 let source_info = drop_data.source_info;
1855 let local = drop_data.local;
1856
1857 match drop_data.kind {
1858 DropKind::Value => {
1859 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.local, drop_data.local);
1866 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1867 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1868
1869 if let Some(idx) = dropline_to {
1870 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1871 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1872 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1873 }
1874
1875 if scope.moved_locals.contains(&local) {
1880 continue;
1881 }
1882
1883 unwind_drops.add_entry_point(block, unwind_to);
1884 if let Some(to) = dropline_to
1885 && is_async_drop(local)
1886 {
1887 coroutine_drops.add_entry_point(block, to);
1888 }
1889
1890 let next = cfg.start_new_block();
1891 cfg.terminate(
1892 block,
1893 source_info,
1894 TerminatorKind::Drop {
1895 place: local.into(),
1896 target: next,
1897 unwind: UnwindAction::Continue,
1898 replace: false,
1899 drop: None,
1900 async_fut: None,
1901 },
1902 );
1903 block = next;
1904 }
1905 DropKind::ForLint => {
1906 if storage_dead_on_unwind {
1912 debug_assert_eq!(
1913 unwind_drops.drop_nodes[unwind_to].data.local,
1914 drop_data.local
1915 );
1916 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1917 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1918 }
1919
1920 if scope.moved_locals.contains(&local) {
1925 continue;
1926 }
1927
1928 cfg.push(
1929 block,
1930 Statement::new(
1931 source_info,
1932 StatementKind::BackwardIncompatibleDropHint {
1933 place: Box::new(local.into()),
1934 reason: BackwardIncompatibleDropReason::Edition2024,
1935 },
1936 ),
1937 );
1938 }
1939 DropKind::Storage => {
1940 if storage_dead_on_unwind {
1946 debug_assert_eq!(
1947 unwind_drops.drop_nodes[unwind_to].data.local,
1948 drop_data.local
1949 );
1950 debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1951 unwind_to = unwind_drops.drop_nodes[unwind_to].next;
1952 }
1953 if let Some(idx) = dropline_to {
1954 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1955 debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1956 dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
1957 }
1958 assert!(local.index() > arg_count);
1960 cfg.push(block, Statement::new(source_info, StatementKind::StorageDead(local)));
1961 }
1962 }
1963 }
1964 block.unit()
1965}
1966
1967impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
1968 fn build_exit_tree(
1973 &mut self,
1974 mut drops: DropTree,
1975 else_scope: region::Scope,
1976 span: Span,
1977 continue_block: Option<BasicBlock>,
1978 ) -> Option<BlockAnd<()>> {
1979 let blocks = drops.build_mir::<ExitScopes>(&mut self.cfg, continue_block);
1980 let is_coroutine = self.coroutine.is_some();
1981
1982 if drops.drop_nodes.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
1984 let unwind_target = self.diverge_cleanup_target(else_scope, span);
1985 let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
1986 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated().skip(1) {
1987 match drop_node.data.kind {
1988 DropKind::Storage | DropKind::ForLint => {
1989 if is_coroutine {
1990 let unwind_drop = self
1991 .scopes
1992 .unwind_drops
1993 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
1994 unwind_indices.push(unwind_drop);
1995 } else {
1996 unwind_indices.push(unwind_indices[drop_node.next]);
1997 }
1998 }
1999 DropKind::Value => {
2000 let unwind_drop = self
2001 .scopes
2002 .unwind_drops
2003 .add_drop(drop_node.data, unwind_indices[drop_node.next]);
2004 self.scopes.unwind_drops.add_entry_point(
2005 blocks[drop_idx].unwrap(),
2006 unwind_indices[drop_node.next],
2007 );
2008 unwind_indices.push(unwind_drop);
2009 }
2010 }
2011 }
2012 }
2013 if is_coroutine
2015 && drops.drop_nodes.iter().any(|DropNode { data, next: _ }| {
2016 data.kind == DropKind::Value && self.is_async_drop(data.local)
2017 })
2018 {
2019 let dropline_target = self.diverge_dropline_target(else_scope, span);
2020 let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1);
2021 for (drop_idx, drop_data) in drops.drop_nodes.iter_enumerated().skip(1) {
2022 let coroutine_drop = self
2023 .scopes
2024 .coroutine_drops
2025 .add_drop(drop_data.data, dropline_indices[drop_data.next]);
2026 match drop_data.data.kind {
2027 DropKind::Storage | DropKind::ForLint => {}
2028 DropKind::Value => {
2029 if self.is_async_drop(drop_data.data.local) {
2030 self.scopes.coroutine_drops.add_entry_point(
2031 blocks[drop_idx].unwrap(),
2032 dropline_indices[drop_data.next],
2033 );
2034 }
2035 }
2036 }
2037 dropline_indices.push(coroutine_drop);
2038 }
2039 }
2040 blocks[ROOT_NODE].map(BasicBlock::unit)
2041 }
2042
2043 pub(crate) fn build_drop_trees(&mut self) {
2045 if self.coroutine.is_some() {
2046 self.build_coroutine_drop_trees();
2047 } else {
2048 Self::build_unwind_tree(
2049 &mut self.cfg,
2050 &mut self.scopes.unwind_drops,
2051 self.fn_span,
2052 &mut None,
2053 );
2054 }
2055 }
2056
2057 fn build_coroutine_drop_trees(&mut self) {
2058 let drops = &mut self.scopes.coroutine_drops;
2060 let cfg = &mut self.cfg;
2061 let fn_span = self.fn_span;
2062 let blocks = drops.build_mir::<CoroutineDrop>(cfg, None);
2063 if let Some(root_block) = blocks[ROOT_NODE] {
2064 cfg.terminate(
2065 root_block,
2066 SourceInfo::outermost(fn_span),
2067 TerminatorKind::CoroutineDrop,
2068 );
2069 }
2070
2071 let resume_block = &mut None;
2073 let unwind_drops = &mut self.scopes.unwind_drops;
2074 Self::build_unwind_tree(cfg, unwind_drops, fn_span, resume_block);
2075
2076 for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated() {
2084 if let DropKind::Value = drop_node.data.kind
2085 && let Some(bb) = blocks[drop_idx]
2086 {
2087 debug_assert!(drop_node.next < drops.drop_nodes.next_index());
2088 drops.entry_points.push((drop_node.next, bb));
2089 }
2090 }
2091 Self::build_unwind_tree(cfg, drops, fn_span, resume_block);
2092 }
2093
2094 fn build_unwind_tree(
2095 cfg: &mut CFG<'tcx>,
2096 drops: &mut DropTree,
2097 fn_span: Span,
2098 resume_block: &mut Option<BasicBlock>,
2099 ) {
2100 let blocks = drops.build_mir::<Unwind>(cfg, *resume_block);
2101 if let (None, Some(resume)) = (*resume_block, blocks[ROOT_NODE]) {
2102 cfg.terminate(resume, SourceInfo::outermost(fn_span), TerminatorKind::UnwindResume);
2103
2104 *resume_block = blocks[ROOT_NODE];
2105 }
2106 }
2107}
2108
2109struct ExitScopes;
2112
2113impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes {
2114 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2115 cfg.start_new_block()
2116 }
2117 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2118 let term = cfg.block_data_mut(from).terminator_mut();
2122 if let TerminatorKind::UnwindResume = term.kind {
2123 term.kind = TerminatorKind::Goto { target: to };
2124 } else {
2125 span_bug!(term.source_info.span, "unexpected dummy terminator kind: {:?}", term.kind);
2126 }
2127 }
2128}
2129
2130struct CoroutineDrop;
2131
2132impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop {
2133 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2134 cfg.start_new_block()
2135 }
2136 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2137 let term = cfg.block_data_mut(from).terminator_mut();
2138 if let TerminatorKind::Yield { ref mut drop, .. } = term.kind {
2139 *drop = Some(to);
2140 } else if let TerminatorKind::Drop { ref mut drop, .. } = term.kind {
2141 *drop = Some(to);
2142 } else {
2143 span_bug!(
2144 term.source_info.span,
2145 "cannot enter coroutine drop tree from {:?}",
2146 term.kind
2147 )
2148 }
2149 }
2150}
2151
2152struct Unwind;
2153
2154impl<'tcx> DropTreeBuilder<'tcx> for Unwind {
2155 fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock {
2156 cfg.start_new_cleanup_block()
2157 }
2158 fn link_entry_point(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) {
2159 let term = &mut cfg.block_data_mut(from).terminator_mut();
2160 match &mut term.kind {
2161 TerminatorKind::Drop { unwind, .. } => {
2162 if let UnwindAction::Cleanup(unwind) = *unwind {
2163 let source_info = term.source_info;
2164 cfg.terminate(unwind, source_info, TerminatorKind::Goto { target: to });
2165 } else {
2166 *unwind = UnwindAction::Cleanup(to);
2167 }
2168 }
2169 TerminatorKind::FalseUnwind { unwind, .. }
2170 | TerminatorKind::Call { unwind, .. }
2171 | TerminatorKind::Assert { unwind, .. }
2172 | TerminatorKind::InlineAsm { unwind, .. } => {
2173 *unwind = UnwindAction::Cleanup(to);
2174 }
2175 TerminatorKind::Goto { .. }
2176 | TerminatorKind::SwitchInt { .. }
2177 | TerminatorKind::UnwindResume
2178 | TerminatorKind::UnwindTerminate(_)
2179 | TerminatorKind::Return
2180 | TerminatorKind::TailCall { .. }
2181 | TerminatorKind::Unreachable
2182 | TerminatorKind::Yield { .. }
2183 | TerminatorKind::CoroutineDrop
2184 | TerminatorKind::FalseEdge { .. } => {
2185 span_bug!(term.source_info.span, "cannot unwind from {:?}", term.kind)
2186 }
2187 }
2188 }
2189}