rustc_mir_dataflow/impls/
initialized.rs

1use std::assert_matches::assert_matches;
2
3use rustc_abi::VariantIdx;
4use rustc_index::Idx;
5use rustc_index::bit_set::{DenseBitSet, MixedBitSet};
6use rustc_middle::bug;
7use rustc_middle::mir::{
8    self, Body, CallReturnPlaces, Location, SwitchTargetValue, TerminatorEdges,
9};
10use rustc_middle::ty::util::Discr;
11use rustc_middle::ty::{self, TyCtxt};
12use smallvec::SmallVec;
13use tracing::{debug, instrument};
14
15use crate::drop_flag_effects::{DropFlagState, InactiveVariants};
16use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
17use crate::{
18    Analysis, GenKill, MaybeReachable, drop_flag_effects, drop_flag_effects_for_function_entry,
19    drop_flag_effects_for_location, on_all_children_bits, on_lookup_result_bits,
20};
21
22// Used by both `MaybeInitializedPlaces` and `MaybeUninitializedPlaces`.
23pub struct MaybePlacesSwitchIntData<'tcx> {
24    enum_place: mir::Place<'tcx>,
25    discriminants: Vec<(VariantIdx, Discr<'tcx>)>,
26    index: usize,
27}
28
29impl<'tcx> MaybePlacesSwitchIntData<'tcx> {
30    /// Creates a `SmallVec` mapping each target in `targets` to its `VariantIdx`.
31    fn variants(&mut self, targets: &mir::SwitchTargets) -> SmallVec<[VariantIdx; 4]> {
32        self.index = 0;
33        targets.all_values().iter().map(|value| self.next_discr(value.get())).collect()
34    }
35
36    // The discriminant order in the `SwitchInt` targets should match the order yielded by
37    // `AdtDef::discriminants`. We rely on this to match each discriminant in the targets to its
38    // corresponding variant in linear time.
39    fn next_discr(&mut self, value: u128) -> VariantIdx {
40        // An out-of-bounds abort will occur if the discriminant ordering isn't as described above.
41        loop {
42            let (variant, discr) = self.discriminants[self.index];
43            self.index += 1;
44            if discr.val == value {
45                return variant;
46            }
47        }
48    }
49}
50
51impl<'tcx> MaybePlacesSwitchIntData<'tcx> {
52    fn new(
53        tcx: TyCtxt<'tcx>,
54        body: &Body<'tcx>,
55        block: mir::BasicBlock,
56        discr: &mir::Operand<'tcx>,
57    ) -> Option<Self> {
58        let Some(discr) = discr.place() else { return None };
59
60        // Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt`
61        // is an enum discriminant.
62        //
63        // We expect such blocks to have a call to `discriminant` as their last statement like so:
64        // ```text
65        // ...
66        // _42 = discriminant(_1)
67        // SwitchInt(_42, ..)
68        // ```
69        // If the basic block matches this pattern, this function gathers the place corresponding
70        // to the enum (`_1` in the example above) as well as the discriminants.
71        let block_data = &body[block];
72        for statement in block_data.statements.iter().rev() {
73            match statement.kind {
74                mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(enum_place)))
75                    if lhs == discr =>
76                {
77                    match enum_place.ty(body, tcx).ty.kind() {
78                        ty::Adt(enum_def, _) => {
79                            return Some(MaybePlacesSwitchIntData {
80                                enum_place,
81                                discriminants: enum_def.discriminants(tcx).collect(),
82                                index: 0,
83                            });
84                        }
85
86                        // `Rvalue::Discriminant` is also used to get the active yield point for a
87                        // coroutine, but we do not need edge-specific effects in that case. This
88                        // may change in the future.
89                        ty::Coroutine(..) => break,
90
91                        t => bug!("`discriminant` called on unexpected type {:?}", t),
92                    }
93                }
94                mir::StatementKind::Coverage(_) => continue,
95                _ => break,
96            }
97        }
98        None
99    }
100}
101
102/// `MaybeInitializedPlaces` tracks all places that might be
103/// initialized upon reaching a particular point in the control flow
104/// for a function.
105///
106/// For example, in code like the following, we have corresponding
107/// dataflow information shown in the right-hand comments.
108///
109/// ```rust
110/// struct S;
111/// #[rustfmt::skip]
112/// fn foo(pred: bool) {                        // maybe-init:
113///                                             // {}
114///     let a = S; let mut b = S; let c; let d; // {a, b}
115///
116///     if pred {
117///         drop(a);                            // {   b}
118///         b = S;                              // {   b}
119///
120///     } else {
121///         drop(b);                            // {a}
122///         d = S;                              // {a,       d}
123///
124///     }                                       // {a, b,    d}
125///
126///     c = S;                                  // {a, b, c, d}
127/// }
128/// ```
129///
130/// To determine whether a place is *definitely* initialized at a
131/// particular control-flow point, one can take the set-complement
132/// of the data from `MaybeUninitializedPlaces` at the corresponding
133/// control-flow point.
134///
135/// Similarly, at a given `drop` statement, the set-intersection
136/// between this data and `MaybeUninitializedPlaces` yields the set of
137/// places that would require a dynamic drop-flag at that statement.
138pub struct MaybeInitializedPlaces<'a, 'tcx> {
139    tcx: TyCtxt<'tcx>,
140    body: &'a Body<'tcx>,
141    move_data: &'a MoveData<'tcx>,
142    exclude_inactive_in_otherwise: bool,
143    skip_unreachable_unwind: bool,
144}
145
146impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
147    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, move_data: &'a MoveData<'tcx>) -> Self {
148        MaybeInitializedPlaces {
149            tcx,
150            body,
151            move_data,
152            exclude_inactive_in_otherwise: false,
153            skip_unreachable_unwind: false,
154        }
155    }
156
157    /// Ensures definitely inactive variants are excluded from the set of initialized places for
158    /// blocks reached through an `otherwise` edge.
159    pub fn exclude_inactive_in_otherwise(mut self) -> Self {
160        self.exclude_inactive_in_otherwise = true;
161        self
162    }
163
164    pub fn skipping_unreachable_unwind(mut self) -> Self {
165        self.skip_unreachable_unwind = true;
166        self
167    }
168
169    pub fn is_unwind_dead(
170        &self,
171        place: mir::Place<'tcx>,
172        state: &<Self as Analysis<'tcx>>::Domain,
173    ) -> bool {
174        if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) {
175            let mut maybe_live = false;
176            on_all_children_bits(self.move_data(), path, |child| {
177                maybe_live |= state.contains(child);
178            });
179            !maybe_live
180        } else {
181            false
182        }
183    }
184}
185
186impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
187    fn move_data(&self) -> &MoveData<'tcx> {
188        self.move_data
189    }
190}
191
192/// `MaybeUninitializedPlaces` tracks all places that might be
193/// uninitialized upon reaching a particular point in the control flow
194/// for a function.
195///
196/// For example, in code like the following, we have corresponding
197/// dataflow information shown in the right-hand comments.
198///
199/// ```rust
200/// struct S;
201/// #[rustfmt::skip]
202/// fn foo(pred: bool) {                        // maybe-uninit:
203///                                             // {a, b, c, d}
204///     let a = S; let mut b = S; let c; let d; // {      c, d}
205///
206///     if pred {
207///         drop(a);                            // {a,    c, d}
208///         b = S;                              // {a,    c, d}
209///
210///     } else {
211///         drop(b);                            // {   b, c, d}
212///         d = S;                              // {   b, c   }
213///
214///     }                                       // {a, b, c, d}
215///
216///     c = S;                                  // {a, b,    d}
217/// }
218/// ```
219///
220/// To determine whether a place is *definitely* uninitialized at a
221/// particular control-flow point, one can take the set-complement
222/// of the data from `MaybeInitializedPlaces` at the corresponding
223/// control-flow point.
224///
225/// Similarly, at a given `drop` statement, the set-intersection
226/// between this data and `MaybeInitializedPlaces` yields the set of
227/// places that would require a dynamic drop-flag at that statement.
228pub struct MaybeUninitializedPlaces<'a, 'tcx> {
229    tcx: TyCtxt<'tcx>,
230    body: &'a Body<'tcx>,
231    move_data: &'a MoveData<'tcx>,
232
233    mark_inactive_variants_as_uninit: bool,
234    include_inactive_in_otherwise: bool,
235    skip_unreachable_unwind: DenseBitSet<mir::BasicBlock>,
236}
237
238impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
239    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, move_data: &'a MoveData<'tcx>) -> Self {
240        MaybeUninitializedPlaces {
241            tcx,
242            body,
243            move_data,
244            mark_inactive_variants_as_uninit: false,
245            include_inactive_in_otherwise: false,
246            skip_unreachable_unwind: DenseBitSet::new_empty(body.basic_blocks.len()),
247        }
248    }
249
250    /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
251    /// enum discriminant.
252    ///
253    /// This is correct in a vacuum but is not the default because it causes problems in the borrow
254    /// checker, where this information gets propagated along `FakeEdge`s.
255    pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
256        self.mark_inactive_variants_as_uninit = true;
257        self
258    }
259
260    /// Ensures definitely inactive variants are included in the set of uninitialized places for
261    /// blocks reached through an `otherwise` edge.
262    pub fn include_inactive_in_otherwise(mut self) -> Self {
263        self.include_inactive_in_otherwise = true;
264        self
265    }
266
267    pub fn skipping_unreachable_unwind(
268        mut self,
269        unreachable_unwind: DenseBitSet<mir::BasicBlock>,
270    ) -> Self {
271        self.skip_unreachable_unwind = unreachable_unwind;
272        self
273    }
274}
275
276impl<'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
277    fn move_data(&self) -> &MoveData<'tcx> {
278        self.move_data
279    }
280}
281
282/// `EverInitializedPlaces` tracks all places that might have ever been
283/// initialized upon reaching a particular point in the control flow
284/// for a function, without an intervening `StorageDead`.
285///
286/// This dataflow is used to determine if an immutable local variable may
287/// be assigned to.
288///
289/// For example, in code like the following, we have corresponding
290/// dataflow information shown in the right-hand comments.
291///
292/// ```rust
293/// struct S;
294/// #[rustfmt::skip]
295/// fn foo(pred: bool) {                        // ever-init:
296///                                             // {          }
297///     let a = S; let mut b = S; let c; let d; // {a, b      }
298///
299///     if pred {
300///         drop(a);                            // {a, b,     }
301///         b = S;                              // {a, b,     }
302///
303///     } else {
304///         drop(b);                            // {a, b,      }
305///         d = S;                              // {a, b,    d }
306///
307///     }                                       // {a, b,    d }
308///
309///     c = S;                                  // {a, b, c, d }
310/// }
311/// ```
312pub struct EverInitializedPlaces<'a, 'tcx> {
313    body: &'a Body<'tcx>,
314    move_data: &'a MoveData<'tcx>,
315}
316
317impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
318    pub fn new(body: &'a Body<'tcx>, move_data: &'a MoveData<'tcx>) -> Self {
319        EverInitializedPlaces { body, move_data }
320    }
321}
322
323impl<'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'_, 'tcx> {
324    fn move_data(&self) -> &MoveData<'tcx> {
325        self.move_data
326    }
327}
328
329impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
330    fn update_bits(
331        state: &mut <Self as Analysis<'tcx>>::Domain,
332        path: MovePathIndex,
333        dfstate: DropFlagState,
334    ) {
335        match dfstate {
336            DropFlagState::Absent => state.kill(path),
337            DropFlagState::Present => state.gen_(path),
338        }
339    }
340}
341
342impl<'tcx> MaybeUninitializedPlaces<'_, 'tcx> {
343    fn update_bits(
344        state: &mut <Self as Analysis<'tcx>>::Domain,
345        path: MovePathIndex,
346        dfstate: DropFlagState,
347    ) {
348        match dfstate {
349            DropFlagState::Absent => state.gen_(path),
350            DropFlagState::Present => state.kill(path),
351        }
352    }
353}
354
355impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
356    /// There can be many more `MovePathIndex` than there are locals in a MIR body.
357    /// We use a mixed bitset to avoid paying too high a memory footprint.
358    type Domain = MaybeReachable<MixedBitSet<MovePathIndex>>;
359
360    type SwitchIntData = MaybePlacesSwitchIntData<'tcx>;
361
362    const NAME: &'static str = "maybe_init";
363
364    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
365        // bottom = uninitialized
366        MaybeReachable::Unreachable
367    }
368
369    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
370        *state =
371            MaybeReachable::Reachable(MixedBitSet::new_empty(self.move_data().move_paths.len()));
372        drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| {
373            assert!(s == DropFlagState::Present);
374            state.gen_(path);
375        });
376    }
377
378    fn apply_primary_statement_effect(
379        &mut self,
380        state: &mut Self::Domain,
381        statement: &mir::Statement<'tcx>,
382        location: Location,
383    ) {
384        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
385            Self::update_bits(state, path, s)
386        });
387
388        // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
389        if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration
390            && let Some((_, rvalue)) = statement.kind.as_assign()
391            && let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
392                // FIXME: Does `&raw const foo` allow mutation? See #90413.
393                | mir::Rvalue::RawPtr(_, place) = rvalue
394            && let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref())
395        {
396            on_all_children_bits(self.move_data(), mpi, |child| {
397                state.gen_(child);
398            })
399        }
400    }
401
402    fn apply_primary_terminator_effect<'mir>(
403        &mut self,
404        state: &mut Self::Domain,
405        terminator: &'mir mir::Terminator<'tcx>,
406        location: Location,
407    ) -> TerminatorEdges<'mir, 'tcx> {
408        // Note: `edges` must be computed first because `drop_flag_effects_for_location` can change
409        // the result of `is_unwind_dead`.
410        let mut edges = terminator.edges();
411        if self.skip_unreachable_unwind
412            && let mir::TerminatorKind::Drop {
413                target,
414                unwind,
415                place,
416                replace: _,
417                drop: _,
418                async_fut: _,
419            } = terminator.kind
420            && matches!(unwind, mir::UnwindAction::Cleanup(_))
421            && self.is_unwind_dead(place, state)
422        {
423            edges = TerminatorEdges::Single(target);
424        }
425        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
426            Self::update_bits(state, path, s)
427        });
428        edges
429    }
430
431    fn apply_call_return_effect(
432        &mut self,
433        state: &mut Self::Domain,
434        _block: mir::BasicBlock,
435        return_places: CallReturnPlaces<'_, 'tcx>,
436    ) {
437        return_places.for_each(|place| {
438            // when a call returns successfully, that means we need to set
439            // the bits for that dest_place to 1 (initialized).
440            on_lookup_result_bits(
441                self.move_data(),
442                self.move_data().rev_lookup.find(place.as_ref()),
443                |mpi| {
444                    state.gen_(mpi);
445                },
446            );
447        });
448    }
449
450    fn get_switch_int_data(
451        &mut self,
452        block: mir::BasicBlock,
453        discr: &mir::Operand<'tcx>,
454    ) -> Option<Self::SwitchIntData> {
455        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
456            return None;
457        }
458
459        MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr)
460    }
461
462    fn apply_switch_int_edge_effect(
463        &mut self,
464        data: &mut Self::SwitchIntData,
465        state: &mut Self::Domain,
466        value: SwitchTargetValue,
467        targets: &mir::SwitchTargets,
468    ) {
469        let inactive_variants = match value {
470            SwitchTargetValue::Normal(value) => InactiveVariants::Active(data.next_discr(value)),
471            SwitchTargetValue::Otherwise if self.exclude_inactive_in_otherwise => {
472                InactiveVariants::Inactives(data.variants(targets))
473            }
474            _ => return,
475        };
476
477        // Kill all move paths that correspond to variants we know to be inactive along this
478        // particular outgoing edge of a `SwitchInt`.
479        drop_flag_effects::on_all_inactive_variants(
480            self.move_data,
481            data.enum_place,
482            &inactive_variants,
483            |mpi| state.kill(mpi),
484        );
485    }
486}
487
488/// There can be many more `MovePathIndex` than there are locals in a MIR body.
489/// We use a mixed bitset to avoid paying too high a memory footprint.
490pub type MaybeUninitializedPlacesDomain = MixedBitSet<MovePathIndex>;
491
492impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
493    type Domain = MaybeUninitializedPlacesDomain;
494
495    type SwitchIntData = MaybePlacesSwitchIntData<'tcx>;
496
497    const NAME: &'static str = "maybe_uninit";
498
499    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
500        // bottom = initialized (`initialize_start_block` overwrites this on first entry)
501        MixedBitSet::new_empty(self.move_data().move_paths.len())
502    }
503
504    // sets state bits for Arg places
505    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
506        // set all bits to 1 (uninit) before gathering counter-evidence
507        state.insert_all();
508
509        drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| {
510            assert!(s == DropFlagState::Present);
511            state.remove(path);
512        });
513    }
514
515    fn apply_primary_statement_effect(
516        &mut self,
517        state: &mut Self::Domain,
518        _statement: &mir::Statement<'tcx>,
519        location: Location,
520    ) {
521        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
522            Self::update_bits(state, path, s)
523        });
524
525        // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
526        // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
527    }
528
529    fn apply_primary_terminator_effect<'mir>(
530        &mut self,
531        state: &mut Self::Domain,
532        terminator: &'mir mir::Terminator<'tcx>,
533        location: Location,
534    ) -> TerminatorEdges<'mir, 'tcx> {
535        drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| {
536            Self::update_bits(state, path, s)
537        });
538        if self.skip_unreachable_unwind.contains(location.block) {
539            let mir::TerminatorKind::Drop { target, unwind, .. } = terminator.kind else { bug!() };
540            assert_matches!(unwind, mir::UnwindAction::Cleanup(_));
541            TerminatorEdges::Single(target)
542        } else {
543            terminator.edges()
544        }
545    }
546
547    fn apply_call_return_effect(
548        &mut self,
549        state: &mut Self::Domain,
550        _block: mir::BasicBlock,
551        return_places: CallReturnPlaces<'_, 'tcx>,
552    ) {
553        return_places.for_each(|place| {
554            // when a call returns successfully, that means we need to set
555            // the bits for that dest_place to 0 (initialized).
556            on_lookup_result_bits(
557                self.move_data(),
558                self.move_data().rev_lookup.find(place.as_ref()),
559                |mpi| {
560                    state.kill(mpi);
561                },
562            );
563        });
564    }
565
566    fn get_switch_int_data(
567        &mut self,
568        block: mir::BasicBlock,
569        discr: &mir::Operand<'tcx>,
570    ) -> Option<Self::SwitchIntData> {
571        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
572            return None;
573        }
574
575        if !self.mark_inactive_variants_as_uninit {
576            return None;
577        }
578
579        MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr)
580    }
581
582    fn apply_switch_int_edge_effect(
583        &mut self,
584        data: &mut Self::SwitchIntData,
585        state: &mut Self::Domain,
586        value: SwitchTargetValue,
587        targets: &mir::SwitchTargets,
588    ) {
589        let inactive_variants = match value {
590            SwitchTargetValue::Normal(value) => InactiveVariants::Active(data.next_discr(value)),
591            SwitchTargetValue::Otherwise if self.include_inactive_in_otherwise => {
592                InactiveVariants::Inactives(data.variants(targets))
593            }
594            _ => return,
595        };
596
597        // Mark all move paths that correspond to variants other than this one as maybe
598        // uninitialized (in reality, they are *definitely* uninitialized).
599        drop_flag_effects::on_all_inactive_variants(
600            self.move_data,
601            data.enum_place,
602            &inactive_variants,
603            |mpi| state.gen_(mpi),
604        );
605    }
606}
607
608/// There can be many more `InitIndex` than there are locals in a MIR body.
609/// We use a mixed bitset to avoid paying too high a memory footprint.
610pub type EverInitializedPlacesDomain = MixedBitSet<InitIndex>;
611
612impl<'tcx> Analysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
613    type Domain = EverInitializedPlacesDomain;
614
615    const NAME: &'static str = "ever_init";
616
617    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
618        // bottom = no initialized variables by default
619        MixedBitSet::new_empty(self.move_data().inits.len())
620    }
621
622    fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
623        for arg_init in 0..body.arg_count {
624            state.insert(InitIndex::new(arg_init));
625        }
626    }
627
628    #[instrument(skip(self, state), level = "debug")]
629    fn apply_primary_statement_effect(
630        &mut self,
631        state: &mut Self::Domain,
632        stmt: &mir::Statement<'tcx>,
633        location: Location,
634    ) {
635        let move_data = self.move_data();
636        let init_path_map = &move_data.init_path_map;
637        let init_loc_map = &move_data.init_loc_map;
638        let rev_lookup = &move_data.rev_lookup;
639
640        debug!("initializes move_indexes {:?}", init_loc_map[location]);
641        state.gen_all(init_loc_map[location].iter().copied());
642
643        if let mir::StatementKind::StorageDead(local) = stmt.kind
644            // End inits for StorageDead, so that an immutable variable can
645            // be reinitialized on the next iteration of the loop.
646            && let Some(move_path_index) = rev_lookup.find_local(local)
647        {
648            debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
649            state.kill_all(init_path_map[move_path_index].iter().copied());
650        }
651    }
652
653    #[instrument(skip(self, state, terminator), level = "debug")]
654    fn apply_primary_terminator_effect<'mir>(
655        &mut self,
656        state: &mut Self::Domain,
657        terminator: &'mir mir::Terminator<'tcx>,
658        location: Location,
659    ) -> TerminatorEdges<'mir, 'tcx> {
660        let (body, move_data) = (self.body, self.move_data());
661        let term = body[location.block].terminator();
662        let init_loc_map = &move_data.init_loc_map;
663        debug!(?term);
664        debug!("initializes move_indexes {:?}", init_loc_map[location]);
665        state.gen_all(
666            init_loc_map[location]
667                .iter()
668                .filter(|init_index| {
669                    move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
670                })
671                .copied(),
672        );
673        terminator.edges()
674    }
675
676    fn apply_call_return_effect(
677        &mut self,
678        state: &mut Self::Domain,
679        block: mir::BasicBlock,
680        _return_places: CallReturnPlaces<'_, 'tcx>,
681    ) {
682        let move_data = self.move_data();
683        let init_loc_map = &move_data.init_loc_map;
684
685        let call_loc = self.body.terminator_loc(block);
686        for init_index in &init_loc_map[call_loc] {
687            state.gen_(*init_index);
688        }
689    }
690}