rustc_const_eval/check_consts/
resolver.rs

1//! Propagate `Qualif`s between locals and query the results.
2//!
3//! This contains the dataflow analysis used to track `Qualif`s on complex control-flow graphs.
4
5use std::fmt;
6use std::marker::PhantomData;
7
8use rustc_index::bit_set::MixedBitSet;
9use rustc_middle::mir::visit::Visitor;
10use rustc_middle::mir::{
11    self, BasicBlock, CallReturnPlaces, Local, Location, Statement, StatementKind, TerminatorEdges,
12};
13use rustc_mir_dataflow::fmt::DebugWithContext;
14use rustc_mir_dataflow::{Analysis, JoinSemiLattice};
15
16use super::{ConstCx, Qualif, qualifs};
17
18/// A `Visitor` that propagates qualifs between locals. This defines the transfer function of
19/// `FlowSensitiveAnalysis`.
20///
21/// To account for indirect assignments, data flow conservatively assumes that local becomes
22/// qualified immediately after it is borrowed or its address escapes. The borrow must allow for
23/// mutation, which includes shared borrows of places with interior mutability. The type of
24/// borrowed place must contain the qualif.
25struct TransferFunction<'a, 'mir, 'tcx, Q> {
26    ccx: &'a ConstCx<'mir, 'tcx>,
27    state: &'a mut State,
28    _qualif: PhantomData<Q>,
29}
30
31impl<'a, 'mir, 'tcx, Q> TransferFunction<'a, 'mir, 'tcx, Q>
32where
33    Q: Qualif,
34{
35    fn new(ccx: &'a ConstCx<'mir, 'tcx>, state: &'a mut State) -> Self {
36        TransferFunction { ccx, state, _qualif: PhantomData }
37    }
38
39    fn initialize_state(&mut self) {
40        self.state.qualif.clear();
41        self.state.borrow.clear();
42
43        for arg in self.ccx.body.args_iter() {
44            let arg_ty = self.ccx.body.local_decls[arg].ty;
45            if Q::in_any_value_of_ty(self.ccx, arg_ty) {
46                self.state.qualif.insert(arg);
47            }
48        }
49    }
50
51    fn assign_qualif_direct(&mut self, place: &mir::Place<'tcx>, mut value: bool) {
52        debug_assert!(!place.is_indirect());
53
54        if !value {
55            for (base, _elem) in place.iter_projections() {
56                let base_ty = base.ty(self.ccx.body, self.ccx.tcx);
57                if base_ty.ty.is_union() && Q::in_any_value_of_ty(self.ccx, base_ty.ty) {
58                    value = true;
59                    break;
60                }
61            }
62        }
63
64        match (value, place.as_ref()) {
65            (true, mir::PlaceRef { local, .. }) => {
66                self.state.qualif.insert(local);
67            }
68
69            // For now, we do not clear the qualif if a local is overwritten in full by
70            // an unqualified rvalue (e.g. `y = 5`). This is to be consistent
71            // with aggregates where we overwrite all fields with assignments, which would not
72            // get this feature.
73            (false, mir::PlaceRef { local: _, projection: &[] }) => {
74                // self.state.qualif.remove(*local);
75            }
76
77            _ => {}
78        }
79    }
80
81    fn apply_call_return_effect(
82        &mut self,
83        _block: BasicBlock,
84        return_places: CallReturnPlaces<'_, 'tcx>,
85    ) {
86        return_places.for_each(|place| {
87            // We cannot reason about another function's internals, so use conservative type-based
88            // qualification for the result of a function call.
89            let return_ty = place.ty(self.ccx.body, self.ccx.tcx).ty;
90            let qualif = Q::in_any_value_of_ty(self.ccx, return_ty);
91
92            if !place.is_indirect() {
93                self.assign_qualif_direct(&place, qualif);
94            }
95        });
96    }
97
98    fn address_of_allows_mutation(&self) -> bool {
99        // Exact set of permissions granted by RawPtr is undecided. Conservatively assume that
100        // it might allow mutation until resolution of #56604.
101        true
102    }
103
104    fn ref_allows_mutation(&self, kind: mir::BorrowKind, place: mir::Place<'tcx>) -> bool {
105        match kind {
106            mir::BorrowKind::Mut { .. } => true,
107            mir::BorrowKind::Shared | mir::BorrowKind::Fake(_) => {
108                self.shared_borrow_allows_mutation(place)
109            }
110        }
111    }
112
113    /// `&` only allow mutation if the borrowed place is `!Freeze`.
114    ///
115    /// This assumes that it is UB to take the address of a struct field whose type is
116    /// `Freeze`, then use pointer arithmetic to derive a pointer to a *different* field of
117    /// that same struct whose type is `!Freeze`. If we decide that this is not UB, we will
118    /// have to check the type of the borrowed **local** instead of the borrowed **place**
119    /// below. See [rust-lang/unsafe-code-guidelines#134].
120    ///
121    /// [rust-lang/unsafe-code-guidelines#134]: https://github.com/rust-lang/unsafe-code-guidelines/issues/134
122    fn shared_borrow_allows_mutation(&self, place: mir::Place<'tcx>) -> bool {
123        !place.ty(self.ccx.body, self.ccx.tcx).ty.is_freeze(self.ccx.tcx, self.ccx.typing_env)
124    }
125}
126
127impl<'tcx, Q> Visitor<'tcx> for TransferFunction<'_, '_, 'tcx, Q>
128where
129    Q: Qualif,
130{
131    fn visit_operand(&mut self, operand: &mir::Operand<'tcx>, location: Location) {
132        self.super_operand(operand, location);
133
134        if !Q::IS_CLEARED_ON_MOVE {
135            return;
136        }
137
138        // If a local with no projections is moved from (e.g. `x` in `y = x`), record that
139        // it no longer needs to be dropped.
140        if let mir::Operand::Move(place) = operand {
141            if let Some(local) = place.as_local() {
142                // For backward compatibility with the MaybeMutBorrowedLocals used in an earlier
143                // implementation we retain qualif if a local had been borrowed before. This might
144                // not be strictly necessary since the local is no longer initialized.
145                if !self.state.borrow.contains(local) {
146                    self.state.qualif.remove(local);
147                }
148            }
149        }
150    }
151
152    fn visit_assign(
153        &mut self,
154        place: &mir::Place<'tcx>,
155        rvalue: &mir::Rvalue<'tcx>,
156        location: Location,
157    ) {
158        let qualif =
159            qualifs::in_rvalue::<Q, _>(self.ccx, &mut |l| self.state.qualif.contains(l), rvalue);
160        if !place.is_indirect() {
161            self.assign_qualif_direct(place, qualif);
162        }
163
164        // We need to assign qualifs to the left-hand side before visiting `rvalue` since
165        // qualifs can be cleared on move.
166        self.super_assign(place, rvalue, location);
167    }
168
169    fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
170        self.super_rvalue(rvalue, location);
171
172        match rvalue {
173            mir::Rvalue::RawPtr(_mt, borrowed_place) => {
174                if !borrowed_place.is_indirect() && self.address_of_allows_mutation() {
175                    let place_ty = borrowed_place.ty(self.ccx.body, self.ccx.tcx).ty;
176                    if Q::in_any_value_of_ty(self.ccx, place_ty) {
177                        self.state.qualif.insert(borrowed_place.local);
178                        self.state.borrow.insert(borrowed_place.local);
179                    }
180                }
181            }
182
183            mir::Rvalue::Ref(_, kind, borrowed_place) => {
184                if !borrowed_place.is_indirect() && self.ref_allows_mutation(*kind, *borrowed_place)
185                {
186                    let place_ty = borrowed_place.ty(self.ccx.body, self.ccx.tcx).ty;
187                    if Q::in_any_value_of_ty(self.ccx, place_ty) {
188                        self.state.qualif.insert(borrowed_place.local);
189                        self.state.borrow.insert(borrowed_place.local);
190                    }
191                }
192            }
193
194            mir::Rvalue::Cast(..)
195            | mir::Rvalue::ShallowInitBox(..)
196            | mir::Rvalue::Use(..)
197            | mir::Rvalue::CopyForDeref(..)
198            | mir::Rvalue::ThreadLocalRef(..)
199            | mir::Rvalue::Repeat(..)
200            | mir::Rvalue::Len(..)
201            | mir::Rvalue::BinaryOp(..)
202            | mir::Rvalue::NullaryOp(..)
203            | mir::Rvalue::UnaryOp(..)
204            | mir::Rvalue::Discriminant(..)
205            | mir::Rvalue::Aggregate(..)
206            | mir::Rvalue::WrapUnsafeBinder(..) => {}
207        }
208    }
209
210    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
211        match statement.kind {
212            StatementKind::StorageDead(local) => {
213                self.state.qualif.remove(local);
214                self.state.borrow.remove(local);
215            }
216            _ => self.super_statement(statement, location),
217        }
218    }
219
220    fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) {
221        // The effect of assignment to the return place in `TerminatorKind::Call` is not applied
222        // here; that occurs in `apply_call_return_effect`.
223
224        // We ignore borrow on drop because custom drop impls are not allowed in consts.
225        // FIXME: Reconsider if accounting for borrows in drops is necessary for const drop.
226        self.super_terminator(terminator, location);
227    }
228}
229
230/// The dataflow analysis used to propagate qualifs on arbitrary CFGs.
231pub(super) struct FlowSensitiveAnalysis<'a, 'mir, 'tcx, Q> {
232    ccx: &'a ConstCx<'mir, 'tcx>,
233    _qualif: PhantomData<Q>,
234}
235
236impl<'a, 'mir, 'tcx, Q> FlowSensitiveAnalysis<'a, 'mir, 'tcx, Q>
237where
238    Q: Qualif,
239{
240    pub(super) fn new(_: Q, ccx: &'a ConstCx<'mir, 'tcx>) -> Self {
241        FlowSensitiveAnalysis { ccx, _qualif: PhantomData }
242    }
243
244    fn transfer_function(&self, state: &'a mut State) -> TransferFunction<'a, 'mir, 'tcx, Q> {
245        TransferFunction::<Q>::new(self.ccx, state)
246    }
247}
248
249#[derive(Debug, PartialEq, Eq)]
250/// The state for the `FlowSensitiveAnalysis` dataflow analysis. This domain is likely homogeneous,
251/// and has a big size, so we use a bitset that can be sparse (c.f. issue #134404).
252pub(super) struct State {
253    /// Describes whether a local contains qualif.
254    pub qualif: MixedBitSet<Local>,
255    /// Describes whether a local's address escaped and it might become qualified as a result an
256    /// indirect mutation.
257    pub borrow: MixedBitSet<Local>,
258}
259
260impl Clone for State {
261    fn clone(&self) -> Self {
262        State { qualif: self.qualif.clone(), borrow: self.borrow.clone() }
263    }
264
265    // Data flow engine when possible uses `clone_from` for domain values.
266    // Providing an implementation will avoid some intermediate memory allocations.
267    fn clone_from(&mut self, other: &Self) {
268        self.qualif.clone_from(&other.qualif);
269        self.borrow.clone_from(&other.borrow);
270    }
271}
272
273impl State {
274    #[inline]
275    pub(super) fn contains(&self, local: Local) -> bool {
276        self.qualif.contains(local)
277    }
278}
279
280impl<C> DebugWithContext<C> for State {
281    fn fmt_with(&self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result {
282        f.write_str("qualif: ")?;
283        self.qualif.fmt_with(ctxt, f)?;
284        f.write_str(" borrow: ")?;
285        self.borrow.fmt_with(ctxt, f)?;
286        Ok(())
287    }
288
289    fn fmt_diff_with(&self, old: &Self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result {
290        if self == old {
291            return Ok(());
292        }
293
294        if self.qualif != old.qualif {
295            f.write_str("qualif: ")?;
296            self.qualif.fmt_diff_with(&old.qualif, ctxt, f)?;
297            f.write_str("\n")?;
298        }
299
300        if self.borrow != old.borrow {
301            f.write_str("borrow: ")?;
302            self.qualif.fmt_diff_with(&old.borrow, ctxt, f)?;
303            f.write_str("\n")?;
304        }
305
306        Ok(())
307    }
308}
309
310impl JoinSemiLattice for State {
311    fn join(&mut self, other: &Self) -> bool {
312        self.qualif.join(&other.qualif) || self.borrow.join(&other.borrow)
313    }
314}
315
316impl<'tcx, Q> Analysis<'tcx> for FlowSensitiveAnalysis<'_, '_, 'tcx, Q>
317where
318    Q: Qualif,
319{
320    type Domain = State;
321
322    const NAME: &'static str = Q::ANALYSIS_NAME;
323
324    fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
325        State {
326            qualif: MixedBitSet::new_empty(body.local_decls.len()),
327            borrow: MixedBitSet::new_empty(body.local_decls.len()),
328        }
329    }
330
331    fn initialize_start_block(&self, _body: &mir::Body<'tcx>, state: &mut Self::Domain) {
332        self.transfer_function(state).initialize_state();
333    }
334
335    fn apply_primary_statement_effect(
336        &mut self,
337        state: &mut Self::Domain,
338        statement: &mir::Statement<'tcx>,
339        location: Location,
340    ) {
341        self.transfer_function(state).visit_statement(statement, location);
342    }
343
344    fn apply_primary_terminator_effect<'mir>(
345        &mut self,
346        state: &mut Self::Domain,
347        terminator: &'mir mir::Terminator<'tcx>,
348        location: Location,
349    ) -> TerminatorEdges<'mir, 'tcx> {
350        self.transfer_function(state).visit_terminator(terminator, location);
351        terminator.edges()
352    }
353
354    fn apply_call_return_effect(
355        &mut self,
356        state: &mut Self::Domain,
357        block: BasicBlock,
358        return_places: CallReturnPlaces<'_, 'tcx>,
359    ) {
360        self.transfer_function(state).apply_call_return_effect(block, return_places)
361    }
362}