rustc_lint/
reference_casting.rs

1use rustc_ast::Mutability;
2use rustc_hir::{Expr, ExprKind, UnOp};
3use rustc_middle::ty::layout::{LayoutOf as _, TyAndLayout};
4use rustc_middle::ty::{self};
5use rustc_session::{declare_lint, declare_lint_pass};
6use rustc_span::sym;
7
8use crate::lints::InvalidReferenceCastingDiag;
9use crate::{LateContext, LateLintPass, LintContext};
10
11declare_lint! {
12    /// The `invalid_reference_casting` lint checks for casts of `&T` to `&mut T`
13    /// without using interior mutability.
14    ///
15    /// ### Example
16    ///
17    /// ```rust,compile_fail
18    /// fn x(r: &i32) {
19    ///     unsafe {
20    ///         *(r as *const i32 as *mut i32) += 1;
21    ///     }
22    /// }
23    /// ```
24    ///
25    /// {{produces}}
26    ///
27    /// ### Explanation
28    ///
29    /// Casting `&T` to `&mut T` without using interior mutability is undefined behavior,
30    /// as it's a violation of Rust reference aliasing requirements.
31    ///
32    /// `UnsafeCell` is the only way to obtain aliasable data that is considered
33    /// mutable.
34    INVALID_REFERENCE_CASTING,
35    Deny,
36    "casts of `&T` to `&mut T` without interior mutability"
37}
38
39declare_lint_pass!(InvalidReferenceCasting => [INVALID_REFERENCE_CASTING]);
40
41impl<'tcx> LateLintPass<'tcx> for InvalidReferenceCasting {
42    fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
43        if let Some((e, pat)) = borrow_or_assign(cx, expr) {
44            let init = cx.expr_or_init(e);
45            let orig_cast = if init.span != e.span { Some(init.span) } else { None };
46
47            // small cache to avoid recomputing needlesly computing peel_casts of init
48            let mut peel_casts = {
49                let mut peel_casts_cache = None;
50                move || *peel_casts_cache.get_or_insert_with(|| peel_casts(cx, init))
51            };
52
53            if matches!(pat, PatternKind::Borrow { mutbl: Mutability::Mut } | PatternKind::Assign)
54                && let Some(ty_has_interior_mutability) =
55                    is_cast_from_ref_to_mut_ptr(cx, init, &mut peel_casts)
56            {
57                cx.emit_span_lint(
58                    INVALID_REFERENCE_CASTING,
59                    expr.span,
60                    if pat == PatternKind::Assign {
61                        InvalidReferenceCastingDiag::AssignToRef {
62                            orig_cast,
63                            ty_has_interior_mutability,
64                        }
65                    } else {
66                        InvalidReferenceCastingDiag::BorrowAsMut {
67                            orig_cast,
68                            ty_has_interior_mutability,
69                        }
70                    },
71                );
72            }
73
74            if let Some((from_ty_layout, to_ty_layout, e_alloc)) =
75                is_cast_to_bigger_memory_layout(cx, init, &mut peel_casts)
76            {
77                cx.emit_span_lint(
78                    INVALID_REFERENCE_CASTING,
79                    expr.span,
80                    InvalidReferenceCastingDiag::BiggerLayout {
81                        orig_cast,
82                        alloc: e_alloc.span,
83                        from_ty: from_ty_layout.ty,
84                        from_size: from_ty_layout.layout.size().bytes(),
85                        to_ty: to_ty_layout.ty,
86                        to_size: to_ty_layout.layout.size().bytes(),
87                    },
88                );
89            }
90        }
91    }
92}
93
94#[derive(Debug, Clone, Copy, PartialEq, Eq)]
95enum PatternKind {
96    Borrow { mutbl: Mutability },
97    Assign,
98}
99
100fn borrow_or_assign<'tcx>(
101    cx: &LateContext<'tcx>,
102    e: &'tcx Expr<'tcx>,
103) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
104    fn deref_assign_or_addr_of<'tcx>(
105        expr: &'tcx Expr<'tcx>,
106    ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
107        // &(mut) <expr>
108        let (inner, pat) = if let ExprKind::AddrOf(_, mutbl, expr) = expr.kind {
109            (expr, PatternKind::Borrow { mutbl })
110        // <expr> = ...
111        } else if let ExprKind::Assign(expr, _, _) = expr.kind {
112            (expr, PatternKind::Assign)
113        // <expr> += ...
114        } else if let ExprKind::AssignOp(_, expr, _) = expr.kind {
115            (expr, PatternKind::Assign)
116        } else {
117            return None;
118        };
119
120        // *<inner>
121        let ExprKind::Unary(UnOp::Deref, e) = &inner.kind else {
122            return None;
123        };
124        Some((e, pat))
125    }
126
127    fn ptr_write<'tcx>(
128        cx: &LateContext<'tcx>,
129        e: &'tcx Expr<'tcx>,
130    ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
131        if let ExprKind::Call(path, [arg_ptr, _arg_val]) = e.kind
132            && let ExprKind::Path(ref qpath) = path.kind
133            && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
134            && matches!(
135                cx.tcx.get_diagnostic_name(def_id),
136                Some(sym::ptr_write | sym::ptr_write_volatile | sym::ptr_write_unaligned)
137            )
138        {
139            Some((arg_ptr, PatternKind::Assign))
140        } else {
141            None
142        }
143    }
144
145    deref_assign_or_addr_of(e).or_else(|| ptr_write(cx, e))
146}
147
148fn is_cast_from_ref_to_mut_ptr<'tcx>(
149    cx: &LateContext<'tcx>,
150    orig_expr: &'tcx Expr<'tcx>,
151    mut peel_casts: impl FnMut() -> (&'tcx Expr<'tcx>, bool),
152) -> Option<bool> {
153    let end_ty = cx.typeck_results().node_type(orig_expr.hir_id);
154
155    // Bail out early if the end type is **not** a mutable pointer.
156    if !matches!(end_ty.kind(), ty::RawPtr(_, Mutability::Mut)) {
157        return None;
158    }
159
160    let (e, need_check_freeze) = peel_casts();
161
162    let start_ty = cx.typeck_results().node_type(e.hir_id);
163    if let ty::Ref(_, inner_ty, Mutability::Not) = start_ty.kind() {
164        // If an UnsafeCell method is involved, we need to additionally check the
165        // inner type for the presence of the Freeze trait (ie does NOT contain
166        // an UnsafeCell), since in that case we would incorrectly lint on valid casts.
167        //
168        // Except on the presence of non concrete skeleton types (ie generics)
169        // since there is no way to make it safe for arbitrary types.
170        let inner_ty_has_interior_mutability =
171            !inner_ty.is_freeze(cx.tcx, cx.typing_env()) && inner_ty.has_concrete_skeleton();
172        (!need_check_freeze || !inner_ty_has_interior_mutability)
173            .then_some(inner_ty_has_interior_mutability)
174    } else {
175        None
176    }
177}
178
179fn is_cast_to_bigger_memory_layout<'tcx>(
180    cx: &LateContext<'tcx>,
181    orig_expr: &'tcx Expr<'tcx>,
182    mut peel_casts: impl FnMut() -> (&'tcx Expr<'tcx>, bool),
183) -> Option<(TyAndLayout<'tcx>, TyAndLayout<'tcx>, Expr<'tcx>)> {
184    let end_ty = cx.typeck_results().node_type(orig_expr.hir_id);
185
186    let ty::RawPtr(inner_end_ty, _) = end_ty.kind() else {
187        return None;
188    };
189
190    let (e, _) = peel_casts();
191    let start_ty = cx.typeck_results().node_type(e.hir_id);
192
193    let ty::Ref(_, inner_start_ty, _) = start_ty.kind() else {
194        return None;
195    };
196
197    // try to find the underlying allocation
198    let e_alloc = cx.expr_or_init(e);
199    let e_alloc =
200        if let ExprKind::AddrOf(_, _, inner_expr) = e_alloc.kind { inner_expr } else { e_alloc };
201
202    // if the current expr looks like this `&mut expr[index]` then just looking
203    // at `expr[index]` won't give us the underlying allocation, so we just skip it
204    // the same logic applies field access `&mut expr.field` and reborrows `&mut *expr`.
205    if let ExprKind::Index(..) | ExprKind::Field(..) | ExprKind::Unary(UnOp::Deref, ..) =
206        e_alloc.kind
207    {
208        return None;
209    }
210
211    let alloc_ty = cx.typeck_results().node_type(e_alloc.hir_id);
212
213    // if we do not find it we bail out, as this may not be UB
214    // see https://github.com/rust-lang/unsafe-code-guidelines/issues/256
215    if alloc_ty.is_any_ptr() {
216        return None;
217    }
218
219    let from_layout = cx.layout_of(*inner_start_ty).ok()?;
220
221    // if the type isn't sized, we bail out, instead of potentially giving
222    // the user a meaningless warning.
223    if from_layout.is_unsized() {
224        return None;
225    }
226
227    let alloc_layout = cx.layout_of(alloc_ty).ok()?;
228    let to_layout = cx.layout_of(*inner_end_ty).ok()?;
229
230    if to_layout.layout.size() > from_layout.layout.size()
231        && to_layout.layout.size() > alloc_layout.layout.size()
232    {
233        Some((from_layout, to_layout, *e_alloc))
234    } else {
235        None
236    }
237}
238
239fn peel_casts<'tcx>(cx: &LateContext<'tcx>, mut e: &'tcx Expr<'tcx>) -> (&'tcx Expr<'tcx>, bool) {
240    let mut gone_trough_unsafe_cell_raw_get = false;
241
242    loop {
243        e = e.peel_blocks();
244        // <expr> as ...
245        e = if let ExprKind::Cast(expr, _) = e.kind {
246            expr
247        // <expr>.cast(), <expr>.cast_mut() or <expr>.cast_const()
248        } else if let ExprKind::MethodCall(_, expr, [], _) = e.kind
249            && let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
250            && matches!(
251                cx.tcx.get_diagnostic_name(def_id),
252                Some(sym::ptr_cast | sym::const_ptr_cast | sym::ptr_cast_mut | sym::ptr_cast_const)
253            )
254        {
255            expr
256        // ptr::from_ref(<expr>), UnsafeCell::raw_get(<expr>) or mem::transmute<_, _>(<expr>)
257        } else if let ExprKind::Call(path, [arg]) = e.kind
258            && let ExprKind::Path(ref qpath) = path.kind
259            && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
260            && matches!(
261                cx.tcx.get_diagnostic_name(def_id),
262                Some(sym::ptr_from_ref | sym::unsafe_cell_raw_get | sym::transmute)
263            )
264        {
265            if cx.tcx.is_diagnostic_item(sym::unsafe_cell_raw_get, def_id) {
266                gone_trough_unsafe_cell_raw_get = true;
267            }
268            arg
269        } else {
270            let init = cx.expr_or_init(e);
271            if init.hir_id != e.hir_id {
272                init
273            } else {
274                break;
275            }
276        };
277    }
278
279    (e, gone_trough_unsafe_cell_raw_get)
280}