1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
use rustc_ast::Mutability;
use rustc_hir::{Expr, ExprKind, UnOp};
use rustc_middle::ty::layout::LayoutOf as _;
use rustc_middle::ty::{self, layout::TyAndLayout};
use rustc_session::{declare_lint, declare_lint_pass};
use rustc_span::sym;

use crate::{lints::InvalidReferenceCastingDiag, LateContext, LateLintPass, LintContext};

declare_lint! {
    /// The `invalid_reference_casting` lint checks for casts of `&T` to `&mut T`
    /// without using interior mutability.
    ///
    /// ### Example
    ///
    /// ```rust,compile_fail
    /// fn x(r: &i32) {
    ///     unsafe {
    ///         *(r as *const i32 as *mut i32) += 1;
    ///     }
    /// }
    /// ```
    ///
    /// {{produces}}
    ///
    /// ### Explanation
    ///
    /// Casting `&T` to `&mut T` without using interior mutability is undefined behavior,
    /// as it's a violation of Rust reference aliasing requirements.
    ///
    /// `UnsafeCell` is the only way to obtain aliasable data that is considered
    /// mutable.
    INVALID_REFERENCE_CASTING,
    Deny,
    "casts of `&T` to `&mut T` without interior mutability"
}

declare_lint_pass!(InvalidReferenceCasting => [INVALID_REFERENCE_CASTING]);

impl<'tcx> LateLintPass<'tcx> for InvalidReferenceCasting {
    fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
        if let Some((e, pat)) = borrow_or_assign(cx, expr) {
            let init = cx.expr_or_init(e);
            let orig_cast = if init.span != e.span { Some(init.span) } else { None };

            // small cache to avoid recomputing needlesly computing peel_casts of init
            let mut peel_casts = {
                let mut peel_casts_cache = None;
                move || *peel_casts_cache.get_or_insert_with(|| peel_casts(cx, init))
            };

            if matches!(pat, PatternKind::Borrow { mutbl: Mutability::Mut } | PatternKind::Assign)
                && let Some(ty_has_interior_mutability) =
                    is_cast_from_ref_to_mut_ptr(cx, init, &mut peel_casts)
            {
                let ty_has_interior_mutability = ty_has_interior_mutability.then_some(());

                cx.emit_span_lint(
                    INVALID_REFERENCE_CASTING,
                    expr.span,
                    if pat == PatternKind::Assign {
                        InvalidReferenceCastingDiag::AssignToRef {
                            orig_cast,
                            ty_has_interior_mutability,
                        }
                    } else {
                        InvalidReferenceCastingDiag::BorrowAsMut {
                            orig_cast,
                            ty_has_interior_mutability,
                        }
                    },
                );
            }

            if let Some((from_ty_layout, to_ty_layout, e_alloc)) =
                is_cast_to_bigger_memory_layout(cx, init, &mut peel_casts)
            {
                cx.emit_span_lint(
                    INVALID_REFERENCE_CASTING,
                    expr.span,
                    InvalidReferenceCastingDiag::BiggerLayout {
                        orig_cast,
                        alloc: e_alloc.span,
                        from_ty: from_ty_layout.ty,
                        from_size: from_ty_layout.layout.size().bytes(),
                        to_ty: to_ty_layout.ty,
                        to_size: to_ty_layout.layout.size().bytes(),
                    },
                );
            }
        }
    }
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum PatternKind {
    Borrow { mutbl: Mutability },
    Assign,
}

fn borrow_or_assign<'tcx>(
    cx: &LateContext<'tcx>,
    e: &'tcx Expr<'tcx>,
) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
    fn deref_assign_or_addr_of<'tcx>(
        expr: &'tcx Expr<'tcx>,
    ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
        // &(mut) <expr>
        let (inner, pat) = if let ExprKind::AddrOf(_, mutbl, expr) = expr.kind {
            (expr, PatternKind::Borrow { mutbl })
        // <expr> = ...
        } else if let ExprKind::Assign(expr, _, _) = expr.kind {
            (expr, PatternKind::Assign)
        // <expr> += ...
        } else if let ExprKind::AssignOp(_, expr, _) = expr.kind {
            (expr, PatternKind::Assign)
        } else {
            return None;
        };

        // *<inner>
        let ExprKind::Unary(UnOp::Deref, e) = &inner.kind else {
            return None;
        };
        Some((e, pat))
    }

    fn ptr_write<'tcx>(
        cx: &LateContext<'tcx>,
        e: &'tcx Expr<'tcx>,
    ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
        if let ExprKind::Call(path, [arg_ptr, _arg_val]) = e.kind
            && let ExprKind::Path(ref qpath) = path.kind
            && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
            && matches!(
                cx.tcx.get_diagnostic_name(def_id),
                Some(sym::ptr_write | sym::ptr_write_volatile | sym::ptr_write_unaligned)
            )
        {
            Some((arg_ptr, PatternKind::Assign))
        } else {
            None
        }
    }

    deref_assign_or_addr_of(e).or_else(|| ptr_write(cx, e))
}

fn is_cast_from_ref_to_mut_ptr<'tcx>(
    cx: &LateContext<'tcx>,
    orig_expr: &'tcx Expr<'tcx>,
    mut peel_casts: impl FnMut() -> (&'tcx Expr<'tcx>, bool),
) -> Option<bool> {
    let end_ty = cx.typeck_results().node_type(orig_expr.hir_id);

    // Bail out early if the end type is **not** a mutable pointer.
    if !matches!(end_ty.kind(), ty::RawPtr(_, Mutability::Mut)) {
        return None;
    }

    let (e, need_check_freeze) = peel_casts();

    let start_ty = cx.typeck_results().node_type(e.hir_id);
    if let ty::Ref(_, inner_ty, Mutability::Not) = start_ty.kind() {
        // If an UnsafeCell method is involved, we need to additionally check the
        // inner type for the presence of the Freeze trait (ie does NOT contain
        // an UnsafeCell), since in that case we would incorrectly lint on valid casts.
        //
        // Except on the presence of non concrete skeleton types (ie generics)
        // since there is no way to make it safe for arbitrary types.
        let inner_ty_has_interior_mutability =
            !inner_ty.is_freeze(cx.tcx, cx.param_env) && inner_ty.has_concrete_skeleton();
        (!need_check_freeze || !inner_ty_has_interior_mutability)
            .then_some(inner_ty_has_interior_mutability)
    } else {
        None
    }
}

fn is_cast_to_bigger_memory_layout<'tcx>(
    cx: &LateContext<'tcx>,
    orig_expr: &'tcx Expr<'tcx>,
    mut peel_casts: impl FnMut() -> (&'tcx Expr<'tcx>, bool),
) -> Option<(TyAndLayout<'tcx>, TyAndLayout<'tcx>, Expr<'tcx>)> {
    let end_ty = cx.typeck_results().node_type(orig_expr.hir_id);

    let ty::RawPtr(inner_end_ty, _) = end_ty.kind() else {
        return None;
    };

    let (e, _) = peel_casts();
    let start_ty = cx.typeck_results().node_type(e.hir_id);

    let ty::Ref(_, inner_start_ty, _) = start_ty.kind() else {
        return None;
    };

    // try to find the underlying allocation
    let e_alloc = cx.expr_or_init(e);
    let e_alloc =
        if let ExprKind::AddrOf(_, _, inner_expr) = e_alloc.kind { inner_expr } else { e_alloc };

    // if the current expr looks like this `&mut expr[index]` then just looking
    // at `expr[index]` won't give us the underlying allocation, so we just skip it
    // the same logic applies field access `&mut expr.field` and reborrows `&mut *expr`.
    if let ExprKind::Index(..) | ExprKind::Field(..) | ExprKind::Unary(UnOp::Deref, ..) =
        e_alloc.kind
    {
        return None;
    }

    let alloc_ty = cx.typeck_results().node_type(e_alloc.hir_id);

    // if we do not find it we bail out, as this may not be UB
    // see https://github.com/rust-lang/unsafe-code-guidelines/issues/256
    if alloc_ty.is_any_ptr() {
        return None;
    }

    let from_layout = cx.layout_of(*inner_start_ty).ok()?;

    // if the type isn't sized, we bail out, instead of potentially giving
    // the user a meaningless warning.
    if from_layout.is_unsized() {
        return None;
    }

    let alloc_layout = cx.layout_of(alloc_ty).ok()?;
    let to_layout = cx.layout_of(*inner_end_ty).ok()?;

    if to_layout.layout.size() > from_layout.layout.size()
        && to_layout.layout.size() > alloc_layout.layout.size()
    {
        Some((from_layout, to_layout, *e_alloc))
    } else {
        None
    }
}

fn peel_casts<'tcx>(cx: &LateContext<'tcx>, mut e: &'tcx Expr<'tcx>) -> (&'tcx Expr<'tcx>, bool) {
    let mut gone_trough_unsafe_cell_raw_get = false;

    loop {
        e = e.peel_blocks();
        // <expr> as ...
        e = if let ExprKind::Cast(expr, _) = e.kind {
            expr
        // <expr>.cast(), <expr>.cast_mut() or <expr>.cast_const()
        } else if let ExprKind::MethodCall(_, expr, [], _) = e.kind
            && let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
            && matches!(
                cx.tcx.get_diagnostic_name(def_id),
                Some(sym::ptr_cast | sym::const_ptr_cast | sym::ptr_cast_mut | sym::ptr_cast_const)
            )
        {
            expr
        // ptr::from_ref(<expr>), UnsafeCell::raw_get(<expr>) or mem::transmute<_, _>(<expr>)
        } else if let ExprKind::Call(path, [arg]) = e.kind
            && let ExprKind::Path(ref qpath) = path.kind
            && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
            && matches!(
                cx.tcx.get_diagnostic_name(def_id),
                Some(sym::ptr_from_ref | sym::unsafe_cell_raw_get | sym::transmute)
            )
        {
            if cx.tcx.is_diagnostic_item(sym::unsafe_cell_raw_get, def_id) {
                gone_trough_unsafe_cell_raw_get = true;
            }
            arg
        } else {
            let init = cx.expr_or_init(e);
            if init.hir_id != e.hir_id {
                init
            } else {
                break;
            }
        };
    }

    (e, gone_trough_unsafe_cell_raw_get)
}