1use std::iter;
2
3use rustc_abi::{BackendRepr, TagEncoding, Variants, WrappingRange};
4use rustc_ast as ast;
5use rustc_hir as hir;
6use rustc_hir::{Expr, ExprKind, HirId, LangItem, find_attr};
7use rustc_middle::bug;
8use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
9use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
10use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
11use rustc_span::{Span, Symbol, sym};
12use tracing::debug;
13
14mod improper_ctypes; pub(crate) use improper_ctypes::ImproperCTypesLint;
16
17use crate::lints::{
18 AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
19 AmbiguousWidePointerComparisonsAddrSuggestion, AmbiguousWidePointerComparisonsCastSuggestion,
20 AmbiguousWidePointerComparisonsExpectSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
21 AtomicOrderingStore, InvalidAtomicOrderingDiag, InvalidNanComparisons,
22 InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
23 UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons,
24 VariantSizeDifferencesDiag,
25};
26use crate::{LateContext, LateLintPass, LintContext};
27
28mod literal;
29use literal::{int_ty_range, lint_literal, uint_ty_range};
30
31#[doc = r" The `unused_comparisons` lint detects comparisons made useless by"]
#[doc = r" limits of the types involved."]
#[doc = r""]
#[doc = r" ### Example"]
#[doc = r""]
#[doc = r" ```rust"]
#[doc = r" fn foo(x: u8) {"]
#[doc = r" x >= 0;"]
#[doc = r" }"]
#[doc = r" ```"]
#[doc = r""]
#[doc = r" {{produces}}"]
#[doc = r""]
#[doc = r" ### Explanation"]
#[doc = r""]
#[doc =
r" A useless comparison may indicate a mistake, and should be fixed or"]
#[doc = r" removed."]
static UNUSED_COMPARISONS: &::rustc_lint_defs::Lint =
&::rustc_lint_defs::Lint {
name: "UNUSED_COMPARISONS",
default_level: ::rustc_lint_defs::Warn,
desc: "comparisons made useless by limits of the types involved",
is_externally_loaded: false,
..::rustc_lint_defs::Lint::default_fields_for_macro()
};declare_lint! {
32 UNUSED_COMPARISONS,
50 Warn,
51 "comparisons made useless by limits of the types involved"
52}
53
54#[doc =
r" The `overflowing_literals` lint detects literals out of range for their type."]
#[doc = r""]
#[doc = r" ### Example"]
#[doc = r""]
#[doc = r" ```rust,compile_fail"]
#[doc = r" let x: u8 = 1000;"]
#[doc = r" ```"]
#[doc = r""]
#[doc = r" {{produces}}"]
#[doc = r""]
#[doc = r" ### Explanation"]
#[doc = r""]
#[doc = r" It is usually a mistake to use a literal that overflows its type"]
#[doc = r" Change either the literal or its type such that the literal is"]
#[doc = r" within the range of its type."]
static OVERFLOWING_LITERALS: &::rustc_lint_defs::Lint =
&::rustc_lint_defs::Lint {
name: "OVERFLOWING_LITERALS",
default_level: ::rustc_lint_defs::Deny,
desc: "literal out of range for its type",
is_externally_loaded: false,
..::rustc_lint_defs::Lint::default_fields_for_macro()
};declare_lint! {
55 OVERFLOWING_LITERALS,
71 Deny,
72 "literal out of range for its type"
73}
74
75#[doc =
r" The `variant_size_differences` lint detects enums with widely varying"]
#[doc = r" variant sizes."]
#[doc = r""]
#[doc = r" ### Example"]
#[doc = r""]
#[doc = r" ```rust,compile_fail"]
#[doc = r" #![deny(variant_size_differences)]"]
#[doc = r" enum En {"]
#[doc = r" V0(u8),"]
#[doc = r" VBig([u8; 1024]),"]
#[doc = r" }"]
#[doc = r" ```"]
#[doc = r""]
#[doc = r" {{produces}}"]
#[doc = r""]
#[doc = r" ### Explanation"]
#[doc = r""]
#[doc =
r" It can be a mistake to add a variant to an enum that is much larger"]
#[doc =
r" than the other variants, bloating the overall size required for all"]
#[doc = r" variants. This can impact performance and memory usage. This is"]
#[doc = r" triggered if one variant is more than 3 times larger than the"]
#[doc = r" second-largest variant."]
#[doc = r""]
#[doc =
r" Consider placing the large variant's contents on the heap (for example"]
#[doc = r" via [`Box`]) to keep the overall size of the enum itself down."]
#[doc = r""]
#[doc =
r#" This lint is "allow" by default because it can be noisy, and may not be"#]
#[doc = r" an actual problem. Decisions about this should be guided with"]
#[doc = r" profiling and benchmarking."]
#[doc = r""]
#[doc = r" [`Box`]: https://doc.rust-lang.org/std/boxed/index.html"]
static VARIANT_SIZE_DIFFERENCES: &::rustc_lint_defs::Lint =
&::rustc_lint_defs::Lint {
name: "VARIANT_SIZE_DIFFERENCES",
default_level: ::rustc_lint_defs::Allow,
desc: "detects enums with widely varying variant sizes",
is_externally_loaded: false,
..::rustc_lint_defs::Lint::default_fields_for_macro()
};declare_lint! {
76 VARIANT_SIZE_DIFFERENCES,
108 Allow,
109 "detects enums with widely varying variant sizes"
110}
111
112#[doc =
r" The `invalid_nan_comparisons` lint checks comparison with `f32::NAN` or `f64::NAN`"]
#[doc = r" as one of the operand."]
#[doc = r""]
#[doc = r" ### Example"]
#[doc = r""]
#[doc = r" ```rust"]
#[doc = r" let a = 2.3f32;"]
#[doc = r" if a == f32::NAN {}"]
#[doc = r" ```"]
#[doc = r""]
#[doc = r" {{produces}}"]
#[doc = r""]
#[doc = r" ### Explanation"]
#[doc = r""]
#[doc = r" NaN does not compare meaningfully to anything – not"]
#[doc = r" even itself – so those comparisons are always false."]
static INVALID_NAN_COMPARISONS: &::rustc_lint_defs::Lint =
&::rustc_lint_defs::Lint {
name: "INVALID_NAN_COMPARISONS",
default_level: ::rustc_lint_defs::Warn,
desc: "detects invalid floating point NaN comparisons",
is_externally_loaded: false,
..::rustc_lint_defs::Lint::default_fields_for_macro()
};declare_lint! {
113 INVALID_NAN_COMPARISONS,
130 Warn,
131 "detects invalid floating point NaN comparisons"
132}
133
134#[doc = r" The `ambiguous_wide_pointer_comparisons` lint checks comparison"]
#[doc = r" of `*const/*mut ?Sized` as the operands."]
#[doc = r""]
#[doc = r" ### Example"]
#[doc = r""]
#[doc = r" ```rust"]
#[doc = r" # struct A;"]
#[doc = r" # struct B;"]
#[doc = r""]
#[doc = r" # trait T {}"]
#[doc = r" # impl T for A {}"]
#[doc = r" # impl T for B {}"]
#[doc = r""]
#[doc = r" let ab = (A, B);"]
#[doc = r" let a = &ab.0 as *const dyn T;"]
#[doc = r" let b = &ab.1 as *const dyn T;"]
#[doc = r""]
#[doc = r" let _ = a == b;"]
#[doc = r" ```"]
#[doc = r""]
#[doc = r" {{produces}}"]
#[doc = r""]
#[doc = r" ### Explanation"]
#[doc = r""]
#[doc = r" The comparison includes metadata which may not be expected."]
static AMBIGUOUS_WIDE_POINTER_COMPARISONS: &::rustc_lint_defs::Lint =
&::rustc_lint_defs::Lint {
name: "AMBIGUOUS_WIDE_POINTER_COMPARISONS",
default_level: ::rustc_lint_defs::Warn,
desc: "detects ambiguous wide pointer comparisons",
is_externally_loaded: false,
..::rustc_lint_defs::Lint::default_fields_for_macro()
};declare_lint! {
135 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
161 Warn,
162 "detects ambiguous wide pointer comparisons"
163}
164
165#[doc =
r" The `unpredictable_function_pointer_comparisons` lint checks comparison"]
#[doc = r" of function pointer as the operands."]
#[doc = r""]
#[doc = r" ### Example"]
#[doc = r""]
#[doc = r" ```rust"]
#[doc = r" fn a() {}"]
#[doc = r" fn b() {}"]
#[doc = r""]
#[doc = r" let f: fn() = a;"]
#[doc = r" let g: fn() = b;"]
#[doc = r""]
#[doc = r" let _ = f == g;"]
#[doc = r" ```"]
#[doc = r""]
#[doc = r" {{produces}}"]
#[doc = r""]
#[doc = r" ### Explanation"]
#[doc = r""]
#[doc =
r" Function pointers comparisons do not produce meaningful result since"]
#[doc =
r" they are never guaranteed to be unique and could vary between different"]
#[doc =
r" code generation units. Furthermore, different functions could have the"]
#[doc = r" same address after being merged together."]
static UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS: &::rustc_lint_defs::Lint =
&::rustc_lint_defs::Lint {
name: "UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS",
default_level: ::rustc_lint_defs::Warn,
desc: "detects unpredictable function pointer comparisons",
is_externally_loaded: false,
report_in_external_macro: true,
..::rustc_lint_defs::Lint::default_fields_for_macro()
};declare_lint! {
166 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
190 Warn,
191 "detects unpredictable function pointer comparisons",
192 report_in_external_macro
193}
194
195#[derive(#[automatically_derived]
impl ::core::marker::Copy for TypeLimits { }Copy, #[automatically_derived]
impl ::core::clone::Clone for TypeLimits {
#[inline]
fn clone(&self) -> TypeLimits {
let _: ::core::clone::AssertParamIsClone<Option<hir::HirId>>;
let _: ::core::clone::AssertParamIsClone<Option<Span>>;
*self
}
}Clone, #[automatically_derived]
impl ::core::default::Default for TypeLimits {
#[inline]
fn default() -> TypeLimits {
TypeLimits {
negated_expr_id: ::core::default::Default::default(),
negated_expr_span: ::core::default::Default::default(),
}
}
}Default)]
196pub(crate) struct TypeLimits {
197 negated_expr_id: Option<hir::HirId>,
199 negated_expr_span: Option<Span>,
201}
202
203impl ::rustc_lint_defs::LintPass for TypeLimits {
fn name(&self) -> &'static str { "TypeLimits" }
fn get_lints(&self) -> ::rustc_lint_defs::LintVec {
::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[UNUSED_COMPARISONS, OVERFLOWING_LITERALS,
INVALID_NAN_COMPARISONS, AMBIGUOUS_WIDE_POINTER_COMPARISONS,
UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS]))
}
}
impl TypeLimits {
#[allow(unused)]
pub fn lint_vec() -> ::rustc_lint_defs::LintVec {
::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[UNUSED_COMPARISONS, OVERFLOWING_LITERALS,
INVALID_NAN_COMPARISONS, AMBIGUOUS_WIDE_POINTER_COMPARISONS,
UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS]))
}
}impl_lint_pass!(TypeLimits => [
204 UNUSED_COMPARISONS,
205 OVERFLOWING_LITERALS,
206 INVALID_NAN_COMPARISONS,
207 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
208 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS
209]);
210
211impl TypeLimits {
212 pub(crate) fn new() -> TypeLimits {
213 TypeLimits { negated_expr_id: None, negated_expr_span: None }
214 }
215}
216
217fn lint_nan<'tcx>(
218 cx: &LateContext<'tcx>,
219 e: &'tcx hir::Expr<'tcx>,
220 binop: hir::BinOpKind,
221 l: &'tcx hir::Expr<'tcx>,
222 r: &'tcx hir::Expr<'tcx>,
223) {
224 fn is_nan(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
225 let expr = expr.peel_blocks().peel_borrows();
226 match expr.kind {
227 ExprKind::Path(qpath) => {
228 let Some(def_id) = cx.typeck_results().qpath_res(&qpath, expr.hir_id).opt_def_id()
229 else {
230 return false;
231 };
232
233 #[allow(non_exhaustive_omitted_patterns)] match cx.tcx.get_diagnostic_name(def_id)
{
Some(sym::f16_nan | sym::f32_nan | sym::f64_nan | sym::f128_nan) => true,
_ => false,
}matches!(
234 cx.tcx.get_diagnostic_name(def_id),
235 Some(sym::f16_nan | sym::f32_nan | sym::f64_nan | sym::f128_nan)
236 )
237 }
238 _ => false,
239 }
240 }
241
242 fn eq_ne(
243 e: &hir::Expr<'_>,
244 l: &hir::Expr<'_>,
245 r: &hir::Expr<'_>,
246 f: impl FnOnce(Span, Span) -> InvalidNanComparisonsSuggestion,
247 ) -> InvalidNanComparisons {
248 let suggestion = if let Some(l_span) = l.span.find_ancestor_inside(e.span)
249 && let Some(r_span) = r.span.find_ancestor_inside(e.span)
250 {
251 f(l_span, r_span)
252 } else {
253 InvalidNanComparisonsSuggestion::Spanless
254 };
255
256 InvalidNanComparisons::EqNe { suggestion }
257 }
258
259 let lint = match binop {
260 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, l) => {
261 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
262 nan_plus_binop: l_span.until(r_span),
263 float: r_span.shrink_to_hi(),
264 neg: (binop == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()),
265 })
266 }
267 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, r) => {
268 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
269 nan_plus_binop: l_span.shrink_to_hi().to(r_span),
270 float: l_span.shrink_to_hi(),
271 neg: (binop == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()),
272 })
273 }
274 hir::BinOpKind::Lt | hir::BinOpKind::Le | hir::BinOpKind::Gt | hir::BinOpKind::Ge
275 if is_nan(cx, l) || is_nan(cx, r) =>
276 {
277 InvalidNanComparisons::LtLeGtGe
278 }
279 _ => return,
280 };
281
282 cx.emit_span_lint(INVALID_NAN_COMPARISONS, e.span, lint);
283}
284
285#[derive(#[automatically_derived]
impl ::core::fmt::Debug for ComparisonOp {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ComparisonOp::BinOp(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "BinOp",
&__self_0),
ComparisonOp::Other =>
::core::fmt::Formatter::write_str(f, "Other"),
}
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for ComparisonOp {
#[inline]
fn eq(&self, other: &ComparisonOp) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ComparisonOp::BinOp(__self_0), ComparisonOp::BinOp(__arg1_0))
=> __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::marker::Copy for ComparisonOp { }Copy, #[automatically_derived]
impl ::core::clone::Clone for ComparisonOp {
#[inline]
fn clone(&self) -> ComparisonOp {
let _: ::core::clone::AssertParamIsClone<hir::BinOpKind>;
*self
}
}Clone)]
286enum ComparisonOp {
287 BinOp(hir::BinOpKind),
288 Other,
289}
290
291fn lint_wide_pointer<'tcx>(
292 cx: &LateContext<'tcx>,
293 e: &'tcx hir::Expr<'tcx>,
294 cmpop: ComparisonOp,
295 l: &'tcx hir::Expr<'tcx>,
296 r: &'tcx hir::Expr<'tcx>,
297) {
298 let ptr_unsized = |mut ty: Ty<'tcx>| -> Option<(
299 usize,
300 String,
301 bool,
302 )> {
303 let mut refs = 0;
304 while let ty::Ref(_, inner_ty, _) = ty.kind() {
307 ty = *inner_ty;
308 refs += 1;
309 }
310
311 let mut modifiers = String::new();
313 ty = match ty.kind() {
314 ty::RawPtr(ty, _) => *ty,
315 ty::Adt(def, args) if cx.tcx.is_diagnostic_item(sym::NonNull, def.did()) => {
316 modifiers.push_str(".as_ptr()");
317 args.type_at(0)
318 }
319 _ => return None,
320 };
321
322 (!ty.is_sized(cx.tcx, cx.typing_env()))
323 .then(|| (refs, modifiers, #[allow(non_exhaustive_omitted_patterns)] match ty.kind() {
ty::Dynamic(_, _) => true,
_ => false,
}matches!(ty.kind(), ty::Dynamic(_, _))))
324 };
325
326 let l = l.peel_borrows();
328 let r = r.peel_borrows();
329
330 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else {
331 return;
332 };
333 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else {
334 return;
335 };
336
337 let Some((l_ty_refs, l_modifiers, l_inner_ty_is_dyn)) = ptr_unsized(l_ty) else {
338 return;
339 };
340 let Some((r_ty_refs, r_modifiers, r_inner_ty_is_dyn)) = ptr_unsized(r_ty) else {
341 return;
342 };
343
344 let (Some(l_span), Some(r_span)) =
345 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
346 else {
347 return cx.emit_span_lint(
348 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
349 e.span,
350 AmbiguousWidePointerComparisons::Spanless,
351 );
352 };
353
354 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
355 let is_eq_ne = #[allow(non_exhaustive_omitted_patterns)] match cmpop {
ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne) => true,
_ => false,
}matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
356 let is_dyn_comparison = l_inner_ty_is_dyn && r_inner_ty_is_dyn;
357 let via_method_call = #[allow(non_exhaustive_omitted_patterns)] match &e.kind {
ExprKind::MethodCall(..) | ExprKind::Call(..) => true,
_ => false,
}matches!(&e.kind, ExprKind::MethodCall(..) | ExprKind::Call(..));
358
359 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
360 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
361 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
362
363 let deref_left = &*"*".repeat(l_ty_refs);
364 let deref_right = &*"*".repeat(r_ty_refs);
365
366 let l_modifiers = &*l_modifiers;
367 let r_modifiers = &*r_modifiers;
368
369 cx.emit_span_lint(
370 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
371 e.span,
372 if is_eq_ne {
373 AmbiguousWidePointerComparisons::SpanfulEq {
374 addr_metadata_suggestion: (!is_dyn_comparison).then(|| {
375 AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
376 ne,
377 deref_left,
378 deref_right,
379 l_modifiers,
380 r_modifiers,
381 left,
382 middle,
383 right,
384 }
385 }),
386 addr_suggestion: AmbiguousWidePointerComparisonsAddrSuggestion {
387 ne,
388 deref_left,
389 deref_right,
390 l_modifiers,
391 r_modifiers,
392 left,
393 middle,
394 right,
395 },
396 }
397 } else {
398 AmbiguousWidePointerComparisons::SpanfulCmp {
399 cast_suggestion: AmbiguousWidePointerComparisonsCastSuggestion {
400 deref_left,
401 deref_right,
402 l_modifiers,
403 r_modifiers,
404 paren_left: if l_ty_refs != 0 { ")" } else { "" },
405 paren_right: if r_ty_refs != 0 { ")" } else { "" },
406 left_before: (l_ty_refs != 0).then_some(l_span.shrink_to_lo()),
407 left_after: l_span.shrink_to_hi(),
408 right_before: (r_ty_refs != 0).then_some(r_span.shrink_to_lo()),
409 right_after: r_span.shrink_to_hi(),
410 },
411 expect_suggestion: AmbiguousWidePointerComparisonsExpectSuggestion {
412 paren_left: if via_method_call { "" } else { "(" },
413 paren_right: if via_method_call { "" } else { ")" },
414 before: e.span.shrink_to_lo(),
415 after: e.span.shrink_to_hi(),
416 },
417 }
418 },
419 );
420}
421
422fn lint_fn_pointer<'tcx>(
423 cx: &LateContext<'tcx>,
424 e: &'tcx hir::Expr<'tcx>,
425 cmpop: ComparisonOp,
426 l: &'tcx hir::Expr<'tcx>,
427 r: &'tcx hir::Expr<'tcx>,
428) {
429 let peel_refs = |mut ty: Ty<'tcx>| -> (Ty<'tcx>, usize) {
430 let mut refs = 0;
431
432 while let ty::Ref(_, inner_ty, _) = ty.kind() {
433 ty = *inner_ty;
434 refs += 1;
435 }
436
437 (ty, refs)
438 };
439
440 let l = l.peel_borrows();
442 let r = r.peel_borrows();
443
444 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else { return };
445 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else { return };
446
447 let (l_ty, l_ty_refs) = peel_refs(l_ty);
450 let (r_ty, r_ty_refs) = peel_refs(r_ty);
451
452 if l_ty.is_fn() && r_ty.is_fn() {
453 } else if let ty::Adt(l_def, l_args) = l_ty.kind()
455 && let ty::Adt(r_def, r_args) = r_ty.kind()
456 && cx.tcx.is_lang_item(l_def.did(), LangItem::Option)
457 && cx.tcx.is_lang_item(r_def.did(), LangItem::Option)
458 && let Some(l_some_arg) = l_args.get(0)
459 && let Some(r_some_arg) = r_args.get(0)
460 && l_some_arg.expect_ty().is_fn()
461 && r_some_arg.expect_ty().is_fn()
462 {
463 return cx.emit_span_lint(
465 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
466 e.span,
467 UnpredictableFunctionPointerComparisons::Warn,
468 );
469 } else {
470 return;
472 }
473
474 let is_eq_ne = #[allow(non_exhaustive_omitted_patterns)] match cmpop {
ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne) => true,
_ => false,
}matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
477
478 if !is_eq_ne {
479 return cx.emit_span_lint(
481 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
482 e.span,
483 UnpredictableFunctionPointerComparisons::Warn,
484 );
485 }
486
487 let (Some(l_span), Some(r_span)) =
488 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
489 else {
490 return cx.emit_span_lint(
492 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
493 e.span,
494 UnpredictableFunctionPointerComparisons::Warn,
495 );
496 };
497
498 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
499
500 let deref_left = &*"*".repeat(l_ty_refs);
502 let deref_right = &*"*".repeat(r_ty_refs);
503
504 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
505 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
506 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
507
508 let sugg =
509 if !r_ty.is_fn_ptr() {
512 let fn_sig = r_ty.fn_sig(cx.tcx);
513
514 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEqWithCast {
515 ne,
516 fn_sig,
517 deref_left,
518 deref_right,
519 left,
520 middle,
521 right,
522 }
523 } else {
524 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEq {
525 ne,
526 deref_left,
527 deref_right,
528 left,
529 middle,
530 right,
531 }
532 };
533
534 cx.emit_span_lint(
535 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
536 e.span,
537 UnpredictableFunctionPointerComparisons::Suggestion { sugg },
538 );
539}
540
541impl<'tcx> LateLintPass<'tcx> for TypeLimits {
542 fn check_lit(&mut self, cx: &LateContext<'tcx>, hir_id: HirId, lit: hir::Lit, negated: bool) {
543 if negated {
544 self.negated_expr_id = Some(hir_id);
545 self.negated_expr_span = Some(lit.span);
546 }
547 lint_literal(cx, self, hir_id, lit.span, &lit, negated);
548 }
549
550 fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) {
551 match e.kind {
552 hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
553 if self.negated_expr_id != Some(e.hir_id) {
555 self.negated_expr_id = Some(expr.hir_id);
556 self.negated_expr_span = Some(e.span);
557 }
558 }
559 hir::ExprKind::Binary(binop, ref l, ref r) => {
560 if is_comparison(binop.node) {
561 if !check_limits(cx, binop.node, l, r) {
562 cx.emit_span_lint(UNUSED_COMPARISONS, e.span, UnusedComparisons);
563 } else {
564 lint_nan(cx, e, binop.node, l, r);
565 let cmpop = ComparisonOp::BinOp(binop.node);
566 lint_wide_pointer(cx, e, cmpop, l, r);
567 lint_fn_pointer(cx, e, cmpop, l, r);
568 }
569 }
570 }
571 hir::ExprKind::Call(path, [l, r])
572 if let ExprKind::Path(ref qpath) = path.kind
573 && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
574 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
575 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
576 {
577 lint_wide_pointer(cx, e, cmpop, l, r);
578 lint_fn_pointer(cx, e, cmpop, l, r);
579 }
580 hir::ExprKind::MethodCall(_, l, [r], _)
581 if let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
582 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
583 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
584 {
585 lint_wide_pointer(cx, e, cmpop, l, r);
586 lint_fn_pointer(cx, e, cmpop, l, r);
587 }
588 _ => {}
589 };
590
591 fn is_valid<T: PartialOrd>(binop: hir::BinOpKind, v: T, min: T, max: T) -> bool {
592 match binop {
593 hir::BinOpKind::Lt => v > min && v <= max,
594 hir::BinOpKind::Le => v >= min && v < max,
595 hir::BinOpKind::Gt => v >= min && v < max,
596 hir::BinOpKind::Ge => v > min && v <= max,
597 hir::BinOpKind::Eq | hir::BinOpKind::Ne => v >= min && v <= max,
598 _ => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
599 }
600 }
601
602 fn rev_binop(binop: hir::BinOpKind) -> hir::BinOpKind {
603 match binop {
604 hir::BinOpKind::Lt => hir::BinOpKind::Gt,
605 hir::BinOpKind::Le => hir::BinOpKind::Ge,
606 hir::BinOpKind::Gt => hir::BinOpKind::Lt,
607 hir::BinOpKind::Ge => hir::BinOpKind::Le,
608 _ => binop,
609 }
610 }
611
612 fn check_limits(
613 cx: &LateContext<'_>,
614 binop: hir::BinOpKind,
615 l: &hir::Expr<'_>,
616 r: &hir::Expr<'_>,
617 ) -> bool {
618 let (lit, expr, swap) = match (&l.kind, &r.kind) {
619 (&hir::ExprKind::Lit(_), _) => (l, r, true),
620 (_, &hir::ExprKind::Lit(_)) => (r, l, false),
621 _ => return true,
622 };
623 let norm_binop = if swap { rev_binop(binop) } else { binop };
626 match *cx.typeck_results().node_type(expr.hir_id).kind() {
627 ty::Int(int_ty) => {
628 let (min, max) = int_ty_range(int_ty);
629 let lit_val: i128 = match lit.kind {
630 hir::ExprKind::Lit(li) => match li.node {
631 ast::LitKind::Int(
632 v,
633 ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed,
634 ) => v.get() as i128,
635 _ => return true,
636 },
637 _ => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
638 };
639 is_valid(norm_binop, lit_val, min, max)
640 }
641 ty::Uint(uint_ty) => {
642 let (min, max): (u128, u128) = uint_ty_range(uint_ty);
643 let lit_val: u128 = match lit.kind {
644 hir::ExprKind::Lit(li) => match li.node {
645 ast::LitKind::Int(v, _) => v.get(),
646 _ => return true,
647 },
648 _ => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
649 };
650 is_valid(norm_binop, lit_val, min, max)
651 }
652 _ => true,
653 }
654 }
655
656 fn is_comparison(binop: hir::BinOpKind) -> bool {
657 #[allow(non_exhaustive_omitted_patterns)] match binop {
hir::BinOpKind::Eq | hir::BinOpKind::Lt | hir::BinOpKind::Le |
hir::BinOpKind::Ne | hir::BinOpKind::Ge | hir::BinOpKind::Gt => true,
_ => false,
}matches!(
658 binop,
659 hir::BinOpKind::Eq
660 | hir::BinOpKind::Lt
661 | hir::BinOpKind::Le
662 | hir::BinOpKind::Ne
663 | hir::BinOpKind::Ge
664 | hir::BinOpKind::Gt
665 )
666 }
667
668 fn diag_item_cmpop(diag_item: Symbol) -> Option<ComparisonOp> {
669 Some(match diag_item {
670 sym::cmp_ord_max => ComparisonOp::Other,
671 sym::cmp_ord_min => ComparisonOp::Other,
672 sym::ord_cmp_method => ComparisonOp::Other,
673 sym::cmp_partialeq_eq => ComparisonOp::BinOp(hir::BinOpKind::Eq),
674 sym::cmp_partialeq_ne => ComparisonOp::BinOp(hir::BinOpKind::Ne),
675 sym::cmp_partialord_cmp => ComparisonOp::Other,
676 sym::cmp_partialord_ge => ComparisonOp::BinOp(hir::BinOpKind::Ge),
677 sym::cmp_partialord_gt => ComparisonOp::BinOp(hir::BinOpKind::Gt),
678 sym::cmp_partialord_le => ComparisonOp::BinOp(hir::BinOpKind::Le),
679 sym::cmp_partialord_lt => ComparisonOp::BinOp(hir::BinOpKind::Lt),
680 _ => return None,
681 })
682 }
683 }
684}
685
686pub(crate) fn nonnull_optimization_guaranteed<'tcx>(
687 tcx: TyCtxt<'tcx>,
688 def: ty::AdtDef<'tcx>,
689) -> bool {
690 {
#[allow(deprecated)]
{
{
'done:
{
for i in tcx.get_all_attrs(def.did()) {
#[allow(unused_imports)]
use rustc_hir::attrs::AttributeKind::*;
let i: &rustc_hir::Attribute = i;
match i {
rustc_hir::Attribute::Parsed(RustcNonnullOptimizationGuaranteed)
=> {
break 'done Some(());
}
rustc_hir::Attribute::Unparsed(..) =>
{}
#[deny(unreachable_patterns)]
_ => {}
}
}
None
}
}
}
}.is_some()find_attr!(tcx, def.did(), RustcNonnullOptimizationGuaranteed)
691}
692
693pub(crate) fn transparent_newtype_field<'a, 'tcx>(
696 tcx: TyCtxt<'tcx>,
697 variant: &'a ty::VariantDef,
698) -> Option<&'a ty::FieldDef> {
699 let typing_env = ty::TypingEnv::non_body_analysis(tcx, variant.def_id);
700 variant.fields.iter().find(|field| {
701 let field_ty = tcx.type_of(field.did).instantiate_identity();
702 let is_1zst =
703 tcx.layout_of(typing_env.as_query_input(field_ty)).is_ok_and(|layout| layout.is_1zst());
704 !is_1zst
705 })
706}
707
708fn ty_is_known_nonnull<'tcx>(
710 tcx: TyCtxt<'tcx>,
711 typing_env: ty::TypingEnv<'tcx>,
712 ty: Ty<'tcx>,
713) -> bool {
714 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
715
716 match ty.kind() {
717 ty::FnPtr(..) => true,
718 ty::Ref(..) => true,
719 ty::Adt(def, _) if def.is_box() => true,
720 ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => {
721 let marked_non_null = nonnull_optimization_guaranteed(tcx, *def);
722
723 if marked_non_null {
724 return true;
725 }
726
727 if def.is_unsafe_cell() || def.is_unsafe_pinned() {
729 return false;
730 }
731
732 def.variants()
733 .iter()
734 .filter_map(|variant| transparent_newtype_field(tcx, variant))
735 .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args)))
736 }
737 ty::Pat(base, pat) => {
738 ty_is_known_nonnull(tcx, typing_env, *base)
739 || pat_ty_is_known_nonnull(tcx, typing_env, *pat)
740 }
741 _ => false,
742 }
743}
744
745fn pat_ty_is_known_nonnull<'tcx>(
746 tcx: TyCtxt<'tcx>,
747 typing_env: ty::TypingEnv<'tcx>,
748 pat: ty::Pattern<'tcx>,
749) -> bool {
750 try {
751 match *pat {
752 ty::PatternKind::Range { start, end } => {
753 let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
754 let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
755
756 start > 0 && end >= start
759 }
760 ty::PatternKind::NotNull => true,
761 ty::PatternKind::Or(patterns) => {
762 patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
763 }
764 }
765 }
766 .unwrap_or_default()
767}
768
769fn get_nullable_type<'tcx>(
772 tcx: TyCtxt<'tcx>,
773 typing_env: ty::TypingEnv<'tcx>,
774 ty: Ty<'tcx>,
775) -> Option<Ty<'tcx>> {
776 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
777
778 Some(match *ty.kind() {
779 ty::Adt(field_def, field_args) => {
780 let inner_field_ty = {
781 let mut first_non_zst_ty =
782 field_def.variants().iter().filter_map(|v| transparent_newtype_field(tcx, v));
783 if true {
match (&first_non_zst_ty.clone().count(), &1) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("Wrong number of fields for transparent type")));
}
}
};
};debug_assert_eq!(
784 first_non_zst_ty.clone().count(),
785 1,
786 "Wrong number of fields for transparent type"
787 );
788 first_non_zst_ty
789 .next_back()
790 .expect("No non-zst fields in transparent type.")
791 .ty(tcx, field_args)
792 };
793 return get_nullable_type(tcx, typing_env, inner_field_ty);
794 }
795 ty::Pat(base, ..) => return get_nullable_type(tcx, typing_env, base),
796 ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => ty,
797 ty::Ref(_region, ty, mutbl) => Ty::new_ptr(tcx, ty, mutbl),
800 ty::FnPtr(..) => ty,
803 ref unhandled => {
806 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_lint/src/types.rs:806",
"rustc_lint::types", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_lint/src/types.rs"),
::tracing_core::__macro_support::Option::Some(806u32),
::tracing_core::__macro_support::Option::Some("rustc_lint::types"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("get_nullable_type: Unhandled scalar kind: {0:?} while checking {1:?}",
unhandled, ty) as &dyn Value))])
});
} else { ; }
};debug!(
807 "get_nullable_type: Unhandled scalar kind: {:?} while checking {:?}",
808 unhandled, ty
809 );
810 return None;
811 }
812 })
813}
814
815fn is_niche_optimization_candidate<'tcx>(
820 tcx: TyCtxt<'tcx>,
821 typing_env: ty::TypingEnv<'tcx>,
822 ty: Ty<'tcx>,
823) -> bool {
824 if tcx.layout_of(typing_env.as_query_input(ty)).is_ok_and(|layout| !layout.is_1zst()) {
825 return false;
826 }
827
828 match ty.kind() {
829 ty::Adt(ty_def, _) => {
830 let non_exhaustive = ty_def.is_variant_list_non_exhaustive();
831 let empty = (ty_def.is_struct() && ty_def.non_enum_variant().fields.is_empty())
832 || (ty_def.is_enum() && ty_def.variants().is_empty());
833
834 !non_exhaustive && empty
835 }
836 ty::Tuple(tys) => tys.is_empty(),
837 _ => false,
838 }
839}
840
841pub(crate) fn repr_nullable_ptr<'tcx>(
846 tcx: TyCtxt<'tcx>,
847 typing_env: ty::TypingEnv<'tcx>,
848 ty: Ty<'tcx>,
849) -> Option<Ty<'tcx>> {
850 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_lint/src/types.rs:850",
"rustc_lint::types", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_lint/src/types.rs"),
::tracing_core::__macro_support::Option::Some(850u32),
::tracing_core::__macro_support::Option::Some("rustc_lint::types"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("is_repr_nullable_ptr(tcx, ty = {0:?})",
ty) as &dyn Value))])
});
} else { ; }
};debug!("is_repr_nullable_ptr(tcx, ty = {:?})", ty);
851 match ty.kind() {
852 ty::Adt(ty_def, args) => {
853 let field_ty = match &ty_def.variants().raw[..] {
854 [var_one, var_two] => match (&var_one.fields.raw[..], &var_two.fields.raw[..]) {
855 ([], [field]) | ([field], []) => field.ty(tcx, args),
856 ([field1], [field2]) => {
857 let ty1 = field1.ty(tcx, args);
858 let ty2 = field2.ty(tcx, args);
859
860 if is_niche_optimization_candidate(tcx, typing_env, ty1) {
861 ty2
862 } else if is_niche_optimization_candidate(tcx, typing_env, ty2) {
863 ty1
864 } else {
865 return None;
866 }
867 }
868 _ => return None,
869 },
870 _ => return None,
871 };
872
873 if !ty_is_known_nonnull(tcx, typing_env, field_ty) {
874 return None;
875 }
876
877 let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, typing_env).ok();
881 if !compute_size_skeleton(ty)?.same_size(compute_size_skeleton(field_ty)?) {
882 ::rustc_middle::util::bug::bug_fmt(format_args!("improper_ctypes: Option nonnull optimization not applied?"));bug!("improper_ctypes: Option nonnull optimization not applied?");
883 }
884
885 let field_ty_layout = tcx.layout_of(typing_env.as_query_input(field_ty));
887 if field_ty_layout.is_err() && !field_ty.has_non_region_param() {
888 ::rustc_middle::util::bug::bug_fmt(format_args!("should be able to compute the layout of non-polymorphic type"));bug!("should be able to compute the layout of non-polymorphic type");
889 }
890
891 let field_ty_abi = &field_ty_layout.ok()?.backend_repr;
892 if let BackendRepr::Scalar(field_ty_scalar) = field_ty_abi {
893 match field_ty_scalar.valid_range(&tcx) {
894 WrappingRange { start: 0, end }
895 if end == field_ty_scalar.size(&tcx).unsigned_int_max() - 1 =>
896 {
897 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
898 }
899 WrappingRange { start: 1, .. } => {
900 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
901 }
902 WrappingRange { start, end } => {
903 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("Unhandled start and end range: ({0}, {1})", start,
end)));
}unreachable!("Unhandled start and end range: ({}, {})", start, end)
904 }
905 };
906 }
907 None
908 }
909 ty::Pat(base, pat) => get_nullable_type_from_pat(tcx, typing_env, *base, *pat),
910 _ => None,
911 }
912}
913
914fn get_nullable_type_from_pat<'tcx>(
915 tcx: TyCtxt<'tcx>,
916 typing_env: ty::TypingEnv<'tcx>,
917 base: Ty<'tcx>,
918 pat: ty::Pattern<'tcx>,
919) -> Option<Ty<'tcx>> {
920 match *pat {
921 ty::PatternKind::NotNull | ty::PatternKind::Range { .. } => {
922 get_nullable_type(tcx, typing_env, base)
923 }
924 ty::PatternKind::Or(patterns) => {
925 let first = get_nullable_type_from_pat(tcx, typing_env, base, patterns[0])?;
926 for &pat in &patterns[1..] {
927 match (&first, &get_nullable_type_from_pat(tcx, typing_env, base, pat)?) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(first, get_nullable_type_from_pat(tcx, typing_env, base, pat)?);
928 }
929 Some(first)
930 }
931 }
932}
933
934pub struct VariantSizeDifferences;
#[automatically_derived]
impl ::core::marker::Copy for VariantSizeDifferences { }
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for VariantSizeDifferences { }
#[automatically_derived]
impl ::core::clone::Clone for VariantSizeDifferences {
#[inline]
fn clone(&self) -> VariantSizeDifferences { *self }
}
impl ::rustc_lint_defs::LintPass for VariantSizeDifferences {
fn name(&self) -> &'static str { "VariantSizeDifferences" }
fn get_lints(&self) -> ::rustc_lint_defs::LintVec {
::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[VARIANT_SIZE_DIFFERENCES]))
}
}
impl VariantSizeDifferences {
#[allow(unused)]
pub fn lint_vec() -> ::rustc_lint_defs::LintVec {
::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[VARIANT_SIZE_DIFFERENCES]))
}
}declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]);
935
936impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
937 fn check_item(&mut self, cx: &LateContext<'_>, it: &hir::Item<'_>) {
938 if let hir::ItemKind::Enum(_, _, ref enum_definition) = it.kind {
939 let t = cx.tcx.type_of(it.owner_id).instantiate_identity();
940 let ty = cx.tcx.erase_and_anonymize_regions(t);
941 let Ok(layout) = cx.layout_of(ty) else { return };
942 let Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, variants, .. } =
943 &layout.variants
944 else {
945 return;
946 };
947
948 let tag_size = tag.size(&cx.tcx).bytes();
949
950 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_lint/src/types.rs:950",
"rustc_lint::types", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_lint/src/types.rs"),
::tracing_core::__macro_support::Option::Some(950u32),
::tracing_core::__macro_support::Option::Some("rustc_lint::types"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("enum `{0}` is {1} bytes large with layout:\n{2:#?}",
t, layout.size.bytes(), layout) as &dyn Value))])
});
} else { ; }
};debug!(
951 "enum `{}` is {} bytes large with layout:\n{:#?}",
952 t,
953 layout.size.bytes(),
954 layout
955 );
956
957 let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
958 .map(|(variant, variant_layout)| {
959 let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
961
962 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_lint/src/types.rs:962",
"rustc_lint::types", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_lint/src/types.rs"),
::tracing_core::__macro_support::Option::Some(962u32),
::tracing_core::__macro_support::Option::Some("rustc_lint::types"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("- variant `{0}` is {1} bytes large",
variant.ident, bytes) as &dyn Value))])
});
} else { ; }
};debug!("- variant `{}` is {} bytes large", variant.ident, bytes);
963 bytes
964 })
965 .enumerate()
966 .fold((0, 0, 0), |(l, s, li), (idx, size)| {
967 if size > l {
968 (size, l, idx)
969 } else if size > s {
970 (l, size, li)
971 } else {
972 (l, s, li)
973 }
974 });
975
976 if largest > slargest * 3 && slargest > 0 {
979 cx.emit_span_lint(
980 VARIANT_SIZE_DIFFERENCES,
981 enum_definition.variants[largest_index].span,
982 VariantSizeDifferencesDiag { largest },
983 );
984 }
985 }
986 }
987}
988
989#[doc = r" The `invalid_atomic_ordering` lint detects passing an `Ordering`"]
#[doc = r" to an atomic operation that does not support that ordering."]
#[doc = r""]
#[doc = r" ### Example"]
#[doc = r""]
#[doc = r" ```rust,compile_fail"]
#[doc = r" # use core::sync::atomic::{AtomicU8, Ordering};"]
#[doc = r" let atom = AtomicU8::new(0);"]
#[doc = r" let value = atom.load(Ordering::Release);"]
#[doc = r" # let _ = value;"]
#[doc = r" ```"]
#[doc = r""]
#[doc = r" {{produces}}"]
#[doc = r""]
#[doc = r" ### Explanation"]
#[doc = r""]
#[doc = r" Some atomic operations are only supported for a subset of the"]
#[doc =
r" `atomic::Ordering` variants. Passing an unsupported variant will cause"]
#[doc =
r" an unconditional panic at runtime, which is detected by this lint."]
#[doc = r""]
#[doc =
r" This lint will trigger in the following cases: (where `AtomicType` is an"]
#[doc = r" atomic type from `core::sync::atomic`, such as `AtomicBool`,"]
#[doc = r" `AtomicPtr`, `AtomicUsize`, or any of the other integer atomics)."]
#[doc = r""]
#[doc = r" - Passing `Ordering::Acquire` or `Ordering::AcqRel` to"]
#[doc = r" `AtomicType::store`."]
#[doc = r""]
#[doc = r" - Passing `Ordering::Release` or `Ordering::AcqRel` to"]
#[doc = r" `AtomicType::load`."]
#[doc = r""]
#[doc = r" - Passing `Ordering::Relaxed` to `core::sync::atomic::fence` or"]
#[doc = r" `core::sync::atomic::compiler_fence`."]
#[doc = r""]
#[doc =
r" - Passing `Ordering::Release` or `Ordering::AcqRel` as the failure"]
#[doc = r" ordering for any of `AtomicType::compare_exchange`,"]
#[doc = r" `AtomicType::compare_exchange_weak`, `AtomicType::update`, or"]
#[doc = r" `AtomicType::try_update`."]
static INVALID_ATOMIC_ORDERING: &::rustc_lint_defs::Lint =
&::rustc_lint_defs::Lint {
name: "INVALID_ATOMIC_ORDERING",
default_level: ::rustc_lint_defs::Deny,
desc: "usage of invalid atomic ordering in atomic operations and memory fences",
is_externally_loaded: false,
..::rustc_lint_defs::Lint::default_fields_for_macro()
};declare_lint! {
990 INVALID_ATOMIC_ORDERING,
1028 Deny,
1029 "usage of invalid atomic ordering in atomic operations and memory fences"
1030}
1031
1032pub struct InvalidAtomicOrdering;
#[automatically_derived]
impl ::core::marker::Copy for InvalidAtomicOrdering { }
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for InvalidAtomicOrdering { }
#[automatically_derived]
impl ::core::clone::Clone for InvalidAtomicOrdering {
#[inline]
fn clone(&self) -> InvalidAtomicOrdering { *self }
}
impl ::rustc_lint_defs::LintPass for InvalidAtomicOrdering {
fn name(&self) -> &'static str { "InvalidAtomicOrdering" }
fn get_lints(&self) -> ::rustc_lint_defs::LintVec {
::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[INVALID_ATOMIC_ORDERING]))
}
}
impl InvalidAtomicOrdering {
#[allow(unused)]
pub fn lint_vec() -> ::rustc_lint_defs::LintVec {
::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[INVALID_ATOMIC_ORDERING]))
}
}declare_lint_pass!(InvalidAtomicOrdering => [INVALID_ATOMIC_ORDERING]);
1033
1034impl InvalidAtomicOrdering {
1035 fn inherent_atomic_method_call<'hir>(
1036 cx: &LateContext<'_>,
1037 expr: &Expr<'hir>,
1038 recognized_names: &[Symbol], ) -> Option<(Symbol, &'hir [Expr<'hir>])> {
1040 if let ExprKind::MethodCall(method_path, _, args, _) = &expr.kind
1041 && recognized_names.contains(&method_path.ident.name)
1042 && let Some(m_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
1043 && let Some(impl_did) = cx.tcx.inherent_impl_of_assoc(m_def_id)
1045 && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
1046 && cx.tcx.is_diagnostic_item(sym::Atomic, adt.did())
1047 {
1048 return Some((method_path.ident.name, args));
1049 }
1050 None
1051 }
1052
1053 fn match_ordering(cx: &LateContext<'_>, ord_arg: &Expr<'_>) -> Option<Symbol> {
1054 let ExprKind::Path(ref ord_qpath) = ord_arg.kind else { return None };
1055 let did = cx.qpath_res(ord_qpath, ord_arg.hir_id).opt_def_id()?;
1056 let tcx = cx.tcx;
1057 let atomic_ordering = tcx.get_diagnostic_item(sym::Ordering);
1058 let name = tcx.item_name(did);
1059 let parent = tcx.parent(did);
1060 [sym::Relaxed, sym::Release, sym::Acquire, sym::AcqRel, sym::SeqCst].into_iter().find(
1061 |&ordering| {
1062 name == ordering
1063 && (Some(parent) == atomic_ordering
1064 || tcx.opt_parent(parent) == atomic_ordering)
1066 },
1067 )
1068 }
1069
1070 fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
1071 if let Some((method, args)) =
1072 Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store])
1073 && let Some((ordering_arg, invalid_ordering)) = match method {
1074 sym::load => Some((&args[0], sym::Release)),
1075 sym::store => Some((&args[1], sym::Acquire)),
1076 _ => None,
1077 }
1078 && let Some(ordering) = Self::match_ordering(cx, ordering_arg)
1079 && (ordering == invalid_ordering || ordering == sym::AcqRel)
1080 {
1081 if method == sym::load {
1082 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingLoad);
1083 } else {
1084 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingStore);
1085 };
1086 }
1087 }
1088
1089 fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
1090 if let ExprKind::Call(func, args) = expr.kind
1091 && let ExprKind::Path(ref func_qpath) = func.kind
1092 && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
1093 && #[allow(non_exhaustive_omitted_patterns)] match cx.tcx.get_diagnostic_name(def_id)
{
Some(sym::fence | sym::compiler_fence) => true,
_ => false,
}matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::fence | sym::compiler_fence))
1094 && Self::match_ordering(cx, &args[0]) == Some(sym::Relaxed)
1095 {
1096 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, args[0].span, AtomicOrderingFence);
1097 }
1098 }
1099
1100 fn check_atomic_compare_exchange(cx: &LateContext<'_>, expr: &Expr<'_>) {
1101 let Some((method, args)) = Self::inherent_atomic_method_call(
1102 cx,
1103 expr,
1104 &[
1105 sym::update,
1106 sym::try_update,
1107 sym::fetch_update,
1108 sym::compare_exchange,
1109 sym::compare_exchange_weak,
1110 ],
1111 ) else {
1112 return;
1113 };
1114
1115 let fail_order_arg = match method {
1116 sym::update | sym::try_update | sym::fetch_update => &args[1],
1117 sym::compare_exchange | sym::compare_exchange_weak => &args[3],
1118 _ => return,
1119 };
1120
1121 let Some(fail_ordering) = Self::match_ordering(cx, fail_order_arg) else { return };
1122
1123 if #[allow(non_exhaustive_omitted_patterns)] match fail_ordering {
sym::Release | sym::AcqRel => true,
_ => false,
}matches!(fail_ordering, sym::Release | sym::AcqRel) {
1124 cx.emit_span_lint(
1125 INVALID_ATOMIC_ORDERING,
1126 fail_order_arg.span,
1127 InvalidAtomicOrderingDiag { method, fail_order_arg_span: fail_order_arg.span },
1128 );
1129 }
1130 }
1131}
1132
1133impl<'tcx> LateLintPass<'tcx> for InvalidAtomicOrdering {
1134 fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
1135 Self::check_atomic_load_store(cx, expr);
1136 Self::check_memory_fence(cx, expr);
1137 Self::check_atomic_compare_exchange(cx, expr);
1138 }
1139}