1use std::iter;
2
3use rustc_abi::{BackendRepr, TagEncoding, Variants, WrappingRange};
4use rustc_hir::{Expr, ExprKind, HirId, LangItem};
5use rustc_middle::bug;
6use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
7use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
8use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
9use rustc_span::{Span, Symbol, sym};
10use tracing::debug;
11use {rustc_ast as ast, rustc_hir as hir};
12
13mod improper_ctypes; pub(crate) use improper_ctypes::ImproperCTypesLint;
15
16use crate::lints::{
17 AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
18 AmbiguousWidePointerComparisonsAddrSuggestion, AmbiguousWidePointerComparisonsCastSuggestion,
19 AmbiguousWidePointerComparisonsExpectSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
20 AtomicOrderingStore, InvalidAtomicOrderingDiag, InvalidNanComparisons,
21 InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
22 UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons,
23 VariantSizeDifferencesDiag,
24};
25use crate::{LateContext, LateLintPass, LintContext};
26
27mod literal;
28use literal::{int_ty_range, lint_literal, uint_ty_range};
29
30declare_lint! {
31 UNUSED_COMPARISONS,
49 Warn,
50 "comparisons made useless by limits of the types involved"
51}
52
53declare_lint! {
54 OVERFLOWING_LITERALS,
70 Deny,
71 "literal out of range for its type"
72}
73
74declare_lint! {
75 VARIANT_SIZE_DIFFERENCES,
107 Allow,
108 "detects enums with widely varying variant sizes"
109}
110
111declare_lint! {
112 INVALID_NAN_COMPARISONS,
129 Warn,
130 "detects invalid floating point NaN comparisons"
131}
132
133declare_lint! {
134 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
160 Warn,
161 "detects ambiguous wide pointer comparisons"
162}
163
164declare_lint! {
165 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
189 Warn,
190 "detects unpredictable function pointer comparisons",
191 report_in_external_macro
192}
193
194#[derive(Copy, Clone, Default)]
195pub(crate) struct TypeLimits {
196 negated_expr_id: Option<hir::HirId>,
198 negated_expr_span: Option<Span>,
200}
201
202impl_lint_pass!(TypeLimits => [
203 UNUSED_COMPARISONS,
204 OVERFLOWING_LITERALS,
205 INVALID_NAN_COMPARISONS,
206 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
207 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS
208]);
209
210impl TypeLimits {
211 pub(crate) fn new() -> TypeLimits {
212 TypeLimits { negated_expr_id: None, negated_expr_span: None }
213 }
214}
215
216fn lint_nan<'tcx>(
217 cx: &LateContext<'tcx>,
218 e: &'tcx hir::Expr<'tcx>,
219 binop: hir::BinOpKind,
220 l: &'tcx hir::Expr<'tcx>,
221 r: &'tcx hir::Expr<'tcx>,
222) {
223 fn is_nan(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
224 let expr = expr.peel_blocks().peel_borrows();
225 match expr.kind {
226 ExprKind::Path(qpath) => {
227 let Some(def_id) = cx.typeck_results().qpath_res(&qpath, expr.hir_id).opt_def_id()
228 else {
229 return false;
230 };
231
232 matches!(
233 cx.tcx.get_diagnostic_name(def_id),
234 Some(sym::f16_nan | sym::f32_nan | sym::f64_nan | sym::f128_nan)
235 )
236 }
237 _ => false,
238 }
239 }
240
241 fn eq_ne(
242 e: &hir::Expr<'_>,
243 l: &hir::Expr<'_>,
244 r: &hir::Expr<'_>,
245 f: impl FnOnce(Span, Span) -> InvalidNanComparisonsSuggestion,
246 ) -> InvalidNanComparisons {
247 let suggestion = if let Some(l_span) = l.span.find_ancestor_inside(e.span)
248 && let Some(r_span) = r.span.find_ancestor_inside(e.span)
249 {
250 f(l_span, r_span)
251 } else {
252 InvalidNanComparisonsSuggestion::Spanless
253 };
254
255 InvalidNanComparisons::EqNe { suggestion }
256 }
257
258 let lint = match binop {
259 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, l) => {
260 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
261 nan_plus_binop: l_span.until(r_span),
262 float: r_span.shrink_to_hi(),
263 neg: (binop == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()),
264 })
265 }
266 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, r) => {
267 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
268 nan_plus_binop: l_span.shrink_to_hi().to(r_span),
269 float: l_span.shrink_to_hi(),
270 neg: (binop == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()),
271 })
272 }
273 hir::BinOpKind::Lt | hir::BinOpKind::Le | hir::BinOpKind::Gt | hir::BinOpKind::Ge
274 if is_nan(cx, l) || is_nan(cx, r) =>
275 {
276 InvalidNanComparisons::LtLeGtGe
277 }
278 _ => return,
279 };
280
281 cx.emit_span_lint(INVALID_NAN_COMPARISONS, e.span, lint);
282}
283
284#[derive(Debug, PartialEq, Copy, Clone)]
285enum ComparisonOp {
286 BinOp(hir::BinOpKind),
287 Other,
288}
289
290fn lint_wide_pointer<'tcx>(
291 cx: &LateContext<'tcx>,
292 e: &'tcx hir::Expr<'tcx>,
293 cmpop: ComparisonOp,
294 l: &'tcx hir::Expr<'tcx>,
295 r: &'tcx hir::Expr<'tcx>,
296) {
297 let ptr_unsized = |mut ty: Ty<'tcx>| -> Option<(
298 usize,
299 String,
300 bool,
301 )> {
302 let mut refs = 0;
303 while let ty::Ref(_, inner_ty, _) = ty.kind() {
306 ty = *inner_ty;
307 refs += 1;
308 }
309
310 let mut modifiers = String::new();
312 ty = match ty.kind() {
313 ty::RawPtr(ty, _) => *ty,
314 ty::Adt(def, args) if cx.tcx.is_diagnostic_item(sym::NonNull, def.did()) => {
315 modifiers.push_str(".as_ptr()");
316 args.type_at(0)
317 }
318 _ => return None,
319 };
320
321 (!ty.is_sized(cx.tcx, cx.typing_env()))
322 .then(|| (refs, modifiers, matches!(ty.kind(), ty::Dynamic(_, _))))
323 };
324
325 let l = l.peel_borrows();
327 let r = r.peel_borrows();
328
329 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else {
330 return;
331 };
332 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else {
333 return;
334 };
335
336 let Some((l_ty_refs, l_modifiers, l_inner_ty_is_dyn)) = ptr_unsized(l_ty) else {
337 return;
338 };
339 let Some((r_ty_refs, r_modifiers, r_inner_ty_is_dyn)) = ptr_unsized(r_ty) else {
340 return;
341 };
342
343 let (Some(l_span), Some(r_span)) =
344 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
345 else {
346 return cx.emit_span_lint(
347 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
348 e.span,
349 AmbiguousWidePointerComparisons::Spanless,
350 );
351 };
352
353 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
354 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
355 let is_dyn_comparison = l_inner_ty_is_dyn && r_inner_ty_is_dyn;
356 let via_method_call = matches!(&e.kind, ExprKind::MethodCall(..) | ExprKind::Call(..));
357
358 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
359 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
360 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
361
362 let deref_left = &*"*".repeat(l_ty_refs);
363 let deref_right = &*"*".repeat(r_ty_refs);
364
365 let l_modifiers = &*l_modifiers;
366 let r_modifiers = &*r_modifiers;
367
368 cx.emit_span_lint(
369 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
370 e.span,
371 if is_eq_ne {
372 AmbiguousWidePointerComparisons::SpanfulEq {
373 addr_metadata_suggestion: (!is_dyn_comparison).then(|| {
374 AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
375 ne,
376 deref_left,
377 deref_right,
378 l_modifiers,
379 r_modifiers,
380 left,
381 middle,
382 right,
383 }
384 }),
385 addr_suggestion: AmbiguousWidePointerComparisonsAddrSuggestion {
386 ne,
387 deref_left,
388 deref_right,
389 l_modifiers,
390 r_modifiers,
391 left,
392 middle,
393 right,
394 },
395 }
396 } else {
397 AmbiguousWidePointerComparisons::SpanfulCmp {
398 cast_suggestion: AmbiguousWidePointerComparisonsCastSuggestion {
399 deref_left,
400 deref_right,
401 l_modifiers,
402 r_modifiers,
403 paren_left: if l_ty_refs != 0 { ")" } else { "" },
404 paren_right: if r_ty_refs != 0 { ")" } else { "" },
405 left_before: (l_ty_refs != 0).then_some(l_span.shrink_to_lo()),
406 left_after: l_span.shrink_to_hi(),
407 right_before: (r_ty_refs != 0).then_some(r_span.shrink_to_lo()),
408 right_after: r_span.shrink_to_hi(),
409 },
410 expect_suggestion: AmbiguousWidePointerComparisonsExpectSuggestion {
411 paren_left: if via_method_call { "" } else { "(" },
412 paren_right: if via_method_call { "" } else { ")" },
413 before: e.span.shrink_to_lo(),
414 after: e.span.shrink_to_hi(),
415 },
416 }
417 },
418 );
419}
420
421fn lint_fn_pointer<'tcx>(
422 cx: &LateContext<'tcx>,
423 e: &'tcx hir::Expr<'tcx>,
424 cmpop: ComparisonOp,
425 l: &'tcx hir::Expr<'tcx>,
426 r: &'tcx hir::Expr<'tcx>,
427) {
428 let peel_refs = |mut ty: Ty<'tcx>| -> (Ty<'tcx>, usize) {
429 let mut refs = 0;
430
431 while let ty::Ref(_, inner_ty, _) = ty.kind() {
432 ty = *inner_ty;
433 refs += 1;
434 }
435
436 (ty, refs)
437 };
438
439 let l = l.peel_borrows();
441 let r = r.peel_borrows();
442
443 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else { return };
444 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else { return };
445
446 let (l_ty, l_ty_refs) = peel_refs(l_ty);
449 let (r_ty, r_ty_refs) = peel_refs(r_ty);
450
451 if l_ty.is_fn() && r_ty.is_fn() {
452 } else if let ty::Adt(l_def, l_args) = l_ty.kind()
454 && let ty::Adt(r_def, r_args) = r_ty.kind()
455 && cx.tcx.is_lang_item(l_def.did(), LangItem::Option)
456 && cx.tcx.is_lang_item(r_def.did(), LangItem::Option)
457 && let Some(l_some_arg) = l_args.get(0)
458 && let Some(r_some_arg) = r_args.get(0)
459 && l_some_arg.expect_ty().is_fn()
460 && r_some_arg.expect_ty().is_fn()
461 {
462 return cx.emit_span_lint(
464 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
465 e.span,
466 UnpredictableFunctionPointerComparisons::Warn,
467 );
468 } else {
469 return;
471 }
472
473 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
476
477 if !is_eq_ne {
478 return cx.emit_span_lint(
480 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
481 e.span,
482 UnpredictableFunctionPointerComparisons::Warn,
483 );
484 }
485
486 let (Some(l_span), Some(r_span)) =
487 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
488 else {
489 return cx.emit_span_lint(
491 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
492 e.span,
493 UnpredictableFunctionPointerComparisons::Warn,
494 );
495 };
496
497 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
498
499 let deref_left = &*"*".repeat(l_ty_refs);
501 let deref_right = &*"*".repeat(r_ty_refs);
502
503 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
504 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
505 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
506
507 let sugg =
508 if !r_ty.is_fn_ptr() {
511 let fn_sig = r_ty.fn_sig(cx.tcx);
512
513 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEqWithCast {
514 ne,
515 fn_sig,
516 deref_left,
517 deref_right,
518 left,
519 middle,
520 right,
521 }
522 } else {
523 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEq {
524 ne,
525 deref_left,
526 deref_right,
527 left,
528 middle,
529 right,
530 }
531 };
532
533 cx.emit_span_lint(
534 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
535 e.span,
536 UnpredictableFunctionPointerComparisons::Suggestion { sugg },
537 );
538}
539
540impl<'tcx> LateLintPass<'tcx> for TypeLimits {
541 fn check_lit(&mut self, cx: &LateContext<'tcx>, hir_id: HirId, lit: hir::Lit, negated: bool) {
542 if negated {
543 self.negated_expr_id = Some(hir_id);
544 self.negated_expr_span = Some(lit.span);
545 }
546 lint_literal(cx, self, hir_id, lit.span, &lit, negated);
547 }
548
549 fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) {
550 match e.kind {
551 hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
552 if self.negated_expr_id != Some(e.hir_id) {
554 self.negated_expr_id = Some(expr.hir_id);
555 self.negated_expr_span = Some(e.span);
556 }
557 }
558 hir::ExprKind::Binary(binop, ref l, ref r) => {
559 if is_comparison(binop.node) {
560 if !check_limits(cx, binop.node, l, r) {
561 cx.emit_span_lint(UNUSED_COMPARISONS, e.span, UnusedComparisons);
562 } else {
563 lint_nan(cx, e, binop.node, l, r);
564 let cmpop = ComparisonOp::BinOp(binop.node);
565 lint_wide_pointer(cx, e, cmpop, l, r);
566 lint_fn_pointer(cx, e, cmpop, l, r);
567 }
568 }
569 }
570 hir::ExprKind::Call(path, [l, r])
571 if let ExprKind::Path(ref qpath) = path.kind
572 && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
573 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
574 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
575 {
576 lint_wide_pointer(cx, e, cmpop, l, r);
577 lint_fn_pointer(cx, e, cmpop, l, r);
578 }
579 hir::ExprKind::MethodCall(_, l, [r], _)
580 if let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
581 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
582 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
583 {
584 lint_wide_pointer(cx, e, cmpop, l, r);
585 lint_fn_pointer(cx, e, cmpop, l, r);
586 }
587 _ => {}
588 };
589
590 fn is_valid<T: PartialOrd>(binop: hir::BinOpKind, v: T, min: T, max: T) -> bool {
591 match binop {
592 hir::BinOpKind::Lt => v > min && v <= max,
593 hir::BinOpKind::Le => v >= min && v < max,
594 hir::BinOpKind::Gt => v >= min && v < max,
595 hir::BinOpKind::Ge => v > min && v <= max,
596 hir::BinOpKind::Eq | hir::BinOpKind::Ne => v >= min && v <= max,
597 _ => bug!(),
598 }
599 }
600
601 fn rev_binop(binop: hir::BinOpKind) -> hir::BinOpKind {
602 match binop {
603 hir::BinOpKind::Lt => hir::BinOpKind::Gt,
604 hir::BinOpKind::Le => hir::BinOpKind::Ge,
605 hir::BinOpKind::Gt => hir::BinOpKind::Lt,
606 hir::BinOpKind::Ge => hir::BinOpKind::Le,
607 _ => binop,
608 }
609 }
610
611 fn check_limits(
612 cx: &LateContext<'_>,
613 binop: hir::BinOpKind,
614 l: &hir::Expr<'_>,
615 r: &hir::Expr<'_>,
616 ) -> bool {
617 let (lit, expr, swap) = match (&l.kind, &r.kind) {
618 (&hir::ExprKind::Lit(_), _) => (l, r, true),
619 (_, &hir::ExprKind::Lit(_)) => (r, l, false),
620 _ => return true,
621 };
622 let norm_binop = if swap { rev_binop(binop) } else { binop };
625 match *cx.typeck_results().node_type(expr.hir_id).kind() {
626 ty::Int(int_ty) => {
627 let (min, max) = int_ty_range(int_ty);
628 let lit_val: i128 = match lit.kind {
629 hir::ExprKind::Lit(li) => match li.node {
630 ast::LitKind::Int(
631 v,
632 ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed,
633 ) => v.get() as i128,
634 _ => return true,
635 },
636 _ => bug!(),
637 };
638 is_valid(norm_binop, lit_val, min, max)
639 }
640 ty::Uint(uint_ty) => {
641 let (min, max): (u128, u128) = uint_ty_range(uint_ty);
642 let lit_val: u128 = match lit.kind {
643 hir::ExprKind::Lit(li) => match li.node {
644 ast::LitKind::Int(v, _) => v.get(),
645 _ => return true,
646 },
647 _ => bug!(),
648 };
649 is_valid(norm_binop, lit_val, min, max)
650 }
651 _ => true,
652 }
653 }
654
655 fn is_comparison(binop: hir::BinOpKind) -> bool {
656 matches!(
657 binop,
658 hir::BinOpKind::Eq
659 | hir::BinOpKind::Lt
660 | hir::BinOpKind::Le
661 | hir::BinOpKind::Ne
662 | hir::BinOpKind::Ge
663 | hir::BinOpKind::Gt
664 )
665 }
666
667 fn diag_item_cmpop(diag_item: Symbol) -> Option<ComparisonOp> {
668 Some(match diag_item {
669 sym::cmp_ord_max => ComparisonOp::Other,
670 sym::cmp_ord_min => ComparisonOp::Other,
671 sym::ord_cmp_method => ComparisonOp::Other,
672 sym::cmp_partialeq_eq => ComparisonOp::BinOp(hir::BinOpKind::Eq),
673 sym::cmp_partialeq_ne => ComparisonOp::BinOp(hir::BinOpKind::Ne),
674 sym::cmp_partialord_cmp => ComparisonOp::Other,
675 sym::cmp_partialord_ge => ComparisonOp::BinOp(hir::BinOpKind::Ge),
676 sym::cmp_partialord_gt => ComparisonOp::BinOp(hir::BinOpKind::Gt),
677 sym::cmp_partialord_le => ComparisonOp::BinOp(hir::BinOpKind::Le),
678 sym::cmp_partialord_lt => ComparisonOp::BinOp(hir::BinOpKind::Lt),
679 _ => return None,
680 })
681 }
682 }
683}
684
685pub(crate) fn nonnull_optimization_guaranteed<'tcx>(
686 tcx: TyCtxt<'tcx>,
687 def: ty::AdtDef<'tcx>,
688) -> bool {
689 tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
690}
691
692pub(crate) fn transparent_newtype_field<'a, 'tcx>(
695 tcx: TyCtxt<'tcx>,
696 variant: &'a ty::VariantDef,
697) -> Option<&'a ty::FieldDef> {
698 let typing_env = ty::TypingEnv::non_body_analysis(tcx, variant.def_id);
699 variant.fields.iter().find(|field| {
700 let field_ty = tcx.type_of(field.did).instantiate_identity();
701 let is_1zst =
702 tcx.layout_of(typing_env.as_query_input(field_ty)).is_ok_and(|layout| layout.is_1zst());
703 !is_1zst
704 })
705}
706
707fn ty_is_known_nonnull<'tcx>(
709 tcx: TyCtxt<'tcx>,
710 typing_env: ty::TypingEnv<'tcx>,
711 ty: Ty<'tcx>,
712) -> bool {
713 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
714
715 match ty.kind() {
716 ty::FnPtr(..) => true,
717 ty::Ref(..) => true,
718 ty::Adt(def, _) if def.is_box() => true,
719 ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => {
720 let marked_non_null = nonnull_optimization_guaranteed(tcx, *def);
721
722 if marked_non_null {
723 return true;
724 }
725
726 if def.is_unsafe_cell() || def.is_unsafe_pinned() {
728 return false;
729 }
730
731 def.variants()
732 .iter()
733 .filter_map(|variant| transparent_newtype_field(tcx, variant))
734 .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args)))
735 }
736 ty::Pat(base, pat) => {
737 ty_is_known_nonnull(tcx, typing_env, *base)
738 || pat_ty_is_known_nonnull(tcx, typing_env, *pat)
739 }
740 _ => false,
741 }
742}
743
744fn pat_ty_is_known_nonnull<'tcx>(
745 tcx: TyCtxt<'tcx>,
746 typing_env: ty::TypingEnv<'tcx>,
747 pat: ty::Pattern<'tcx>,
748) -> bool {
749 Option::unwrap_or_default(
750 try {
751 match *pat {
752 ty::PatternKind::Range { start, end } => {
753 let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
754 let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
755
756 start > 0 && end >= start
759 }
760 ty::PatternKind::NotNull => true,
761 ty::PatternKind::Or(patterns) => {
762 patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
763 }
764 }
765 },
766 )
767}
768
769fn get_nullable_type<'tcx>(
772 tcx: TyCtxt<'tcx>,
773 typing_env: ty::TypingEnv<'tcx>,
774 ty: Ty<'tcx>,
775) -> Option<Ty<'tcx>> {
776 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
777
778 Some(match *ty.kind() {
779 ty::Adt(field_def, field_args) => {
780 let inner_field_ty = {
781 let mut first_non_zst_ty =
782 field_def.variants().iter().filter_map(|v| transparent_newtype_field(tcx, v));
783 debug_assert_eq!(
784 first_non_zst_ty.clone().count(),
785 1,
786 "Wrong number of fields for transparent type"
787 );
788 first_non_zst_ty
789 .next_back()
790 .expect("No non-zst fields in transparent type.")
791 .ty(tcx, field_args)
792 };
793 return get_nullable_type(tcx, typing_env, inner_field_ty);
794 }
795 ty::Pat(base, ..) => return get_nullable_type(tcx, typing_env, base),
796 ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => ty,
797 ty::Ref(_region, ty, mutbl) => Ty::new_ptr(tcx, ty, mutbl),
800 ty::FnPtr(..) => ty,
803 ref unhandled => {
806 debug!(
807 "get_nullable_type: Unhandled scalar kind: {:?} while checking {:?}",
808 unhandled, ty
809 );
810 return None;
811 }
812 })
813}
814
815fn is_niche_optimization_candidate<'tcx>(
820 tcx: TyCtxt<'tcx>,
821 typing_env: ty::TypingEnv<'tcx>,
822 ty: Ty<'tcx>,
823) -> bool {
824 if tcx.layout_of(typing_env.as_query_input(ty)).is_ok_and(|layout| !layout.is_1zst()) {
825 return false;
826 }
827
828 match ty.kind() {
829 ty::Adt(ty_def, _) => {
830 let non_exhaustive = ty_def.is_variant_list_non_exhaustive();
831 let empty = (ty_def.is_struct() && ty_def.non_enum_variant().fields.is_empty())
832 || (ty_def.is_enum() && ty_def.variants().is_empty());
833
834 !non_exhaustive && empty
835 }
836 ty::Tuple(tys) => tys.is_empty(),
837 _ => false,
838 }
839}
840
841pub(crate) fn repr_nullable_ptr<'tcx>(
846 tcx: TyCtxt<'tcx>,
847 typing_env: ty::TypingEnv<'tcx>,
848 ty: Ty<'tcx>,
849) -> Option<Ty<'tcx>> {
850 debug!("is_repr_nullable_ptr(tcx, ty = {:?})", ty);
851 match ty.kind() {
852 ty::Adt(ty_def, args) => {
853 let field_ty = match &ty_def.variants().raw[..] {
854 [var_one, var_two] => match (&var_one.fields.raw[..], &var_two.fields.raw[..]) {
855 ([], [field]) | ([field], []) => field.ty(tcx, args),
856 ([field1], [field2]) => {
857 let ty1 = field1.ty(tcx, args);
858 let ty2 = field2.ty(tcx, args);
859
860 if is_niche_optimization_candidate(tcx, typing_env, ty1) {
861 ty2
862 } else if is_niche_optimization_candidate(tcx, typing_env, ty2) {
863 ty1
864 } else {
865 return None;
866 }
867 }
868 _ => return None,
869 },
870 _ => return None,
871 };
872
873 if !ty_is_known_nonnull(tcx, typing_env, field_ty) {
874 return None;
875 }
876
877 let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, typing_env).ok();
881 if !compute_size_skeleton(ty)?.same_size(compute_size_skeleton(field_ty)?) {
882 bug!("improper_ctypes: Option nonnull optimization not applied?");
883 }
884
885 let field_ty_layout = tcx.layout_of(typing_env.as_query_input(field_ty));
887 if field_ty_layout.is_err() && !field_ty.has_non_region_param() {
888 bug!("should be able to compute the layout of non-polymorphic type");
889 }
890
891 let field_ty_abi = &field_ty_layout.ok()?.backend_repr;
892 if let BackendRepr::Scalar(field_ty_scalar) = field_ty_abi {
893 match field_ty_scalar.valid_range(&tcx) {
894 WrappingRange { start: 0, end }
895 if end == field_ty_scalar.size(&tcx).unsigned_int_max() - 1 =>
896 {
897 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
898 }
899 WrappingRange { start: 1, .. } => {
900 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
901 }
902 WrappingRange { start, end } => {
903 unreachable!("Unhandled start and end range: ({}, {})", start, end)
904 }
905 };
906 }
907 None
908 }
909 ty::Pat(base, pat) => get_nullable_type_from_pat(tcx, typing_env, *base, *pat),
910 _ => None,
911 }
912}
913
914fn get_nullable_type_from_pat<'tcx>(
915 tcx: TyCtxt<'tcx>,
916 typing_env: ty::TypingEnv<'tcx>,
917 base: Ty<'tcx>,
918 pat: ty::Pattern<'tcx>,
919) -> Option<Ty<'tcx>> {
920 match *pat {
921 ty::PatternKind::NotNull | ty::PatternKind::Range { .. } => {
922 get_nullable_type(tcx, typing_env, base)
923 }
924 ty::PatternKind::Or(patterns) => {
925 let first = get_nullable_type_from_pat(tcx, typing_env, base, patterns[0])?;
926 for &pat in &patterns[1..] {
927 assert_eq!(first, get_nullable_type_from_pat(tcx, typing_env, base, pat)?);
928 }
929 Some(first)
930 }
931 }
932}
933
934declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]);
935
936impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
937 fn check_item(&mut self, cx: &LateContext<'_>, it: &hir::Item<'_>) {
938 if let hir::ItemKind::Enum(_, _, ref enum_definition) = it.kind {
939 let t = cx.tcx.type_of(it.owner_id).instantiate_identity();
940 let ty = cx.tcx.erase_and_anonymize_regions(t);
941 let Ok(layout) = cx.layout_of(ty) else { return };
942 let Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, variants, .. } =
943 &layout.variants
944 else {
945 return;
946 };
947
948 let tag_size = tag.size(&cx.tcx).bytes();
949
950 debug!(
951 "enum `{}` is {} bytes large with layout:\n{:#?}",
952 t,
953 layout.size.bytes(),
954 layout
955 );
956
957 let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
958 .map(|(variant, variant_layout)| {
959 let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
961
962 debug!("- variant `{}` is {} bytes large", variant.ident, bytes);
963 bytes
964 })
965 .enumerate()
966 .fold((0, 0, 0), |(l, s, li), (idx, size)| {
967 if size > l {
968 (size, l, idx)
969 } else if size > s {
970 (l, size, li)
971 } else {
972 (l, s, li)
973 }
974 });
975
976 if largest > slargest * 3 && slargest > 0 {
979 cx.emit_span_lint(
980 VARIANT_SIZE_DIFFERENCES,
981 enum_definition.variants[largest_index].span,
982 VariantSizeDifferencesDiag { largest },
983 );
984 }
985 }
986 }
987}
988
989declare_lint! {
990 INVALID_ATOMIC_ORDERING,
1027 Deny,
1028 "usage of invalid atomic ordering in atomic operations and memory fences"
1029}
1030
1031declare_lint_pass!(InvalidAtomicOrdering => [INVALID_ATOMIC_ORDERING]);
1032
1033impl InvalidAtomicOrdering {
1034 fn inherent_atomic_method_call<'hir>(
1035 cx: &LateContext<'_>,
1036 expr: &Expr<'hir>,
1037 recognized_names: &[Symbol], ) -> Option<(Symbol, &'hir [Expr<'hir>])> {
1039 const ATOMIC_TYPES: &[Symbol] = &[
1040 sym::AtomicBool,
1041 sym::AtomicPtr,
1042 sym::AtomicUsize,
1043 sym::AtomicU8,
1044 sym::AtomicU16,
1045 sym::AtomicU32,
1046 sym::AtomicU64,
1047 sym::AtomicU128,
1048 sym::AtomicIsize,
1049 sym::AtomicI8,
1050 sym::AtomicI16,
1051 sym::AtomicI32,
1052 sym::AtomicI64,
1053 sym::AtomicI128,
1054 ];
1055 if let ExprKind::MethodCall(method_path, _, args, _) = &expr.kind
1056 && recognized_names.contains(&method_path.ident.name)
1057 && let Some(m_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
1058 && let Some(impl_did) = cx.tcx.inherent_impl_of_assoc(m_def_id)
1060 && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
1061 && let parent = cx.tcx.parent(adt.did())
1062 && cx.tcx.is_diagnostic_item(sym::atomic_mod, parent)
1063 && ATOMIC_TYPES.contains(&cx.tcx.item_name(adt.did()))
1064 {
1065 return Some((method_path.ident.name, args));
1066 }
1067 None
1068 }
1069
1070 fn match_ordering(cx: &LateContext<'_>, ord_arg: &Expr<'_>) -> Option<Symbol> {
1071 let ExprKind::Path(ref ord_qpath) = ord_arg.kind else { return None };
1072 let did = cx.qpath_res(ord_qpath, ord_arg.hir_id).opt_def_id()?;
1073 let tcx = cx.tcx;
1074 let atomic_ordering = tcx.get_diagnostic_item(sym::Ordering);
1075 let name = tcx.item_name(did);
1076 let parent = tcx.parent(did);
1077 [sym::Relaxed, sym::Release, sym::Acquire, sym::AcqRel, sym::SeqCst].into_iter().find(
1078 |&ordering| {
1079 name == ordering
1080 && (Some(parent) == atomic_ordering
1081 || tcx.opt_parent(parent) == atomic_ordering)
1083 },
1084 )
1085 }
1086
1087 fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
1088 if let Some((method, args)) =
1089 Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store])
1090 && let Some((ordering_arg, invalid_ordering)) = match method {
1091 sym::load => Some((&args[0], sym::Release)),
1092 sym::store => Some((&args[1], sym::Acquire)),
1093 _ => None,
1094 }
1095 && let Some(ordering) = Self::match_ordering(cx, ordering_arg)
1096 && (ordering == invalid_ordering || ordering == sym::AcqRel)
1097 {
1098 if method == sym::load {
1099 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingLoad);
1100 } else {
1101 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingStore);
1102 };
1103 }
1104 }
1105
1106 fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
1107 if let ExprKind::Call(func, args) = expr.kind
1108 && let ExprKind::Path(ref func_qpath) = func.kind
1109 && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
1110 && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::fence | sym::compiler_fence))
1111 && Self::match_ordering(cx, &args[0]) == Some(sym::Relaxed)
1112 {
1113 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, args[0].span, AtomicOrderingFence);
1114 }
1115 }
1116
1117 fn check_atomic_compare_exchange(cx: &LateContext<'_>, expr: &Expr<'_>) {
1118 let Some((method, args)) = Self::inherent_atomic_method_call(
1119 cx,
1120 expr,
1121 &[sym::fetch_update, sym::compare_exchange, sym::compare_exchange_weak],
1122 ) else {
1123 return;
1124 };
1125
1126 let fail_order_arg = match method {
1127 sym::fetch_update => &args[1],
1128 sym::compare_exchange | sym::compare_exchange_weak => &args[3],
1129 _ => return,
1130 };
1131
1132 let Some(fail_ordering) = Self::match_ordering(cx, fail_order_arg) else { return };
1133
1134 if matches!(fail_ordering, sym::Release | sym::AcqRel) {
1135 cx.emit_span_lint(
1136 INVALID_ATOMIC_ORDERING,
1137 fail_order_arg.span,
1138 InvalidAtomicOrderingDiag { method, fail_order_arg_span: fail_order_arg.span },
1139 );
1140 }
1141 }
1142}
1143
1144impl<'tcx> LateLintPass<'tcx> for InvalidAtomicOrdering {
1145 fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
1146 Self::check_atomic_load_store(cx, expr);
1147 Self::check_memory_fence(cx, expr);
1148 Self::check_atomic_compare_exchange(cx, expr);
1149 }
1150}