1use std::iter;
2
3use rustc_abi::{BackendRepr, TagEncoding, Variants, WrappingRange};
4use rustc_hir::{Expr, ExprKind, HirId, LangItem};
5use rustc_middle::bug;
6use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
7use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
8use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
9use rustc_span::{Span, Symbol, sym};
10use tracing::debug;
11use {rustc_ast as ast, rustc_hir as hir};
12
13mod improper_ctypes; pub(crate) use improper_ctypes::ImproperCTypesLint;
15
16use crate::lints::{
17 AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
18 AmbiguousWidePointerComparisonsAddrSuggestion, AmbiguousWidePointerComparisonsCastSuggestion,
19 AmbiguousWidePointerComparisonsExpectSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
20 AtomicOrderingStore, InvalidAtomicOrderingDiag, InvalidNanComparisons,
21 InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
22 UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons,
23 VariantSizeDifferencesDiag,
24};
25use crate::{LateContext, LateLintPass, LintContext};
26
27mod literal;
28
29use literal::{int_ty_range, lint_literal, uint_ty_range};
30
31declare_lint! {
32 UNUSED_COMPARISONS,
50 Warn,
51 "comparisons made useless by limits of the types involved"
52}
53
54declare_lint! {
55 OVERFLOWING_LITERALS,
71 Deny,
72 "literal out of range for its type"
73}
74
75declare_lint! {
76 VARIANT_SIZE_DIFFERENCES,
108 Allow,
109 "detects enums with widely varying variant sizes"
110}
111
112declare_lint! {
113 INVALID_NAN_COMPARISONS,
130 Warn,
131 "detects invalid floating point NaN comparisons"
132}
133
134declare_lint! {
135 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
161 Warn,
162 "detects ambiguous wide pointer comparisons"
163}
164
165declare_lint! {
166 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
190 Warn,
191 "detects unpredictable function pointer comparisons",
192 report_in_external_macro
193}
194
195#[derive(Copy, Clone, Default)]
196pub(crate) struct TypeLimits {
197 negated_expr_id: Option<hir::HirId>,
199 negated_expr_span: Option<Span>,
201}
202
203impl_lint_pass!(TypeLimits => [
204 UNUSED_COMPARISONS,
205 OVERFLOWING_LITERALS,
206 INVALID_NAN_COMPARISONS,
207 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
208 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS
209]);
210
211impl TypeLimits {
212 pub(crate) fn new() -> TypeLimits {
213 TypeLimits { negated_expr_id: None, negated_expr_span: None }
214 }
215}
216
217fn lint_nan<'tcx>(
218 cx: &LateContext<'tcx>,
219 e: &'tcx hir::Expr<'tcx>,
220 binop: hir::BinOpKind,
221 l: &'tcx hir::Expr<'tcx>,
222 r: &'tcx hir::Expr<'tcx>,
223) {
224 fn is_nan(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
225 let expr = expr.peel_blocks().peel_borrows();
226 match expr.kind {
227 ExprKind::Path(qpath) => {
228 let Some(def_id) = cx.typeck_results().qpath_res(&qpath, expr.hir_id).opt_def_id()
229 else {
230 return false;
231 };
232
233 matches!(
234 cx.tcx.get_diagnostic_name(def_id),
235 Some(sym::f16_nan | sym::f32_nan | sym::f64_nan | sym::f128_nan)
236 )
237 }
238 _ => false,
239 }
240 }
241
242 fn eq_ne(
243 e: &hir::Expr<'_>,
244 l: &hir::Expr<'_>,
245 r: &hir::Expr<'_>,
246 f: impl FnOnce(Span, Span) -> InvalidNanComparisonsSuggestion,
247 ) -> InvalidNanComparisons {
248 let suggestion = if let Some(l_span) = l.span.find_ancestor_inside(e.span)
249 && let Some(r_span) = r.span.find_ancestor_inside(e.span)
250 {
251 f(l_span, r_span)
252 } else {
253 InvalidNanComparisonsSuggestion::Spanless
254 };
255
256 InvalidNanComparisons::EqNe { suggestion }
257 }
258
259 let lint = match binop {
260 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, l) => {
261 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
262 nan_plus_binop: l_span.until(r_span),
263 float: r_span.shrink_to_hi(),
264 neg: (binop == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()),
265 })
266 }
267 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, r) => {
268 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
269 nan_plus_binop: l_span.shrink_to_hi().to(r_span),
270 float: l_span.shrink_to_hi(),
271 neg: (binop == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()),
272 })
273 }
274 hir::BinOpKind::Lt | hir::BinOpKind::Le | hir::BinOpKind::Gt | hir::BinOpKind::Ge
275 if is_nan(cx, l) || is_nan(cx, r) =>
276 {
277 InvalidNanComparisons::LtLeGtGe
278 }
279 _ => return,
280 };
281
282 cx.emit_span_lint(INVALID_NAN_COMPARISONS, e.span, lint);
283}
284
285#[derive(Debug, PartialEq, Copy, Clone)]
286enum ComparisonOp {
287 BinOp(hir::BinOpKind),
288 Other,
289}
290
291fn lint_wide_pointer<'tcx>(
292 cx: &LateContext<'tcx>,
293 e: &'tcx hir::Expr<'tcx>,
294 cmpop: ComparisonOp,
295 l: &'tcx hir::Expr<'tcx>,
296 r: &'tcx hir::Expr<'tcx>,
297) {
298 let ptr_unsized = |mut ty: Ty<'tcx>| -> Option<(
299 usize,
300 String,
301 bool,
302 )> {
303 let mut refs = 0;
304 while let ty::Ref(_, inner_ty, _) = ty.kind() {
307 ty = *inner_ty;
308 refs += 1;
309 }
310
311 let mut modifiers = String::new();
313 ty = match ty.kind() {
314 ty::RawPtr(ty, _) => *ty,
315 ty::Adt(def, args) if cx.tcx.is_diagnostic_item(sym::NonNull, def.did()) => {
316 modifiers.push_str(".as_ptr()");
317 args.type_at(0)
318 }
319 _ => return None,
320 };
321
322 (!ty.is_sized(cx.tcx, cx.typing_env()))
323 .then(|| (refs, modifiers, matches!(ty.kind(), ty::Dynamic(_, _, ty::Dyn))))
324 };
325
326 let l = l.peel_borrows();
328 let r = r.peel_borrows();
329
330 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else {
331 return;
332 };
333 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else {
334 return;
335 };
336
337 let Some((l_ty_refs, l_modifiers, l_inner_ty_is_dyn)) = ptr_unsized(l_ty) else {
338 return;
339 };
340 let Some((r_ty_refs, r_modifiers, r_inner_ty_is_dyn)) = ptr_unsized(r_ty) else {
341 return;
342 };
343
344 let (Some(l_span), Some(r_span)) =
345 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
346 else {
347 return cx.emit_span_lint(
348 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
349 e.span,
350 AmbiguousWidePointerComparisons::Spanless,
351 );
352 };
353
354 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
355 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
356 let is_dyn_comparison = l_inner_ty_is_dyn && r_inner_ty_is_dyn;
357 let via_method_call = matches!(&e.kind, ExprKind::MethodCall(..) | ExprKind::Call(..));
358
359 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
360 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
361 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
362
363 let deref_left = &*"*".repeat(l_ty_refs);
364 let deref_right = &*"*".repeat(r_ty_refs);
365
366 let l_modifiers = &*l_modifiers;
367 let r_modifiers = &*r_modifiers;
368
369 cx.emit_span_lint(
370 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
371 e.span,
372 if is_eq_ne {
373 AmbiguousWidePointerComparisons::SpanfulEq {
374 addr_metadata_suggestion: (!is_dyn_comparison).then(|| {
375 AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
376 ne,
377 deref_left,
378 deref_right,
379 l_modifiers,
380 r_modifiers,
381 left,
382 middle,
383 right,
384 }
385 }),
386 addr_suggestion: AmbiguousWidePointerComparisonsAddrSuggestion {
387 ne,
388 deref_left,
389 deref_right,
390 l_modifiers,
391 r_modifiers,
392 left,
393 middle,
394 right,
395 },
396 }
397 } else {
398 AmbiguousWidePointerComparisons::SpanfulCmp {
399 cast_suggestion: AmbiguousWidePointerComparisonsCastSuggestion {
400 deref_left,
401 deref_right,
402 l_modifiers,
403 r_modifiers,
404 paren_left: if l_ty_refs != 0 { ")" } else { "" },
405 paren_right: if r_ty_refs != 0 { ")" } else { "" },
406 left_before: (l_ty_refs != 0).then_some(l_span.shrink_to_lo()),
407 left_after: l_span.shrink_to_hi(),
408 right_before: (r_ty_refs != 0).then_some(r_span.shrink_to_lo()),
409 right_after: r_span.shrink_to_hi(),
410 },
411 expect_suggestion: AmbiguousWidePointerComparisonsExpectSuggestion {
412 paren_left: if via_method_call { "" } else { "(" },
413 paren_right: if via_method_call { "" } else { ")" },
414 before: e.span.shrink_to_lo(),
415 after: e.span.shrink_to_hi(),
416 },
417 }
418 },
419 );
420}
421
422fn lint_fn_pointer<'tcx>(
423 cx: &LateContext<'tcx>,
424 e: &'tcx hir::Expr<'tcx>,
425 cmpop: ComparisonOp,
426 l: &'tcx hir::Expr<'tcx>,
427 r: &'tcx hir::Expr<'tcx>,
428) {
429 let peel_refs = |mut ty: Ty<'tcx>| -> (Ty<'tcx>, usize) {
430 let mut refs = 0;
431
432 while let ty::Ref(_, inner_ty, _) = ty.kind() {
433 ty = *inner_ty;
434 refs += 1;
435 }
436
437 (ty, refs)
438 };
439
440 let l = l.peel_borrows();
442 let r = r.peel_borrows();
443
444 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else { return };
445 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else { return };
446
447 let (l_ty, l_ty_refs) = peel_refs(l_ty);
450 let (r_ty, r_ty_refs) = peel_refs(r_ty);
451
452 if l_ty.is_fn() && r_ty.is_fn() {
453 } else if let ty::Adt(l_def, l_args) = l_ty.kind()
455 && let ty::Adt(r_def, r_args) = r_ty.kind()
456 && cx.tcx.is_lang_item(l_def.did(), LangItem::Option)
457 && cx.tcx.is_lang_item(r_def.did(), LangItem::Option)
458 && let Some(l_some_arg) = l_args.get(0)
459 && let Some(r_some_arg) = r_args.get(0)
460 && l_some_arg.expect_ty().is_fn()
461 && r_some_arg.expect_ty().is_fn()
462 {
463 return cx.emit_span_lint(
465 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
466 e.span,
467 UnpredictableFunctionPointerComparisons::Warn,
468 );
469 } else {
470 return;
472 }
473
474 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
477
478 if !is_eq_ne {
479 return cx.emit_span_lint(
481 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
482 e.span,
483 UnpredictableFunctionPointerComparisons::Warn,
484 );
485 }
486
487 let (Some(l_span), Some(r_span)) =
488 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
489 else {
490 return cx.emit_span_lint(
492 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
493 e.span,
494 UnpredictableFunctionPointerComparisons::Warn,
495 );
496 };
497
498 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
499
500 let deref_left = &*"*".repeat(l_ty_refs);
502 let deref_right = &*"*".repeat(r_ty_refs);
503
504 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
505 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
506 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
507
508 let sugg =
509 if !r_ty.is_fn_ptr() {
512 let fn_sig = r_ty.fn_sig(cx.tcx);
513
514 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEqWithCast {
515 ne,
516 fn_sig,
517 deref_left,
518 deref_right,
519 left,
520 middle,
521 right,
522 }
523 } else {
524 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEq {
525 ne,
526 deref_left,
527 deref_right,
528 left,
529 middle,
530 right,
531 }
532 };
533
534 cx.emit_span_lint(
535 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
536 e.span,
537 UnpredictableFunctionPointerComparisons::Suggestion { sugg },
538 );
539}
540
541impl<'tcx> LateLintPass<'tcx> for TypeLimits {
542 fn check_lit(&mut self, cx: &LateContext<'tcx>, hir_id: HirId, lit: hir::Lit, negated: bool) {
543 if negated {
544 self.negated_expr_id = Some(hir_id);
545 self.negated_expr_span = Some(lit.span);
546 }
547 lint_literal(cx, self, hir_id, lit.span, &lit, negated);
548 }
549
550 fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) {
551 match e.kind {
552 hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
553 if self.negated_expr_id != Some(e.hir_id) {
555 self.negated_expr_id = Some(expr.hir_id);
556 self.negated_expr_span = Some(e.span);
557 }
558 }
559 hir::ExprKind::Binary(binop, ref l, ref r) => {
560 if is_comparison(binop.node) {
561 if !check_limits(cx, binop.node, l, r) {
562 cx.emit_span_lint(UNUSED_COMPARISONS, e.span, UnusedComparisons);
563 } else {
564 lint_nan(cx, e, binop.node, l, r);
565 let cmpop = ComparisonOp::BinOp(binop.node);
566 lint_wide_pointer(cx, e, cmpop, l, r);
567 lint_fn_pointer(cx, e, cmpop, l, r);
568 }
569 }
570 }
571 hir::ExprKind::Call(path, [l, r])
572 if let ExprKind::Path(ref qpath) = path.kind
573 && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
574 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
575 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
576 {
577 lint_wide_pointer(cx, e, cmpop, l, r);
578 lint_fn_pointer(cx, e, cmpop, l, r);
579 }
580 hir::ExprKind::MethodCall(_, l, [r], _)
581 if let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
582 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
583 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
584 {
585 lint_wide_pointer(cx, e, cmpop, l, r);
586 lint_fn_pointer(cx, e, cmpop, l, r);
587 }
588 _ => {}
589 };
590
591 fn is_valid<T: PartialOrd>(binop: hir::BinOpKind, v: T, min: T, max: T) -> bool {
592 match binop {
593 hir::BinOpKind::Lt => v > min && v <= max,
594 hir::BinOpKind::Le => v >= min && v < max,
595 hir::BinOpKind::Gt => v >= min && v < max,
596 hir::BinOpKind::Ge => v > min && v <= max,
597 hir::BinOpKind::Eq | hir::BinOpKind::Ne => v >= min && v <= max,
598 _ => bug!(),
599 }
600 }
601
602 fn rev_binop(binop: hir::BinOpKind) -> hir::BinOpKind {
603 match binop {
604 hir::BinOpKind::Lt => hir::BinOpKind::Gt,
605 hir::BinOpKind::Le => hir::BinOpKind::Ge,
606 hir::BinOpKind::Gt => hir::BinOpKind::Lt,
607 hir::BinOpKind::Ge => hir::BinOpKind::Le,
608 _ => binop,
609 }
610 }
611
612 fn check_limits(
613 cx: &LateContext<'_>,
614 binop: hir::BinOpKind,
615 l: &hir::Expr<'_>,
616 r: &hir::Expr<'_>,
617 ) -> bool {
618 let (lit, expr, swap) = match (&l.kind, &r.kind) {
619 (&hir::ExprKind::Lit(_), _) => (l, r, true),
620 (_, &hir::ExprKind::Lit(_)) => (r, l, false),
621 _ => return true,
622 };
623 let norm_binop = if swap { rev_binop(binop) } else { binop };
626 match *cx.typeck_results().node_type(expr.hir_id).kind() {
627 ty::Int(int_ty) => {
628 let (min, max) = int_ty_range(int_ty);
629 let lit_val: i128 = match lit.kind {
630 hir::ExprKind::Lit(li) => match li.node {
631 ast::LitKind::Int(
632 v,
633 ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed,
634 ) => v.get() as i128,
635 _ => return true,
636 },
637 _ => bug!(),
638 };
639 is_valid(norm_binop, lit_val, min, max)
640 }
641 ty::Uint(uint_ty) => {
642 let (min, max): (u128, u128) = uint_ty_range(uint_ty);
643 let lit_val: u128 = match lit.kind {
644 hir::ExprKind::Lit(li) => match li.node {
645 ast::LitKind::Int(v, _) => v.get(),
646 _ => return true,
647 },
648 _ => bug!(),
649 };
650 is_valid(norm_binop, lit_val, min, max)
651 }
652 _ => true,
653 }
654 }
655
656 fn is_comparison(binop: hir::BinOpKind) -> bool {
657 matches!(
658 binop,
659 hir::BinOpKind::Eq
660 | hir::BinOpKind::Lt
661 | hir::BinOpKind::Le
662 | hir::BinOpKind::Ne
663 | hir::BinOpKind::Ge
664 | hir::BinOpKind::Gt
665 )
666 }
667
668 fn diag_item_cmpop(diag_item: Symbol) -> Option<ComparisonOp> {
669 Some(match diag_item {
670 sym::cmp_ord_max => ComparisonOp::Other,
671 sym::cmp_ord_min => ComparisonOp::Other,
672 sym::ord_cmp_method => ComparisonOp::Other,
673 sym::cmp_partialeq_eq => ComparisonOp::BinOp(hir::BinOpKind::Eq),
674 sym::cmp_partialeq_ne => ComparisonOp::BinOp(hir::BinOpKind::Ne),
675 sym::cmp_partialord_cmp => ComparisonOp::Other,
676 sym::cmp_partialord_ge => ComparisonOp::BinOp(hir::BinOpKind::Ge),
677 sym::cmp_partialord_gt => ComparisonOp::BinOp(hir::BinOpKind::Gt),
678 sym::cmp_partialord_le => ComparisonOp::BinOp(hir::BinOpKind::Le),
679 sym::cmp_partialord_lt => ComparisonOp::BinOp(hir::BinOpKind::Lt),
680 _ => return None,
681 })
682 }
683 }
684}
685
686pub(crate) fn nonnull_optimization_guaranteed<'tcx>(
687 tcx: TyCtxt<'tcx>,
688 def: ty::AdtDef<'tcx>,
689) -> bool {
690 tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
691}
692
693pub(crate) fn transparent_newtype_field<'a, 'tcx>(
696 tcx: TyCtxt<'tcx>,
697 variant: &'a ty::VariantDef,
698) -> Option<&'a ty::FieldDef> {
699 let typing_env = ty::TypingEnv::non_body_analysis(tcx, variant.def_id);
700 variant.fields.iter().find(|field| {
701 let field_ty = tcx.type_of(field.did).instantiate_identity();
702 let is_1zst =
703 tcx.layout_of(typing_env.as_query_input(field_ty)).is_ok_and(|layout| layout.is_1zst());
704 !is_1zst
705 })
706}
707
708fn ty_is_known_nonnull<'tcx>(
710 tcx: TyCtxt<'tcx>,
711 typing_env: ty::TypingEnv<'tcx>,
712 ty: Ty<'tcx>,
713) -> bool {
714 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
715
716 match ty.kind() {
717 ty::FnPtr(..) => true,
718 ty::Ref(..) => true,
719 ty::Adt(def, _) if def.is_box() => true,
720 ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => {
721 let marked_non_null = nonnull_optimization_guaranteed(tcx, *def);
722
723 if marked_non_null {
724 return true;
725 }
726
727 if def.is_unsafe_cell() || def.is_unsafe_pinned() {
729 return false;
730 }
731
732 def.variants()
733 .iter()
734 .filter_map(|variant| transparent_newtype_field(tcx, variant))
735 .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args)))
736 }
737 ty::Pat(base, pat) => {
738 ty_is_known_nonnull(tcx, typing_env, *base)
739 || pat_ty_is_known_nonnull(tcx, typing_env, *pat)
740 }
741 _ => false,
742 }
743}
744
745fn pat_ty_is_known_nonnull<'tcx>(
746 tcx: TyCtxt<'tcx>,
747 typing_env: ty::TypingEnv<'tcx>,
748 pat: ty::Pattern<'tcx>,
749) -> bool {
750 Option::unwrap_or_default(
751 try {
752 match *pat {
753 ty::PatternKind::Range { start, end } => {
754 let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
755 let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
756
757 start > 0 && end >= start
760 }
761 ty::PatternKind::Or(patterns) => {
762 patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
763 }
764 }
765 },
766 )
767}
768
769fn get_nullable_type<'tcx>(
772 tcx: TyCtxt<'tcx>,
773 typing_env: ty::TypingEnv<'tcx>,
774 ty: Ty<'tcx>,
775) -> Option<Ty<'tcx>> {
776 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
777
778 Some(match *ty.kind() {
779 ty::Adt(field_def, field_args) => {
780 let inner_field_ty = {
781 let mut first_non_zst_ty =
782 field_def.variants().iter().filter_map(|v| transparent_newtype_field(tcx, v));
783 debug_assert_eq!(
784 first_non_zst_ty.clone().count(),
785 1,
786 "Wrong number of fields for transparent type"
787 );
788 first_non_zst_ty
789 .next_back()
790 .expect("No non-zst fields in transparent type.")
791 .ty(tcx, field_args)
792 };
793 return get_nullable_type(tcx, typing_env, inner_field_ty);
794 }
795 ty::Pat(base, ..) => return get_nullable_type(tcx, typing_env, base),
796 ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => ty,
797 ty::Ref(_region, ty, mutbl) => Ty::new_ptr(tcx, ty, mutbl),
800 ty::FnPtr(..) => ty,
803 ref unhandled => {
806 debug!(
807 "get_nullable_type: Unhandled scalar kind: {:?} while checking {:?}",
808 unhandled, ty
809 );
810 return None;
811 }
812 })
813}
814
815fn is_niche_optimization_candidate<'tcx>(
820 tcx: TyCtxt<'tcx>,
821 typing_env: ty::TypingEnv<'tcx>,
822 ty: Ty<'tcx>,
823) -> bool {
824 if tcx.layout_of(typing_env.as_query_input(ty)).is_ok_and(|layout| !layout.is_1zst()) {
825 return false;
826 }
827
828 match ty.kind() {
829 ty::Adt(ty_def, _) => {
830 let non_exhaustive = ty_def.is_variant_list_non_exhaustive();
831 let empty = (ty_def.is_struct() && ty_def.all_fields().next().is_none())
832 || (ty_def.is_enum() && ty_def.variants().is_empty());
833
834 !non_exhaustive && empty
835 }
836 ty::Tuple(tys) => tys.is_empty(),
837 _ => false,
838 }
839}
840
841pub(crate) fn repr_nullable_ptr<'tcx>(
846 tcx: TyCtxt<'tcx>,
847 typing_env: ty::TypingEnv<'tcx>,
848 ty: Ty<'tcx>,
849) -> Option<Ty<'tcx>> {
850 debug!("is_repr_nullable_ptr(tcx, ty = {:?})", ty);
851 match ty.kind() {
852 ty::Adt(ty_def, args) => {
853 let field_ty = match &ty_def.variants().raw[..] {
854 [var_one, var_two] => match (&var_one.fields.raw[..], &var_two.fields.raw[..]) {
855 ([], [field]) | ([field], []) => field.ty(tcx, args),
856 ([field1], [field2]) => {
857 let ty1 = field1.ty(tcx, args);
858 let ty2 = field2.ty(tcx, args);
859
860 if is_niche_optimization_candidate(tcx, typing_env, ty1) {
861 ty2
862 } else if is_niche_optimization_candidate(tcx, typing_env, ty2) {
863 ty1
864 } else {
865 return None;
866 }
867 }
868 _ => return None,
869 },
870 _ => return None,
871 };
872
873 if !ty_is_known_nonnull(tcx, typing_env, field_ty) {
874 return None;
875 }
876
877 let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, typing_env).ok();
881 if !compute_size_skeleton(ty)?.same_size(compute_size_skeleton(field_ty)?) {
882 bug!("improper_ctypes: Option nonnull optimization not applied?");
883 }
884
885 let field_ty_layout = tcx.layout_of(typing_env.as_query_input(field_ty));
887 if field_ty_layout.is_err() && !field_ty.has_non_region_param() {
888 bug!("should be able to compute the layout of non-polymorphic type");
889 }
890
891 let field_ty_abi = &field_ty_layout.ok()?.backend_repr;
892 if let BackendRepr::Scalar(field_ty_scalar) = field_ty_abi {
893 match field_ty_scalar.valid_range(&tcx) {
894 WrappingRange { start: 0, end }
895 if end == field_ty_scalar.size(&tcx).unsigned_int_max() - 1 =>
896 {
897 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
898 }
899 WrappingRange { start: 1, .. } => {
900 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
901 }
902 WrappingRange { start, end } => {
903 unreachable!("Unhandled start and end range: ({}, {})", start, end)
904 }
905 };
906 }
907 None
908 }
909 ty::Pat(base, pat) => get_nullable_type_from_pat(tcx, typing_env, *base, *pat),
910 _ => None,
911 }
912}
913
914fn get_nullable_type_from_pat<'tcx>(
915 tcx: TyCtxt<'tcx>,
916 typing_env: ty::TypingEnv<'tcx>,
917 base: Ty<'tcx>,
918 pat: ty::Pattern<'tcx>,
919) -> Option<Ty<'tcx>> {
920 match *pat {
921 ty::PatternKind::Range { .. } => get_nullable_type(tcx, typing_env, base),
922 ty::PatternKind::Or(patterns) => {
923 let first = get_nullable_type_from_pat(tcx, typing_env, base, patterns[0])?;
924 for &pat in &patterns[1..] {
925 assert_eq!(first, get_nullable_type_from_pat(tcx, typing_env, base, pat)?);
926 }
927 Some(first)
928 }
929 }
930}
931
932declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]);
933
934impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
935 fn check_item(&mut self, cx: &LateContext<'_>, it: &hir::Item<'_>) {
936 if let hir::ItemKind::Enum(_, _, ref enum_definition) = it.kind {
937 let t = cx.tcx.type_of(it.owner_id).instantiate_identity();
938 let ty = cx.tcx.erase_and_anonymize_regions(t);
939 let Ok(layout) = cx.layout_of(ty) else { return };
940 let Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, variants, .. } =
941 &layout.variants
942 else {
943 return;
944 };
945
946 let tag_size = tag.size(&cx.tcx).bytes();
947
948 debug!(
949 "enum `{}` is {} bytes large with layout:\n{:#?}",
950 t,
951 layout.size.bytes(),
952 layout
953 );
954
955 let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
956 .map(|(variant, variant_layout)| {
957 let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
959
960 debug!("- variant `{}` is {} bytes large", variant.ident, bytes);
961 bytes
962 })
963 .enumerate()
964 .fold((0, 0, 0), |(l, s, li), (idx, size)| {
965 if size > l {
966 (size, l, idx)
967 } else if size > s {
968 (l, size, li)
969 } else {
970 (l, s, li)
971 }
972 });
973
974 if largest > slargest * 3 && slargest > 0 {
977 cx.emit_span_lint(
978 VARIANT_SIZE_DIFFERENCES,
979 enum_definition.variants[largest_index].span,
980 VariantSizeDifferencesDiag { largest },
981 );
982 }
983 }
984 }
985}
986
987declare_lint! {
988 INVALID_ATOMIC_ORDERING,
1025 Deny,
1026 "usage of invalid atomic ordering in atomic operations and memory fences"
1027}
1028
1029declare_lint_pass!(InvalidAtomicOrdering => [INVALID_ATOMIC_ORDERING]);
1030
1031impl InvalidAtomicOrdering {
1032 fn inherent_atomic_method_call<'hir>(
1033 cx: &LateContext<'_>,
1034 expr: &Expr<'hir>,
1035 recognized_names: &[Symbol], ) -> Option<(Symbol, &'hir [Expr<'hir>])> {
1037 const ATOMIC_TYPES: &[Symbol] = &[
1038 sym::AtomicBool,
1039 sym::AtomicPtr,
1040 sym::AtomicUsize,
1041 sym::AtomicU8,
1042 sym::AtomicU16,
1043 sym::AtomicU32,
1044 sym::AtomicU64,
1045 sym::AtomicU128,
1046 sym::AtomicIsize,
1047 sym::AtomicI8,
1048 sym::AtomicI16,
1049 sym::AtomicI32,
1050 sym::AtomicI64,
1051 sym::AtomicI128,
1052 ];
1053 if let ExprKind::MethodCall(method_path, _, args, _) = &expr.kind
1054 && recognized_names.contains(&method_path.ident.name)
1055 && let Some(m_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
1056 && let Some(impl_did) = cx.tcx.inherent_impl_of_assoc(m_def_id)
1058 && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
1059 && let parent = cx.tcx.parent(adt.did())
1060 && cx.tcx.is_diagnostic_item(sym::atomic_mod, parent)
1061 && ATOMIC_TYPES.contains(&cx.tcx.item_name(adt.did()))
1062 {
1063 return Some((method_path.ident.name, args));
1064 }
1065 None
1066 }
1067
1068 fn match_ordering(cx: &LateContext<'_>, ord_arg: &Expr<'_>) -> Option<Symbol> {
1069 let ExprKind::Path(ref ord_qpath) = ord_arg.kind else { return None };
1070 let did = cx.qpath_res(ord_qpath, ord_arg.hir_id).opt_def_id()?;
1071 let tcx = cx.tcx;
1072 let atomic_ordering = tcx.get_diagnostic_item(sym::Ordering);
1073 let name = tcx.item_name(did);
1074 let parent = tcx.parent(did);
1075 [sym::Relaxed, sym::Release, sym::Acquire, sym::AcqRel, sym::SeqCst].into_iter().find(
1076 |&ordering| {
1077 name == ordering
1078 && (Some(parent) == atomic_ordering
1079 || tcx.opt_parent(parent) == atomic_ordering)
1081 },
1082 )
1083 }
1084
1085 fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
1086 if let Some((method, args)) =
1087 Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store])
1088 && let Some((ordering_arg, invalid_ordering)) = match method {
1089 sym::load => Some((&args[0], sym::Release)),
1090 sym::store => Some((&args[1], sym::Acquire)),
1091 _ => None,
1092 }
1093 && let Some(ordering) = Self::match_ordering(cx, ordering_arg)
1094 && (ordering == invalid_ordering || ordering == sym::AcqRel)
1095 {
1096 if method == sym::load {
1097 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingLoad);
1098 } else {
1099 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingStore);
1100 };
1101 }
1102 }
1103
1104 fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
1105 if let ExprKind::Call(func, args) = expr.kind
1106 && let ExprKind::Path(ref func_qpath) = func.kind
1107 && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
1108 && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::fence | sym::compiler_fence))
1109 && Self::match_ordering(cx, &args[0]) == Some(sym::Relaxed)
1110 {
1111 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, args[0].span, AtomicOrderingFence);
1112 }
1113 }
1114
1115 fn check_atomic_compare_exchange(cx: &LateContext<'_>, expr: &Expr<'_>) {
1116 let Some((method, args)) = Self::inherent_atomic_method_call(
1117 cx,
1118 expr,
1119 &[sym::fetch_update, sym::compare_exchange, sym::compare_exchange_weak],
1120 ) else {
1121 return;
1122 };
1123
1124 let fail_order_arg = match method {
1125 sym::fetch_update => &args[1],
1126 sym::compare_exchange | sym::compare_exchange_weak => &args[3],
1127 _ => return,
1128 };
1129
1130 let Some(fail_ordering) = Self::match_ordering(cx, fail_order_arg) else { return };
1131
1132 if matches!(fail_ordering, sym::Release | sym::AcqRel) {
1133 cx.emit_span_lint(
1134 INVALID_ATOMIC_ORDERING,
1135 fail_order_arg.span,
1136 InvalidAtomicOrderingDiag { method, fail_order_arg_span: fail_order_arg.span },
1137 );
1138 }
1139 }
1140}
1141
1142impl<'tcx> LateLintPass<'tcx> for InvalidAtomicOrdering {
1143 fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
1144 Self::check_atomic_load_store(cx, expr);
1145 Self::check_memory_fence(cx, expr);
1146 Self::check_atomic_compare_exchange(cx, expr);
1147 }
1148}