1use std::iter;
2use std::ops::ControlFlow;
3
4use rustc_abi::{BackendRepr, TagEncoding, VariantIdx, Variants, WrappingRange};
5use rustc_data_structures::fx::FxHashSet;
6use rustc_errors::DiagMessage;
7use rustc_hir::intravisit::VisitorExt;
8use rustc_hir::{AmbigArg, Expr, ExprKind, HirId, LangItem};
9use rustc_middle::bug;
10use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
11use rustc_middle::ty::{
12 self, Adt, AdtKind, GenericArgsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
13 TypeVisitableExt,
14};
15use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
16use rustc_span::def_id::LocalDefId;
17use rustc_span::{Span, Symbol, source_map, sym};
18use tracing::debug;
19use {rustc_ast as ast, rustc_hir as hir};
20
21mod improper_ctypes;
22
23use crate::lints::{
24 AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
25 AmbiguousWidePointerComparisonsAddrSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
26 AtomicOrderingStore, ImproperCTypes, InvalidAtomicOrderingDiag, InvalidNanComparisons,
27 InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
28 UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons, UsesPowerAlignment,
29 VariantSizeDifferencesDiag,
30};
31use crate::{LateContext, LateLintPass, LintContext, fluent_generated as fluent};
32
33mod literal;
34
35use literal::{int_ty_range, lint_literal, uint_ty_range};
36
37declare_lint! {
38 UNUSED_COMPARISONS,
56 Warn,
57 "comparisons made useless by limits of the types involved"
58}
59
60declare_lint! {
61 OVERFLOWING_LITERALS,
78 Deny,
79 "literal out of range for its type"
80}
81
82declare_lint! {
83 VARIANT_SIZE_DIFFERENCES,
115 Allow,
116 "detects enums with widely varying variant sizes"
117}
118
119declare_lint! {
120 INVALID_NAN_COMPARISONS,
137 Warn,
138 "detects invalid floating point NaN comparisons"
139}
140
141declare_lint! {
142 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
168 Warn,
169 "detects ambiguous wide pointer comparisons"
170}
171
172declare_lint! {
173 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
197 Warn,
198 "detects unpredictable function pointer comparisons"
199}
200
201#[derive(Copy, Clone, Default)]
202pub(crate) struct TypeLimits {
203 negated_expr_id: Option<hir::HirId>,
205 negated_expr_span: Option<Span>,
207}
208
209impl_lint_pass!(TypeLimits => [
210 UNUSED_COMPARISONS,
211 OVERFLOWING_LITERALS,
212 INVALID_NAN_COMPARISONS,
213 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
214 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS
215]);
216
217impl TypeLimits {
218 pub(crate) fn new() -> TypeLimits {
219 TypeLimits { negated_expr_id: None, negated_expr_span: None }
220 }
221}
222
223fn lint_nan<'tcx>(
224 cx: &LateContext<'tcx>,
225 e: &'tcx hir::Expr<'tcx>,
226 binop: hir::BinOp,
227 l: &'tcx hir::Expr<'tcx>,
228 r: &'tcx hir::Expr<'tcx>,
229) {
230 fn is_nan(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
231 let expr = expr.peel_blocks().peel_borrows();
232 match expr.kind {
233 ExprKind::Path(qpath) => {
234 let Some(def_id) = cx.typeck_results().qpath_res(&qpath, expr.hir_id).opt_def_id()
235 else {
236 return false;
237 };
238
239 matches!(
240 cx.tcx.get_diagnostic_name(def_id),
241 Some(sym::f16_nan | sym::f32_nan | sym::f64_nan | sym::f128_nan)
242 )
243 }
244 _ => false,
245 }
246 }
247
248 fn eq_ne(
249 e: &hir::Expr<'_>,
250 l: &hir::Expr<'_>,
251 r: &hir::Expr<'_>,
252 f: impl FnOnce(Span, Span) -> InvalidNanComparisonsSuggestion,
253 ) -> InvalidNanComparisons {
254 let suggestion = if let Some(l_span) = l.span.find_ancestor_inside(e.span)
255 && let Some(r_span) = r.span.find_ancestor_inside(e.span)
256 {
257 f(l_span, r_span)
258 } else {
259 InvalidNanComparisonsSuggestion::Spanless
260 };
261
262 InvalidNanComparisons::EqNe { suggestion }
263 }
264
265 let lint = match binop.node {
266 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, l) => {
267 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
268 nan_plus_binop: l_span.until(r_span),
269 float: r_span.shrink_to_hi(),
270 neg: (binop.node == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()),
271 })
272 }
273 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, r) => {
274 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
275 nan_plus_binop: l_span.shrink_to_hi().to(r_span),
276 float: l_span.shrink_to_hi(),
277 neg: (binop.node == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()),
278 })
279 }
280 hir::BinOpKind::Lt | hir::BinOpKind::Le | hir::BinOpKind::Gt | hir::BinOpKind::Ge
281 if is_nan(cx, l) || is_nan(cx, r) =>
282 {
283 InvalidNanComparisons::LtLeGtGe
284 }
285 _ => return,
286 };
287
288 cx.emit_span_lint(INVALID_NAN_COMPARISONS, e.span, lint);
289}
290
291#[derive(Debug, PartialEq, Copy, Clone)]
292enum ComparisonOp {
293 BinOp(hir::BinOpKind),
294 Other,
295}
296
297fn lint_wide_pointer<'tcx>(
298 cx: &LateContext<'tcx>,
299 e: &'tcx hir::Expr<'tcx>,
300 cmpop: ComparisonOp,
301 l: &'tcx hir::Expr<'tcx>,
302 r: &'tcx hir::Expr<'tcx>,
303) {
304 let ptr_unsized = |mut ty: Ty<'tcx>| -> Option<(
305 usize,
306 String,
307 bool,
308 )> {
309 let mut refs = 0;
310 while let ty::Ref(_, inner_ty, _) = ty.kind() {
313 ty = *inner_ty;
314 refs += 1;
315 }
316
317 let mut modifiers = String::new();
319 ty = match ty.kind() {
320 ty::RawPtr(ty, _) => *ty,
321 ty::Adt(def, args) if cx.tcx.is_diagnostic_item(sym::NonNull, def.did()) => {
322 modifiers.push_str(".as_ptr()");
323 args.type_at(0)
324 }
325 _ => return None,
326 };
327
328 (!ty.is_sized(cx.tcx, cx.typing_env()))
329 .then(|| (refs, modifiers, matches!(ty.kind(), ty::Dynamic(_, _, ty::Dyn))))
330 };
331
332 let l = l.peel_borrows();
334 let r = r.peel_borrows();
335
336 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else {
337 return;
338 };
339 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else {
340 return;
341 };
342
343 let Some((l_ty_refs, l_modifiers, l_inner_ty_is_dyn)) = ptr_unsized(l_ty) else {
344 return;
345 };
346 let Some((r_ty_refs, r_modifiers, r_inner_ty_is_dyn)) = ptr_unsized(r_ty) else {
347 return;
348 };
349
350 let (Some(l_span), Some(r_span)) =
351 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
352 else {
353 return cx.emit_span_lint(
354 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
355 e.span,
356 AmbiguousWidePointerComparisons::Spanless,
357 );
358 };
359
360 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
361 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
362 let is_dyn_comparison = l_inner_ty_is_dyn && r_inner_ty_is_dyn;
363
364 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
365 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
366 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
367
368 let deref_left = &*"*".repeat(l_ty_refs);
369 let deref_right = &*"*".repeat(r_ty_refs);
370
371 let l_modifiers = &*l_modifiers;
372 let r_modifiers = &*r_modifiers;
373
374 cx.emit_span_lint(
375 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
376 e.span,
377 AmbiguousWidePointerComparisons::Spanful {
378 addr_metadata_suggestion: (is_eq_ne && !is_dyn_comparison).then(|| {
379 AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
380 ne,
381 deref_left,
382 deref_right,
383 l_modifiers,
384 r_modifiers,
385 left,
386 middle,
387 right,
388 }
389 }),
390 addr_suggestion: if is_eq_ne {
391 AmbiguousWidePointerComparisonsAddrSuggestion::AddrEq {
392 ne,
393 deref_left,
394 deref_right,
395 l_modifiers,
396 r_modifiers,
397 left,
398 middle,
399 right,
400 }
401 } else {
402 AmbiguousWidePointerComparisonsAddrSuggestion::Cast {
403 deref_left,
404 deref_right,
405 l_modifiers,
406 r_modifiers,
407 paren_left: if l_ty_refs != 0 { ")" } else { "" },
408 paren_right: if r_ty_refs != 0 { ")" } else { "" },
409 left_before: (l_ty_refs != 0).then_some(l_span.shrink_to_lo()),
410 left_after: l_span.shrink_to_hi(),
411 right_before: (r_ty_refs != 0).then_some(r_span.shrink_to_lo()),
412 right_after: r_span.shrink_to_hi(),
413 }
414 },
415 },
416 );
417}
418
419fn lint_fn_pointer<'tcx>(
420 cx: &LateContext<'tcx>,
421 e: &'tcx hir::Expr<'tcx>,
422 cmpop: ComparisonOp,
423 l: &'tcx hir::Expr<'tcx>,
424 r: &'tcx hir::Expr<'tcx>,
425) {
426 let peel_refs = |mut ty: Ty<'tcx>| -> (Ty<'tcx>, usize) {
427 let mut refs = 0;
428
429 while let ty::Ref(_, inner_ty, _) = ty.kind() {
430 ty = *inner_ty;
431 refs += 1;
432 }
433
434 (ty, refs)
435 };
436
437 let l = l.peel_borrows();
439 let r = r.peel_borrows();
440
441 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else { return };
442 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else { return };
443
444 let (l_ty, l_ty_refs) = peel_refs(l_ty);
447 let (r_ty, r_ty_refs) = peel_refs(r_ty);
448
449 if l_ty.is_fn() && r_ty.is_fn() {
450 } else if let ty::Adt(l_def, l_args) = l_ty.kind()
452 && let ty::Adt(r_def, r_args) = r_ty.kind()
453 && cx.tcx.is_lang_item(l_def.did(), LangItem::Option)
454 && cx.tcx.is_lang_item(r_def.did(), LangItem::Option)
455 && let Some(l_some_arg) = l_args.get(0)
456 && let Some(r_some_arg) = r_args.get(0)
457 && l_some_arg.expect_ty().is_fn()
458 && r_some_arg.expect_ty().is_fn()
459 {
460 return cx.emit_span_lint(
462 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
463 e.span,
464 UnpredictableFunctionPointerComparisons::Warn,
465 );
466 } else {
467 return;
469 }
470
471 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
474
475 if !is_eq_ne {
476 return cx.emit_span_lint(
478 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
479 e.span,
480 UnpredictableFunctionPointerComparisons::Warn,
481 );
482 }
483
484 let (Some(l_span), Some(r_span)) =
485 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
486 else {
487 return cx.emit_span_lint(
489 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
490 e.span,
491 UnpredictableFunctionPointerComparisons::Warn,
492 );
493 };
494
495 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
496
497 let deref_left = &*"*".repeat(l_ty_refs);
499 let deref_right = &*"*".repeat(r_ty_refs);
500
501 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
502 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
503 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
504
505 let sugg =
506 if !r_ty.is_fn_ptr() {
509 let fn_sig = r_ty.fn_sig(cx.tcx);
510
511 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEqWithCast {
512 ne,
513 fn_sig,
514 deref_left,
515 deref_right,
516 left,
517 middle,
518 right,
519 }
520 } else {
521 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEq {
522 ne,
523 deref_left,
524 deref_right,
525 left,
526 middle,
527 right,
528 }
529 };
530
531 cx.emit_span_lint(
532 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
533 e.span,
534 UnpredictableFunctionPointerComparisons::Suggestion { sugg },
535 );
536}
537
538impl<'tcx> LateLintPass<'tcx> for TypeLimits {
539 fn check_lit(
540 &mut self,
541 cx: &LateContext<'tcx>,
542 hir_id: HirId,
543 lit: &'tcx hir::Lit,
544 negated: bool,
545 ) {
546 if negated {
547 self.negated_expr_id = Some(hir_id);
548 self.negated_expr_span = Some(lit.span);
549 }
550 lint_literal(cx, self, hir_id, lit.span, lit, negated);
551 }
552
553 fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) {
554 match e.kind {
555 hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
556 if self.negated_expr_id != Some(e.hir_id) {
558 self.negated_expr_id = Some(expr.hir_id);
559 self.negated_expr_span = Some(e.span);
560 }
561 }
562 hir::ExprKind::Binary(binop, ref l, ref r) => {
563 if is_comparison(binop) {
564 if !check_limits(cx, binop, l, r) {
565 cx.emit_span_lint(UNUSED_COMPARISONS, e.span, UnusedComparisons);
566 } else {
567 lint_nan(cx, e, binop, l, r);
568 let cmpop = ComparisonOp::BinOp(binop.node);
569 lint_wide_pointer(cx, e, cmpop, l, r);
570 lint_fn_pointer(cx, e, cmpop, l, r);
571 }
572 }
573 }
574 hir::ExprKind::Call(path, [l, r])
575 if let ExprKind::Path(ref qpath) = path.kind
576 && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
577 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
578 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
579 {
580 lint_wide_pointer(cx, e, cmpop, l, r);
581 lint_fn_pointer(cx, e, cmpop, l, r);
582 }
583 hir::ExprKind::MethodCall(_, l, [r], _)
584 if let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
585 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
586 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
587 {
588 lint_wide_pointer(cx, e, cmpop, l, r);
589 lint_fn_pointer(cx, e, cmpop, l, r);
590 }
591 _ => {}
592 };
593
594 fn is_valid<T: PartialOrd>(binop: hir::BinOp, v: T, min: T, max: T) -> bool {
595 match binop.node {
596 hir::BinOpKind::Lt => v > min && v <= max,
597 hir::BinOpKind::Le => v >= min && v < max,
598 hir::BinOpKind::Gt => v >= min && v < max,
599 hir::BinOpKind::Ge => v > min && v <= max,
600 hir::BinOpKind::Eq | hir::BinOpKind::Ne => v >= min && v <= max,
601 _ => bug!(),
602 }
603 }
604
605 fn rev_binop(binop: hir::BinOp) -> hir::BinOp {
606 source_map::respan(
607 binop.span,
608 match binop.node {
609 hir::BinOpKind::Lt => hir::BinOpKind::Gt,
610 hir::BinOpKind::Le => hir::BinOpKind::Ge,
611 hir::BinOpKind::Gt => hir::BinOpKind::Lt,
612 hir::BinOpKind::Ge => hir::BinOpKind::Le,
613 _ => return binop,
614 },
615 )
616 }
617
618 fn check_limits(
619 cx: &LateContext<'_>,
620 binop: hir::BinOp,
621 l: &hir::Expr<'_>,
622 r: &hir::Expr<'_>,
623 ) -> bool {
624 let (lit, expr, swap) = match (&l.kind, &r.kind) {
625 (&hir::ExprKind::Lit(_), _) => (l, r, true),
626 (_, &hir::ExprKind::Lit(_)) => (r, l, false),
627 _ => return true,
628 };
629 let norm_binop = if swap { rev_binop(binop) } else { binop };
632 match *cx.typeck_results().node_type(expr.hir_id).kind() {
633 ty::Int(int_ty) => {
634 let (min, max) = int_ty_range(int_ty);
635 let lit_val: i128 = match lit.kind {
636 hir::ExprKind::Lit(li) => match li.node {
637 ast::LitKind::Int(
638 v,
639 ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed,
640 ) => v.get() as i128,
641 _ => return true,
642 },
643 _ => bug!(),
644 };
645 is_valid(norm_binop, lit_val, min, max)
646 }
647 ty::Uint(uint_ty) => {
648 let (min, max): (u128, u128) = uint_ty_range(uint_ty);
649 let lit_val: u128 = match lit.kind {
650 hir::ExprKind::Lit(li) => match li.node {
651 ast::LitKind::Int(v, _) => v.get(),
652 _ => return true,
653 },
654 _ => bug!(),
655 };
656 is_valid(norm_binop, lit_val, min, max)
657 }
658 _ => true,
659 }
660 }
661
662 fn is_comparison(binop: hir::BinOp) -> bool {
663 matches!(
664 binop.node,
665 hir::BinOpKind::Eq
666 | hir::BinOpKind::Lt
667 | hir::BinOpKind::Le
668 | hir::BinOpKind::Ne
669 | hir::BinOpKind::Ge
670 | hir::BinOpKind::Gt
671 )
672 }
673
674 fn diag_item_cmpop(diag_item: Symbol) -> Option<ComparisonOp> {
675 Some(match diag_item {
676 sym::cmp_ord_max => ComparisonOp::Other,
677 sym::cmp_ord_min => ComparisonOp::Other,
678 sym::ord_cmp_method => ComparisonOp::Other,
679 sym::cmp_partialeq_eq => ComparisonOp::BinOp(hir::BinOpKind::Eq),
680 sym::cmp_partialeq_ne => ComparisonOp::BinOp(hir::BinOpKind::Ne),
681 sym::cmp_partialord_cmp => ComparisonOp::Other,
682 sym::cmp_partialord_ge => ComparisonOp::BinOp(hir::BinOpKind::Ge),
683 sym::cmp_partialord_gt => ComparisonOp::BinOp(hir::BinOpKind::Gt),
684 sym::cmp_partialord_le => ComparisonOp::BinOp(hir::BinOpKind::Le),
685 sym::cmp_partialord_lt => ComparisonOp::BinOp(hir::BinOpKind::Lt),
686 _ => return None,
687 })
688 }
689 }
690}
691
692declare_lint! {
693 IMPROPER_CTYPES,
715 Warn,
716 "proper use of libc types in foreign modules"
717}
718
719declare_lint_pass!(ImproperCTypesDeclarations => [IMPROPER_CTYPES]);
720
721declare_lint! {
722 IMPROPER_CTYPES_DEFINITIONS,
744 Warn,
745 "proper use of libc types in foreign item definitions"
746}
747
748declare_lint! {
749 USES_POWER_ALIGNMENT,
797 Warn,
798 "Structs do not follow the power alignment rule under repr(C)"
799}
800
801declare_lint_pass!(ImproperCTypesDefinitions => [IMPROPER_CTYPES_DEFINITIONS, USES_POWER_ALIGNMENT]);
802
803#[derive(Clone, Copy)]
804pub(crate) enum CItemKind {
805 Declaration,
806 Definition,
807}
808
809struct ImproperCTypesVisitor<'a, 'tcx> {
810 cx: &'a LateContext<'tcx>,
811 mode: CItemKind,
812}
813
814struct CTypesVisitorState<'tcx> {
816 cache: FxHashSet<Ty<'tcx>>,
817 base_ty: Ty<'tcx>,
820}
821
822enum FfiResult<'tcx> {
823 FfiSafe,
824 FfiPhantom(Ty<'tcx>),
825 FfiUnsafe { ty: Ty<'tcx>, reason: DiagMessage, help: Option<DiagMessage> },
826}
827
828pub(crate) fn nonnull_optimization_guaranteed<'tcx>(
829 tcx: TyCtxt<'tcx>,
830 def: ty::AdtDef<'tcx>,
831) -> bool {
832 tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
833}
834
835pub(crate) fn transparent_newtype_field<'a, 'tcx>(
838 tcx: TyCtxt<'tcx>,
839 variant: &'a ty::VariantDef,
840) -> Option<&'a ty::FieldDef> {
841 let typing_env = ty::TypingEnv::non_body_analysis(tcx, variant.def_id);
842 variant.fields.iter().find(|field| {
843 let field_ty = tcx.type_of(field.did).instantiate_identity();
844 let is_1zst =
845 tcx.layout_of(typing_env.as_query_input(field_ty)).is_ok_and(|layout| layout.is_1zst());
846 !is_1zst
847 })
848}
849
850fn ty_is_known_nonnull<'tcx>(
852 tcx: TyCtxt<'tcx>,
853 typing_env: ty::TypingEnv<'tcx>,
854 ty: Ty<'tcx>,
855 mode: CItemKind,
856) -> bool {
857 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
858
859 match ty.kind() {
860 ty::FnPtr(..) => true,
861 ty::Ref(..) => true,
862 ty::Adt(def, _) if def.is_box() && matches!(mode, CItemKind::Definition) => true,
863 ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => {
864 let marked_non_null = nonnull_optimization_guaranteed(tcx, *def);
865
866 if marked_non_null {
867 return true;
868 }
869
870 if def.is_unsafe_cell() {
872 return false;
873 }
874
875 def.variants()
876 .iter()
877 .filter_map(|variant| transparent_newtype_field(tcx, variant))
878 .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args), mode))
879 }
880 ty::Pat(base, pat) => {
881 ty_is_known_nonnull(tcx, typing_env, *base, mode)
882 || Option::unwrap_or_default(
883 try {
884 match **pat {
885 ty::PatternKind::Range { start, end } => {
886 let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
887 let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
888
889 start > 0 && end >= start
892 }
893 }
894 },
895 )
896 }
897 _ => false,
898 }
899}
900
901fn get_nullable_type<'tcx>(
904 tcx: TyCtxt<'tcx>,
905 typing_env: ty::TypingEnv<'tcx>,
906 ty: Ty<'tcx>,
907) -> Option<Ty<'tcx>> {
908 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
909
910 Some(match *ty.kind() {
911 ty::Adt(field_def, field_args) => {
912 let inner_field_ty = {
913 let mut first_non_zst_ty =
914 field_def.variants().iter().filter_map(|v| transparent_newtype_field(tcx, v));
915 debug_assert_eq!(
916 first_non_zst_ty.clone().count(),
917 1,
918 "Wrong number of fields for transparent type"
919 );
920 first_non_zst_ty
921 .next_back()
922 .expect("No non-zst fields in transparent type.")
923 .ty(tcx, field_args)
924 };
925 return get_nullable_type(tcx, typing_env, inner_field_ty);
926 }
927 ty::Pat(base, ..) => return get_nullable_type(tcx, typing_env, base),
928 ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => ty,
929 ty::Ref(_region, ty, mutbl) => Ty::new_ptr(tcx, ty, mutbl),
932 ty::FnPtr(..) => ty,
935 ref unhandled => {
938 debug!(
939 "get_nullable_type: Unhandled scalar kind: {:?} while checking {:?}",
940 unhandled, ty
941 );
942 return None;
943 }
944 })
945}
946
947fn is_niche_optimization_candidate<'tcx>(
952 tcx: TyCtxt<'tcx>,
953 typing_env: ty::TypingEnv<'tcx>,
954 ty: Ty<'tcx>,
955) -> bool {
956 if tcx.layout_of(typing_env.as_query_input(ty)).is_ok_and(|layout| !layout.is_1zst()) {
957 return false;
958 }
959
960 match ty.kind() {
961 ty::Adt(ty_def, _) => {
962 let non_exhaustive = ty_def.is_variant_list_non_exhaustive();
963 let empty = (ty_def.is_struct() && ty_def.all_fields().next().is_none())
964 || (ty_def.is_enum() && ty_def.variants().is_empty());
965
966 !non_exhaustive && empty
967 }
968 ty::Tuple(tys) => tys.is_empty(),
969 _ => false,
970 }
971}
972
973pub(crate) fn repr_nullable_ptr<'tcx>(
978 tcx: TyCtxt<'tcx>,
979 typing_env: ty::TypingEnv<'tcx>,
980 ty: Ty<'tcx>,
981 ckind: CItemKind,
982) -> Option<Ty<'tcx>> {
983 debug!("is_repr_nullable_ptr(tcx, ty = {:?})", ty);
984 match ty.kind() {
985 ty::Adt(ty_def, args) => {
986 let field_ty = match &ty_def.variants().raw[..] {
987 [var_one, var_two] => match (&var_one.fields.raw[..], &var_two.fields.raw[..]) {
988 ([], [field]) | ([field], []) => field.ty(tcx, args),
989 ([field1], [field2]) => {
990 let ty1 = field1.ty(tcx, args);
991 let ty2 = field2.ty(tcx, args);
992
993 if is_niche_optimization_candidate(tcx, typing_env, ty1) {
994 ty2
995 } else if is_niche_optimization_candidate(tcx, typing_env, ty2) {
996 ty1
997 } else {
998 return None;
999 }
1000 }
1001 _ => return None,
1002 },
1003 _ => return None,
1004 };
1005
1006 if !ty_is_known_nonnull(tcx, typing_env, field_ty, ckind) {
1007 return None;
1008 }
1009
1010 let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, typing_env).ok();
1014 if !compute_size_skeleton(ty)?.same_size(compute_size_skeleton(field_ty)?) {
1015 bug!("improper_ctypes: Option nonnull optimization not applied?");
1016 }
1017
1018 let field_ty_layout = tcx.layout_of(typing_env.as_query_input(field_ty));
1020 if field_ty_layout.is_err() && !field_ty.has_non_region_param() {
1021 bug!("should be able to compute the layout of non-polymorphic type");
1022 }
1023
1024 let field_ty_abi = &field_ty_layout.ok()?.backend_repr;
1025 if let BackendRepr::Scalar(field_ty_scalar) = field_ty_abi {
1026 match field_ty_scalar.valid_range(&tcx) {
1027 WrappingRange { start: 0, end }
1028 if end == field_ty_scalar.size(&tcx).unsigned_int_max() - 1 =>
1029 {
1030 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
1031 }
1032 WrappingRange { start: 1, .. } => {
1033 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
1034 }
1035 WrappingRange { start, end } => {
1036 unreachable!("Unhandled start and end range: ({}, {})", start, end)
1037 }
1038 };
1039 }
1040 None
1041 }
1042 ty::Pat(base, pat) => match **pat {
1043 ty::PatternKind::Range { .. } => get_nullable_type(tcx, typing_env, *base),
1044 },
1045 _ => None,
1046 }
1047}
1048
1049impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
1050 fn check_for_array_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
1052 if let ty::Array(..) = ty.kind() {
1053 self.emit_ffi_unsafe_type_lint(
1054 ty,
1055 sp,
1056 fluent::lint_improper_ctypes_array_reason,
1057 Some(fluent::lint_improper_ctypes_array_help),
1058 );
1059 true
1060 } else {
1061 false
1062 }
1063 }
1064
1065 fn check_field_type_for_ffi(
1067 &self,
1068 acc: &mut CTypesVisitorState<'tcx>,
1069 field: &ty::FieldDef,
1070 args: GenericArgsRef<'tcx>,
1071 ) -> FfiResult<'tcx> {
1072 let field_ty = field.ty(self.cx.tcx, args);
1073 let field_ty = self
1074 .cx
1075 .tcx
1076 .try_normalize_erasing_regions(self.cx.typing_env(), field_ty)
1077 .unwrap_or(field_ty);
1078 self.check_type_for_ffi(acc, field_ty)
1079 }
1080
1081 fn check_variant_for_ffi(
1083 &self,
1084 acc: &mut CTypesVisitorState<'tcx>,
1085 ty: Ty<'tcx>,
1086 def: ty::AdtDef<'tcx>,
1087 variant: &ty::VariantDef,
1088 args: GenericArgsRef<'tcx>,
1089 ) -> FfiResult<'tcx> {
1090 use FfiResult::*;
1091 let transparent_with_all_zst_fields = if def.repr().transparent() {
1092 if let Some(field) = transparent_newtype_field(self.cx.tcx, variant) {
1093 match self.check_field_type_for_ffi(acc, field, args) {
1095 FfiUnsafe { ty, .. } if ty.is_unit() => (),
1096 r => return r,
1097 }
1098
1099 false
1100 } else {
1101 true
1104 }
1105 } else {
1106 false
1107 };
1108
1109 let mut all_phantom = !variant.fields.is_empty();
1111 for field in &variant.fields {
1112 all_phantom &= match self.check_field_type_for_ffi(acc, field, args) {
1113 FfiSafe => false,
1114 FfiUnsafe { ty, .. } if ty.is_unit() => false,
1116 FfiPhantom(..) => true,
1117 r @ FfiUnsafe { .. } => return r,
1118 }
1119 }
1120
1121 if all_phantom {
1122 FfiPhantom(ty)
1123 } else if transparent_with_all_zst_fields {
1124 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_struct_zst, help: None }
1125 } else {
1126 FfiSafe
1127 }
1128 }
1129
1130 fn check_type_for_ffi(
1133 &self,
1134 acc: &mut CTypesVisitorState<'tcx>,
1135 ty: Ty<'tcx>,
1136 ) -> FfiResult<'tcx> {
1137 use FfiResult::*;
1138
1139 let tcx = self.cx.tcx;
1140
1141 if !acc.cache.insert(ty) {
1146 return FfiSafe;
1147 }
1148
1149 match *ty.kind() {
1150 ty::Adt(def, args) => {
1151 if let Some(boxed) = ty.boxed_ty()
1152 && matches!(self.mode, CItemKind::Definition)
1153 {
1154 if boxed.is_sized(tcx, self.cx.typing_env()) {
1155 return FfiSafe;
1156 } else {
1157 return FfiUnsafe {
1158 ty,
1159 reason: fluent::lint_improper_ctypes_box,
1160 help: None,
1161 };
1162 }
1163 }
1164 if def.is_phantom_data() {
1165 return FfiPhantom(ty);
1166 }
1167 match def.adt_kind() {
1168 AdtKind::Struct | AdtKind::Union => {
1169 if let Some(sym::cstring_type | sym::cstr_type) =
1170 tcx.get_diagnostic_name(def.did())
1171 && !acc.base_ty.is_mutable_ptr()
1172 {
1173 return FfiUnsafe {
1174 ty,
1175 reason: fluent::lint_improper_ctypes_cstr_reason,
1176 help: Some(fluent::lint_improper_ctypes_cstr_help),
1177 };
1178 }
1179
1180 if !def.repr().c() && !def.repr().transparent() {
1181 return FfiUnsafe {
1182 ty,
1183 reason: if def.is_struct() {
1184 fluent::lint_improper_ctypes_struct_layout_reason
1185 } else {
1186 fluent::lint_improper_ctypes_union_layout_reason
1187 },
1188 help: if def.is_struct() {
1189 Some(fluent::lint_improper_ctypes_struct_layout_help)
1190 } else {
1191 Some(fluent::lint_improper_ctypes_union_layout_help)
1192 },
1193 };
1194 }
1195
1196 if def.non_enum_variant().field_list_has_applicable_non_exhaustive() {
1197 return FfiUnsafe {
1198 ty,
1199 reason: if def.is_struct() {
1200 fluent::lint_improper_ctypes_struct_non_exhaustive
1201 } else {
1202 fluent::lint_improper_ctypes_union_non_exhaustive
1203 },
1204 help: None,
1205 };
1206 }
1207
1208 if def.non_enum_variant().fields.is_empty() {
1209 return FfiUnsafe {
1210 ty,
1211 reason: if def.is_struct() {
1212 fluent::lint_improper_ctypes_struct_fieldless_reason
1213 } else {
1214 fluent::lint_improper_ctypes_union_fieldless_reason
1215 },
1216 help: if def.is_struct() {
1217 Some(fluent::lint_improper_ctypes_struct_fieldless_help)
1218 } else {
1219 Some(fluent::lint_improper_ctypes_union_fieldless_help)
1220 },
1221 };
1222 }
1223
1224 self.check_variant_for_ffi(acc, ty, def, def.non_enum_variant(), args)
1225 }
1226 AdtKind::Enum => {
1227 if def.variants().is_empty() {
1228 return FfiSafe;
1230 }
1231 if !def.repr().c() && !def.repr().transparent() && def.repr().int.is_none()
1234 {
1235 if let Some(ty) =
1237 repr_nullable_ptr(self.cx.tcx, self.cx.typing_env(), ty, self.mode)
1238 {
1239 return self.check_type_for_ffi(acc, ty);
1240 }
1241
1242 return FfiUnsafe {
1243 ty,
1244 reason: fluent::lint_improper_ctypes_enum_repr_reason,
1245 help: Some(fluent::lint_improper_ctypes_enum_repr_help),
1246 };
1247 }
1248
1249 use improper_ctypes::check_non_exhaustive_variant;
1250
1251 let non_exhaustive = def.variant_list_has_applicable_non_exhaustive();
1252 let ret = def.variants().iter().try_for_each(|variant| {
1254 check_non_exhaustive_variant(non_exhaustive, variant)
1255 .map_break(|reason| FfiUnsafe { ty, reason, help: None })?;
1256
1257 match self.check_variant_for_ffi(acc, ty, def, variant, args) {
1258 FfiSafe => ControlFlow::Continue(()),
1259 r => ControlFlow::Break(r),
1260 }
1261 });
1262 if let ControlFlow::Break(result) = ret {
1263 return result;
1264 }
1265
1266 FfiSafe
1267 }
1268 }
1269 }
1270
1271 ty::Char => FfiUnsafe {
1272 ty,
1273 reason: fluent::lint_improper_ctypes_char_reason,
1274 help: Some(fluent::lint_improper_ctypes_char_help),
1275 },
1276
1277 ty::Pat(base, ..) => self.check_type_for_ffi(acc, base),
1280
1281 ty::Int(ty::IntTy::I128) | ty::Uint(ty::UintTy::U128) => {
1282 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_128bit, help: None }
1283 }
1284
1285 ty::Bool | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Never => FfiSafe,
1287
1288 ty::Slice(_) => FfiUnsafe {
1289 ty,
1290 reason: fluent::lint_improper_ctypes_slice_reason,
1291 help: Some(fluent::lint_improper_ctypes_slice_help),
1292 },
1293
1294 ty::Dynamic(..) => {
1295 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_dyn, help: None }
1296 }
1297
1298 ty::Str => FfiUnsafe {
1299 ty,
1300 reason: fluent::lint_improper_ctypes_str_reason,
1301 help: Some(fluent::lint_improper_ctypes_str_help),
1302 },
1303
1304 ty::Tuple(..) => FfiUnsafe {
1305 ty,
1306 reason: fluent::lint_improper_ctypes_tuple_reason,
1307 help: Some(fluent::lint_improper_ctypes_tuple_help),
1308 },
1309
1310 ty::RawPtr(ty, _) | ty::Ref(_, ty, _)
1311 if {
1312 matches!(self.mode, CItemKind::Definition)
1313 && ty.is_sized(self.cx.tcx, self.cx.typing_env())
1314 } =>
1315 {
1316 FfiSafe
1317 }
1318
1319 ty::RawPtr(ty, _)
1320 if match ty.kind() {
1321 ty::Tuple(tuple) => tuple.is_empty(),
1322 _ => false,
1323 } =>
1324 {
1325 FfiSafe
1326 }
1327
1328 ty::RawPtr(ty, _) | ty::Ref(_, ty, _) => self.check_type_for_ffi(acc, ty),
1329
1330 ty::Array(inner_ty, _) => self.check_type_for_ffi(acc, inner_ty),
1331
1332 ty::FnPtr(sig_tys, hdr) => {
1333 let sig = sig_tys.with(hdr);
1334 if sig.abi().is_rustic_abi() {
1335 return FfiUnsafe {
1336 ty,
1337 reason: fluent::lint_improper_ctypes_fnptr_reason,
1338 help: Some(fluent::lint_improper_ctypes_fnptr_help),
1339 };
1340 }
1341
1342 let sig = tcx.instantiate_bound_regions_with_erased(sig);
1343 for arg in sig.inputs() {
1344 match self.check_type_for_ffi(acc, *arg) {
1345 FfiSafe => {}
1346 r => return r,
1347 }
1348 }
1349
1350 let ret_ty = sig.output();
1351 if ret_ty.is_unit() {
1352 return FfiSafe;
1353 }
1354
1355 self.check_type_for_ffi(acc, ret_ty)
1356 }
1357
1358 ty::Foreign(..) => FfiSafe,
1359
1360 ty::Alias(ty::Opaque, ..) => {
1363 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_opaque, help: None }
1364 }
1365
1366 ty::Param(..) | ty::Alias(ty::Projection | ty::Inherent, ..)
1369 if matches!(self.mode, CItemKind::Definition) =>
1370 {
1371 FfiSafe
1372 }
1373
1374 ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),
1375
1376 ty::Param(..)
1377 | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..)
1378 | ty::Infer(..)
1379 | ty::Bound(..)
1380 | ty::Error(_)
1381 | ty::Closure(..)
1382 | ty::CoroutineClosure(..)
1383 | ty::Coroutine(..)
1384 | ty::CoroutineWitness(..)
1385 | ty::Placeholder(..)
1386 | ty::FnDef(..) => bug!("unexpected type in foreign function: {:?}", ty),
1387 }
1388 }
1389
1390 fn emit_ffi_unsafe_type_lint(
1391 &mut self,
1392 ty: Ty<'tcx>,
1393 sp: Span,
1394 note: DiagMessage,
1395 help: Option<DiagMessage>,
1396 ) {
1397 let lint = match self.mode {
1398 CItemKind::Declaration => IMPROPER_CTYPES,
1399 CItemKind::Definition => IMPROPER_CTYPES_DEFINITIONS,
1400 };
1401 let desc = match self.mode {
1402 CItemKind::Declaration => "block",
1403 CItemKind::Definition => "fn",
1404 };
1405 let span_note = if let ty::Adt(def, _) = ty.kind()
1406 && let Some(sp) = self.cx.tcx.hir().span_if_local(def.did())
1407 {
1408 Some(sp)
1409 } else {
1410 None
1411 };
1412 self.cx.emit_span_lint(
1413 lint,
1414 sp,
1415 ImproperCTypes { ty, desc, label: sp, help, note, span_note },
1416 );
1417 }
1418
1419 fn check_for_opaque_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
1420 struct ProhibitOpaqueTypes;
1421 impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for ProhibitOpaqueTypes {
1422 type Result = ControlFlow<Ty<'tcx>>;
1423
1424 fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
1425 if !ty.has_opaque_types() {
1426 return ControlFlow::Continue(());
1427 }
1428
1429 if let ty::Alias(ty::Opaque, ..) = ty.kind() {
1430 ControlFlow::Break(ty)
1431 } else {
1432 ty.super_visit_with(self)
1433 }
1434 }
1435 }
1436
1437 if let Some(ty) = self
1438 .cx
1439 .tcx
1440 .try_normalize_erasing_regions(self.cx.typing_env(), ty)
1441 .unwrap_or(ty)
1442 .visit_with(&mut ProhibitOpaqueTypes)
1443 .break_value()
1444 {
1445 self.emit_ffi_unsafe_type_lint(ty, sp, fluent::lint_improper_ctypes_opaque, None);
1446 true
1447 } else {
1448 false
1449 }
1450 }
1451
1452 fn check_type_for_ffi_and_report_errors(
1453 &mut self,
1454 sp: Span,
1455 ty: Ty<'tcx>,
1456 is_static: bool,
1457 is_return_type: bool,
1458 ) {
1459 if self.check_for_opaque_ty(sp, ty) {
1460 return;
1462 }
1463
1464 let ty = self.cx.tcx.try_normalize_erasing_regions(self.cx.typing_env(), ty).unwrap_or(ty);
1465
1466 if !is_static && self.check_for_array_ty(sp, ty) {
1470 return;
1471 }
1472
1473 if is_return_type && ty.is_unit() {
1477 return;
1478 }
1479
1480 let mut acc = CTypesVisitorState { cache: FxHashSet::default(), base_ty: ty };
1481 match self.check_type_for_ffi(&mut acc, ty) {
1482 FfiResult::FfiSafe => {}
1483 FfiResult::FfiPhantom(ty) => {
1484 self.emit_ffi_unsafe_type_lint(
1485 ty,
1486 sp,
1487 fluent::lint_improper_ctypes_only_phantomdata,
1488 None,
1489 );
1490 }
1491 FfiResult::FfiUnsafe { ty, reason, help } => {
1492 self.emit_ffi_unsafe_type_lint(ty, sp, reason, help);
1493 }
1494 }
1495 }
1496
1497 fn check_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
1502 let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
1503 let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
1504
1505 for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
1506 for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(input_hir, *input_ty) {
1507 self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, false);
1508 }
1509 }
1510
1511 if let hir::FnRetTy::Return(ret_hir) = decl.output {
1512 for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(ret_hir, sig.output()) {
1513 self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, true);
1514 }
1515 }
1516 }
1517
1518 fn check_foreign_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
1520 let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
1521 let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
1522
1523 for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
1524 self.check_type_for_ffi_and_report_errors(input_hir.span, *input_ty, false, false);
1525 }
1526
1527 if let hir::FnRetTy::Return(ret_hir) = decl.output {
1528 self.check_type_for_ffi_and_report_errors(ret_hir.span, sig.output(), false, true);
1529 }
1530 }
1531
1532 fn check_foreign_static(&mut self, id: hir::OwnerId, span: Span) {
1533 let ty = self.cx.tcx.type_of(id).instantiate_identity();
1534 self.check_type_for_ffi_and_report_errors(span, ty, true, false);
1535 }
1536
1537 fn find_fn_ptr_ty_with_external_abi(
1541 &self,
1542 hir_ty: &hir::Ty<'tcx>,
1543 ty: Ty<'tcx>,
1544 ) -> Vec<(Ty<'tcx>, Span)> {
1545 struct FnPtrFinder<'tcx> {
1546 spans: Vec<Span>,
1547 tys: Vec<Ty<'tcx>>,
1548 }
1549
1550 impl<'tcx> hir::intravisit::Visitor<'_> for FnPtrFinder<'tcx> {
1551 fn visit_ty(&mut self, ty: &'_ hir::Ty<'_, AmbigArg>) {
1552 debug!(?ty);
1553 if let hir::TyKind::BareFn(hir::BareFnTy { abi, .. }) = ty.kind
1554 && !abi.is_rustic_abi()
1555 {
1556 self.spans.push(ty.span);
1557 }
1558
1559 hir::intravisit::walk_ty(self, ty)
1560 }
1561 }
1562
1563 impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for FnPtrFinder<'tcx> {
1564 type Result = ();
1565
1566 fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
1567 if let ty::FnPtr(_, hdr) = ty.kind()
1568 && !hdr.abi.is_rustic_abi()
1569 {
1570 self.tys.push(ty);
1571 }
1572
1573 ty.super_visit_with(self)
1574 }
1575 }
1576
1577 let mut visitor = FnPtrFinder { spans: Vec::new(), tys: Vec::new() };
1578 ty.visit_with(&mut visitor);
1579 visitor.visit_ty_unambig(hir_ty);
1580
1581 iter::zip(visitor.tys.drain(..), visitor.spans.drain(..)).collect()
1582 }
1583}
1584
1585impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDeclarations {
1586 fn check_foreign_item(&mut self, cx: &LateContext<'tcx>, it: &hir::ForeignItem<'tcx>) {
1587 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Declaration };
1588 let abi = cx.tcx.hir_get_foreign_abi(it.hir_id());
1589
1590 match it.kind {
1591 hir::ForeignItemKind::Fn(sig, _, _) => {
1592 if abi.is_rustic_abi() {
1593 vis.check_fn(it.owner_id.def_id, sig.decl)
1594 } else {
1595 vis.check_foreign_fn(it.owner_id.def_id, sig.decl);
1596 }
1597 }
1598 hir::ForeignItemKind::Static(ty, _, _) if !abi.is_rustic_abi() => {
1599 vis.check_foreign_static(it.owner_id, ty.span);
1600 }
1601 hir::ForeignItemKind::Static(..) | hir::ForeignItemKind::Type => (),
1602 }
1603 }
1604}
1605
1606impl ImproperCTypesDefinitions {
1607 fn check_ty_maybe_containing_foreign_fnptr<'tcx>(
1608 &mut self,
1609 cx: &LateContext<'tcx>,
1610 hir_ty: &'tcx hir::Ty<'_>,
1611 ty: Ty<'tcx>,
1612 ) {
1613 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
1614 for (fn_ptr_ty, span) in vis.find_fn_ptr_ty_with_external_abi(hir_ty, ty) {
1615 vis.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, true, false);
1616 }
1617 }
1618
1619 fn check_arg_for_power_alignment<'tcx>(
1620 &mut self,
1621 cx: &LateContext<'tcx>,
1622 ty: Ty<'tcx>,
1623 ) -> bool {
1624 if cx.tcx.sess.target.os != "aix" {
1631 return false;
1632 }
1633 if ty.is_floating_point() && ty.primitive_size(cx.tcx).bytes() > 4 {
1634 return true;
1635 } else if let Adt(adt_def, _) = ty.kind()
1636 && adt_def.is_struct()
1637 {
1638 let struct_variant = adt_def.variant(VariantIdx::ZERO);
1639 for struct_field in &struct_variant.fields {
1643 let field_ty = cx.tcx.type_of(struct_field.did).instantiate_identity();
1644 if self.check_arg_for_power_alignment(cx, field_ty) {
1645 return true;
1646 }
1647 }
1648 }
1649 return false;
1650 }
1651
1652 fn check_struct_for_power_alignment<'tcx>(
1653 &mut self,
1654 cx: &LateContext<'tcx>,
1655 item: &'tcx hir::Item<'tcx>,
1656 ) {
1657 let adt_def = cx.tcx.adt_def(item.owner_id.to_def_id());
1658 if adt_def.repr().c()
1659 && !adt_def.repr().packed()
1660 && cx.tcx.sess.target.os == "aix"
1661 && !adt_def.all_fields().next().is_none()
1662 {
1663 let struct_variant_data = item.expect_struct().1;
1664 for (index, ..) in struct_variant_data.fields().iter().enumerate() {
1665 if index != 0 {
1669 let first_field_def = struct_variant_data.fields()[index];
1670 let def_id = first_field_def.def_id;
1671 let ty = cx.tcx.type_of(def_id).instantiate_identity();
1672 if self.check_arg_for_power_alignment(cx, ty) {
1673 cx.emit_span_lint(
1674 USES_POWER_ALIGNMENT,
1675 first_field_def.span,
1676 UsesPowerAlignment,
1677 );
1678 }
1679 }
1680 }
1681 }
1682 }
1683}
1684
1685impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDefinitions {
1693 fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
1694 match item.kind {
1695 hir::ItemKind::Static(_, ty, ..)
1696 | hir::ItemKind::Const(_, ty, ..)
1697 | hir::ItemKind::TyAlias(_, ty, ..) => {
1698 self.check_ty_maybe_containing_foreign_fnptr(
1699 cx,
1700 ty,
1701 cx.tcx.type_of(item.owner_id).instantiate_identity(),
1702 );
1703 }
1704 hir::ItemKind::Fn { .. } => {}
1706 hir::ItemKind::Struct(..) => {
1709 self.check_struct_for_power_alignment(cx, item);
1710 }
1711 hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) => {}
1713 hir::ItemKind::Impl(..)
1715 | hir::ItemKind::TraitAlias(..)
1716 | hir::ItemKind::Trait(..)
1717 | hir::ItemKind::GlobalAsm { .. }
1718 | hir::ItemKind::ForeignMod { .. }
1719 | hir::ItemKind::Mod(..)
1720 | hir::ItemKind::Macro(..)
1721 | hir::ItemKind::Use(..)
1722 | hir::ItemKind::ExternCrate(..) => {}
1723 }
1724 }
1725
1726 fn check_field_def(&mut self, cx: &LateContext<'tcx>, field: &'tcx hir::FieldDef<'tcx>) {
1727 self.check_ty_maybe_containing_foreign_fnptr(
1728 cx,
1729 field.ty,
1730 cx.tcx.type_of(field.def_id).instantiate_identity(),
1731 );
1732 }
1733
1734 fn check_fn(
1735 &mut self,
1736 cx: &LateContext<'tcx>,
1737 kind: hir::intravisit::FnKind<'tcx>,
1738 decl: &'tcx hir::FnDecl<'_>,
1739 _: &'tcx hir::Body<'_>,
1740 _: Span,
1741 id: LocalDefId,
1742 ) {
1743 use hir::intravisit::FnKind;
1744
1745 let abi = match kind {
1746 FnKind::ItemFn(_, _, header, ..) => header.abi,
1747 FnKind::Method(_, sig, ..) => sig.header.abi,
1748 _ => return,
1749 };
1750
1751 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
1752 if abi.is_rustic_abi() {
1753 vis.check_fn(id, decl);
1754 } else {
1755 vis.check_foreign_fn(id, decl);
1756 }
1757 }
1758}
1759
1760declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]);
1761
1762impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
1763 fn check_item(&mut self, cx: &LateContext<'_>, it: &hir::Item<'_>) {
1764 if let hir::ItemKind::Enum(_, ref enum_definition, _) = it.kind {
1765 let t = cx.tcx.type_of(it.owner_id).instantiate_identity();
1766 let ty = cx.tcx.erase_regions(t);
1767 let Ok(layout) = cx.layout_of(ty) else { return };
1768 let Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, variants, .. } =
1769 &layout.variants
1770 else {
1771 return;
1772 };
1773
1774 let tag_size = tag.size(&cx.tcx).bytes();
1775
1776 debug!(
1777 "enum `{}` is {} bytes large with layout:\n{:#?}",
1778 t,
1779 layout.size.bytes(),
1780 layout
1781 );
1782
1783 let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
1784 .map(|(variant, variant_layout)| {
1785 let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
1787
1788 debug!("- variant `{}` is {} bytes large", variant.ident, bytes);
1789 bytes
1790 })
1791 .enumerate()
1792 .fold((0, 0, 0), |(l, s, li), (idx, size)| {
1793 if size > l {
1794 (size, l, idx)
1795 } else if size > s {
1796 (l, size, li)
1797 } else {
1798 (l, s, li)
1799 }
1800 });
1801
1802 if largest > slargest * 3 && slargest > 0 {
1805 cx.emit_span_lint(
1806 VARIANT_SIZE_DIFFERENCES,
1807 enum_definition.variants[largest_index].span,
1808 VariantSizeDifferencesDiag { largest },
1809 );
1810 }
1811 }
1812 }
1813}
1814
1815declare_lint! {
1816 INVALID_ATOMIC_ORDERING,
1853 Deny,
1854 "usage of invalid atomic ordering in atomic operations and memory fences"
1855}
1856
1857declare_lint_pass!(InvalidAtomicOrdering => [INVALID_ATOMIC_ORDERING]);
1858
1859impl InvalidAtomicOrdering {
1860 fn inherent_atomic_method_call<'hir>(
1861 cx: &LateContext<'_>,
1862 expr: &Expr<'hir>,
1863 recognized_names: &[Symbol], ) -> Option<(Symbol, &'hir [Expr<'hir>])> {
1865 const ATOMIC_TYPES: &[Symbol] = &[
1866 sym::AtomicBool,
1867 sym::AtomicPtr,
1868 sym::AtomicUsize,
1869 sym::AtomicU8,
1870 sym::AtomicU16,
1871 sym::AtomicU32,
1872 sym::AtomicU64,
1873 sym::AtomicU128,
1874 sym::AtomicIsize,
1875 sym::AtomicI8,
1876 sym::AtomicI16,
1877 sym::AtomicI32,
1878 sym::AtomicI64,
1879 sym::AtomicI128,
1880 ];
1881 if let ExprKind::MethodCall(method_path, _, args, _) = &expr.kind
1882 && recognized_names.contains(&method_path.ident.name)
1883 && let Some(m_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
1884 && let Some(impl_did) = cx.tcx.impl_of_method(m_def_id)
1885 && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
1886 && cx.tcx.trait_id_of_impl(impl_did).is_none()
1888 && let parent = cx.tcx.parent(adt.did())
1889 && cx.tcx.is_diagnostic_item(sym::atomic_mod, parent)
1890 && ATOMIC_TYPES.contains(&cx.tcx.item_name(adt.did()))
1891 {
1892 return Some((method_path.ident.name, args));
1893 }
1894 None
1895 }
1896
1897 fn match_ordering(cx: &LateContext<'_>, ord_arg: &Expr<'_>) -> Option<Symbol> {
1898 let ExprKind::Path(ref ord_qpath) = ord_arg.kind else { return None };
1899 let did = cx.qpath_res(ord_qpath, ord_arg.hir_id).opt_def_id()?;
1900 let tcx = cx.tcx;
1901 let atomic_ordering = tcx.get_diagnostic_item(sym::Ordering);
1902 let name = tcx.item_name(did);
1903 let parent = tcx.parent(did);
1904 [sym::Relaxed, sym::Release, sym::Acquire, sym::AcqRel, sym::SeqCst].into_iter().find(
1905 |&ordering| {
1906 name == ordering
1907 && (Some(parent) == atomic_ordering
1908 || tcx.opt_parent(parent) == atomic_ordering)
1910 },
1911 )
1912 }
1913
1914 fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
1915 if let Some((method, args)) =
1916 Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store])
1917 && let Some((ordering_arg, invalid_ordering)) = match method {
1918 sym::load => Some((&args[0], sym::Release)),
1919 sym::store => Some((&args[1], sym::Acquire)),
1920 _ => None,
1921 }
1922 && let Some(ordering) = Self::match_ordering(cx, ordering_arg)
1923 && (ordering == invalid_ordering || ordering == sym::AcqRel)
1924 {
1925 if method == sym::load {
1926 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingLoad);
1927 } else {
1928 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingStore);
1929 };
1930 }
1931 }
1932
1933 fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
1934 if let ExprKind::Call(func, args) = expr.kind
1935 && let ExprKind::Path(ref func_qpath) = func.kind
1936 && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
1937 && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::fence | sym::compiler_fence))
1938 && Self::match_ordering(cx, &args[0]) == Some(sym::Relaxed)
1939 {
1940 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, args[0].span, AtomicOrderingFence);
1941 }
1942 }
1943
1944 fn check_atomic_compare_exchange(cx: &LateContext<'_>, expr: &Expr<'_>) {
1945 let Some((method, args)) = Self::inherent_atomic_method_call(
1946 cx,
1947 expr,
1948 &[sym::fetch_update, sym::compare_exchange, sym::compare_exchange_weak],
1949 ) else {
1950 return;
1951 };
1952
1953 let fail_order_arg = match method {
1954 sym::fetch_update => &args[1],
1955 sym::compare_exchange | sym::compare_exchange_weak => &args[3],
1956 _ => return,
1957 };
1958
1959 let Some(fail_ordering) = Self::match_ordering(cx, fail_order_arg) else { return };
1960
1961 if matches!(fail_ordering, sym::Release | sym::AcqRel) {
1962 cx.emit_span_lint(
1963 INVALID_ATOMIC_ORDERING,
1964 fail_order_arg.span,
1965 InvalidAtomicOrderingDiag { method, fail_order_arg_span: fail_order_arg.span },
1966 );
1967 }
1968 }
1969}
1970
1971impl<'tcx> LateLintPass<'tcx> for InvalidAtomicOrdering {
1972 fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
1973 Self::check_atomic_load_store(cx, expr);
1974 Self::check_memory_fence(cx, expr);
1975 Self::check_atomic_compare_exchange(cx, expr);
1976 }
1977}