rustc_hir_typeck/upvar.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431 2432 2433 2434 2435 2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490 2491 2492 2493
//! ### Inferring borrow kinds for upvars
//!
//! Whenever there is a closure expression, we need to determine how each
//! upvar is used. We do this by initially assigning each upvar an
//! immutable "borrow kind" (see `ty::BorrowKind` for details) and then
//! "escalating" the kind as needed. The borrow kind proceeds according to
//! the following lattice:
//! ```ignore (not-rust)
//! ty::ImmBorrow -> ty::UniqueImmBorrow -> ty::MutBorrow
//! ```
//! So, for example, if we see an assignment `x = 5` to an upvar `x`, we
//! will promote its borrow kind to mutable borrow. If we see an `&mut x`
//! we'll do the same. Naturally, this applies not just to the upvar, but
//! to everything owned by `x`, so the result is the same for something
//! like `x.f = 5` and so on (presuming `x` is not a borrowed pointer to a
//! struct). These adjustments are performed in
//! `adjust_for_non_move_closure` (you can trace backwards through the code
//! from there).
//!
//! The fact that we are inferring borrow kinds as we go results in a
//! semi-hacky interaction with mem-categorization. In particular,
//! mem-categorization will query the current borrow kind as it
//! categorizes, and we'll return the *current* value, but this may get
//! adjusted later. Therefore, in this module, we generally ignore the
//! borrow kind (and derived mutabilities) that are returned from
//! mem-categorization, since they may be inaccurate. (Another option
//! would be to use a unification scheme, where instead of returning a
//! concrete borrow kind like `ty::ImmBorrow`, we return a
//! `ty::InferBorrow(upvar_id)` or something like that, but this would
//! then mean that all later passes would have to check for these figments
//! and report an error, and it just seems like more mess in the end.)
use std::iter;
use rustc_abi::FIRST_VARIANT;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_data_structures::unord::{ExtendUnord, UnordSet};
use rustc_errors::{Applicability, MultiSpan};
use rustc_hir as hir;
use rustc_hir::HirId;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, Visitor};
use rustc_middle::hir::place::{Place, PlaceBase, PlaceWithHirId, Projection, ProjectionKind};
use rustc_middle::mir::FakeReadCause;
use rustc_middle::traits::ObligationCauseCode;
use rustc_middle::ty::{
self, BorrowKind, ClosureSizeProfileData, Ty, TyCtxt, TypeVisitableExt as _, TypeckResults,
UpvarArgs, UpvarCapture,
};
use rustc_middle::{bug, span_bug};
use rustc_session::lint;
use rustc_span::{BytePos, Pos, Span, Symbol, sym};
use rustc_trait_selection::infer::InferCtxtExt;
use tracing::{debug, instrument};
use super::FnCtxt;
use crate::expr_use_visitor as euv;
/// Describe the relationship between the paths of two places
/// eg:
/// - `foo` is ancestor of `foo.bar.baz`
/// - `foo.bar.baz` is an descendant of `foo.bar`
/// - `foo.bar` and `foo.baz` are divergent
enum PlaceAncestryRelation {
Ancestor,
Descendant,
SamePlace,
Divergent,
}
/// Intermediate format to store a captured `Place` and associated `ty::CaptureInfo`
/// during capture analysis. Information in this map feeds into the minimum capture
/// analysis pass.
type InferredCaptureInformation<'tcx> = Vec<(Place<'tcx>, ty::CaptureInfo)>;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub(crate) fn closure_analyze(&self, body: &'tcx hir::Body<'tcx>) {
InferBorrowKindVisitor { fcx: self }.visit_body(body);
// it's our job to process these.
assert!(self.deferred_call_resolutions.borrow().is_empty());
}
}
/// Intermediate format to store the hir_id pointing to the use that resulted in the
/// corresponding place being captured and a String which contains the captured value's
/// name (i.e: a.b.c)
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum UpvarMigrationInfo {
/// We previously captured all of `x`, but now we capture some sub-path.
CapturingPrecise { source_expr: Option<HirId>, var_name: String },
CapturingNothing {
// where the variable appears in the closure (but is not captured)
use_span: Span,
},
}
/// Reasons that we might issue a migration warning.
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct MigrationWarningReason {
/// When we used to capture `x` in its entirety, we implemented the auto-trait(s)
/// in this vec, but now we don't.
auto_traits: Vec<&'static str>,
/// When we used to capture `x` in its entirety, we would execute some destructors
/// at a different time.
drop_order: bool,
}
impl MigrationWarningReason {
fn migration_message(&self) -> String {
let base = "changes to closure capture in Rust 2021 will affect";
if !self.auto_traits.is_empty() && self.drop_order {
format!("{base} drop order and which traits the closure implements")
} else if self.drop_order {
format!("{base} drop order")
} else {
format!("{base} which traits the closure implements")
}
}
}
/// Intermediate format to store information needed to generate a note in the migration lint.
struct MigrationLintNote {
captures_info: UpvarMigrationInfo,
/// reasons why migration is needed for this capture
reason: MigrationWarningReason,
}
/// Intermediate format to store the hir id of the root variable and a HashSet containing
/// information on why the root variable should be fully captured
struct NeededMigration {
var_hir_id: HirId,
diagnostics_info: Vec<MigrationLintNote>,
}
struct InferBorrowKindVisitor<'a, 'tcx> {
fcx: &'a FnCtxt<'a, 'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for InferBorrowKindVisitor<'a, 'tcx> {
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
match expr.kind {
hir::ExprKind::Closure(&hir::Closure { capture_clause, body: body_id, .. }) => {
let body = self.fcx.tcx.hir().body(body_id);
self.visit_body(body);
self.fcx.analyze_closure(expr.hir_id, expr.span, body_id, body, capture_clause);
}
hir::ExprKind::ConstBlock(anon_const) => {
let body = self.fcx.tcx.hir().body(anon_const.body);
self.visit_body(body);
}
_ => {}
}
intravisit::walk_expr(self, expr);
}
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Analysis starting point.
#[instrument(skip(self, body), level = "debug")]
fn analyze_closure(
&self,
closure_hir_id: HirId,
span: Span,
body_id: hir::BodyId,
body: &'tcx hir::Body<'tcx>,
mut capture_clause: hir::CaptureBy,
) {
// Extract the type of the closure.
let ty = self.node_ty(closure_hir_id);
let (closure_def_id, args, infer_kind) = match *ty.kind() {
ty::Closure(def_id, args) => {
(def_id, UpvarArgs::Closure(args), self.closure_kind(ty).is_none())
}
ty::CoroutineClosure(def_id, args) => {
(def_id, UpvarArgs::CoroutineClosure(args), self.closure_kind(ty).is_none())
}
ty::Coroutine(def_id, args) => (def_id, UpvarArgs::Coroutine(args), false),
ty::Error(_) => {
// #51714: skip analysis when we have already encountered type errors
return;
}
_ => {
span_bug!(
span,
"type of closure expr {:?} is not a closure {:?}",
closure_hir_id,
ty
);
}
};
let args = self.resolve_vars_if_possible(args);
let closure_def_id = closure_def_id.expect_local();
assert_eq!(self.tcx.hir().body_owner_def_id(body.id()), closure_def_id);
let mut delegate = InferBorrowKind {
closure_def_id,
capture_information: Default::default(),
fake_reads: Default::default(),
};
let _ = euv::ExprUseVisitor::new(
&FnCtxt::new(self, self.tcx.param_env(closure_def_id), closure_def_id),
&mut delegate,
)
.consume_body(body);
// There are several curious situations with coroutine-closures where
// analysis is too aggressive with borrows when the coroutine-closure is
// marked `move`. Specifically:
//
// 1. If the coroutine-closure was inferred to be `FnOnce` during signature
// inference, then it's still possible that we try to borrow upvars from
// the coroutine-closure because they are not used by the coroutine body
// in a way that forces a move. See the test:
// `async-await/async-closures/force-move-due-to-inferred-kind.rs`.
//
// 2. If the coroutine-closure is forced to be `FnOnce` due to the way it
// uses its upvars (e.g. it consumes a non-copy value), but not *all* upvars
// would force the closure to `FnOnce`.
// See the test: `async-await/async-closures/force-move-due-to-actually-fnonce.rs`.
//
// This would lead to an impossible to satisfy situation, since `AsyncFnOnce`
// coroutine bodies can't borrow from their parent closure. To fix this,
// we force the inner coroutine to also be `move`. This only matters for
// coroutine-closures that are `move` since otherwise they themselves will
// be borrowing from the outer environment, so there's no self-borrows occurring.
if let UpvarArgs::Coroutine(..) = args
&& let hir::CoroutineKind::Desugared(_, hir::CoroutineSource::Closure) =
self.tcx.coroutine_kind(closure_def_id).expect("coroutine should have kind")
&& let parent_hir_id =
self.tcx.local_def_id_to_hir_id(self.tcx.local_parent(closure_def_id))
&& let parent_ty = self.node_ty(parent_hir_id)
&& let hir::CaptureBy::Value { move_kw } =
self.tcx.hir_node(parent_hir_id).expect_closure().capture_clause
{
// (1.) Closure signature inference forced this closure to `FnOnce`.
if let Some(ty::ClosureKind::FnOnce) = self.closure_kind(parent_ty) {
capture_clause = hir::CaptureBy::Value { move_kw };
}
// (2.) The way that the closure uses its upvars means it's `FnOnce`.
else if self.coroutine_body_consumes_upvars(closure_def_id, body) {
capture_clause = hir::CaptureBy::Value { move_kw };
}
}
// As noted in `lower_coroutine_body_with_moved_arguments`, we default the capture mode
// to `ByRef` for the `async {}` block internal to async fns/closure. This means
// that we would *not* be moving all of the parameters into the async block in all cases.
// For example, when one of the arguments is `Copy`, we turn a consuming use into a copy of
// a reference, so for `async fn x(t: i32) {}`, we'd only take a reference to `t`.
//
// We force all of these arguments to be captured by move before we do expr use analysis.
//
// FIXME(async_closures): This could be cleaned up. It's a bit janky that we're just
// moving all of the `LocalSource::AsyncFn` locals here.
if let Some(hir::CoroutineKind::Desugared(
_,
hir::CoroutineSource::Fn | hir::CoroutineSource::Closure,
)) = self.tcx.coroutine_kind(closure_def_id)
{
let hir::ExprKind::Block(block, _) = body.value.kind else {
bug!();
};
for stmt in block.stmts {
let hir::StmtKind::Let(hir::LetStmt {
init: Some(init),
source: hir::LocalSource::AsyncFn,
pat,
..
}) = stmt.kind
else {
bug!();
};
let hir::PatKind::Binding(hir::BindingMode(hir::ByRef::No, _), _, _, _) = pat.kind
else {
// Complex pattern, skip the non-upvar local.
continue;
};
let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = init.kind else {
bug!();
};
let hir::def::Res::Local(local_id) = path.res else {
bug!();
};
let place = self.place_for_root_variable(closure_def_id, local_id);
delegate.capture_information.push((place, ty::CaptureInfo {
capture_kind_expr_id: Some(init.hir_id),
path_expr_id: Some(init.hir_id),
capture_kind: UpvarCapture::ByValue,
}));
}
}
debug!(
"For closure={:?}, capture_information={:#?}",
closure_def_id, delegate.capture_information
);
self.log_capture_analysis_first_pass(closure_def_id, &delegate.capture_information, span);
let (capture_information, closure_kind, origin) = self
.process_collected_capture_information(capture_clause, &delegate.capture_information);
self.compute_min_captures(closure_def_id, capture_information, span);
let closure_hir_id = self.tcx.local_def_id_to_hir_id(closure_def_id);
if should_do_rust_2021_incompatible_closure_captures_analysis(self.tcx, closure_hir_id) {
self.perform_2229_migration_analysis(closure_def_id, body_id, capture_clause, span);
}
let after_feature_tys = self.final_upvar_tys(closure_def_id);
// We now fake capture information for all variables that are mentioned within the closure
// We do this after handling migrations so that min_captures computes before
if !enable_precise_capture(span) {
let mut capture_information: InferredCaptureInformation<'tcx> = Default::default();
if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
for var_hir_id in upvars.keys() {
let place = self.place_for_root_variable(closure_def_id, *var_hir_id);
debug!("seed place {:?}", place);
let capture_kind = self.init_capture_kind_for_place(&place, capture_clause);
let fake_info = ty::CaptureInfo {
capture_kind_expr_id: None,
path_expr_id: None,
capture_kind,
};
capture_information.push((place, fake_info));
}
}
// This will update the min captures based on this new fake information.
self.compute_min_captures(closure_def_id, capture_information, span);
}
let before_feature_tys = self.final_upvar_tys(closure_def_id);
if infer_kind {
// Unify the (as yet unbound) type variable in the closure
// args with the kind we inferred.
let closure_kind_ty = match args {
UpvarArgs::Closure(args) => args.as_closure().kind_ty(),
UpvarArgs::CoroutineClosure(args) => args.as_coroutine_closure().kind_ty(),
UpvarArgs::Coroutine(_) => unreachable!("coroutines don't have an inferred kind"),
};
self.demand_eqtype(
span,
Ty::from_closure_kind(self.tcx, closure_kind),
closure_kind_ty,
);
// If we have an origin, store it.
if let Some(mut origin) = origin {
if !enable_precise_capture(span) {
// Without precise captures, we just capture the base and ignore
// the projections.
origin.1.projections.clear()
}
self.typeck_results
.borrow_mut()
.closure_kind_origins_mut()
.insert(closure_hir_id, origin);
}
}
// For coroutine-closures, we additionally must compute the
// `coroutine_captures_by_ref_ty` type, which is used to generate the by-ref
// version of the coroutine-closure's output coroutine.
if let UpvarArgs::CoroutineClosure(args) = args
&& !args.references_error()
{
let closure_env_region: ty::Region<'_> =
ty::Region::new_bound(self.tcx, ty::INNERMOST, ty::BoundRegion {
var: ty::BoundVar::ZERO,
kind: ty::BoundRegionKind::ClosureEnv,
});
let num_args = args
.as_coroutine_closure()
.coroutine_closure_sig()
.skip_binder()
.tupled_inputs_ty
.tuple_fields()
.len();
let typeck_results = self.typeck_results.borrow();
let tupled_upvars_ty_for_borrow = Ty::new_tup_from_iter(
self.tcx,
ty::analyze_coroutine_closure_captures(
typeck_results.closure_min_captures_flattened(closure_def_id),
typeck_results
.closure_min_captures_flattened(
self.tcx.coroutine_for_closure(closure_def_id).expect_local(),
)
// Skip the captures that are just moving the closure's args
// into the coroutine. These are always by move, and we append
// those later in the `CoroutineClosureSignature` helper functions.
.skip(num_args),
|(_, parent_capture), (_, child_capture)| {
// This is subtle. See documentation on function.
let needs_ref = should_reborrow_from_env_of_parent_coroutine_closure(
parent_capture,
child_capture,
);
let upvar_ty = child_capture.place.ty();
let capture = child_capture.info.capture_kind;
// Not all upvars are captured by ref, so use
// `apply_capture_kind_on_capture_ty` to ensure that we
// compute the right captured type.
return apply_capture_kind_on_capture_ty(
self.tcx,
upvar_ty,
capture,
if needs_ref {
closure_env_region
} else {
self.tcx.lifetimes.re_erased
},
);
},
),
);
let coroutine_captures_by_ref_ty = Ty::new_fn_ptr(
self.tcx,
ty::Binder::bind_with_vars(
self.tcx.mk_fn_sig(
[],
tupled_upvars_ty_for_borrow,
false,
hir::Safety::Safe,
rustc_abi::ExternAbi::Rust,
),
self.tcx.mk_bound_variable_kinds(&[ty::BoundVariableKind::Region(
ty::BoundRegionKind::ClosureEnv,
)]),
),
);
self.demand_eqtype(
span,
args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
coroutine_captures_by_ref_ty,
);
// Additionally, we can now constrain the coroutine's kind type.
//
// We only do this if `infer_kind`, because if we have constrained
// the kind from closure signature inference, the kind inferred
// for the inner coroutine may actually be more restrictive.
if infer_kind {
let ty::Coroutine(_, coroutine_args) =
*self.typeck_results.borrow().expr_ty(body.value).kind()
else {
bug!();
};
self.demand_eqtype(
span,
coroutine_args.as_coroutine().kind_ty(),
Ty::from_coroutine_closure_kind(self.tcx, closure_kind),
);
}
}
self.log_closure_min_capture_info(closure_def_id, span);
// Now that we've analyzed the closure, we know how each
// variable is borrowed, and we know what traits the closure
// implements (Fn vs FnMut etc). We now have some updates to do
// with that information.
//
// Note that no closure type C may have an upvar of type C
// (though it may reference itself via a trait object). This
// results from the desugaring of closures to a struct like
// `Foo<..., UV0...UVn>`. If one of those upvars referenced
// C, then the type would have infinite size (and the
// inference algorithm will reject it).
// Equate the type variables for the upvars with the actual types.
let final_upvar_tys = self.final_upvar_tys(closure_def_id);
debug!(?closure_hir_id, ?args, ?final_upvar_tys);
if self.tcx.features().unsized_locals() || self.tcx.features().unsized_fn_params() {
for capture in
self.typeck_results.borrow().closure_min_captures_flattened(closure_def_id)
{
if let UpvarCapture::ByValue = capture.info.capture_kind {
self.require_type_is_sized(
capture.place.ty(),
capture.get_path_span(self.tcx),
ObligationCauseCode::SizedClosureCapture(closure_def_id),
);
}
}
}
// Build a tuple (U0..Un) of the final upvar types U0..Un
// and unify the upvar tuple type in the closure with it:
let final_tupled_upvars_type = Ty::new_tup(self.tcx, &final_upvar_tys);
self.demand_suptype(span, args.tupled_upvars_ty(), final_tupled_upvars_type);
let fake_reads = delegate.fake_reads;
self.typeck_results.borrow_mut().closure_fake_reads.insert(closure_def_id, fake_reads);
if self.tcx.sess.opts.unstable_opts.profile_closures {
self.typeck_results.borrow_mut().closure_size_eval.insert(
closure_def_id,
ClosureSizeProfileData {
before_feature_tys: Ty::new_tup(self.tcx, &before_feature_tys),
after_feature_tys: Ty::new_tup(self.tcx, &after_feature_tys),
},
);
}
// If we are also inferred the closure kind here,
// process any deferred resolutions.
let deferred_call_resolutions = self.remove_deferred_call_resolutions(closure_def_id);
for deferred_call_resolution in deferred_call_resolutions {
deferred_call_resolution.resolve(self);
}
}
/// Determines whether the body of the coroutine uses its upvars in a way that
/// consumes (i.e. moves) the value, which would force the coroutine to `FnOnce`.
/// In a more detailed comment above, we care whether this happens, since if
/// this happens, we want to force the coroutine to move all of the upvars it
/// would've borrowed from the parent coroutine-closure.
///
/// This only really makes sense to be called on the child coroutine of a
/// coroutine-closure.
fn coroutine_body_consumes_upvars(
&self,
coroutine_def_id: LocalDefId,
body: &'tcx hir::Body<'tcx>,
) -> bool {
// This block contains argument capturing details. Since arguments
// aren't upvars, we do not care about them for determining if the
// coroutine body actually consumes its upvars.
let hir::ExprKind::Block(&hir::Block { expr: Some(body), .. }, None) = body.value.kind
else {
bug!();
};
// Specifically, we only care about the *real* body of the coroutine.
// We skip out into the drop-temps within the block of the body in order
// to skip over the args of the desugaring.
let hir::ExprKind::DropTemps(body) = body.kind else {
bug!();
};
let mut delegate = InferBorrowKind {
closure_def_id: coroutine_def_id,
capture_information: Default::default(),
fake_reads: Default::default(),
};
let _ = euv::ExprUseVisitor::new(
&FnCtxt::new(self, self.tcx.param_env(coroutine_def_id), coroutine_def_id),
&mut delegate,
)
.consume_expr(body);
let (_, kind, _) = self.process_collected_capture_information(
hir::CaptureBy::Ref,
&delegate.capture_information,
);
matches!(kind, ty::ClosureKind::FnOnce)
}
// Returns a list of `Ty`s for each upvar.
fn final_upvar_tys(&self, closure_id: LocalDefId) -> Vec<Ty<'tcx>> {
self.typeck_results
.borrow()
.closure_min_captures_flattened(closure_id)
.map(|captured_place| {
let upvar_ty = captured_place.place.ty();
let capture = captured_place.info.capture_kind;
debug!(?captured_place.place, ?upvar_ty, ?capture, ?captured_place.mutability);
apply_capture_kind_on_capture_ty(
self.tcx,
upvar_ty,
capture,
self.tcx.lifetimes.re_erased,
)
})
.collect()
}
/// Adjusts the closure capture information to ensure that the operations aren't unsafe,
/// and that the path can be captured with required capture kind (depending on use in closure,
/// move closure etc.)
///
/// Returns the set of adjusted information along with the inferred closure kind and span
/// associated with the closure kind inference.
///
/// Note that we *always* infer a minimal kind, even if
/// we don't always *use* that in the final result (i.e., sometimes
/// we've taken the closure kind from the expectations instead, and
/// for coroutines we don't even implement the closure traits
/// really).
///
/// If we inferred that the closure needs to be FnMut/FnOnce, last element of the returned tuple
/// contains a `Some()` with the `Place` that caused us to do so.
fn process_collected_capture_information(
&self,
capture_clause: hir::CaptureBy,
capture_information: &InferredCaptureInformation<'tcx>,
) -> (InferredCaptureInformation<'tcx>, ty::ClosureKind, Option<(Span, Place<'tcx>)>) {
let mut closure_kind = ty::ClosureKind::LATTICE_BOTTOM;
let mut origin: Option<(Span, Place<'tcx>)> = None;
let processed = capture_information
.iter()
.cloned()
.map(|(place, mut capture_info)| {
// Apply rules for safety before inferring closure kind
let (place, capture_kind) =
restrict_capture_precision(place, capture_info.capture_kind);
let (place, capture_kind) = truncate_capture_for_optimization(place, capture_kind);
let usage_span = if let Some(usage_expr) = capture_info.path_expr_id {
self.tcx.hir().span(usage_expr)
} else {
unreachable!()
};
let updated = match capture_kind {
ty::UpvarCapture::ByValue => match closure_kind {
ty::ClosureKind::Fn | ty::ClosureKind::FnMut => {
(ty::ClosureKind::FnOnce, Some((usage_span, place.clone())))
}
// If closure is already FnOnce, don't update
ty::ClosureKind::FnOnce => (closure_kind, origin.take()),
},
ty::UpvarCapture::ByRef(
ty::BorrowKind::Mutable | ty::BorrowKind::UniqueImmutable,
) => {
match closure_kind {
ty::ClosureKind::Fn => {
(ty::ClosureKind::FnMut, Some((usage_span, place.clone())))
}
// Don't update the origin
ty::ClosureKind::FnMut | ty::ClosureKind::FnOnce => {
(closure_kind, origin.take())
}
}
}
_ => (closure_kind, origin.take()),
};
closure_kind = updated.0;
origin = updated.1;
let (place, capture_kind) = match capture_clause {
hir::CaptureBy::Value { .. } => adjust_for_move_closure(place, capture_kind),
hir::CaptureBy::Ref => adjust_for_non_move_closure(place, capture_kind),
};
// This restriction needs to be applied after we have handled adjustments for `move`
// closures. We want to make sure any adjustment that might make us move the place into
// the closure gets handled.
let (place, capture_kind) =
restrict_precision_for_drop_types(self, place, capture_kind);
capture_info.capture_kind = capture_kind;
(place, capture_info)
})
.collect();
(processed, closure_kind, origin)
}
/// Analyzes the information collected by `InferBorrowKind` to compute the min number of
/// Places (and corresponding capture kind) that we need to keep track of to support all
/// the required captured paths.
///
///
/// Note: If this function is called multiple times for the same closure, it will update
/// the existing min_capture map that is stored in TypeckResults.
///
/// Eg:
/// ```
/// #[derive(Debug)]
/// struct Point { x: i32, y: i32 }
///
/// let s = String::from("s"); // hir_id_s
/// let mut p = Point { x: 2, y: -2 }; // his_id_p
/// let c = || {
/// println!("{s:?}"); // L1
/// p.x += 10; // L2
/// println!("{}" , p.y); // L3
/// println!("{p:?}"); // L4
/// drop(s); // L5
/// };
/// ```
/// and let hir_id_L1..5 be the expressions pointing to use of a captured variable on
/// the lines L1..5 respectively.
///
/// InferBorrowKind results in a structure like this:
///
/// ```ignore (illustrative)
/// {
/// Place(base: hir_id_s, projections: [], ....) -> {
/// capture_kind_expr: hir_id_L5,
/// path_expr_id: hir_id_L5,
/// capture_kind: ByValue
/// },
/// Place(base: hir_id_p, projections: [Field(0, 0)], ...) -> {
/// capture_kind_expr: hir_id_L2,
/// path_expr_id: hir_id_L2,
/// capture_kind: ByValue
/// },
/// Place(base: hir_id_p, projections: [Field(1, 0)], ...) -> {
/// capture_kind_expr: hir_id_L3,
/// path_expr_id: hir_id_L3,
/// capture_kind: ByValue
/// },
/// Place(base: hir_id_p, projections: [], ...) -> {
/// capture_kind_expr: hir_id_L4,
/// path_expr_id: hir_id_L4,
/// capture_kind: ByValue
/// },
/// }
/// ```
///
/// After the min capture analysis, we get:
/// ```ignore (illustrative)
/// {
/// hir_id_s -> [
/// Place(base: hir_id_s, projections: [], ....) -> {
/// capture_kind_expr: hir_id_L5,
/// path_expr_id: hir_id_L5,
/// capture_kind: ByValue
/// },
/// ],
/// hir_id_p -> [
/// Place(base: hir_id_p, projections: [], ...) -> {
/// capture_kind_expr: hir_id_L2,
/// path_expr_id: hir_id_L4,
/// capture_kind: ByValue
/// },
/// ],
/// }
/// ```
fn compute_min_captures(
&self,
closure_def_id: LocalDefId,
capture_information: InferredCaptureInformation<'tcx>,
closure_span: Span,
) {
if capture_information.is_empty() {
return;
}
let mut typeck_results = self.typeck_results.borrow_mut();
let mut root_var_min_capture_list =
typeck_results.closure_min_captures.remove(&closure_def_id).unwrap_or_default();
for (mut place, capture_info) in capture_information.into_iter() {
let var_hir_id = match place.base {
PlaceBase::Upvar(upvar_id) => upvar_id.var_path.hir_id,
base => bug!("Expected upvar, found={:?}", base),
};
let var_ident = self.tcx.hir().ident(var_hir_id);
let Some(min_cap_list) = root_var_min_capture_list.get_mut(&var_hir_id) else {
let mutability = self.determine_capture_mutability(&typeck_results, &place);
let min_cap_list =
vec![ty::CapturedPlace { var_ident, place, info: capture_info, mutability }];
root_var_min_capture_list.insert(var_hir_id, min_cap_list);
continue;
};
// Go through each entry in the current list of min_captures
// - if ancestor is found, update its capture kind to account for current place's
// capture information.
//
// - if descendant is found, remove it from the list, and update the current place's
// capture information to account for the descendant's capture kind.
//
// We can never be in a case where the list contains both an ancestor and a descendant
// Also there can only be ancestor but in case of descendants there might be
// multiple.
let mut descendant_found = false;
let mut updated_capture_info = capture_info;
min_cap_list.retain(|possible_descendant| {
match determine_place_ancestry_relation(&place, &possible_descendant.place) {
// current place is ancestor of possible_descendant
PlaceAncestryRelation::Ancestor => {
descendant_found = true;
let mut possible_descendant = possible_descendant.clone();
let backup_path_expr_id = updated_capture_info.path_expr_id;
// Truncate the descendant (already in min_captures) to be same as the ancestor to handle any
// possible change in capture mode.
truncate_place_to_len_and_update_capture_kind(
&mut possible_descendant.place,
&mut possible_descendant.info.capture_kind,
place.projections.len(),
);
updated_capture_info =
determine_capture_info(updated_capture_info, possible_descendant.info);
// we need to keep the ancestor's `path_expr_id`
updated_capture_info.path_expr_id = backup_path_expr_id;
false
}
_ => true,
}
});
let mut ancestor_found = false;
if !descendant_found {
for possible_ancestor in min_cap_list.iter_mut() {
match determine_place_ancestry_relation(&place, &possible_ancestor.place) {
PlaceAncestryRelation::SamePlace => {
ancestor_found = true;
possible_ancestor.info = determine_capture_info(
possible_ancestor.info,
updated_capture_info,
);
// Only one related place will be in the list.
break;
}
// current place is descendant of possible_ancestor
PlaceAncestryRelation::Descendant => {
ancestor_found = true;
let backup_path_expr_id = possible_ancestor.info.path_expr_id;
// Truncate the descendant (current place) to be same as the ancestor to handle any
// possible change in capture mode.
truncate_place_to_len_and_update_capture_kind(
&mut place,
&mut updated_capture_info.capture_kind,
possible_ancestor.place.projections.len(),
);
possible_ancestor.info = determine_capture_info(
possible_ancestor.info,
updated_capture_info,
);
// we need to keep the ancestor's `path_expr_id`
possible_ancestor.info.path_expr_id = backup_path_expr_id;
// Only one related place will be in the list.
break;
}
_ => {}
}
}
}
// Only need to insert when we don't have an ancestor in the existing min capture list
if !ancestor_found {
let mutability = self.determine_capture_mutability(&typeck_results, &place);
let captured_place =
ty::CapturedPlace { var_ident, place, info: updated_capture_info, mutability };
min_cap_list.push(captured_place);
}
}
debug!(
"For closure={:?}, min_captures before sorting={:?}",
closure_def_id, root_var_min_capture_list
);
// Now that we have the minimized list of captures, sort the captures by field id.
// This causes the closure to capture the upvars in the same order as the fields are
// declared which is also the drop order. Thus, in situations where we capture all the
// fields of some type, the observable drop order will remain the same as it previously
// was even though we're dropping each capture individually.
// See https://github.com/rust-lang/project-rfc-2229/issues/42 and
// `tests/ui/closures/2229_closure_analysis/preserve_field_drop_order.rs`.
for (_, captures) in &mut root_var_min_capture_list {
captures.sort_by(|capture1, capture2| {
fn is_field<'a>(p: &&Projection<'a>) -> bool {
match p.kind {
ProjectionKind::Field(_, _) => true,
ProjectionKind::Deref | ProjectionKind::OpaqueCast => false,
p @ (ProjectionKind::Subslice | ProjectionKind::Index) => {
bug!("ProjectionKind {:?} was unexpected", p)
}
}
}
// Need to sort only by Field projections, so filter away others.
// A previous implementation considered other projection types too
// but that caused ICE #118144
let capture1_field_projections = capture1.place.projections.iter().filter(is_field);
let capture2_field_projections = capture2.place.projections.iter().filter(is_field);
for (p1, p2) in capture1_field_projections.zip(capture2_field_projections) {
// We do not need to look at the `Projection.ty` fields here because at each
// step of the iteration, the projections will either be the same and therefore
// the types must be as well or the current projection will be different and
// we will return the result of comparing the field indexes.
match (p1.kind, p2.kind) {
(ProjectionKind::Field(i1, _), ProjectionKind::Field(i2, _)) => {
// Compare only if paths are different.
// Otherwise continue to the next iteration
if i1 != i2 {
return i1.cmp(&i2);
}
}
// Given the filter above, this arm should never be hit
(l, r) => bug!("ProjectionKinds {:?} or {:?} were unexpected", l, r),
}
}
self.dcx().span_delayed_bug(
closure_span,
format!(
"two identical projections: ({:?}, {:?})",
capture1.place.projections, capture2.place.projections
),
);
std::cmp::Ordering::Equal
});
}
debug!(
"For closure={:?}, min_captures after sorting={:#?}",
closure_def_id, root_var_min_capture_list
);
typeck_results.closure_min_captures.insert(closure_def_id, root_var_min_capture_list);
}
/// Perform the migration analysis for RFC 2229, and emit lint
/// `disjoint_capture_drop_reorder` if needed.
fn perform_2229_migration_analysis(
&self,
closure_def_id: LocalDefId,
body_id: hir::BodyId,
capture_clause: hir::CaptureBy,
span: Span,
) {
let (need_migrations, reasons) = self.compute_2229_migrations(
closure_def_id,
span,
capture_clause,
self.typeck_results.borrow().closure_min_captures.get(&closure_def_id),
);
if !need_migrations.is_empty() {
let (migration_string, migrated_variables_concat) =
migration_suggestion_for_2229(self.tcx, &need_migrations);
let closure_hir_id = self.tcx.local_def_id_to_hir_id(closure_def_id);
let closure_head_span = self.tcx.def_span(closure_def_id);
self.tcx.node_span_lint(
lint::builtin::RUST_2021_INCOMPATIBLE_CLOSURE_CAPTURES,
closure_hir_id,
closure_head_span,
|lint| {
lint.primary_message(reasons.migration_message());
for NeededMigration { var_hir_id, diagnostics_info } in &need_migrations {
// Labels all the usage of the captured variable and why they are responsible
// for migration being needed
for lint_note in diagnostics_info.iter() {
match &lint_note.captures_info {
UpvarMigrationInfo::CapturingPrecise { source_expr: Some(capture_expr_id), var_name: captured_name } => {
let cause_span = self.tcx.hir().span(*capture_expr_id);
lint.span_label(cause_span, format!("in Rust 2018, this closure captures all of `{}`, but in Rust 2021, it will only capture `{}`",
self.tcx.hir().name(*var_hir_id),
captured_name,
));
}
UpvarMigrationInfo::CapturingNothing { use_span } => {
lint.span_label(*use_span, format!("in Rust 2018, this causes the closure to capture `{}`, but in Rust 2021, it has no effect",
self.tcx.hir().name(*var_hir_id),
));
}
_ => { }
}
// Add a label pointing to where a captured variable affected by drop order
// is dropped
if lint_note.reason.drop_order {
let drop_location_span = drop_location_span(self.tcx, closure_hir_id);
match &lint_note.captures_info {
UpvarMigrationInfo::CapturingPrecise { var_name: captured_name, .. } => {
lint.span_label(drop_location_span, format!("in Rust 2018, `{}` is dropped here, but in Rust 2021, only `{}` will be dropped here as part of the closure",
self.tcx.hir().name(*var_hir_id),
captured_name,
));
}
UpvarMigrationInfo::CapturingNothing { use_span: _ } => {
lint.span_label(drop_location_span, format!("in Rust 2018, `{v}` is dropped here along with the closure, but in Rust 2021 `{v}` is not part of the closure",
v = self.tcx.hir().name(*var_hir_id),
));
}
}
}
// Add a label explaining why a closure no longer implements a trait
for &missing_trait in &lint_note.reason.auto_traits {
// not capturing something anymore cannot cause a trait to fail to be implemented:
match &lint_note.captures_info {
UpvarMigrationInfo::CapturingPrecise { var_name: captured_name, .. } => {
let var_name = self.tcx.hir().name(*var_hir_id);
lint.span_label(closure_head_span, format!("\
in Rust 2018, this closure implements {missing_trait} \
as `{var_name}` implements {missing_trait}, but in Rust 2021, \
this closure will no longer implement {missing_trait} \
because `{var_name}` is not fully captured \
and `{captured_name}` does not implement {missing_trait}"));
}
// Cannot happen: if we don't capture a variable, we impl strictly more traits
UpvarMigrationInfo::CapturingNothing { use_span } => span_bug!(*use_span, "missing trait from not capturing something"),
}
}
}
}
lint.note("for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2021/disjoint-capture-in-closures.html>");
let diagnostic_msg = format!(
"add a dummy let to cause {migrated_variables_concat} to be fully captured"
);
let closure_span = self.tcx.hir().span_with_body(closure_hir_id);
let mut closure_body_span = {
// If the body was entirely expanded from a macro
// invocation, i.e. the body is not contained inside the
// closure span, then we walk up the expansion until we
// find the span before the expansion.
let s = self.tcx.hir().span_with_body(body_id.hir_id);
s.find_ancestor_inside(closure_span).unwrap_or(s)
};
if let Ok(mut s) = self.tcx.sess.source_map().span_to_snippet(closure_body_span) {
if s.starts_with('$') {
// Looks like a macro fragment. Try to find the real block.
if let hir::Node::Expr(&hir::Expr {
kind: hir::ExprKind::Block(block, ..), ..
}) = self.tcx.hir_node(body_id.hir_id) {
// If the body is a block (with `{..}`), we use the span of that block.
// E.g. with a `|| $body` expanded from a `m!({ .. })`, we use `{ .. }`, and not `$body`.
// Since we know it's a block, we know we can insert the `let _ = ..` without
// breaking the macro syntax.
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(block.span) {
closure_body_span = block.span;
s = snippet;
}
}
}
let mut lines = s.lines();
let line1 = lines.next().unwrap_or_default();
if line1.trim_end() == "{" {
// This is a multi-line closure with just a `{` on the first line,
// so we put the `let` on its own line.
// We take the indentation from the next non-empty line.
let line2 = lines.find(|line| !line.is_empty()).unwrap_or_default();
let indent = line2.split_once(|c: char| !c.is_whitespace()).unwrap_or_default().0;
lint.span_suggestion(
closure_body_span.with_lo(closure_body_span.lo() + BytePos::from_usize(line1.len())).shrink_to_lo(),
diagnostic_msg,
format!("\n{indent}{migration_string};"),
Applicability::MachineApplicable,
);
} else if line1.starts_with('{') {
// This is a closure with its body wrapped in
// braces, but with more than just the opening
// brace on the first line. We put the `let`
// directly after the `{`.
lint.span_suggestion(
closure_body_span.with_lo(closure_body_span.lo() + BytePos(1)).shrink_to_lo(),
diagnostic_msg,
format!(" {migration_string};"),
Applicability::MachineApplicable,
);
} else {
// This is a closure without braces around the body.
// We add braces to add the `let` before the body.
lint.multipart_suggestion(
diagnostic_msg,
vec![
(closure_body_span.shrink_to_lo(), format!("{{ {migration_string}; ")),
(closure_body_span.shrink_to_hi(), " }".to_string()),
],
Applicability::MachineApplicable
);
}
} else {
lint.span_suggestion(
closure_span,
diagnostic_msg,
migration_string,
Applicability::HasPlaceholders
);
}
},
);
}
}
/// Combines all the reasons for 2229 migrations
fn compute_2229_migrations_reasons(
&self,
auto_trait_reasons: UnordSet<&'static str>,
drop_order: bool,
) -> MigrationWarningReason {
MigrationWarningReason {
auto_traits: auto_trait_reasons.into_sorted_stable_ord(),
drop_order,
}
}
/// Figures out the list of root variables (and their types) that aren't completely
/// captured by the closure when `capture_disjoint_fields` is enabled and auto-traits
/// differ between the root variable and the captured paths.
///
/// Returns a tuple containing a HashMap of CapturesInfo that maps to a HashSet of trait names
/// if migration is needed for traits for the provided var_hir_id, otherwise returns None
fn compute_2229_migrations_for_trait(
&self,
min_captures: Option<&ty::RootVariableMinCaptureList<'tcx>>,
var_hir_id: HirId,
closure_clause: hir::CaptureBy,
) -> Option<FxIndexMap<UpvarMigrationInfo, UnordSet<&'static str>>> {
let auto_traits_def_id = [
self.tcx.lang_items().clone_trait(),
self.tcx.lang_items().sync_trait(),
self.tcx.get_diagnostic_item(sym::Send),
self.tcx.lang_items().unpin_trait(),
self.tcx.get_diagnostic_item(sym::unwind_safe_trait),
self.tcx.get_diagnostic_item(sym::ref_unwind_safe_trait),
];
const AUTO_TRAITS: [&str; 6] =
["`Clone`", "`Sync`", "`Send`", "`Unpin`", "`UnwindSafe`", "`RefUnwindSafe`"];
let root_var_min_capture_list = min_captures.and_then(|m| m.get(&var_hir_id))?;
let ty = self.resolve_vars_if_possible(self.node_ty(var_hir_id));
let ty = match closure_clause {
hir::CaptureBy::Value { .. } => ty, // For move closure the capture kind should be by value
hir::CaptureBy::Ref => {
// For non move closure the capture kind is the max capture kind of all captures
// according to the ordering ImmBorrow < UniqueImmBorrow < MutBorrow < ByValue
let mut max_capture_info = root_var_min_capture_list.first().unwrap().info;
for capture in root_var_min_capture_list.iter() {
max_capture_info = determine_capture_info(max_capture_info, capture.info);
}
apply_capture_kind_on_capture_ty(
self.tcx,
ty,
max_capture_info.capture_kind,
self.tcx.lifetimes.re_erased,
)
}
};
let mut obligations_should_hold = Vec::new();
// Checks if a root variable implements any of the auto traits
for check_trait in auto_traits_def_id.iter() {
obligations_should_hold.push(check_trait.is_some_and(|check_trait| {
self.infcx
.type_implements_trait(check_trait, [ty], self.param_env)
.must_apply_modulo_regions()
}));
}
let mut problematic_captures = FxIndexMap::default();
// Check whether captured fields also implement the trait
for capture in root_var_min_capture_list.iter() {
let ty = apply_capture_kind_on_capture_ty(
self.tcx,
capture.place.ty(),
capture.info.capture_kind,
self.tcx.lifetimes.re_erased,
);
// Checks if a capture implements any of the auto traits
let mut obligations_holds_for_capture = Vec::new();
for check_trait in auto_traits_def_id.iter() {
obligations_holds_for_capture.push(check_trait.is_some_and(|check_trait| {
self.infcx
.type_implements_trait(check_trait, [ty], self.param_env)
.must_apply_modulo_regions()
}));
}
let mut capture_problems = UnordSet::default();
// Checks if for any of the auto traits, one or more trait is implemented
// by the root variable but not by the capture
for (idx, _) in obligations_should_hold.iter().enumerate() {
if !obligations_holds_for_capture[idx] && obligations_should_hold[idx] {
capture_problems.insert(AUTO_TRAITS[idx]);
}
}
if !capture_problems.is_empty() {
problematic_captures.insert(
UpvarMigrationInfo::CapturingPrecise {
source_expr: capture.info.path_expr_id,
var_name: capture.to_string(self.tcx),
},
capture_problems,
);
}
}
if !problematic_captures.is_empty() {
return Some(problematic_captures);
}
None
}
/// Figures out the list of root variables (and their types) that aren't completely
/// captured by the closure when `capture_disjoint_fields` is enabled and drop order of
/// some path starting at that root variable **might** be affected.
///
/// The output list would include a root variable if:
/// - It would have been moved into the closure when `capture_disjoint_fields` wasn't
/// enabled, **and**
/// - It wasn't completely captured by the closure, **and**
/// - One of the paths starting at this root variable, that is not captured needs Drop.
///
/// This function only returns a HashSet of CapturesInfo for significant drops. If there
/// are no significant drops than None is returned
#[instrument(level = "debug", skip(self))]
fn compute_2229_migrations_for_drop(
&self,
closure_def_id: LocalDefId,
closure_span: Span,
min_captures: Option<&ty::RootVariableMinCaptureList<'tcx>>,
closure_clause: hir::CaptureBy,
var_hir_id: HirId,
) -> Option<FxIndexSet<UpvarMigrationInfo>> {
let ty = self.resolve_vars_if_possible(self.node_ty(var_hir_id));
// FIXME(#132279): Using `non_body_analysis` here feels wrong.
if !ty.has_significant_drop(
self.tcx,
ty::TypingEnv::non_body_analysis(self.tcx, closure_def_id),
) {
debug!("does not have significant drop");
return None;
}
let Some(root_var_min_capture_list) = min_captures.and_then(|m| m.get(&var_hir_id)) else {
// The upvar is mentioned within the closure but no path starting from it is
// used. This occurs when you have (e.g.)
//
// ```
// let x = move || {
// let _ = y;
// });
// ```
debug!("no path starting from it is used");
match closure_clause {
// Only migrate if closure is a move closure
hir::CaptureBy::Value { .. } => {
let mut diagnostics_info = FxIndexSet::default();
let upvars =
self.tcx.upvars_mentioned(closure_def_id).expect("must be an upvar");
let upvar = upvars[&var_hir_id];
diagnostics_info
.insert(UpvarMigrationInfo::CapturingNothing { use_span: upvar.span });
return Some(diagnostics_info);
}
hir::CaptureBy::Ref => {}
}
return None;
};
debug!(?root_var_min_capture_list);
let mut projections_list = Vec::new();
let mut diagnostics_info = FxIndexSet::default();
for captured_place in root_var_min_capture_list.iter() {
match captured_place.info.capture_kind {
// Only care about captures that are moved into the closure
ty::UpvarCapture::ByValue => {
projections_list.push(captured_place.place.projections.as_slice());
diagnostics_info.insert(UpvarMigrationInfo::CapturingPrecise {
source_expr: captured_place.info.path_expr_id,
var_name: captured_place.to_string(self.tcx),
});
}
ty::UpvarCapture::ByRef(..) => {}
}
}
debug!(?projections_list);
debug!(?diagnostics_info);
let is_moved = !projections_list.is_empty();
debug!(?is_moved);
let is_not_completely_captured =
root_var_min_capture_list.iter().any(|capture| !capture.place.projections.is_empty());
debug!(?is_not_completely_captured);
if is_moved
&& is_not_completely_captured
&& self.has_significant_drop_outside_of_captures(
closure_def_id,
closure_span,
ty,
projections_list,
)
{
return Some(diagnostics_info);
}
None
}
/// Figures out the list of root variables (and their types) that aren't completely
/// captured by the closure when `capture_disjoint_fields` is enabled and either drop
/// order of some path starting at that root variable **might** be affected or auto-traits
/// differ between the root variable and the captured paths.
///
/// The output list would include a root variable if:
/// - It would have been moved into the closure when `capture_disjoint_fields` wasn't
/// enabled, **and**
/// - It wasn't completely captured by the closure, **and**
/// - One of the paths starting at this root variable, that is not captured needs Drop **or**
/// - One of the paths captured does not implement all the auto-traits its root variable
/// implements.
///
/// Returns a tuple containing a vector of MigrationDiagnosticInfo, as well as a String
/// containing the reason why root variables whose HirId is contained in the vector should
/// be captured
#[instrument(level = "debug", skip(self))]
fn compute_2229_migrations(
&self,
closure_def_id: LocalDefId,
closure_span: Span,
closure_clause: hir::CaptureBy,
min_captures: Option<&ty::RootVariableMinCaptureList<'tcx>>,
) -> (Vec<NeededMigration>, MigrationWarningReason) {
let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) else {
return (Vec::new(), MigrationWarningReason::default());
};
let mut need_migrations = Vec::new();
let mut auto_trait_migration_reasons = UnordSet::default();
let mut drop_migration_needed = false;
// Perform auto-trait analysis
for (&var_hir_id, _) in upvars.iter() {
let mut diagnostics_info = Vec::new();
let auto_trait_diagnostic = self
.compute_2229_migrations_for_trait(min_captures, var_hir_id, closure_clause)
.unwrap_or_default();
let drop_reorder_diagnostic = if let Some(diagnostics_info) = self
.compute_2229_migrations_for_drop(
closure_def_id,
closure_span,
min_captures,
closure_clause,
var_hir_id,
) {
drop_migration_needed = true;
diagnostics_info
} else {
FxIndexSet::default()
};
// Combine all the captures responsible for needing migrations into one HashSet
let mut capture_diagnostic = drop_reorder_diagnostic.clone();
for key in auto_trait_diagnostic.keys() {
capture_diagnostic.insert(key.clone());
}
let mut capture_diagnostic = capture_diagnostic.into_iter().collect::<Vec<_>>();
capture_diagnostic.sort();
for captures_info in capture_diagnostic {
// Get the auto trait reasons of why migration is needed because of that capture, if there are any
let capture_trait_reasons =
if let Some(reasons) = auto_trait_diagnostic.get(&captures_info) {
reasons.clone()
} else {
UnordSet::default()
};
// Check if migration is needed because of drop reorder as a result of that capture
let capture_drop_reorder_reason = drop_reorder_diagnostic.contains(&captures_info);
// Combine all the reasons of why the root variable should be captured as a result of
// auto trait implementation issues
auto_trait_migration_reasons.extend_unord(capture_trait_reasons.items().copied());
diagnostics_info.push(MigrationLintNote {
captures_info,
reason: self.compute_2229_migrations_reasons(
capture_trait_reasons,
capture_drop_reorder_reason,
),
});
}
if !diagnostics_info.is_empty() {
need_migrations.push(NeededMigration { var_hir_id, diagnostics_info });
}
}
(
need_migrations,
self.compute_2229_migrations_reasons(
auto_trait_migration_reasons,
drop_migration_needed,
),
)
}
/// This is a helper function to `compute_2229_migrations_precise_pass`. Provided the type
/// of a root variable and a list of captured paths starting at this root variable (expressed
/// using list of `Projection` slices), it returns true if there is a path that is not
/// captured starting at this root variable that implements Drop.
///
/// The way this function works is at a given call it looks at type `base_path_ty` of some base
/// path say P and then list of projection slices which represent the different captures moved
/// into the closure starting off of P.
///
/// This will make more sense with an example:
///
/// ```rust,edition2021
///
/// struct FancyInteger(i32); // This implements Drop
///
/// struct Point { x: FancyInteger, y: FancyInteger }
/// struct Color;
///
/// struct Wrapper { p: Point, c: Color }
///
/// fn f(w: Wrapper) {
/// let c = || {
/// // Closure captures w.p.x and w.c by move.
/// };
///
/// c();
/// }
/// ```
///
/// If `capture_disjoint_fields` wasn't enabled the closure would've moved `w` instead of the
/// precise paths. If we look closely `w.p.y` isn't captured which implements Drop and
/// therefore Drop ordering would change and we want this function to return true.
///
/// Call stack to figure out if we need to migrate for `w` would look as follows:
///
/// Our initial base path is just `w`, and the paths captured from it are `w[p, x]` and
/// `w[c]`.
/// Notation:
/// - Ty(place): Type of place
/// - `(a, b)`: Represents the function parameters `base_path_ty` and `captured_by_move_projs`
/// respectively.
/// ```ignore (illustrative)
/// (Ty(w), [ &[p, x], &[c] ])
/// // |
/// // ----------------------------
/// // | |
/// // v v
/// (Ty(w.p), [ &[x] ]) (Ty(w.c), [ &[] ]) // I(1)
/// // | |
/// // v v
/// (Ty(w.p), [ &[x] ]) false
/// // |
/// // |
/// // -------------------------------
/// // | |
/// // v v
/// (Ty((w.p).x), [ &[] ]) (Ty((w.p).y), []) // IMP 2
/// // | |
/// // v v
/// false NeedsSignificantDrop(Ty(w.p.y))
/// // |
/// // v
/// true
/// ```
///
/// IMP 1 `(Ty(w.c), [ &[] ])`: Notice the single empty slice inside `captured_projs`.
/// This implies that the `w.c` is completely captured by the closure.
/// Since drop for this path will be called when the closure is
/// dropped we don't need to migrate for it.
///
/// IMP 2 `(Ty((w.p).y), [])`: Notice that `captured_projs` is empty. This implies that this
/// path wasn't captured by the closure. Also note that even
/// though we didn't capture this path, the function visits it,
/// which is kind of the point of this function. We then return
/// if the type of `w.p.y` implements Drop, which in this case is
/// true.
///
/// Consider another example:
///
/// ```ignore (pseudo-rust)
/// struct X;
/// impl Drop for X {}
///
/// struct Y(X);
/// impl Drop for Y {}
///
/// fn foo() {
/// let y = Y(X);
/// let c = || move(y.0);
/// }
/// ```
///
/// Note that `y.0` is captured by the closure. When this function is called for `y`, it will
/// return true, because even though all paths starting at `y` are captured, `y` itself
/// implements Drop which will be affected since `y` isn't completely captured.
fn has_significant_drop_outside_of_captures(
&self,
closure_def_id: LocalDefId,
closure_span: Span,
base_path_ty: Ty<'tcx>,
captured_by_move_projs: Vec<&[Projection<'tcx>]>,
) -> bool {
// FIXME(#132279): Using `non_body_analysis` here feels wrong.
let needs_drop = |ty: Ty<'tcx>| {
ty.has_significant_drop(
self.tcx,
ty::TypingEnv::non_body_analysis(self.tcx, closure_def_id),
)
};
let is_drop_defined_for_ty = |ty: Ty<'tcx>| {
let drop_trait = self.tcx.require_lang_item(hir::LangItem::Drop, Some(closure_span));
self.infcx
.type_implements_trait(drop_trait, [ty], self.tcx.param_env(closure_def_id))
.must_apply_modulo_regions()
};
let is_drop_defined_for_ty = is_drop_defined_for_ty(base_path_ty);
// If there is a case where no projection is applied on top of current place
// then there must be exactly one capture corresponding to such a case. Note that this
// represents the case of the path being completely captured by the variable.
//
// eg. If `a.b` is captured and we are processing `a.b`, then we can't have the closure also
// capture `a.b.c`, because that violates min capture.
let is_completely_captured = captured_by_move_projs.iter().any(|projs| projs.is_empty());
assert!(!is_completely_captured || (captured_by_move_projs.len() == 1));
if is_completely_captured {
// The place is captured entirely, so doesn't matter if needs dtor, it will be drop
// when the closure is dropped.
return false;
}
if captured_by_move_projs.is_empty() {
return needs_drop(base_path_ty);
}
if is_drop_defined_for_ty {
// If drop is implemented for this type then we need it to be fully captured,
// and we know it is not completely captured because of the previous checks.
// Note that this is a bug in the user code that will be reported by the
// borrow checker, since we can't move out of drop types.
// The bug exists in the user's code pre-migration, and we don't migrate here.
return false;
}
match base_path_ty.kind() {
// Observations:
// - `captured_by_move_projs` is not empty. Therefore we can call
// `captured_by_move_projs.first().unwrap()` safely.
// - All entries in `captured_by_move_projs` have at least one projection.
// Therefore we can call `captured_by_move_projs.first().unwrap().first().unwrap()` safely.
// We don't capture derefs in case of move captures, which would have be applied to
// access any further paths.
ty::Adt(def, _) if def.is_box() => unreachable!(),
ty::Ref(..) => unreachable!(),
ty::RawPtr(..) => unreachable!(),
ty::Adt(def, args) => {
// Multi-variant enums are captured in entirety,
// which would've been handled in the case of single empty slice in `captured_by_move_projs`.
assert_eq!(def.variants().len(), 1);
// Only Field projections can be applied to a non-box Adt.
assert!(
captured_by_move_projs.iter().all(|projs| matches!(
projs.first().unwrap().kind,
ProjectionKind::Field(..)
))
);
def.variants().get(FIRST_VARIANT).unwrap().fields.iter_enumerated().any(
|(i, field)| {
let paths_using_field = captured_by_move_projs
.iter()
.filter_map(|projs| {
if let ProjectionKind::Field(field_idx, _) =
projs.first().unwrap().kind
{
if field_idx == i { Some(&projs[1..]) } else { None }
} else {
unreachable!();
}
})
.collect();
let after_field_ty = field.ty(self.tcx, args);
self.has_significant_drop_outside_of_captures(
closure_def_id,
closure_span,
after_field_ty,
paths_using_field,
)
},
)
}
ty::Tuple(fields) => {
// Only Field projections can be applied to a tuple.
assert!(
captured_by_move_projs.iter().all(|projs| matches!(
projs.first().unwrap().kind,
ProjectionKind::Field(..)
))
);
fields.iter().enumerate().any(|(i, element_ty)| {
let paths_using_field = captured_by_move_projs
.iter()
.filter_map(|projs| {
if let ProjectionKind::Field(field_idx, _) = projs.first().unwrap().kind
{
if field_idx.index() == i { Some(&projs[1..]) } else { None }
} else {
unreachable!();
}
})
.collect();
self.has_significant_drop_outside_of_captures(
closure_def_id,
closure_span,
element_ty,
paths_using_field,
)
})
}
// Anything else would be completely captured and therefore handled already.
_ => unreachable!(),
}
}
fn init_capture_kind_for_place(
&self,
place: &Place<'tcx>,
capture_clause: hir::CaptureBy,
) -> ty::UpvarCapture {
match capture_clause {
// In case of a move closure if the data is accessed through a reference we
// want to capture by ref to allow precise capture using reborrows.
//
// If the data will be moved out of this place, then the place will be truncated
// at the first Deref in `adjust_for_move_closure` and then moved into the closure.
hir::CaptureBy::Value { .. } if !place.deref_tys().any(Ty::is_ref) => {
ty::UpvarCapture::ByValue
}
hir::CaptureBy::Value { .. } | hir::CaptureBy::Ref => {
ty::UpvarCapture::ByRef(BorrowKind::Immutable)
}
}
}
fn place_for_root_variable(
&self,
closure_def_id: LocalDefId,
var_hir_id: HirId,
) -> Place<'tcx> {
let upvar_id = ty::UpvarId::new(var_hir_id, closure_def_id);
Place {
base_ty: self.node_ty(var_hir_id),
base: PlaceBase::Upvar(upvar_id),
projections: Default::default(),
}
}
fn should_log_capture_analysis(&self, closure_def_id: LocalDefId) -> bool {
self.tcx.has_attr(closure_def_id, sym::rustc_capture_analysis)
}
fn log_capture_analysis_first_pass(
&self,
closure_def_id: LocalDefId,
capture_information: &InferredCaptureInformation<'tcx>,
closure_span: Span,
) {
if self.should_log_capture_analysis(closure_def_id) {
let mut diag =
self.dcx().struct_span_err(closure_span, "First Pass analysis includes:");
for (place, capture_info) in capture_information {
let capture_str = construct_capture_info_string(self.tcx, place, capture_info);
let output_str = format!("Capturing {capture_str}");
let span =
capture_info.path_expr_id.map_or(closure_span, |e| self.tcx.hir().span(e));
diag.span_note(span, output_str);
}
diag.emit();
}
}
fn log_closure_min_capture_info(&self, closure_def_id: LocalDefId, closure_span: Span) {
if self.should_log_capture_analysis(closure_def_id) {
if let Some(min_captures) =
self.typeck_results.borrow().closure_min_captures.get(&closure_def_id)
{
let mut diag =
self.dcx().struct_span_err(closure_span, "Min Capture analysis includes:");
for (_, min_captures_for_var) in min_captures {
for capture in min_captures_for_var {
let place = &capture.place;
let capture_info = &capture.info;
let capture_str =
construct_capture_info_string(self.tcx, place, capture_info);
let output_str = format!("Min Capture {capture_str}");
if capture.info.path_expr_id != capture.info.capture_kind_expr_id {
let path_span = capture_info
.path_expr_id
.map_or(closure_span, |e| self.tcx.hir().span(e));
let capture_kind_span = capture_info
.capture_kind_expr_id
.map_or(closure_span, |e| self.tcx.hir().span(e));
let mut multi_span: MultiSpan =
MultiSpan::from_spans(vec![path_span, capture_kind_span]);
let capture_kind_label =
construct_capture_kind_reason_string(self.tcx, place, capture_info);
let path_label = construct_path_string(self.tcx, place);
multi_span.push_span_label(path_span, path_label);
multi_span.push_span_label(capture_kind_span, capture_kind_label);
diag.span_note(multi_span, output_str);
} else {
let span = capture_info
.path_expr_id
.map_or(closure_span, |e| self.tcx.hir().span(e));
diag.span_note(span, output_str);
};
}
}
diag.emit();
}
}
}
/// A captured place is mutable if
/// 1. Projections don't include a Deref of an immut-borrow, **and**
/// 2. PlaceBase is mut or projections include a Deref of a mut-borrow.
fn determine_capture_mutability(
&self,
typeck_results: &'a TypeckResults<'tcx>,
place: &Place<'tcx>,
) -> hir::Mutability {
let var_hir_id = match place.base {
PlaceBase::Upvar(upvar_id) => upvar_id.var_path.hir_id,
_ => unreachable!(),
};
let bm = *typeck_results.pat_binding_modes().get(var_hir_id).expect("missing binding mode");
let mut is_mutbl = bm.1;
for pointer_ty in place.deref_tys() {
match self.structurally_resolve_type(self.tcx.hir().span(var_hir_id), pointer_ty).kind()
{
// We don't capture derefs of raw ptrs
ty::RawPtr(_, _) => unreachable!(),
// Dereferencing a mut-ref allows us to mut the Place if we don't deref
// an immut-ref after on top of this.
ty::Ref(.., hir::Mutability::Mut) => is_mutbl = hir::Mutability::Mut,
// The place isn't mutable once we dereference an immutable reference.
ty::Ref(.., hir::Mutability::Not) => return hir::Mutability::Not,
// Dereferencing a box doesn't change mutability
ty::Adt(def, ..) if def.is_box() => {}
unexpected_ty => span_bug!(
self.tcx.hir().span(var_hir_id),
"deref of unexpected pointer type {:?}",
unexpected_ty
),
}
}
is_mutbl
}
}
/// Determines whether a child capture that is derived from a parent capture
/// should be borrowed with the lifetime of the parent coroutine-closure's env.
///
/// There are two cases when this needs to happen:
///
/// (1.) Are we borrowing data owned by the parent closure? We can determine if
/// that is the case by checking if the parent capture is by move, EXCEPT if we
/// apply a deref projection, which means we're reborrowing a reference that we
/// captured by move.
///
/// ```rust
/// let x = &1i32; // Let's call this lifetime `'1`.
/// let c = async move || {
/// println!("{:?}", *x);
/// // Even though the inner coroutine borrows by ref, we're only capturing `*x`,
/// // not `x`, so the inner closure is allowed to reborrow the data for `'1`.
/// };
/// ```
///
/// (2.) If a coroutine is mutably borrowing from a parent capture, then that
/// mutable borrow cannot live for longer than either the parent *or* the borrow
/// that we have on the original upvar. Therefore we always need to borrow the
/// child capture with the lifetime of the parent coroutine-closure's env.
///
/// ```rust
/// let mut x = 1i32;
/// let c = async || {
/// x = 1;
/// // The parent borrows `x` for some `&'1 mut i32`.
/// // However, when we call `c()`, we implicitly autoref for the signature of
/// // `AsyncFnMut::async_call_mut`. Let's call that lifetime `'call`. Since
/// // the maximum that `&'call mut &'1 mut i32` can be reborrowed is `&'call mut i32`,
/// // the inner coroutine should capture w/ the lifetime of the coroutine-closure.
/// };
/// ```
///
/// If either of these cases apply, then we should capture the borrow with the
/// lifetime of the parent coroutine-closure's env. Luckily, if this function is
/// not correct, then the program is not unsound, since we still borrowck and validate
/// the choices made from this function -- the only side-effect is that the user
/// may receive unnecessary borrowck errors.
fn should_reborrow_from_env_of_parent_coroutine_closure<'tcx>(
parent_capture: &ty::CapturedPlace<'tcx>,
child_capture: &ty::CapturedPlace<'tcx>,
) -> bool {
// (1.)
(!parent_capture.is_by_ref()
&& !matches!(
child_capture.place.projections.get(parent_capture.place.projections.len()),
Some(Projection { kind: ProjectionKind::Deref, .. })
))
// (2.)
|| matches!(child_capture.info.capture_kind, UpvarCapture::ByRef(ty::BorrowKind::Mutable))
}
/// Truncate the capture so that the place being borrowed is in accordance with RFC 1240,
/// which states that it's unsafe to take a reference into a struct marked `repr(packed)`.
fn restrict_repr_packed_field_ref_capture<'tcx>(
mut place: Place<'tcx>,
mut curr_borrow_kind: ty::UpvarCapture,
) -> (Place<'tcx>, ty::UpvarCapture) {
let pos = place.projections.iter().enumerate().position(|(i, p)| {
let ty = place.ty_before_projection(i);
// Return true for fields of packed structs.
match p.kind {
ProjectionKind::Field(..) => match ty.kind() {
ty::Adt(def, _) if def.repr().packed() => {
// We stop here regardless of field alignment. Field alignment can change as
// types change, including the types of private fields in other crates, and that
// shouldn't affect how we compute our captures.
true
}
_ => false,
},
_ => false,
}
});
if let Some(pos) = pos {
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_borrow_kind, pos);
}
(place, curr_borrow_kind)
}
/// Returns a Ty that applies the specified capture kind on the provided capture Ty
fn apply_capture_kind_on_capture_ty<'tcx>(
tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
capture_kind: UpvarCapture,
region: ty::Region<'tcx>,
) -> Ty<'tcx> {
match capture_kind {
ty::UpvarCapture::ByValue => ty,
ty::UpvarCapture::ByRef(kind) => Ty::new_ref(tcx, region, ty, kind.to_mutbl_lossy()),
}
}
/// Returns the Span of where the value with the provided HirId would be dropped
fn drop_location_span(tcx: TyCtxt<'_>, hir_id: HirId) -> Span {
let owner_id = tcx.hir().get_enclosing_scope(hir_id).unwrap();
let owner_node = tcx.hir_node(owner_id);
let owner_span = match owner_node {
hir::Node::Item(item) => match item.kind {
hir::ItemKind::Fn(_, _, owner_id) => tcx.hir().span(owner_id.hir_id),
_ => {
bug!("Drop location span error: need to handle more ItemKind '{:?}'", item.kind);
}
},
hir::Node::Block(block) => tcx.hir().span(block.hir_id),
hir::Node::TraitItem(item) => tcx.hir().span(item.hir_id()),
hir::Node::ImplItem(item) => tcx.hir().span(item.hir_id()),
_ => {
bug!("Drop location span error: need to handle more Node '{:?}'", owner_node);
}
};
tcx.sess.source_map().end_point(owner_span)
}
struct InferBorrowKind<'tcx> {
// The def-id of the closure whose kind and upvar accesses are being inferred.
closure_def_id: LocalDefId,
/// For each Place that is captured by the closure, we track the minimal kind of
/// access we need (ref, ref mut, move, etc) and the expression that resulted in such access.
///
/// Consider closure where s.str1 is captured via an ImmutableBorrow and
/// s.str2 via a MutableBorrow
///
/// ```rust,no_run
/// struct SomeStruct { str1: String, str2: String };
///
/// // Assume that the HirId for the variable definition is `V1`
/// let mut s = SomeStruct { str1: format!("s1"), str2: format!("s2") };
///
/// let fix_s = |new_s2| {
/// // Assume that the HirId for the expression `s.str1` is `E1`
/// println!("Updating SomeStruct with str1={0}", s.str1);
/// // Assume that the HirId for the expression `*s.str2` is `E2`
/// s.str2 = new_s2;
/// };
/// ```
///
/// For closure `fix_s`, (at a high level) the map contains
///
/// ```ignore (illustrative)
/// Place { V1, [ProjectionKind::Field(Index=0, Variant=0)] } : CaptureKind { E1, ImmutableBorrow }
/// Place { V1, [ProjectionKind::Field(Index=1, Variant=0)] } : CaptureKind { E2, MutableBorrow }
/// ```
capture_information: InferredCaptureInformation<'tcx>,
fake_reads: Vec<(Place<'tcx>, FakeReadCause, HirId)>,
}
impl<'tcx> euv::Delegate<'tcx> for InferBorrowKind<'tcx> {
fn fake_read(
&mut self,
place: &PlaceWithHirId<'tcx>,
cause: FakeReadCause,
diag_expr_id: HirId,
) {
let PlaceBase::Upvar(_) = place.place.base else { return };
// We need to restrict Fake Read precision to avoid fake reading unsafe code,
// such as deref of a raw pointer.
let dummy_capture_kind = ty::UpvarCapture::ByRef(ty::BorrowKind::Immutable);
let (place, _) = restrict_capture_precision(place.place.clone(), dummy_capture_kind);
let (place, _) = restrict_repr_packed_field_ref_capture(place, dummy_capture_kind);
self.fake_reads.push((place, cause, diag_expr_id));
}
#[instrument(skip(self), level = "debug")]
fn consume(&mut self, place_with_id: &PlaceWithHirId<'tcx>, diag_expr_id: HirId) {
let PlaceBase::Upvar(upvar_id) = place_with_id.place.base else { return };
assert_eq!(self.closure_def_id, upvar_id.closure_expr_id);
self.capture_information.push((place_with_id.place.clone(), ty::CaptureInfo {
capture_kind_expr_id: Some(diag_expr_id),
path_expr_id: Some(diag_expr_id),
capture_kind: ty::UpvarCapture::ByValue,
}));
}
#[instrument(skip(self), level = "debug")]
fn borrow(
&mut self,
place_with_id: &PlaceWithHirId<'tcx>,
diag_expr_id: HirId,
bk: ty::BorrowKind,
) {
let PlaceBase::Upvar(upvar_id) = place_with_id.place.base else { return };
assert_eq!(self.closure_def_id, upvar_id.closure_expr_id);
// The region here will get discarded/ignored
let capture_kind = ty::UpvarCapture::ByRef(bk);
// We only want repr packed restriction to be applied to reading references into a packed
// struct, and not when the data is being moved. Therefore we call this method here instead
// of in `restrict_capture_precision`.
let (place, mut capture_kind) =
restrict_repr_packed_field_ref_capture(place_with_id.place.clone(), capture_kind);
// Raw pointers don't inherit mutability
if place_with_id.place.deref_tys().any(Ty::is_unsafe_ptr) {
capture_kind = ty::UpvarCapture::ByRef(ty::BorrowKind::Immutable);
}
self.capture_information.push((place, ty::CaptureInfo {
capture_kind_expr_id: Some(diag_expr_id),
path_expr_id: Some(diag_expr_id),
capture_kind,
}));
}
#[instrument(skip(self), level = "debug")]
fn mutate(&mut self, assignee_place: &PlaceWithHirId<'tcx>, diag_expr_id: HirId) {
self.borrow(assignee_place, diag_expr_id, ty::BorrowKind::Mutable);
}
}
/// Rust doesn't permit moving fields out of a type that implements drop
fn restrict_precision_for_drop_types<'a, 'tcx>(
fcx: &'a FnCtxt<'a, 'tcx>,
mut place: Place<'tcx>,
mut curr_mode: ty::UpvarCapture,
) -> (Place<'tcx>, ty::UpvarCapture) {
let is_copy_type = fcx.infcx.type_is_copy_modulo_regions(fcx.param_env, place.ty());
if let (false, UpvarCapture::ByValue) = (is_copy_type, curr_mode) {
for i in 0..place.projections.len() {
match place.ty_before_projection(i).kind() {
ty::Adt(def, _) if def.destructor(fcx.tcx).is_some() => {
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_mode, i);
break;
}
_ => {}
}
}
}
(place, curr_mode)
}
/// Truncate `place` so that an `unsafe` block isn't required to capture it.
/// - No projections are applied to raw pointers, since these require unsafe blocks. We capture
/// them completely.
/// - No projections are applied on top of Union ADTs, since these require unsafe blocks.
fn restrict_precision_for_unsafe(
mut place: Place<'_>,
mut curr_mode: ty::UpvarCapture,
) -> (Place<'_>, ty::UpvarCapture) {
if place.base_ty.is_unsafe_ptr() {
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_mode, 0);
}
if place.base_ty.is_union() {
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_mode, 0);
}
for (i, proj) in place.projections.iter().enumerate() {
if proj.ty.is_unsafe_ptr() {
// Don't apply any projections on top of an unsafe ptr.
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_mode, i + 1);
break;
}
if proj.ty.is_union() {
// Don't capture precise fields of a union.
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_mode, i + 1);
break;
}
}
(place, curr_mode)
}
/// Truncate projections so that following rules are obeyed by the captured `place`:
/// - No Index projections are captured, since arrays are captured completely.
/// - No unsafe block is required to capture `place`
/// Returns the truncated place and updated capture mode.
fn restrict_capture_precision(
place: Place<'_>,
curr_mode: ty::UpvarCapture,
) -> (Place<'_>, ty::UpvarCapture) {
let (mut place, mut curr_mode) = restrict_precision_for_unsafe(place, curr_mode);
if place.projections.is_empty() {
// Nothing to do here
return (place, curr_mode);
}
for (i, proj) in place.projections.iter().enumerate() {
match proj.kind {
ProjectionKind::Index | ProjectionKind::Subslice => {
// Arrays are completely captured, so we drop Index and Subslice projections
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_mode, i);
return (place, curr_mode);
}
ProjectionKind::Deref => {}
ProjectionKind::OpaqueCast => {}
ProjectionKind::Field(..) => {} // ignore
}
}
(place, curr_mode)
}
/// Truncate deref of any reference.
fn adjust_for_move_closure(
mut place: Place<'_>,
mut kind: ty::UpvarCapture,
) -> (Place<'_>, ty::UpvarCapture) {
let first_deref = place.projections.iter().position(|proj| proj.kind == ProjectionKind::Deref);
if let Some(idx) = first_deref {
truncate_place_to_len_and_update_capture_kind(&mut place, &mut kind, idx);
}
(place, ty::UpvarCapture::ByValue)
}
/// Adjust closure capture just that if taking ownership of data, only move data
/// from enclosing stack frame.
fn adjust_for_non_move_closure(
mut place: Place<'_>,
mut kind: ty::UpvarCapture,
) -> (Place<'_>, ty::UpvarCapture) {
let contains_deref =
place.projections.iter().position(|proj| proj.kind == ProjectionKind::Deref);
match kind {
ty::UpvarCapture::ByValue => {
if let Some(idx) = contains_deref {
truncate_place_to_len_and_update_capture_kind(&mut place, &mut kind, idx);
}
}
ty::UpvarCapture::ByRef(..) => {}
}
(place, kind)
}
fn construct_place_string<'tcx>(tcx: TyCtxt<'_>, place: &Place<'tcx>) -> String {
let variable_name = match place.base {
PlaceBase::Upvar(upvar_id) => var_name(tcx, upvar_id.var_path.hir_id).to_string(),
_ => bug!("Capture_information should only contain upvars"),
};
let mut projections_str = String::new();
for (i, item) in place.projections.iter().enumerate() {
let proj = match item.kind {
ProjectionKind::Field(a, b) => format!("({a:?}, {b:?})"),
ProjectionKind::Deref => String::from("Deref"),
ProjectionKind::Index => String::from("Index"),
ProjectionKind::Subslice => String::from("Subslice"),
ProjectionKind::OpaqueCast => String::from("OpaqueCast"),
};
if i != 0 {
projections_str.push(',');
}
projections_str.push_str(proj.as_str());
}
format!("{variable_name}[{projections_str}]")
}
fn construct_capture_kind_reason_string<'tcx>(
tcx: TyCtxt<'_>,
place: &Place<'tcx>,
capture_info: &ty::CaptureInfo,
) -> String {
let place_str = construct_place_string(tcx, place);
let capture_kind_str = match capture_info.capture_kind {
ty::UpvarCapture::ByValue => "ByValue".into(),
ty::UpvarCapture::ByRef(kind) => format!("{kind:?}"),
};
format!("{place_str} captured as {capture_kind_str} here")
}
fn construct_path_string<'tcx>(tcx: TyCtxt<'_>, place: &Place<'tcx>) -> String {
let place_str = construct_place_string(tcx, place);
format!("{place_str} used here")
}
fn construct_capture_info_string<'tcx>(
tcx: TyCtxt<'_>,
place: &Place<'tcx>,
capture_info: &ty::CaptureInfo,
) -> String {
let place_str = construct_place_string(tcx, place);
let capture_kind_str = match capture_info.capture_kind {
ty::UpvarCapture::ByValue => "ByValue".into(),
ty::UpvarCapture::ByRef(kind) => format!("{kind:?}"),
};
format!("{place_str} -> {capture_kind_str}")
}
fn var_name(tcx: TyCtxt<'_>, var_hir_id: HirId) -> Symbol {
tcx.hir().name(var_hir_id)
}
#[instrument(level = "debug", skip(tcx))]
fn should_do_rust_2021_incompatible_closure_captures_analysis(
tcx: TyCtxt<'_>,
closure_id: HirId,
) -> bool {
if tcx.sess.at_least_rust_2021() {
return false;
}
let (level, _) =
tcx.lint_level_at_node(lint::builtin::RUST_2021_INCOMPATIBLE_CLOSURE_CAPTURES, closure_id);
!matches!(level, lint::Level::Allow)
}
/// Return a two string tuple (s1, s2)
/// - s1: Line of code that is needed for the migration: eg: `let _ = (&x, ...)`.
/// - s2: Comma separated names of the variables being migrated.
fn migration_suggestion_for_2229(
tcx: TyCtxt<'_>,
need_migrations: &[NeededMigration],
) -> (String, String) {
let need_migrations_variables = need_migrations
.iter()
.map(|NeededMigration { var_hir_id: v, .. }| var_name(tcx, *v))
.collect::<Vec<_>>();
let migration_ref_concat =
need_migrations_variables.iter().map(|v| format!("&{v}")).collect::<Vec<_>>().join(", ");
let migration_string = if 1 == need_migrations.len() {
format!("let _ = {migration_ref_concat}")
} else {
format!("let _ = ({migration_ref_concat})")
};
let migrated_variables_concat =
need_migrations_variables.iter().map(|v| format!("`{v}`")).collect::<Vec<_>>().join(", ");
(migration_string, migrated_variables_concat)
}
/// Helper function to determine if we need to escalate CaptureKind from
/// CaptureInfo A to B and returns the escalated CaptureInfo.
/// (Note: CaptureInfo contains CaptureKind and an expression that led to capture it in that way)
///
/// If both `CaptureKind`s are considered equivalent, then the CaptureInfo is selected based
/// on the `CaptureInfo` containing an associated `capture_kind_expr_id`.
///
/// It is the caller's duty to figure out which path_expr_id to use.
///
/// If both the CaptureKind and Expression are considered to be equivalent,
/// then `CaptureInfo` A is preferred. This can be useful in cases where we want to prioritize
/// expressions reported back to the user as part of diagnostics based on which appears earlier
/// in the closure. This can be achieved simply by calling
/// `determine_capture_info(existing_info, current_info)`. This works out because the
/// expressions that occur earlier in the closure body than the current expression are processed before.
/// Consider the following example
/// ```rust,no_run
/// struct Point { x: i32, y: i32 }
/// let mut p = Point { x: 10, y: 10 };
///
/// let c = || {
/// p.x += 10;
/// // ^ E1 ^
/// // ...
/// // More code
/// // ...
/// p.x += 10; // E2
/// // ^ E2 ^
/// };
/// ```
/// `CaptureKind` associated with both `E1` and `E2` will be ByRef(MutBorrow),
/// and both have an expression associated, however for diagnostics we prefer reporting
/// `E1` since it appears earlier in the closure body. When `E2` is being processed we
/// would've already handled `E1`, and have an existing capture_information for it.
/// Calling `determine_capture_info(existing_info_e1, current_info_e2)` will return
/// `existing_info_e1` in this case, allowing us to point to `E1` in case of diagnostics.
fn determine_capture_info(
capture_info_a: ty::CaptureInfo,
capture_info_b: ty::CaptureInfo,
) -> ty::CaptureInfo {
// If the capture kind is equivalent then, we don't need to escalate and can compare the
// expressions.
let eq_capture_kind = match (capture_info_a.capture_kind, capture_info_b.capture_kind) {
(ty::UpvarCapture::ByValue, ty::UpvarCapture::ByValue) => true,
(ty::UpvarCapture::ByRef(ref_a), ty::UpvarCapture::ByRef(ref_b)) => ref_a == ref_b,
(ty::UpvarCapture::ByValue, _) | (ty::UpvarCapture::ByRef(_), _) => false,
};
if eq_capture_kind {
match (capture_info_a.capture_kind_expr_id, capture_info_b.capture_kind_expr_id) {
(Some(_), _) | (None, None) => capture_info_a,
(None, Some(_)) => capture_info_b,
}
} else {
// We select the CaptureKind which ranks higher based the following priority order:
// ByValue > MutBorrow > UniqueImmBorrow > ImmBorrow
match (capture_info_a.capture_kind, capture_info_b.capture_kind) {
(ty::UpvarCapture::ByValue, _) => capture_info_a,
(_, ty::UpvarCapture::ByValue) => capture_info_b,
(ty::UpvarCapture::ByRef(ref_a), ty::UpvarCapture::ByRef(ref_b)) => {
match (ref_a, ref_b) {
// Take LHS:
(BorrowKind::UniqueImmutable | BorrowKind::Mutable, BorrowKind::Immutable)
| (BorrowKind::Mutable, BorrowKind::UniqueImmutable) => capture_info_a,
// Take RHS:
(BorrowKind::Immutable, BorrowKind::UniqueImmutable | BorrowKind::Mutable)
| (BorrowKind::UniqueImmutable, BorrowKind::Mutable) => capture_info_b,
(BorrowKind::Immutable, BorrowKind::Immutable)
| (BorrowKind::UniqueImmutable, BorrowKind::UniqueImmutable)
| (BorrowKind::Mutable, BorrowKind::Mutable) => {
bug!("Expected unequal capture kinds");
}
}
}
}
}
}
/// Truncates `place` to have up to `len` projections.
/// `curr_mode` is the current required capture kind for the place.
/// Returns the truncated `place` and the updated required capture kind.
///
/// Note: Capture kind changes from `MutBorrow` to `UniqueImmBorrow` if the truncated part of the `place`
/// contained `Deref` of `&mut`.
fn truncate_place_to_len_and_update_capture_kind<'tcx>(
place: &mut Place<'tcx>,
curr_mode: &mut ty::UpvarCapture,
len: usize,
) {
let is_mut_ref = |ty: Ty<'_>| matches!(ty.kind(), ty::Ref(.., hir::Mutability::Mut));
// If the truncated part of the place contains `Deref` of a `&mut` then convert MutBorrow ->
// UniqueImmBorrow
// Note that if the place contained Deref of a raw pointer it would've not been MutBorrow, so
// we don't need to worry about that case here.
match curr_mode {
ty::UpvarCapture::ByRef(ty::BorrowKind::Mutable) => {
for i in len..place.projections.len() {
if place.projections[i].kind == ProjectionKind::Deref
&& is_mut_ref(place.ty_before_projection(i))
{
*curr_mode = ty::UpvarCapture::ByRef(ty::BorrowKind::UniqueImmutable);
break;
}
}
}
ty::UpvarCapture::ByRef(..) => {}
ty::UpvarCapture::ByValue => {}
}
place.projections.truncate(len);
}
/// Determines the Ancestry relationship of Place A relative to Place B
///
/// `PlaceAncestryRelation::Ancestor` implies Place A is ancestor of Place B
/// `PlaceAncestryRelation::Descendant` implies Place A is descendant of Place B
/// `PlaceAncestryRelation::Divergent` implies neither of them is the ancestor of the other.
fn determine_place_ancestry_relation<'tcx>(
place_a: &Place<'tcx>,
place_b: &Place<'tcx>,
) -> PlaceAncestryRelation {
// If Place A and Place B don't start off from the same root variable, they are divergent.
if place_a.base != place_b.base {
return PlaceAncestryRelation::Divergent;
}
// Assume of length of projections_a = n
let projections_a = &place_a.projections;
// Assume of length of projections_b = m
let projections_b = &place_b.projections;
let same_initial_projections =
iter::zip(projections_a, projections_b).all(|(proj_a, proj_b)| proj_a.kind == proj_b.kind);
if same_initial_projections {
use std::cmp::Ordering;
// First min(n, m) projections are the same
// Select Ancestor/Descendant
match projections_b.len().cmp(&projections_a.len()) {
Ordering::Greater => PlaceAncestryRelation::Ancestor,
Ordering::Equal => PlaceAncestryRelation::SamePlace,
Ordering::Less => PlaceAncestryRelation::Descendant,
}
} else {
PlaceAncestryRelation::Divergent
}
}
/// Reduces the precision of the captured place when the precision doesn't yield any benefit from
/// borrow checking perspective, allowing us to save us on the size of the capture.
///
///
/// Fields that are read through a shared reference will always be read via a shared ref or a copy,
/// and therefore capturing precise paths yields no benefit. This optimization truncates the
/// rightmost deref of the capture if the deref is applied to a shared ref.
///
/// Reason we only drop the last deref is because of the following edge case:
///
/// ```
/// # struct A { field_of_a: Box<i32> }
/// # struct B {}
/// # struct C<'a>(&'a i32);
/// struct MyStruct<'a> {
/// a: &'static A,
/// b: B,
/// c: C<'a>,
/// }
///
/// fn foo<'a, 'b>(m: &'a MyStruct<'b>) -> impl FnMut() + 'static {
/// || drop(&*m.a.field_of_a)
/// // Here we really do want to capture `*m.a` because that outlives `'static`
///
/// // If we capture `m`, then the closure no longer outlives `'static`
/// // it is constrained to `'a`
/// }
/// ```
fn truncate_capture_for_optimization(
mut place: Place<'_>,
mut curr_mode: ty::UpvarCapture,
) -> (Place<'_>, ty::UpvarCapture) {
let is_shared_ref = |ty: Ty<'_>| matches!(ty.kind(), ty::Ref(.., hir::Mutability::Not));
// Find the rightmost deref (if any). All the projections that come after this
// are fields or other "in-place pointer adjustments"; these refer therefore to
// data owned by whatever pointer is being dereferenced here.
let idx = place.projections.iter().rposition(|proj| ProjectionKind::Deref == proj.kind);
match idx {
// If that pointer is a shared reference, then we don't need those fields.
Some(idx) if is_shared_ref(place.ty_before_projection(idx)) => {
truncate_place_to_len_and_update_capture_kind(&mut place, &mut curr_mode, idx + 1)
}
None | Some(_) => {}
}
(place, curr_mode)
}
/// Precise capture is enabled if user is using Rust Edition 2021 or higher.
/// `span` is the span of the closure.
fn enable_precise_capture(span: Span) -> bool {
// We use span here to ensure that if the closure was generated by a macro with a different
// edition.
span.at_least_rust_2021()
}