rustc_data_structures/
marker.rs
1use std::alloc::Allocator;
2
3#[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \
4 Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")]
5pub unsafe auto trait DynSend {}
9
10#[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSync`. \
11 Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`")]
12pub unsafe auto trait DynSync {}
16
17unsafe impl<T: DynSync + ?Sized> DynSend for &T {}
19
20macro_rules! impls_dyn_send_neg {
21 ($([$t1: ty $(where $($generics1: tt)*)?])*) => {
22 $(impl$(<$($generics1)*>)? !DynSend for $t1 {})*
23 };
24}
25
26impls_dyn_send_neg!(
28 [std::env::Args]
29 [std::env::ArgsOs]
30 [*const T where T: ?Sized]
31 [*mut T where T: ?Sized]
32 [std::ptr::NonNull<T> where T: ?Sized]
33 [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
34 [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
35 [std::sync::MutexGuard<'_, T> where T: ?Sized]
36 [std::sync::RwLockReadGuard<'_, T> where T: ?Sized]
37 [std::sync::RwLockWriteGuard<'_, T> where T: ?Sized]
38 [std::io::StdoutLock<'_>]
39 [std::io::StderrLock<'_>]
40);
41
42#[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))]
43impl !DynSend for std::env::VarsOs {}
45
46macro_rules! already_send {
47 ($([$ty: ty])*) => {
48 $(unsafe impl DynSend for $ty where $ty: Send {})*
49 };
50}
51
52already_send!(
54 [std::backtrace::Backtrace][std::io::Stdout][std::io::Stderr][std::io::Error][std::fs::File]
55 [rustc_arena::DroplessArena][crate::memmap::Mmap][crate::profiling::SelfProfiler]
56 [crate::owned_slice::OwnedSlice]
57);
58
59macro_rules! impl_dyn_send {
60 ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
61 $(unsafe impl<$($generics2)*> DynSend for $ty {})*
62 };
63}
64
65impl_dyn_send!(
66 [std::sync::atomic::AtomicPtr<T> where T]
67 [std::sync::Mutex<T> where T: ?Sized+ DynSend]
68 [std::sync::mpsc::Sender<T> where T: DynSend]
69 [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
70 [std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
71 [std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
72 [std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
73 [std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
74 [Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
75 [Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
76 [crate::sync::RwLock<T> where T: DynSend]
77 [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag]
78 [rustc_arena::TypedArena<T> where T: DynSend]
79 [hashbrown::HashTable<T> where T: DynSend]
80 [indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
81 [indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
82 [thin_vec::ThinVec<T> where T: DynSend]
83 [smallvec::SmallVec<A> where A: smallvec::Array + DynSend]
84);
85
86macro_rules! impls_dyn_sync_neg {
87 ($([$t1: ty $(where $($generics1: tt)*)?])*) => {
88 $(impl$(<$($generics1)*>)? !DynSync for $t1 {})*
89 };
90}
91
92impls_dyn_sync_neg!(
94 [std::env::Args]
95 [std::env::ArgsOs]
96 [*const T where T: ?Sized]
97 [*mut T where T: ?Sized]
98 [std::cell::Cell<T> where T: ?Sized]
99 [std::cell::RefCell<T> where T: ?Sized]
100 [std::cell::UnsafeCell<T> where T: ?Sized]
101 [std::ptr::NonNull<T> where T: ?Sized]
102 [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
103 [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
104 [std::cell::OnceCell<T> where T]
105 [std::sync::mpsc::Receiver<T> where T]
106 [std::sync::mpsc::Sender<T> where T]
107);
108
109#[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))]
110impl !DynSync for std::env::VarsOs {}
112
113macro_rules! already_sync {
114 ($([$ty: ty])*) => {
115 $(unsafe impl DynSync for $ty where $ty: Sync {})*
116 };
117}
118
119already_sync!(
121 [std::sync::atomic::AtomicBool][std::sync::atomic::AtomicUsize][std::sync::atomic::AtomicU8]
122 [std::sync::atomic::AtomicU32][std::backtrace::Backtrace][std::io::Error][std::fs::File]
123 [jobserver_crate::Client][crate::memmap::Mmap][crate::profiling::SelfProfiler]
124 [crate::owned_slice::OwnedSlice]
125);
126
127#[cfg(target_has_atomic = "64")]
129already_sync!([std::sync::atomic::AtomicU64]);
130
131#[cfg(not(target_has_atomic = "64"))]
132already_sync!([portable_atomic::AtomicU64]);
133
134macro_rules! impl_dyn_sync {
135 ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
136 $(unsafe impl<$($generics2)*> DynSync for $ty {})*
137 };
138}
139
140impl_dyn_sync!(
141 [std::sync::atomic::AtomicPtr<T> where T]
142 [std::sync::OnceLock<T> where T: DynSend + DynSync]
143 [std::sync::Mutex<T> where T: ?Sized + DynSend]
144 [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
145 [std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
146 [std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
147 [std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
148 [std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
149 [Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
150 [Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]
151 [crate::sync::RwLock<T> where T: DynSend + DynSync]
152 [crate::sync::WorkerLocal<T> where T: DynSend]
153 [crate::intern::Interned<'a, T> where 'a, T: DynSync]
154 [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag]
155 [parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
156 [parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
157 [hashbrown::HashTable<T> where T: DynSync]
158 [indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
159 [indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
160 [smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
161 [thin_vec::ThinVec<T> where T: DynSync]
162);
163
164pub fn assert_dyn_sync<T: ?Sized + DynSync>() {}
165pub fn assert_dyn_send<T: ?Sized + DynSend>() {}
166pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {}
167pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {}
168
169#[derive(Copy, Clone)]
170pub struct FromDyn<T>(T);
171
172impl<T> FromDyn<T> {
173 #[inline(always)]
174 pub fn from(val: T) -> Self {
175 assert!(crate::sync::is_dyn_thread_safe());
179 FromDyn(val)
180 }
181
182 #[inline(always)]
183 pub fn into_inner(self) -> T {
184 self.0
185 }
186}
187
188unsafe impl<T: DynSend> Send for FromDyn<T> {}
190
191unsafe impl<T: DynSync> Sync for FromDyn<T> {}
193
194impl<T> std::ops::Deref for FromDyn<T> {
195 type Target = T;
196
197 #[inline(always)]
198 fn deref(&self) -> &Self::Target {
199 &self.0
200 }
201}
202
203#[derive(Copy, Clone)]
207pub struct IntoDynSyncSend<T: ?Sized>(pub T);
208
209unsafe impl<T: ?Sized + Send> DynSend for IntoDynSyncSend<T> {}
210unsafe impl<T: ?Sized + Sync> DynSync for IntoDynSyncSend<T> {}
211
212impl<T> std::ops::Deref for IntoDynSyncSend<T> {
213 type Target = T;
214
215 #[inline(always)]
216 fn deref(&self) -> &T {
217 &self.0
218 }
219}
220
221impl<T> std::ops::DerefMut for IntoDynSyncSend<T> {
222 #[inline(always)]
223 fn deref_mut(&mut self) -> &mut T {
224 &mut self.0
225 }
226}