rustc_data_structures/
marker.rs

1use std::alloc::Allocator;
2
3#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \
4            Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")]
5// This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()`
6// is true. These types can be wrapped in a `FromDyn` to get a `Send` type. Wrapping a
7// `Send` type in `IntoDynSyncSend` will create a `DynSend` type.
8pub unsafe auto trait DynSend {}
9
10#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSync`. \
11            Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`")]
12// This is an auto trait for types which can be shared across threads if `sync::is_dyn_thread_safe()`
13// is true. These types can be wrapped in a `FromDyn` to get a `Sync` type. Wrapping a
14// `Sync` type in `IntoDynSyncSend` will create a `DynSync` type.
15pub unsafe auto trait DynSync {}
16
17// Same with `Sync` and `Send`.
18unsafe impl<T: DynSync + ?Sized> DynSend for &T {}
19
20macro_rules! impls_dyn_send_neg {
21    ($([$t1: ty $(where $($generics1: tt)*)?])*) => {
22        $(impl$(<$($generics1)*>)? !DynSend for $t1 {})*
23    };
24}
25
26// Consistent with `std`
27impls_dyn_send_neg!(
28    [std::env::Args]
29    [std::env::ArgsOs]
30    [*const T where T: ?Sized]
31    [*mut T where T: ?Sized]
32    [std::ptr::NonNull<T> where T: ?Sized]
33    [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
34    [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
35    [std::sync::MutexGuard<'_, T> where T: ?Sized]
36    [std::sync::RwLockReadGuard<'_, T> where T: ?Sized]
37    [std::sync::RwLockWriteGuard<'_, T> where T: ?Sized]
38    [std::io::StdoutLock<'_>]
39    [std::io::StderrLock<'_>]
40);
41
42#[cfg(any(
43    unix,
44    target_os = "hermit",
45    all(target_vendor = "fortanix", target_env = "sgx"),
46    target_os = "solid_asp3",
47    target_os = "wasi",
48    target_os = "xous"
49))]
50// Consistent with `std`, `env_imp::Env` is `!Sync` in these platforms
51impl !DynSend for std::env::VarsOs {}
52
53macro_rules! already_send {
54    ($([$ty: ty])*) => {
55        $(unsafe impl DynSend for $ty where $ty: Send {})*
56    };
57}
58
59// These structures are already `Send`.
60already_send!(
61    [std::backtrace::Backtrace][std::io::Stdout][std::io::Stderr][std::io::Error][std::fs::File]
62        [rustc_arena::DroplessArena][crate::memmap::Mmap][crate::profiling::SelfProfiler]
63        [crate::owned_slice::OwnedSlice]
64);
65
66macro_rules! impl_dyn_send {
67    ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
68        $(unsafe impl<$($generics2)*> DynSend for $ty {})*
69    };
70}
71
72impl_dyn_send!(
73    [std::sync::atomic::AtomicPtr<T> where T]
74    [std::sync::Mutex<T> where T: ?Sized+ DynSend]
75    [std::sync::mpsc::Sender<T> where T: DynSend]
76    [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
77    [std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
78    [std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
79    [std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
80    [std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
81    [Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
82    [Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
83    [crate::sync::RwLock<T> where T: DynSend]
84    [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag]
85    [rustc_arena::TypedArena<T> where T: DynSend]
86    [hashbrown::HashTable<T> where T: DynSend]
87    [indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
88    [indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
89    [thin_vec::ThinVec<T> where T: DynSend]
90    [smallvec::SmallVec<A> where A: smallvec::Array + DynSend]
91);
92
93macro_rules! impls_dyn_sync_neg {
94    ($([$t1: ty $(where $($generics1: tt)*)?])*) => {
95        $(impl$(<$($generics1)*>)? !DynSync for $t1 {})*
96    };
97}
98
99// Consistent with `std`
100impls_dyn_sync_neg!(
101    [std::env::Args]
102    [std::env::ArgsOs]
103    [*const T where T: ?Sized]
104    [*mut T where T: ?Sized]
105    [std::cell::Cell<T> where T: ?Sized]
106    [std::cell::RefCell<T> where T: ?Sized]
107    [std::cell::UnsafeCell<T> where T: ?Sized]
108    [std::ptr::NonNull<T> where T: ?Sized]
109    [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
110    [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
111    [std::cell::OnceCell<T> where T]
112    [std::sync::mpsc::Receiver<T> where T]
113    [std::sync::mpsc::Sender<T> where T]
114);
115
116#[cfg(any(
117    unix,
118    target_os = "hermit",
119    all(target_vendor = "fortanix", target_env = "sgx"),
120    target_os = "solid_asp3",
121    target_os = "wasi",
122    target_os = "xous"
123))]
124// Consistent with `std`, `env_imp::Env` is `!Sync` in these platforms
125impl !DynSync for std::env::VarsOs {}
126
127macro_rules! already_sync {
128    ($([$ty: ty])*) => {
129        $(unsafe impl DynSync for $ty where $ty: Sync {})*
130    };
131}
132
133// These structures are already `Sync`.
134already_sync!(
135    [std::sync::atomic::AtomicBool][std::sync::atomic::AtomicUsize][std::sync::atomic::AtomicU8]
136        [std::sync::atomic::AtomicU32][std::backtrace::Backtrace][std::io::Error][std::fs::File]
137        [jobserver_crate::Client][crate::memmap::Mmap][crate::profiling::SelfProfiler]
138        [crate::owned_slice::OwnedSlice]
139);
140
141// Use portable AtomicU64 for targets without native 64-bit atomics
142#[cfg(target_has_atomic = "64")]
143already_sync!([std::sync::atomic::AtomicU64]);
144
145#[cfg(not(target_has_atomic = "64"))]
146already_sync!([portable_atomic::AtomicU64]);
147
148macro_rules! impl_dyn_sync {
149    ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
150        $(unsafe impl<$($generics2)*> DynSync for $ty {})*
151    };
152}
153
154impl_dyn_sync!(
155    [std::sync::atomic::AtomicPtr<T> where T]
156    [std::sync::OnceLock<T> where T: DynSend + DynSync]
157    [std::sync::Mutex<T> where T: ?Sized + DynSend]
158    [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
159    [std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
160    [std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
161    [std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
162    [std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
163    [Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
164    [Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]
165    [crate::sync::RwLock<T> where T: DynSend + DynSync]
166    [crate::sync::WorkerLocal<T> where T: DynSend]
167    [crate::intern::Interned<'a, T> where 'a, T: DynSync]
168    [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag]
169    [parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
170    [parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
171    [hashbrown::HashTable<T> where T: DynSync]
172    [indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
173    [indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
174    [smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
175    [thin_vec::ThinVec<T> where T: DynSync]
176);
177
178pub fn assert_dyn_sync<T: ?Sized + DynSync>() {}
179pub fn assert_dyn_send<T: ?Sized + DynSend>() {}
180pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {}
181pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {}
182
183#[derive(Copy, Clone)]
184pub struct FromDyn<T>(T);
185
186impl<T> FromDyn<T> {
187    #[inline(always)]
188    pub fn from(val: T) -> Self {
189        // Check that `sync::is_dyn_thread_safe()` is true on creation so we can
190        // implement `Send` and `Sync` for this structure when `T`
191        // implements `DynSend` and `DynSync` respectively.
192        assert!(crate::sync::is_dyn_thread_safe());
193        FromDyn(val)
194    }
195
196    #[inline(always)]
197    pub fn derive<O>(&self, val: O) -> FromDyn<O> {
198        // We already did the check for `sync::is_dyn_thread_safe()` when creating `Self`
199        FromDyn(val)
200    }
201
202    #[inline(always)]
203    pub fn into_inner(self) -> T {
204        self.0
205    }
206}
207
208// `FromDyn` is `Send` if `T` is `DynSend`, since it ensures that sync::is_dyn_thread_safe() is true.
209unsafe impl<T: DynSend> Send for FromDyn<T> {}
210
211// `FromDyn` is `Sync` if `T` is `DynSync`, since it ensures that sync::is_dyn_thread_safe() is true.
212unsafe impl<T: DynSync> Sync for FromDyn<T> {}
213
214impl<T> std::ops::Deref for FromDyn<T> {
215    type Target = T;
216
217    #[inline(always)]
218    fn deref(&self) -> &Self::Target {
219        &self.0
220    }
221}
222
223impl<T> std::ops::DerefMut for FromDyn<T> {
224    #[inline(always)]
225    fn deref_mut(&mut self) -> &mut Self::Target {
226        &mut self.0
227    }
228}
229
230// A wrapper to convert a struct that is already a `Send` or `Sync` into
231// an instance of `DynSend` and `DynSync`, since the compiler cannot infer
232// it automatically in some cases. (e.g. Box<dyn Send / Sync>)
233#[derive(Copy, Clone)]
234pub struct IntoDynSyncSend<T: ?Sized>(pub T);
235
236unsafe impl<T: ?Sized + Send> DynSend for IntoDynSyncSend<T> {}
237unsafe impl<T: ?Sized + Sync> DynSync for IntoDynSyncSend<T> {}
238
239impl<T> std::ops::Deref for IntoDynSyncSend<T> {
240    type Target = T;
241
242    #[inline(always)]
243    fn deref(&self) -> &T {
244        &self.0
245    }
246}
247
248impl<T> std::ops::DerefMut for IntoDynSyncSend<T> {
249    #[inline(always)]
250    fn deref_mut(&mut self) -> &mut T {
251        &mut self.0
252    }
253}