rustc_data_structures/
marker.rs1use std::alloc::Allocator;
2use std::marker::PointeeSized;
3
4#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \
5 Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")]
6pub unsafe auto trait DynSend {}
10
11#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSync`. \
12 Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`")]
13pub unsafe auto trait DynSync {}
17
18unsafe impl<T: DynSync + ?Sized + PointeeSized> DynSend for &T {}
20
21macro_rules! impls_dyn_send_neg {
22 ($([$t1: ty $(where $($generics1: tt)*)?])*) => {
23 $(impl$(<$($generics1)*>)? !DynSend for $t1 {})*
24 };
25}
26
27impl !DynSend for std::io::StderrLock<'_> {}impls_dyn_send_neg!(
29 [std::env::Args]
30 [std::env::ArgsOs]
31 [*const T where T: ?Sized + PointeeSized]
32 [*mut T where T: ?Sized + PointeeSized]
33 [std::ptr::NonNull<T> where T: ?Sized + PointeeSized]
34 [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
35 [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
36 [std::sync::MutexGuard<'_, T> where T: ?Sized]
37 [std::sync::RwLockReadGuard<'_, T> where T: ?Sized]
38 [std::sync::RwLockWriteGuard<'_, T> where T: ?Sized]
39 [std::io::StdoutLock<'_>]
40 [std::io::StderrLock<'_>]
41);
42
43#[cfg(any(
44 unix,
45 target_os = "hermit",
46 all(target_vendor = "fortanix", target_env = "sgx"),
47 target_os = "solid_asp3",
48 target_os = "wasi",
49 target_os = "xous"
50))]
51impl !DynSend for std::env::VarsOs {}
53
54macro_rules! already_send {
55 ($([$ty: ty])*) => {
56 $(unsafe impl DynSend for $ty where Self: Send {})*
57 };
58}
59
60unsafe impl DynSend for crate::owned_slice::OwnedSlice where Self: Send {}already_send!(
62 [std::sync::atomic::AtomicBool][std::sync::atomic::AtomicUsize][std::sync::atomic::AtomicU8]
63 [std::sync::atomic::AtomicU32][std::backtrace::Backtrace][std::io::Stdout][std::io::Stderr]
64 [std::io::Error][std::fs::File][std::panic::Location<'_>][rustc_arena::DroplessArena]
65 [jobserver_crate::Client][jobserver_crate::HelperThread][crate::memmap::Mmap]
66 [crate::profiling::SelfProfiler][crate::owned_slice::OwnedSlice]
67);
68
69#[cfg(target_has_atomic = "64")]
70unsafe impl DynSend for std::sync::atomic::AtomicU64 where Self: Send {}already_send!([std::sync::atomic::AtomicU64]);
71
72macro_rules! impl_dyn_send {
73 ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
74 $(unsafe impl<$($generics2)*> DynSend for $ty {})*
75 };
76}
77
78unsafe impl<A: smallvec::Array + DynSend> DynSend for smallvec::SmallVec<A> {}impl_dyn_send!(
79 [std::sync::atomic::AtomicPtr<T> where T]
80 [std::sync::Mutex<T> where T: ?Sized+ DynSend]
81 [std::sync::mpsc::Sender<T> where T: DynSend]
82 [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
83 [std::sync::Weak<T> where T: ?Sized + DynSync + DynSend]
84 [std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
85 [std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
86 [std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
87 [std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
88 [Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
89 [Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
90 [crate::sync::RwLock<T> where T: DynSend]
91 [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag]
92 [rustc_arena::TypedArena<T> where T: DynSend]
93 [hashbrown::HashTable<T> where T: DynSend]
94 [indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
95 [indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
96 [thin_vec::ThinVec<T> where T: DynSend]
97 [smallvec::SmallVec<A> where A: smallvec::Array + DynSend]
98);
99
100macro_rules! impls_dyn_sync_neg {
101 ($([$t1: ty $(where $($generics1: tt)*)?])*) => {
102 $(impl$(<$($generics1)*>)? !DynSync for $t1 {})*
103 };
104}
105
106impl<T> !DynSync for std::sync::mpsc::Sender<T> {}impls_dyn_sync_neg!(
108 [std::env::Args]
109 [std::env::ArgsOs]
110 [*const T where T: ?Sized + PointeeSized]
111 [*mut T where T: ?Sized + PointeeSized]
112 [std::cell::Cell<T> where T: ?Sized]
113 [std::cell::RefCell<T> where T: ?Sized]
114 [std::cell::UnsafeCell<T> where T: ?Sized]
115 [std::ptr::NonNull<T> where T: ?Sized + PointeeSized]
116 [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
117 [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
118 [std::cell::OnceCell<T> where T]
119 [std::sync::mpsc::Receiver<T> where T]
120 [std::sync::mpsc::Sender<T> where T]
121);
122
123#[cfg(any(
124 unix,
125 target_os = "hermit",
126 all(target_vendor = "fortanix", target_env = "sgx"),
127 target_os = "solid_asp3",
128 target_os = "wasi",
129 target_os = "xous"
130))]
131impl !DynSync for std::env::VarsOs {}
133
134macro_rules! already_sync {
135 ($([$ty: ty])*) => {
136 $(unsafe impl DynSync for $ty where Self: Sync {})*
137 };
138}
139
140unsafe impl DynSync for crate::owned_slice::OwnedSlice where Self: Sync {}already_sync!(
142 [std::sync::atomic::AtomicBool][std::sync::atomic::AtomicUsize][std::sync::atomic::AtomicU8]
143 [std::sync::atomic::AtomicU32][std::backtrace::Backtrace][std::io::Error][std::fs::File][std::panic::Location<'_>]
144 [jobserver_crate::Client][jobserver_crate::HelperThread][crate::memmap::Mmap]
145 [crate::profiling::SelfProfiler][crate::owned_slice::OwnedSlice]
146);
147
148#[cfg(target_has_atomic = "64")]
150unsafe impl DynSync for std::sync::atomic::AtomicU64 where Self: Sync {}already_sync!([std::sync::atomic::AtomicU64]);
151
152#[cfg(not(target_has_atomic = "64"))]
153already_sync!([portable_atomic::AtomicU64]);
154
155macro_rules! impl_dyn_sync {
156 ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
157 $(unsafe impl<$($generics2)*> DynSync for $ty {})*
158 };
159}
160
161unsafe impl<T: DynSync> DynSync for thin_vec::ThinVec<T> {}impl_dyn_sync!(
162 [std::sync::atomic::AtomicPtr<T> where T]
163 [std::sync::OnceLock<T> where T: DynSend + DynSync]
164 [std::sync::Mutex<T> where T: ?Sized + DynSend]
165 [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
166 [std::sync::Weak<T> where T: ?Sized + DynSync + DynSend]
167 [std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
168 [std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
169 [std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
170 [std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
171 [Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
172 [Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]
173 [crate::sync::RwLock<T> where T: DynSend + DynSync]
174 [crate::sync::WorkerLocal<T> where T: DynSend]
175 [crate::intern::Interned<'a, T> where 'a, T: DynSync]
176 [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag]
177 [parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
178 [parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
179 [hashbrown::HashTable<T> where T: DynSync]
180 [indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
181 [indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
182 [smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
183 [thin_vec::ThinVec<T> where T: DynSync]
184);
185
186pub fn assert_dyn_sync<T: ?Sized + PointeeSized + DynSync>() {}
187pub fn assert_dyn_send<T: ?Sized + PointeeSized + DynSend>() {}
188pub fn assert_dyn_send_val<T: ?Sized + PointeeSized + DynSend>(_t: &T) {}
189pub fn assert_dyn_send_sync_val<T: ?Sized + PointeeSized + DynSync + DynSend>(_t: &T) {}
190
191#[derive(#[automatically_derived]
impl<T: ::core::marker::Copy> ::core::marker::Copy for FromDyn<T> { }Copy, #[automatically_derived]
impl<T: ::core::clone::Clone> ::core::clone::Clone for FromDyn<T> {
#[inline]
fn clone(&self) -> FromDyn<T> {
FromDyn(::core::clone::Clone::clone(&self.0))
}
}Clone)]
192pub struct FromDyn<T>(T);
193
194impl<T> FromDyn<T> {
195 #[inline(always)]
196 pub fn from(val: T) -> Self {
197 if !crate::sync::is_dyn_thread_safe() {
::core::panicking::panic("assertion failed: crate::sync::is_dyn_thread_safe()")
};assert!(crate::sync::is_dyn_thread_safe());
201 FromDyn(val)
202 }
203
204 #[inline(always)]
205 pub fn derive<O>(&self, val: O) -> FromDyn<O> {
206 FromDyn(val)
208 }
209
210 #[inline(always)]
211 pub fn into_inner(self) -> T {
212 self.0
213 }
214}
215
216unsafe impl<T: DynSend> Send for FromDyn<T> {}
218
219unsafe impl<T: DynSync> Sync for FromDyn<T> {}
221
222impl<T> std::ops::Deref for FromDyn<T> {
223 type Target = T;
224
225 #[inline(always)]
226 fn deref(&self) -> &Self::Target {
227 &self.0
228 }
229}
230
231impl<T> std::ops::DerefMut for FromDyn<T> {
232 #[inline(always)]
233 fn deref_mut(&mut self) -> &mut Self::Target {
234 &mut self.0
235 }
236}
237
238#[derive(#[automatically_derived]
impl<T: ::core::marker::Copy + ?Sized + PointeeSized> ::core::marker::Copy for
IntoDynSyncSend<T> {
}Copy, #[automatically_derived]
impl<T: ::core::clone::Clone + ?Sized + PointeeSized> ::core::clone::Clone for
IntoDynSyncSend<T> {
#[inline]
fn clone(&self) -> IntoDynSyncSend<T> {
IntoDynSyncSend(::core::clone::Clone::clone(&self.0))
}
}Clone)]
242pub struct IntoDynSyncSend<T: ?Sized + PointeeSized>(pub T);
243
244unsafe impl<T: ?Sized + PointeeSized + Send> DynSend for IntoDynSyncSend<T> {}
245unsafe impl<T: ?Sized + PointeeSized + Sync> DynSync for IntoDynSyncSend<T> {}
246
247impl<T> std::ops::Deref for IntoDynSyncSend<T> {
248 type Target = T;
249
250 #[inline(always)]
251 fn deref(&self) -> &T {
252 &self.0
253 }
254}
255
256impl<T> std::ops::DerefMut for IntoDynSyncSend<T> {
257 #[inline(always)]
258 fn deref_mut(&mut self) -> &mut T {
259 &mut self.0
260 }
261}