rustc_data_structures/
sync.rs1use std::collections::HashMap;
31use std::hash::{BuildHasher, Hash};
32
33pub use parking_lot::{
34 MappedRwLockReadGuard as MappedReadGuard, MappedRwLockWriteGuard as MappedWriteGuard,
35 RwLockReadGuard as ReadGuard, RwLockWriteGuard as WriteGuard,
36};
37
38pub use self::atomic::AtomicU64;
39pub use self::freeze::{FreezeLock, FreezeReadGuard, FreezeWriteGuard};
40#[doc(no_inline)]
41pub use self::lock::{Lock, LockGuard, Mode};
42pub use self::mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
43pub use self::parallel::{
44 broadcast, par_fns, par_for_each_in, par_join, par_map, parallel_guard, spawn,
45 try_par_for_each_in,
46};
47pub use self::vec::{AppendOnlyIndexVec, AppendOnlyVec};
48pub use self::worker_local::{Registry, WorkerLocal};
49pub use crate::marker::*;
50
51mod freeze;
52mod lock;
53mod parallel;
54mod vec;
55mod worker_local;
56
57mod atomic {
60 #[cfg(target_has_atomic = "64")]
62 pub use std::sync::atomic::AtomicU64;
63
64 #[cfg(not(target_has_atomic = "64"))]
66 pub use portable_atomic::AtomicU64;
67}
68
69mod mode {
70 use std::sync::atomic::{AtomicU8, Ordering};
71
72 const UNINITIALIZED: u8 = 0;
73 const DYN_NOT_THREAD_SAFE: u8 = 1;
74 const DYN_THREAD_SAFE: u8 = 2;
75
76 static DYN_THREAD_SAFE_MODE: AtomicU8 = AtomicU8::new(UNINITIALIZED);
77
78 #[inline]
80 pub fn is_dyn_thread_safe() -> bool {
81 match DYN_THREAD_SAFE_MODE.load(Ordering::Relaxed) {
82 DYN_NOT_THREAD_SAFE => false,
83 DYN_THREAD_SAFE => true,
84 _ => {
::core::panicking::panic_fmt(format_args!("uninitialized dyn_thread_safe mode!"));
}panic!("uninitialized dyn_thread_safe mode!"),
85 }
86 }
87
88 #[inline]
90 pub(super) fn might_be_dyn_thread_safe() -> bool {
91 DYN_THREAD_SAFE_MODE.load(Ordering::Relaxed) != DYN_NOT_THREAD_SAFE
92 }
93
94 pub fn set_dyn_thread_safe_mode(mode: bool) {
96 let set: u8 = if mode { DYN_THREAD_SAFE } else { DYN_NOT_THREAD_SAFE };
97 let previous = DYN_THREAD_SAFE_MODE.compare_exchange(
98 UNINITIALIZED,
99 set,
100 Ordering::Relaxed,
101 Ordering::Relaxed,
102 );
103
104 if !(previous.is_ok() || previous == Err(set)) {
::core::panicking::panic("assertion failed: previous.is_ok() || previous == Err(set)")
};assert!(previous.is_ok() || previous == Err(set));
106 }
107}
108
109#[derive(#[automatically_derived]
impl<T: ::core::fmt::Debug> ::core::fmt::Debug for MTLock<T> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "MTLock",
&&self.0)
}
}Debug, #[automatically_derived]
impl<T: ::core::default::Default> ::core::default::Default for MTLock<T> {
#[inline]
fn default() -> MTLock<T> { MTLock(::core::default::Default::default()) }
}Default)]
112pub struct MTLock<T>(Lock<T>);
113
114impl<T> MTLock<T> {
115 #[inline(always)]
116 pub fn new(inner: T) -> Self {
117 MTLock(Lock::new(inner))
118 }
119
120 #[inline(always)]
121 pub fn into_inner(self) -> T {
122 self.0.into_inner()
123 }
124
125 #[inline(always)]
126 pub fn get_mut(&mut self) -> &mut T {
127 self.0.get_mut()
128 }
129
130 #[inline(always)]
131 pub fn lock(&self) -> LockGuard<'_, T> {
132 self.0.lock()
133 }
134
135 #[inline(always)]
136 pub fn lock_mut(&self) -> LockGuard<'_, T> {
137 self.lock()
138 }
139}
140
141const ERROR_CHECKING: bool = false;
144
145#[derive(#[automatically_derived]
impl<T: ::core::default::Default> ::core::default::Default for CacheAligned<T>
{
#[inline]
fn default() -> CacheAligned<T> {
CacheAligned(::core::default::Default::default())
}
}Default)]
146#[repr(align(64))]
147pub struct CacheAligned<T>(pub T);
148
149pub trait HashMapExt<K, V> {
150 fn insert_same(&mut self, key: K, value: V);
153}
154
155impl<K: Eq + Hash, V: Eq, S: BuildHasher> HashMapExt<K, V> for HashMap<K, V, S> {
156 fn insert_same(&mut self, key: K, value: V) {
157 self.entry(key).and_modify(|old| if !(*old == value) {
::core::panicking::panic("assertion failed: *old == value")
}assert!(*old == value)).or_insert(value);
158 }
159}
160
161#[derive(#[automatically_derived]
impl<T: ::core::fmt::Debug> ::core::fmt::Debug for RwLock<T> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "RwLock",
&&self.0)
}
}Debug, #[automatically_derived]
impl<T: ::core::default::Default> ::core::default::Default for RwLock<T> {
#[inline]
fn default() -> RwLock<T> { RwLock(::core::default::Default::default()) }
}Default)]
162pub struct RwLock<T>(parking_lot::RwLock<T>);
163
164impl<T> RwLock<T> {
165 #[inline(always)]
166 pub fn new(inner: T) -> Self {
167 RwLock(parking_lot::RwLock::new(inner))
168 }
169
170 #[inline(always)]
171 pub fn into_inner(self) -> T {
172 self.0.into_inner()
173 }
174
175 #[inline(always)]
176 pub fn get_mut(&mut self) -> &mut T {
177 self.0.get_mut()
178 }
179
180 #[inline(always)]
181 pub fn read(&self) -> ReadGuard<'_, T> {
182 if ERROR_CHECKING {
183 self.0.try_read().expect("lock was already held")
184 } else {
185 self.0.read()
186 }
187 }
188
189 #[inline(always)]
190 pub fn try_write(&self) -> Result<WriteGuard<'_, T>, ()> {
191 self.0.try_write().ok_or(())
192 }
193
194 #[inline(always)]
195 pub fn write(&self) -> WriteGuard<'_, T> {
196 if ERROR_CHECKING {
197 self.0.try_write().expect("lock was already held")
198 } else {
199 self.0.write()
200 }
201 }
202
203 #[inline(always)]
204 #[track_caller]
205 pub fn borrow(&self) -> ReadGuard<'_, T> {
206 self.read()
207 }
208
209 #[inline(always)]
210 #[track_caller]
211 pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
212 self.write()
213 }
214}