|
| 1 | +cfg_if!( |
| 2 | + if #[cfg(not(parallel_compiler))] { |
| 3 | + pub auto trait DynSend {} |
| 4 | + pub auto trait DynSync {} |
| 5 | + |
| 6 | + impl<T> DynSend for T {} |
| 7 | + impl<T> DynSync for T {} |
| 8 | + } else { |
| 9 | + #[rustc_on_unimplemented( |
| 10 | + message = "`{Self}` doesn't implement `DynSend`. \ |
| 11 | + Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Send`", |
| 12 | + label = "`{Self}` doesn't implement `DynSend`. \ |
| 13 | + Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Send`" |
| 14 | + )] |
| 15 | + // Ensure data structures is `Send` if `sync::active()` is true. |
| 16 | + // `sync::active()` should be checked before using these data structures. |
| 17 | + // Note: Ensure that the data structure **will not break** |
| 18 | + // thread safety after being created. |
| 19 | + // |
| 20 | + // `sync::active()` should be checked when downcasting these data structures |
| 21 | + // to `Send` via `FromDyn`. |
| 22 | + pub unsafe auto trait DynSend {} |
| 23 | + |
| 24 | + #[rustc_on_unimplemented( |
| 25 | + message = "`{Self}` doesn't implement `DynSync`. \ |
| 26 | + Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Sync`", |
| 27 | + label = "`{Self}` doesn't implement `DynSync`. \ |
| 28 | + Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Sync`" |
| 29 | + )] |
| 30 | + // Ensure data structures is `Sync` if `sync::active()` is true. |
| 31 | + // Note: Ensure that the data structure **will not break** |
| 32 | + // thread safety after being checked. |
| 33 | + // |
| 34 | + // `sync::active()` should be checked when downcasting these data structures |
| 35 | + // to `Send` via `FromDyn`. |
| 36 | + pub unsafe auto trait DynSync {} |
| 37 | + |
| 38 | + // Same with `Sync` and `Send`. |
| 39 | + unsafe impl<T: DynSync + ?Sized> DynSend for &T {} |
| 40 | + |
| 41 | + macro_rules! impls_dyn_send_neg { |
| 42 | + ($([$t1: ty $(where $($generics1: tt)*)?])*) => { |
| 43 | + $(impl$(<$($generics1)*>)? !DynSend for $t1 {})* |
| 44 | + }; |
| 45 | + } |
| 46 | + |
| 47 | + // Consistent with `std` |
| 48 | + impls_dyn_send_neg!( |
| 49 | + [std::env::Args] |
| 50 | + [std::env::ArgsOs] |
| 51 | + [*const T where T: ?Sized] |
| 52 | + [*mut T where T: ?Sized] |
| 53 | + [std::ptr::NonNull<T> where T: ?Sized] |
| 54 | + [std::rc::Rc<T> where T: ?Sized] |
| 55 | + [std::rc::Weak<T> where T: ?Sized] |
| 56 | + [std::sync::MutexGuard<'_, T> where T: ?Sized] |
| 57 | + [std::sync::RwLockReadGuard<'_, T> where T: ?Sized] |
| 58 | + [std::sync::RwLockWriteGuard<'_, T> where T: ?Sized] |
| 59 | + [std::io::StdoutLock<'_>] |
| 60 | + [std::io::StderrLock<'_>] |
| 61 | + ); |
| 62 | + cfg_if!( |
| 63 | + // Consistent with `std` |
| 64 | + // `os_imp::Env` is `!Send` in these platforms |
| 65 | + if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] { |
| 66 | + impl !DynSend for std::env::VarsOs {} |
| 67 | + } |
| 68 | + ); |
| 69 | + |
| 70 | + macro_rules! already_send { |
| 71 | + ($([$ty: ty])*) => { |
| 72 | + $(unsafe impl DynSend for $ty where $ty: Send {})* |
| 73 | + }; |
| 74 | + } |
| 75 | + |
| 76 | + // These structures are already `Send`. |
| 77 | + already_send!( |
| 78 | + [std::backtrace::Backtrace] |
| 79 | + [std::io::Stdout] |
| 80 | + [std::io::Stderr] |
| 81 | + [std::io::Error] |
| 82 | + [std::fs::File] |
| 83 | + [rustc_arena::DroplessArena] |
| 84 | + [crate::memmap::Mmap] |
| 85 | + [crate::profiling::SelfProfiler] |
| 86 | + ); |
| 87 | + |
| 88 | + macro_rules! impl_dyn_send { |
| 89 | + ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => { |
| 90 | + $(unsafe impl<$($generics2)*> DynSend for $ty {})* |
| 91 | + }; |
| 92 | + } |
| 93 | + |
| 94 | + impl_dyn_send!( |
| 95 | + [std::sync::atomic::AtomicPtr<T> where T] |
| 96 | + [std::sync::Mutex<T> where T: ?Sized+ DynSend] |
| 97 | + [std::sync::mpsc::Sender<T> where T: DynSend] |
| 98 | + [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend] |
| 99 | + [std::sync::LazyLock<T, F> where T: DynSend, F: DynSend] |
| 100 | + [std::collections::HashSet<K, S> where K: DynSend, S: DynSend] |
| 101 | + [std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend] |
| 102 | + [std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend] |
| 103 | + [Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend] |
| 104 | + [Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend] |
| 105 | + [crate::sync::Lock<T> where T: DynSend] |
| 106 | + [crate::sync::RwLock<T> where T: DynSend] |
| 107 | + [rustc_arena::TypedArena<T> where T: DynSend] |
| 108 | + [indexmap::IndexSet<V, S> where V: DynSend, S: DynSend] |
| 109 | + [indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend] |
| 110 | + [thin_vec::ThinVec<T> where T: DynSend] |
| 111 | + [smallvec::SmallVec<A> where A: smallvec::Array + DynSend] |
| 112 | + |
| 113 | + // We use `Send` here to omit some extra code, since they are only |
| 114 | + // used in `Send` situations now. |
| 115 | + [crate::owning_ref::OwningRef<O, T> where O: Send, T: ?Sized + Send] |
| 116 | + [crate::owning_ref::OwningRefMut<O, T> where O: Send, T: ?Sized + Send] |
| 117 | + ); |
| 118 | + |
| 119 | + macro_rules! impls_dyn_sync_neg { |
| 120 | + ($([$t1: ty $(where $($generics1: tt)*)?])*) => { |
| 121 | + $(impl$(<$($generics1)*>)? !DynSync for $t1 {})* |
| 122 | + }; |
| 123 | + } |
| 124 | + |
| 125 | + // Consistent with `std` |
| 126 | + impls_dyn_sync_neg!( |
| 127 | + [std::env::Args] |
| 128 | + [std::env::ArgsOs] |
| 129 | + [*const T where T: ?Sized] |
| 130 | + [*mut T where T: ?Sized] |
| 131 | + [std::cell::Cell<T> where T: ?Sized] |
| 132 | + [std::cell::RefCell<T> where T: ?Sized] |
| 133 | + [std::cell::UnsafeCell<T> where T: ?Sized] |
| 134 | + [std::ptr::NonNull<T> where T: ?Sized] |
| 135 | + [std::rc::Rc<T> where T: ?Sized] |
| 136 | + [std::rc::Weak<T> where T: ?Sized] |
| 137 | + [std::cell::OnceCell<T> where T] |
| 138 | + [std::sync::mpsc::Receiver<T> where T] |
| 139 | + [std::sync::mpsc::Sender<T> where T] |
| 140 | + ); |
| 141 | + cfg_if!( |
| 142 | + // Consistent with `std` |
| 143 | + // `os_imp::Env` is `!Sync` in these platforms |
| 144 | + if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] { |
| 145 | + impl !DynSync for std::env::VarsOs {} |
| 146 | + } |
| 147 | + ); |
| 148 | + |
| 149 | + macro_rules! already_sync { |
| 150 | + ($([$ty: ty])*) => { |
| 151 | + $(unsafe impl DynSync for $ty where $ty: Sync {})* |
| 152 | + }; |
| 153 | + } |
| 154 | + |
| 155 | + // These structures are already `Sync`. |
| 156 | + already_sync!( |
| 157 | + [std::sync::atomic::AtomicBool] |
| 158 | + [std::sync::atomic::AtomicUsize] |
| 159 | + [std::sync::atomic::AtomicU8] |
| 160 | + [std::sync::atomic::AtomicU32] |
| 161 | + [std::sync::atomic::AtomicU64] |
| 162 | + [std::backtrace::Backtrace] |
| 163 | + [std::io::Error] |
| 164 | + [std::fs::File] |
| 165 | + [jobserver_crate::Client] |
| 166 | + [crate::memmap::Mmap] |
| 167 | + [crate::profiling::SelfProfiler] |
| 168 | + ); |
| 169 | + |
| 170 | + macro_rules! impl_dyn_sync { |
| 171 | + ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => { |
| 172 | + $(unsafe impl<$($generics2)*> DynSync for $ty {})* |
| 173 | + }; |
| 174 | + } |
| 175 | + |
| 176 | + impl_dyn_sync!( |
| 177 | + [std::sync::atomic::AtomicPtr<T> where T] |
| 178 | + [std::sync::OnceLock<T> where T: DynSend + DynSync] |
| 179 | + [std::sync::Mutex<T> where T: ?Sized + DynSend] |
| 180 | + [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend] |
| 181 | + [std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend] |
| 182 | + [std::collections::HashSet<K, S> where K: DynSync, S: DynSync] |
| 183 | + [std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync] |
| 184 | + [std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync] |
| 185 | + [Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync] |
| 186 | + [Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync] |
| 187 | + [crate::sync::Lock<T> where T: DynSend] |
| 188 | + [crate::sync::RwLock<T> where T: DynSend + DynSync] |
| 189 | + [crate::sync::OneThread<T> where T] |
| 190 | + [crate::sync::WorkerLocal<T> where T: DynSend] |
| 191 | + [crate::intern::Interned<'a, T> where 'a, T: DynSync] |
| 192 | + [parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend] |
| 193 | + [parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync] |
| 194 | + [indexmap::IndexSet<V, S> where V: DynSync, S: DynSync] |
| 195 | + [indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync] |
| 196 | + [smallvec::SmallVec<A> where A: smallvec::Array + DynSync] |
| 197 | + [thin_vec::ThinVec<T> where T: DynSync] |
| 198 | + |
| 199 | + // We use `Sync` here to omit some extra code, since they are only |
| 200 | + // used in `Sync` situations now. |
| 201 | + [crate::owning_ref::OwningRef<O, T> where O: Sync, T: ?Sized + Sync] |
| 202 | + [crate::owning_ref::OwningRefMut<O, T> where O: Sync, T: ?Sized + Sync] |
| 203 | + ); |
| 204 | + } |
| 205 | +); |
| 206 | + |
| 207 | +pub fn assert_dyn_sync<T: ?Sized + DynSync>() {} |
| 208 | +pub fn assert_dyn_send<T: ?Sized + DynSend>() {} |
| 209 | +pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {} |
| 210 | +pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {} |
| 211 | + |
| 212 | +#[derive(Copy, Clone)] |
| 213 | +pub struct FromDyn<T>(T); |
| 214 | + |
| 215 | +impl<T> FromDyn<T> { |
| 216 | + // Check `sync::active()` when creating this structure |
| 217 | + // and downcasting to `Send`. So we can ensure it is |
| 218 | + // thread-safe. |
| 219 | + #[inline(always)] |
| 220 | + pub fn from(val: T) -> Self { |
| 221 | + #[cfg(parallel_compiler)] |
| 222 | + assert!(crate::sync::active()); |
| 223 | + FromDyn(val) |
| 224 | + } |
| 225 | + |
| 226 | + #[inline(always)] |
| 227 | + pub fn into_inner(self) -> T { |
| 228 | + self.0 |
| 229 | + } |
| 230 | +} |
| 231 | + |
| 232 | +// `FromDyn` is `Send` if `T` is `DynSend`, since it check when created. |
| 233 | +#[cfg(parallel_compiler)] |
| 234 | +unsafe impl<T: DynSend> Send for FromDyn<T> {} |
| 235 | + |
| 236 | +// `FromDyn` is `Sync` if `T` is `DynSync`, since it check when created. |
| 237 | +#[cfg(parallel_compiler)] |
| 238 | +unsafe impl<T: DynSync> Sync for FromDyn<T> {} |
| 239 | + |
| 240 | +impl<T> const std::ops::Deref for FromDyn<T> { |
| 241 | + type Target = T; |
| 242 | + |
| 243 | + fn deref(&self) -> &Self::Target { |
| 244 | + &self.0 |
| 245 | + } |
| 246 | +} |
| 247 | + |
| 248 | +#[derive(Copy, Clone)] |
| 249 | +pub struct IntoDyn<T: ?Sized>(pub T); |
| 250 | + |
| 251 | +#[cfg(parallel_compiler)] |
| 252 | +unsafe impl<T: ?Sized + Send> DynSend for IntoDyn<T> {} |
| 253 | +#[cfg(parallel_compiler)] |
| 254 | +unsafe impl<T: ?Sized + Sync> DynSync for IntoDyn<T> {} |
| 255 | + |
| 256 | +impl<T> const std::ops::Deref for IntoDyn<T> { |
| 257 | + type Target = T; |
| 258 | + |
| 259 | + fn deref(&self) -> &T { |
| 260 | + &self.0 |
| 261 | + } |
| 262 | +} |
| 263 | + |
| 264 | +impl<T> const std::ops::DerefMut for IntoDyn<T> { |
| 265 | + fn deref_mut(&mut self) -> &mut T { |
| 266 | + &mut self.0 |
| 267 | + } |
| 268 | +} |
0 commit comments