Skip to content

Commit f3c923a

Browse files
committed
Auto merge of #76645 - fusion-engineering-forks:windows-lock, r=kennytm
Small cleanups in Windows Mutex. - Move `held` into the boxed part, since the SRW lock implementation does not use this. This makes the Mutex 50% smaller. - Use `Cell` instead of `UnsafeCell` for `held`, such that `.replace()` can be used. - Add some comments. - Avoid creating multiple `&mut`s to the critical section object in `ReentrantMutex`.
2 parents 7bdb5de + 0bb96e7 commit f3c923a

File tree

2 files changed

+42
-43
lines changed

2 files changed

+42
-43
lines changed

library/std/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -315,6 +315,7 @@
315315
#![feature(try_reserve)]
316316
#![feature(unboxed_closures)]
317317
#![feature(unsafe_block_in_unsafe_fn)]
318+
#![feature(unsafe_cell_raw_get)]
318319
#![feature(untagged_unions)]
319320
#![feature(unwind_attributes)]
320321
#![feature(vec_into_raw_parts)]

library/std/src/sys/windows/mutex.rs

+41-43
Original file line numberDiff line numberDiff line change
@@ -19,20 +19,25 @@
1919
//! CriticalSection is used and we keep track of who's holding the mutex to
2020
//! detect recursive locks.
2121
22-
use crate::cell::UnsafeCell;
22+
use crate::cell::{Cell, UnsafeCell};
2323
use crate::mem::{self, MaybeUninit};
2424
use crate::sync::atomic::{AtomicUsize, Ordering};
2525
use crate::sys::c;
2626
use crate::sys::compat;
2727

2828
pub struct Mutex {
29+
// This is either directly an SRWLOCK (if supported), or a Box<Inner> otherwise.
2930
lock: AtomicUsize,
30-
held: UnsafeCell<bool>,
3131
}
3232

3333
unsafe impl Send for Mutex {}
3434
unsafe impl Sync for Mutex {}
3535

36+
struct Inner {
37+
remutex: ReentrantMutex,
38+
held: Cell<bool>,
39+
}
40+
3641
#[derive(Clone, Copy)]
3742
enum Kind {
3843
SRWLock = 1,
@@ -51,7 +56,6 @@ impl Mutex {
5156
// This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly
5257
// initializing an SRWLOCK here.
5358
lock: AtomicUsize::new(0),
54-
held: UnsafeCell::new(false),
5559
}
5660
}
5761
#[inline]
@@ -60,10 +64,11 @@ impl Mutex {
6064
match kind() {
6165
Kind::SRWLock => c::AcquireSRWLockExclusive(raw(self)),
6266
Kind::CriticalSection => {
63-
let re = self.remutex();
64-
(*re).lock();
65-
if !self.flag_locked() {
66-
(*re).unlock();
67+
let inner = &*self.inner();
68+
inner.remutex.lock();
69+
if inner.held.replace(true) {
70+
// It was already locked, so we got a recursive lock which we do not want.
71+
inner.remutex.unlock();
6772
panic!("cannot recursively lock a mutex");
6873
}
6974
}
@@ -73,62 +78,55 @@ impl Mutex {
7378
match kind() {
7479
Kind::SRWLock => c::TryAcquireSRWLockExclusive(raw(self)) != 0,
7580
Kind::CriticalSection => {
76-
let re = self.remutex();
77-
if !(*re).try_lock() {
81+
let inner = &*self.inner();
82+
if !inner.remutex.try_lock() {
7883
false
79-
} else if self.flag_locked() {
80-
true
81-
} else {
82-
(*re).unlock();
84+
} else if inner.held.replace(true) {
85+
// It was already locked, so we got a recursive lock which we do not want.
86+
inner.remutex.unlock();
8387
false
88+
} else {
89+
true
8490
}
8591
}
8692
}
8793
}
8894
pub unsafe fn unlock(&self) {
89-
*self.held.get() = false;
9095
match kind() {
9196
Kind::SRWLock => c::ReleaseSRWLockExclusive(raw(self)),
92-
Kind::CriticalSection => (*self.remutex()).unlock(),
97+
Kind::CriticalSection => {
98+
let inner = &*(self.lock.load(Ordering::SeqCst) as *const Inner);
99+
inner.held.set(false);
100+
inner.remutex.unlock();
101+
}
93102
}
94103
}
95104
pub unsafe fn destroy(&self) {
96105
match kind() {
97106
Kind::SRWLock => {}
98107
Kind::CriticalSection => match self.lock.load(Ordering::SeqCst) {
99108
0 => {}
100-
n => {
101-
Box::from_raw(n as *mut ReentrantMutex).destroy();
102-
}
109+
n => Box::from_raw(n as *mut Inner).remutex.destroy(),
103110
},
104111
}
105112
}
106113

107-
unsafe fn remutex(&self) -> *mut ReentrantMutex {
114+
unsafe fn inner(&self) -> *const Inner {
108115
match self.lock.load(Ordering::SeqCst) {
109116
0 => {}
110-
n => return n as *mut _,
117+
n => return n as *const _,
111118
}
112-
let re = box ReentrantMutex::uninitialized();
113-
re.init();
114-
let re = Box::into_raw(re);
115-
match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {
116-
0 => re,
119+
let inner = box Inner { remutex: ReentrantMutex::uninitialized(), held: Cell::new(false) };
120+
inner.remutex.init();
121+
let inner = Box::into_raw(inner);
122+
match self.lock.compare_and_swap(0, inner as usize, Ordering::SeqCst) {
123+
0 => inner,
117124
n => {
118-
Box::from_raw(re).destroy();
119-
n as *mut _
125+
Box::from_raw(inner).remutex.destroy();
126+
n as *const _
120127
}
121128
}
122129
}
123-
124-
unsafe fn flag_locked(&self) -> bool {
125-
if *self.held.get() {
126-
false
127-
} else {
128-
*self.held.get() = true;
129-
true
130-
}
131-
}
132130
}
133131

134132
fn kind() -> Kind {
@@ -150,35 +148,35 @@ fn kind() -> Kind {
150148
}
151149

152150
pub struct ReentrantMutex {
153-
inner: UnsafeCell<MaybeUninit<c::CRITICAL_SECTION>>,
151+
inner: MaybeUninit<UnsafeCell<c::CRITICAL_SECTION>>,
154152
}
155153

156154
unsafe impl Send for ReentrantMutex {}
157155
unsafe impl Sync for ReentrantMutex {}
158156

159157
impl ReentrantMutex {
160158
pub const fn uninitialized() -> ReentrantMutex {
161-
ReentrantMutex { inner: UnsafeCell::new(MaybeUninit::uninit()) }
159+
ReentrantMutex { inner: MaybeUninit::uninit() }
162160
}
163161

164162
pub unsafe fn init(&self) {
165-
c::InitializeCriticalSection((&mut *self.inner.get()).as_mut_ptr());
163+
c::InitializeCriticalSection(UnsafeCell::raw_get(self.inner.as_ptr()));
166164
}
167165

168166
pub unsafe fn lock(&self) {
169-
c::EnterCriticalSection((&mut *self.inner.get()).as_mut_ptr());
167+
c::EnterCriticalSection(UnsafeCell::raw_get(self.inner.as_ptr()));
170168
}
171169

172170
#[inline]
173171
pub unsafe fn try_lock(&self) -> bool {
174-
c::TryEnterCriticalSection((&mut *self.inner.get()).as_mut_ptr()) != 0
172+
c::TryEnterCriticalSection(UnsafeCell::raw_get(self.inner.as_ptr())) != 0
175173
}
176174

177175
pub unsafe fn unlock(&self) {
178-
c::LeaveCriticalSection((&mut *self.inner.get()).as_mut_ptr());
176+
c::LeaveCriticalSection(UnsafeCell::raw_get(self.inner.as_ptr()));
179177
}
180178

181179
pub unsafe fn destroy(&self) {
182-
c::DeleteCriticalSection((&mut *self.inner.get()).as_mut_ptr());
180+
c::DeleteCriticalSection(UnsafeCell::raw_get(self.inner.as_ptr()));
183181
}
184182
}

0 commit comments

Comments
 (0)