19
19
//! CriticalSection is used and we keep track of who's holding the mutex to
20
20
//! detect recursive locks.
21
21
22
- use crate :: cell:: UnsafeCell ;
22
+ use crate :: cell:: { Cell , UnsafeCell } ;
23
23
use crate :: mem:: { self , MaybeUninit } ;
24
24
use crate :: sync:: atomic:: { AtomicUsize , Ordering } ;
25
25
use crate :: sys:: c;
26
26
use crate :: sys:: compat;
27
27
28
28
pub struct Mutex {
29
+ // This is either directly an SRWLOCK (if supported), or a Box<Inner> otherwise.
29
30
lock : AtomicUsize ,
30
- held : UnsafeCell < bool > ,
31
31
}
32
32
33
33
unsafe impl Send for Mutex { }
34
34
unsafe impl Sync for Mutex { }
35
35
36
+ struct Inner {
37
+ remutex : ReentrantMutex ,
38
+ held : Cell < bool > ,
39
+ }
40
+
36
41
#[ derive( Clone , Copy ) ]
37
42
enum Kind {
38
43
SRWLock = 1 ,
@@ -51,7 +56,6 @@ impl Mutex {
51
56
// This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly
52
57
// initializing an SRWLOCK here.
53
58
lock : AtomicUsize :: new ( 0 ) ,
54
- held : UnsafeCell :: new ( false ) ,
55
59
}
56
60
}
57
61
#[ inline]
@@ -60,10 +64,11 @@ impl Mutex {
60
64
match kind ( ) {
61
65
Kind :: SRWLock => c:: AcquireSRWLockExclusive ( raw ( self ) ) ,
62
66
Kind :: CriticalSection => {
63
- let re = self . remutex ( ) ;
64
- ( * re) . lock ( ) ;
65
- if !self . flag_locked ( ) {
66
- ( * re) . unlock ( ) ;
67
+ let inner = & * self . inner ( ) ;
68
+ inner. remutex . lock ( ) ;
69
+ if inner. held . replace ( true ) {
70
+ // It was already locked, so we got a recursive lock which we do not want.
71
+ inner. remutex . unlock ( ) ;
67
72
panic ! ( "cannot recursively lock a mutex" ) ;
68
73
}
69
74
}
@@ -73,62 +78,55 @@ impl Mutex {
73
78
match kind ( ) {
74
79
Kind :: SRWLock => c:: TryAcquireSRWLockExclusive ( raw ( self ) ) != 0 ,
75
80
Kind :: CriticalSection => {
76
- let re = self . remutex ( ) ;
77
- if !( * re ) . try_lock ( ) {
81
+ let inner = & * self . inner ( ) ;
82
+ if !inner . remutex . try_lock ( ) {
78
83
false
79
- } else if self . flag_locked ( ) {
80
- true
81
- } else {
82
- ( * re) . unlock ( ) ;
84
+ } else if inner. held . replace ( true ) {
85
+ // It was already locked, so we got a recursive lock which we do not want.
86
+ inner. remutex . unlock ( ) ;
83
87
false
88
+ } else {
89
+ true
84
90
}
85
91
}
86
92
}
87
93
}
88
94
pub unsafe fn unlock ( & self ) {
89
- * self . held . get ( ) = false ;
90
95
match kind ( ) {
91
96
Kind :: SRWLock => c:: ReleaseSRWLockExclusive ( raw ( self ) ) ,
92
- Kind :: CriticalSection => ( * self . remutex ( ) ) . unlock ( ) ,
97
+ Kind :: CriticalSection => {
98
+ let inner = & * ( self . lock . load ( Ordering :: SeqCst ) as * const Inner ) ;
99
+ inner. held . set ( false ) ;
100
+ inner. remutex . unlock ( ) ;
101
+ }
93
102
}
94
103
}
95
104
pub unsafe fn destroy ( & self ) {
96
105
match kind ( ) {
97
106
Kind :: SRWLock => { }
98
107
Kind :: CriticalSection => match self . lock . load ( Ordering :: SeqCst ) {
99
108
0 => { }
100
- n => {
101
- Box :: from_raw ( n as * mut ReentrantMutex ) . destroy ( ) ;
102
- }
109
+ n => Box :: from_raw ( n as * mut Inner ) . remutex . destroy ( ) ,
103
110
} ,
104
111
}
105
112
}
106
113
107
- unsafe fn remutex ( & self ) -> * mut ReentrantMutex {
114
+ unsafe fn inner ( & self ) -> * const Inner {
108
115
match self . lock . load ( Ordering :: SeqCst ) {
109
116
0 => { }
110
- n => return n as * mut _ ,
117
+ n => return n as * const _ ,
111
118
}
112
- let re = box ReentrantMutex :: uninitialized ( ) ;
113
- re . init ( ) ;
114
- let re = Box :: into_raw ( re ) ;
115
- match self . lock . compare_and_swap ( 0 , re as usize , Ordering :: SeqCst ) {
116
- 0 => re ,
119
+ let inner = box Inner { remutex : ReentrantMutex :: uninitialized ( ) , held : Cell :: new ( false ) } ;
120
+ inner . remutex . init ( ) ;
121
+ let inner = Box :: into_raw ( inner ) ;
122
+ match self . lock . compare_and_swap ( 0 , inner as usize , Ordering :: SeqCst ) {
123
+ 0 => inner ,
117
124
n => {
118
- Box :: from_raw ( re ) . destroy ( ) ;
119
- n as * mut _
125
+ Box :: from_raw ( inner ) . remutex . destroy ( ) ;
126
+ n as * const _
120
127
}
121
128
}
122
129
}
123
-
124
- unsafe fn flag_locked ( & self ) -> bool {
125
- if * self . held . get ( ) {
126
- false
127
- } else {
128
- * self . held . get ( ) = true ;
129
- true
130
- }
131
- }
132
130
}
133
131
134
132
fn kind ( ) -> Kind {
@@ -150,35 +148,35 @@ fn kind() -> Kind {
150
148
}
151
149
152
150
pub struct ReentrantMutex {
153
- inner : UnsafeCell < MaybeUninit < c:: CRITICAL_SECTION > > ,
151
+ inner : MaybeUninit < UnsafeCell < c:: CRITICAL_SECTION > > ,
154
152
}
155
153
156
154
unsafe impl Send for ReentrantMutex { }
157
155
unsafe impl Sync for ReentrantMutex { }
158
156
159
157
impl ReentrantMutex {
160
158
pub const fn uninitialized ( ) -> ReentrantMutex {
161
- ReentrantMutex { inner : UnsafeCell :: new ( MaybeUninit :: uninit ( ) ) }
159
+ ReentrantMutex { inner : MaybeUninit :: uninit ( ) }
162
160
}
163
161
164
162
pub unsafe fn init ( & self ) {
165
- c:: InitializeCriticalSection ( ( & mut * self . inner . get ( ) ) . as_mut_ptr ( ) ) ;
163
+ c:: InitializeCriticalSection ( UnsafeCell :: raw_get ( self . inner . as_ptr ( ) ) ) ;
166
164
}
167
165
168
166
pub unsafe fn lock ( & self ) {
169
- c:: EnterCriticalSection ( ( & mut * self . inner . get ( ) ) . as_mut_ptr ( ) ) ;
167
+ c:: EnterCriticalSection ( UnsafeCell :: raw_get ( self . inner . as_ptr ( ) ) ) ;
170
168
}
171
169
172
170
#[ inline]
173
171
pub unsafe fn try_lock ( & self ) -> bool {
174
- c:: TryEnterCriticalSection ( ( & mut * self . inner . get ( ) ) . as_mut_ptr ( ) ) != 0
172
+ c:: TryEnterCriticalSection ( UnsafeCell :: raw_get ( self . inner . as_ptr ( ) ) ) != 0
175
173
}
176
174
177
175
pub unsafe fn unlock ( & self ) {
178
- c:: LeaveCriticalSection ( ( & mut * self . inner . get ( ) ) . as_mut_ptr ( ) ) ;
176
+ c:: LeaveCriticalSection ( UnsafeCell :: raw_get ( self . inner . as_ptr ( ) ) ) ;
179
177
}
180
178
181
179
pub unsafe fn destroy ( & self ) {
182
- c:: DeleteCriticalSection ( ( & mut * self . inner . get ( ) ) . as_mut_ptr ( ) ) ;
180
+ c:: DeleteCriticalSection ( UnsafeCell :: raw_get ( self . inner . as_ptr ( ) ) ) ;
183
181
}
184
182
}
0 commit comments