1
1
use crate :: cell:: UnsafeCell ;
2
2
use crate :: mem;
3
+ use crate :: mem:: MaybeUninit ;
3
4
use crate :: sync:: atomic:: { AtomicU32 , Ordering } ;
4
5
use crate :: sys:: cloudabi:: abi;
5
6
use crate :: sys:: rwlock:: { self , RWLock } ;
@@ -47,24 +48,27 @@ impl Mutex {
47
48
}
48
49
49
50
pub struct ReentrantMutex {
50
- lock : UnsafeCell < AtomicU32 > ,
51
- recursion : UnsafeCell < u32 > ,
51
+ lock : UnsafeCell < MaybeUninit < AtomicU32 > > ,
52
+ recursion : UnsafeCell < MaybeUninit < u32 > > ,
52
53
}
53
54
54
55
impl ReentrantMutex {
55
56
pub unsafe fn uninitialized ( ) -> ReentrantMutex {
56
- mem:: uninitialized ( )
57
+ ReentrantMutex {
58
+ lock : UnsafeCell :: new ( MaybeUninit :: uninit ( ) ) ,
59
+ recursion : UnsafeCell :: new ( MaybeUninit :: uninit ( ) )
60
+ }
57
61
}
58
62
59
63
pub unsafe fn init ( & mut self ) {
60
- self . lock = UnsafeCell :: new ( AtomicU32 :: new ( abi:: LOCK_UNLOCKED . 0 ) ) ;
61
- self . recursion = UnsafeCell :: new ( 0 ) ;
64
+ self . lock = UnsafeCell :: new ( MaybeUninit :: new ( AtomicU32 :: new ( abi:: LOCK_UNLOCKED . 0 ) ) ) ;
65
+ self . recursion = UnsafeCell :: new ( MaybeUninit :: new ( 0 ) ) ;
62
66
}
63
67
64
68
pub unsafe fn try_lock ( & self ) -> bool {
65
69
// Attempt to acquire the lock.
66
- let lock = self . lock . get ( ) ;
67
- let recursion = self . recursion . get ( ) ;
70
+ let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
71
+ let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
68
72
if let Err ( old) = ( * lock) . compare_exchange (
69
73
abi:: LOCK_UNLOCKED . 0 ,
70
74
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
@@ -109,8 +113,8 @@ impl ReentrantMutex {
109
113
}
110
114
111
115
pub unsafe fn unlock ( & self ) {
112
- let lock = self . lock . get ( ) ;
113
- let recursion = self . recursion . get ( ) ;
116
+ let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
117
+ let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
114
118
assert_eq ! (
115
119
( * lock) . load( Ordering :: Relaxed ) & !abi:: LOCK_KERNEL_MANAGED . 0 ,
116
120
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
@@ -136,8 +140,8 @@ impl ReentrantMutex {
136
140
}
137
141
138
142
pub unsafe fn destroy ( & self ) {
139
- let lock = self . lock . get ( ) ;
140
- let recursion = self . recursion . get ( ) ;
143
+ let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
144
+ let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
141
145
assert_eq ! (
142
146
( * lock) . load( Ordering :: Relaxed ) ,
143
147
abi:: LOCK_UNLOCKED . 0 ,
0 commit comments