/rust/registry/src/index.crates.io-1949cf8c6b5b557f/dashmap-6.1.0/src/lock.rs
Line | Count | Source |
1 | | use core::sync::atomic::{AtomicUsize, Ordering}; |
2 | | use parking_lot_core::{ParkToken, SpinWait, UnparkToken}; |
3 | | |
4 | | pub type RwLock<T> = lock_api::RwLock<RawRwLock, T>; |
5 | | pub type RwLockReadGuard<'a, T> = lock_api::RwLockReadGuard<'a, RawRwLock, T>; |
6 | | pub type RwLockWriteGuard<'a, T> = lock_api::RwLockWriteGuard<'a, RawRwLock, T>; |
7 | | |
8 | | const READERS_PARKED: usize = 0b0001; |
9 | | const WRITERS_PARKED: usize = 0b0010; |
10 | | const ONE_READER: usize = 0b0100; |
11 | | const ONE_WRITER: usize = !(READERS_PARKED | WRITERS_PARKED); |
12 | | |
13 | | pub struct RawRwLock { |
14 | | state: AtomicUsize, |
15 | | } |
16 | | |
17 | | unsafe impl lock_api::RawRwLock for RawRwLock { |
18 | | #[allow(clippy::declare_interior_mutable_const)] |
19 | | const INIT: Self = Self { |
20 | | state: AtomicUsize::new(0), |
21 | | }; |
22 | | |
23 | | type GuardMarker = lock_api::GuardNoSend; |
24 | | |
25 | | #[inline] |
26 | 0 | fn try_lock_exclusive(&self) -> bool { |
27 | 0 | self.state |
28 | 0 | .compare_exchange(0, ONE_WRITER, Ordering::Acquire, Ordering::Relaxed) |
29 | 0 | .is_ok() |
30 | 0 | } |
31 | | |
32 | | #[inline] |
33 | 0 | fn lock_exclusive(&self) { |
34 | 0 | if self |
35 | 0 | .state |
36 | 0 | .compare_exchange_weak(0, ONE_WRITER, Ordering::Acquire, Ordering::Relaxed) |
37 | 0 | .is_err() |
38 | 0 | { |
39 | 0 | self.lock_exclusive_slow(); |
40 | 0 | } |
41 | 0 | } Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::lock_exclusive Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::lock_exclusive |
42 | | |
43 | | #[inline] |
44 | 0 | unsafe fn unlock_exclusive(&self) { |
45 | 0 | if self |
46 | 0 | .state |
47 | 0 | .compare_exchange(ONE_WRITER, 0, Ordering::Release, Ordering::Relaxed) |
48 | 0 | .is_err() |
49 | 0 | { |
50 | 0 | self.unlock_exclusive_slow(); |
51 | 0 | } |
52 | 0 | } Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::unlock_exclusive Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::unlock_exclusive |
53 | | |
54 | | #[inline] |
55 | 0 | fn try_lock_shared(&self) -> bool { |
56 | 0 | self.try_lock_shared_fast() || self.try_lock_shared_slow() |
57 | 0 | } |
58 | | |
59 | | #[inline] |
60 | 0 | fn lock_shared(&self) { |
61 | 0 | if !self.try_lock_shared_fast() { |
62 | 0 | self.lock_shared_slow(); |
63 | 0 | } |
64 | 0 | } Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::lock_shared Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::lock_shared |
65 | | |
66 | | #[inline] |
67 | 0 | unsafe fn unlock_shared(&self) { |
68 | 0 | let state = self.state.fetch_sub(ONE_READER, Ordering::Release); |
69 | | |
70 | 0 | if state == (ONE_READER | WRITERS_PARKED) { |
71 | 0 | self.unlock_shared_slow(); |
72 | 0 | } |
73 | 0 | } Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::unlock_shared Unexecuted instantiation: <dashmap::lock::RawRwLock as lock_api::rwlock::RawRwLock>::unlock_shared |
74 | | } |
75 | | |
76 | | unsafe impl lock_api::RawRwLockDowngrade for RawRwLock { |
77 | | #[inline] |
78 | 0 | unsafe fn downgrade(&self) { |
79 | 0 | let state = self |
80 | 0 | .state |
81 | 0 | .fetch_and(ONE_READER | WRITERS_PARKED, Ordering::Release); |
82 | 0 | if state & READERS_PARKED != 0 { |
83 | 0 | parking_lot_core::unpark_all((self as *const _ as usize) + 1, UnparkToken(0)); |
84 | 0 | } |
85 | 0 | } |
86 | | } |
87 | | |
88 | | impl RawRwLock { |
89 | | #[cold] |
90 | 0 | fn lock_exclusive_slow(&self) { |
91 | 0 | let mut acquire_with = 0; |
92 | | loop { |
93 | 0 | let mut spin = SpinWait::new(); |
94 | 0 | let mut state = self.state.load(Ordering::Relaxed); |
95 | | |
96 | | loop { |
97 | 0 | while state & ONE_WRITER == 0 { |
98 | 0 | match self.state.compare_exchange_weak( |
99 | 0 | state, |
100 | 0 | state | ONE_WRITER | acquire_with, |
101 | 0 | Ordering::Acquire, |
102 | 0 | Ordering::Relaxed, |
103 | 0 | ) { |
104 | 0 | Ok(_) => return, |
105 | 0 | Err(e) => state = e, |
106 | | } |
107 | | } |
108 | | |
109 | 0 | if state & WRITERS_PARKED == 0 { |
110 | 0 | if spin.spin() { |
111 | 0 | state = self.state.load(Ordering::Relaxed); |
112 | 0 | continue; |
113 | 0 | } |
114 | | |
115 | 0 | if let Err(e) = self.state.compare_exchange_weak( |
116 | 0 | state, |
117 | 0 | state | WRITERS_PARKED, |
118 | 0 | Ordering::Relaxed, |
119 | 0 | Ordering::Relaxed, |
120 | 0 | ) { |
121 | 0 | state = e; |
122 | 0 | continue; |
123 | 0 | } |
124 | 0 | } |
125 | | |
126 | | let _ = unsafe { |
127 | 0 | parking_lot_core::park( |
128 | 0 | self as *const _ as usize, |
129 | 0 | || { |
130 | 0 | let state = self.state.load(Ordering::Relaxed); |
131 | 0 | (state & ONE_WRITER != 0) && (state & WRITERS_PARKED != 0) |
132 | 0 | }, |
133 | 0 | || {}, |
134 | 0 | |_, _| {}, |
135 | 0 | ParkToken(0), |
136 | 0 | None, |
137 | | ) |
138 | | }; |
139 | | |
140 | 0 | acquire_with = WRITERS_PARKED; |
141 | 0 | break; |
142 | | } |
143 | | } |
144 | 0 | } |
145 | | |
146 | | #[cold] |
147 | 0 | fn unlock_exclusive_slow(&self) { |
148 | 0 | let state = self.state.load(Ordering::Relaxed); |
149 | 0 | assert_eq!(state & ONE_WRITER, ONE_WRITER); |
150 | | |
151 | 0 | let mut parked = state & (READERS_PARKED | WRITERS_PARKED); |
152 | 0 | assert_ne!(parked, 0); |
153 | | |
154 | 0 | if parked != (READERS_PARKED | WRITERS_PARKED) { |
155 | 0 | if let Err(new_state) = |
156 | 0 | self.state |
157 | 0 | .compare_exchange(state, 0, Ordering::Release, Ordering::Relaxed) |
158 | | { |
159 | 0 | assert_eq!(new_state, ONE_WRITER | READERS_PARKED | WRITERS_PARKED); |
160 | 0 | parked = READERS_PARKED | WRITERS_PARKED; |
161 | 0 | } |
162 | 0 | } |
163 | | |
164 | 0 | if parked == (READERS_PARKED | WRITERS_PARKED) { |
165 | 0 | self.state.store(WRITERS_PARKED, Ordering::Release); |
166 | 0 | parked = READERS_PARKED; |
167 | 0 | } |
168 | | |
169 | 0 | if parked == READERS_PARKED { |
170 | 0 | return unsafe { |
171 | 0 | parking_lot_core::unpark_all((self as *const _ as usize) + 1, UnparkToken(0)); |
172 | 0 | }; |
173 | 0 | } |
174 | | |
175 | 0 | assert_eq!(parked, WRITERS_PARKED); |
176 | | unsafe { |
177 | 0 | parking_lot_core::unpark_one(self as *const _ as usize, |_| UnparkToken(0)); |
178 | | } |
179 | 0 | } |
180 | | |
181 | | #[inline(always)] |
182 | 0 | fn try_lock_shared_fast(&self) -> bool { |
183 | 0 | let state = self.state.load(Ordering::Relaxed); |
184 | | |
185 | 0 | if let Some(new_state) = state.checked_add(ONE_READER) { |
186 | 0 | if new_state & ONE_WRITER != ONE_WRITER { |
187 | 0 | return self |
188 | 0 | .state |
189 | 0 | .compare_exchange_weak(state, new_state, Ordering::Acquire, Ordering::Relaxed) |
190 | 0 | .is_ok(); |
191 | 0 | } |
192 | 0 | } |
193 | | |
194 | 0 | false |
195 | 0 | } |
196 | | |
197 | | #[cold] |
198 | 0 | fn try_lock_shared_slow(&self) -> bool { |
199 | 0 | let mut state = self.state.load(Ordering::Relaxed); |
200 | | |
201 | 0 | while let Some(new_state) = state.checked_add(ONE_READER) { |
202 | 0 | if new_state & ONE_WRITER == ONE_WRITER { |
203 | 0 | break; |
204 | 0 | } |
205 | | |
206 | 0 | match self.state.compare_exchange_weak( |
207 | 0 | state, |
208 | 0 | new_state, |
209 | 0 | Ordering::Acquire, |
210 | 0 | Ordering::Relaxed, |
211 | 0 | ) { |
212 | 0 | Ok(_) => return true, |
213 | 0 | Err(e) => state = e, |
214 | | } |
215 | | } |
216 | | |
217 | 0 | false |
218 | 0 | } |
219 | | |
220 | | #[cold] |
221 | 0 | fn lock_shared_slow(&self) { |
222 | | loop { |
223 | 0 | let mut spin = SpinWait::new(); |
224 | 0 | let mut state = self.state.load(Ordering::Relaxed); |
225 | | |
226 | | loop { |
227 | 0 | let mut backoff = SpinWait::new(); |
228 | 0 | while let Some(new_state) = state.checked_add(ONE_READER) { |
229 | 0 | assert_ne!( |
230 | 0 | new_state & ONE_WRITER, |
231 | | ONE_WRITER, |
232 | 0 | "reader count overflowed", |
233 | | ); |
234 | | |
235 | 0 | if self |
236 | 0 | .state |
237 | 0 | .compare_exchange_weak( |
238 | 0 | state, |
239 | 0 | new_state, |
240 | 0 | Ordering::Acquire, |
241 | 0 | Ordering::Relaxed, |
242 | 0 | ) |
243 | 0 | .is_ok() |
244 | | { |
245 | 0 | return; |
246 | 0 | } |
247 | | |
248 | 0 | backoff.spin_no_yield(); |
249 | 0 | state = self.state.load(Ordering::Relaxed); |
250 | | } |
251 | | |
252 | 0 | if state & READERS_PARKED == 0 { |
253 | 0 | if spin.spin() { |
254 | 0 | state = self.state.load(Ordering::Relaxed); |
255 | 0 | continue; |
256 | 0 | } |
257 | | |
258 | 0 | if let Err(e) = self.state.compare_exchange_weak( |
259 | 0 | state, |
260 | 0 | state | READERS_PARKED, |
261 | 0 | Ordering::Relaxed, |
262 | 0 | Ordering::Relaxed, |
263 | 0 | ) { |
264 | 0 | state = e; |
265 | 0 | continue; |
266 | 0 | } |
267 | 0 | } |
268 | | |
269 | | let _ = unsafe { |
270 | 0 | parking_lot_core::park( |
271 | 0 | (self as *const _ as usize) + 1, |
272 | 0 | || { |
273 | 0 | let state = self.state.load(Ordering::Relaxed); |
274 | 0 | (state & ONE_WRITER == ONE_WRITER) && (state & READERS_PARKED != 0) |
275 | 0 | }, |
276 | 0 | || {}, |
277 | 0 | |_, _| {}, |
278 | 0 | ParkToken(0), |
279 | 0 | None, |
280 | | ) |
281 | | }; |
282 | | |
283 | 0 | break; |
284 | | } |
285 | | } |
286 | 0 | } |
287 | | |
288 | | #[cold] |
289 | 0 | fn unlock_shared_slow(&self) { |
290 | 0 | if self |
291 | 0 | .state |
292 | 0 | .compare_exchange(WRITERS_PARKED, 0, Ordering::Relaxed, Ordering::Relaxed) |
293 | 0 | .is_ok() |
294 | | { |
295 | | unsafe { |
296 | 0 | parking_lot_core::unpark_one(self as *const _ as usize, |_| UnparkToken(0)); |
297 | | } |
298 | 0 | } |
299 | 0 | } |
300 | | } |