/src/swift-nio/Sources/NIOConcurrencyHelpers/atomics.swift
Line | Count | Source |
1 | | //===----------------------------------------------------------------------===// |
2 | | // |
3 | | // This source file is part of the SwiftNIO open source project |
4 | | // |
5 | | // Copyright (c) 2017-2018 Apple Inc. and the SwiftNIO project authors |
6 | | // Licensed under Apache License v2.0 |
7 | | // |
8 | | // See LICENSE.txt for license information |
9 | | // See CONTRIBUTORS.txt for the list of SwiftNIO project authors |
10 | | // |
11 | | // SPDX-License-Identifier: Apache-2.0 |
12 | | // |
13 | | //===----------------------------------------------------------------------===// |
14 | | |
15 | | import CNIOAtomics |
16 | | |
17 | | #if canImport(Darwin) |
18 | | import Darwin |
19 | | private func sys_sched_yield() { |
20 | | pthread_yield_np() |
21 | | } |
22 | | #elseif os(Windows) |
23 | | import ucrt |
24 | | import WinSDK |
25 | | private func sys_sched_yield() { |
26 | | Sleep(0) |
27 | | } |
28 | | #else |
29 | | #if canImport(Glibc) |
30 | | @preconcurrency import Glibc |
31 | | #elseif canImport(Musl) |
32 | | @preconcurrency import Musl |
33 | | #elseif canImport(Bionic) |
34 | | @preconcurrency import Bionic |
35 | | #elseif canImport(WASILibc) |
36 | | @preconcurrency import WASILibc |
37 | | #else |
38 | | #error("The concurrency atomics module was unable to identify your C library.") |
39 | | #endif |
40 | | |
41 | 0 | private func sys_sched_yield() { |
42 | 0 | _ = sched_yield() |
43 | 0 | } |
44 | | #endif |
45 | | |
46 | | /// An atomic primitive object. |
47 | | /// |
48 | | /// Before using `UnsafeEmbeddedAtomic`, please consider whether your needs can be met by `Atomic` instead. |
49 | | /// `UnsafeEmbeddedAtomic` is a value type, but atomics are heap-allocated. Thus, it is only safe to |
50 | | /// use `UnsafeEmbeddedAtomic` in situations where the atomic can be guaranteed to be cleaned up (via calling `destroy`). |
51 | | /// If you cannot make these guarantees, use `Atomic` instead, which manages this for you. |
52 | | /// |
53 | | /// Atomic objects support a wide range of atomic operations: |
54 | | /// |
55 | | /// - Compare and swap |
56 | | /// - Add |
57 | | /// - Subtract |
58 | | /// - Exchange |
59 | | /// - Load current value |
60 | | /// - Store current value |
61 | | /// |
62 | | /// Atomic primitives are useful when building constructs that need to |
63 | | /// communicate or cooperate across multiple threads. In the case of |
64 | | /// SwiftNIO this usually involves communicating across multiple event loops. |
65 | | @available(*, deprecated, message: "please use UnsafeAtomic from https://github.com/apple/swift-atomics instead") |
66 | | public struct UnsafeEmbeddedAtomic<T: AtomicPrimitive> { |
67 | | @usableFromInline |
68 | | internal let value: OpaquePointer |
69 | | |
70 | | /// Create an atomic object with `value`. |
71 | | @inlinable |
72 | 0 | public init(value: T) { |
73 | 0 | self.value = T.atomic_create(value) |
74 | 0 | } |
75 | | |
76 | | /// Atomically compares the value against `expected` and, if they are equal, |
77 | | /// replaces the value with `desired`. |
78 | | /// |
79 | | /// This implementation conforms to C11's `atomic_compare_exchange_strong`. This |
80 | | /// means that the compare-and-swap will always succeed if `expected` is equal to |
81 | | /// value. Additionally, it uses a *sequentially consistent ordering*. For more |
82 | | /// details on atomic memory models, check the documentation for C11's |
83 | | /// `stdatomic.h`. |
84 | | /// |
85 | | /// - Parameter expected: The value that this object must currently hold for the |
86 | | /// compare-and-swap to succeed. |
87 | | /// - Parameter desired: The new value that this object will hold if the compare |
88 | | /// succeeds. |
89 | | /// - Returns: `True` if the exchange occurred, or `False` if `expected` did not |
90 | | /// match the current value and so no exchange occurred. |
91 | | @inlinable |
92 | 0 | public func compareAndExchange(expected: T, desired: T) -> Bool { |
93 | 0 | T.atomic_compare_and_exchange(self.value, expected, desired) |
94 | 0 | } |
95 | | |
96 | | /// Atomically adds `rhs` to this object. |
97 | | /// |
98 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
99 | | /// more than that this operation is atomic: there is no guarantee that any other |
100 | | /// event will be ordered before or after this one. |
101 | | /// |
102 | | /// - Parameter rhs: The value to add to this object. |
103 | | /// - Returns: The previous value of this object, before the addition occurred. |
104 | | @discardableResult |
105 | | @inlinable |
106 | 0 | public func add(_ rhs: T) -> T { |
107 | 0 | T.atomic_add(self.value, rhs) |
108 | 0 | } |
109 | | |
110 | | /// Atomically subtracts `rhs` from this object. |
111 | | /// |
112 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
113 | | /// more than that this operation is atomic: there is no guarantee that any other |
114 | | /// event will be ordered before or after this one. |
115 | | /// |
116 | | /// - Parameter rhs: The value to subtract from this object. |
117 | | /// - Returns: The previous value of this object, before the subtraction occurred. |
118 | | @discardableResult |
119 | | @inlinable |
120 | 0 | public func sub(_ rhs: T) -> T { |
121 | 0 | T.atomic_sub(self.value, rhs) |
122 | 0 | } |
123 | | |
124 | | /// Atomically exchanges `value` for the current value of this object. |
125 | | /// |
126 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
127 | | /// more than that this operation is atomic: there is no guarantee that any other |
128 | | /// event will be ordered before or after this one. |
129 | | /// |
130 | | /// - Parameter value: The new value to set this object to. |
131 | | /// - Returns: The value previously held by this object. |
132 | | @inlinable |
133 | 0 | public func exchange(with value: T) -> T { |
134 | 0 | T.atomic_exchange(self.value, value) |
135 | 0 | } |
136 | | |
137 | | /// Atomically loads and returns the value of this object. |
138 | | /// |
139 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
140 | | /// more than that this operation is atomic: there is no guarantee that any other |
141 | | /// event will be ordered before or after this one. |
142 | | /// |
143 | | /// - Returns: The value of this object |
144 | | @inlinable |
145 | 0 | public func load() -> T { |
146 | 0 | T.atomic_load(self.value) |
147 | 0 | } |
148 | | |
149 | | /// Atomically replaces the value of this object with `value`. |
150 | | /// |
151 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
152 | | /// more than that this operation is atomic: there is no guarantee that any other |
153 | | /// event will be ordered before or after this one. |
154 | | /// |
155 | | /// - Parameter value: The new value to set the object to. |
156 | | @inlinable |
157 | 0 | public func store(_ value: T) { |
158 | 0 | T.atomic_store(self.value, value) |
159 | 0 | } |
160 | | |
161 | | /// Destroy the atomic value. |
162 | | /// |
163 | | /// This method is the source of the unsafety of this structure. This *must* be called, or you will leak memory with each |
164 | | /// atomic. |
165 | 0 | public func destroy() { |
166 | 0 | T.atomic_destroy(self.value) |
167 | 0 | } |
168 | | } |
169 | | |
170 | | @available(*, deprecated) |
171 | | extension UnsafeEmbeddedAtomic: @unchecked Sendable where T: Sendable {} |
172 | | |
173 | | /// An encapsulation of an atomic primitive object. |
174 | | /// |
175 | | /// Atomic objects support a wide range of atomic operations: |
176 | | /// |
177 | | /// - Compare and swap |
178 | | /// - Add |
179 | | /// - Subtract |
180 | | /// - Exchange |
181 | | /// - Load current value |
182 | | /// - Store current value |
183 | | /// |
184 | | /// Atomic primitives are useful when building constructs that need to |
185 | | /// communicate or cooperate across multiple threads. In the case of |
186 | | /// SwiftNIO this usually involves communicating across multiple event loops. |
187 | | /// |
188 | | /// By necessity, all atomic values are references: after all, it makes no |
189 | | /// sense to talk about managing an atomic value when each time it's modified |
190 | | /// the thread that modified it gets a local copy! |
191 | | @available(*, deprecated, message: "please use ManagedAtomic from https://github.com/apple/swift-atomics instead") |
192 | | public final class Atomic<T: AtomicPrimitive> { |
193 | | @usableFromInline |
194 | | internal let embedded: UnsafeEmbeddedAtomic<T> |
195 | | |
196 | | /// Create an atomic object with `value`. |
197 | | @inlinable |
198 | 0 | public init(value: T) { |
199 | 0 | self.embedded = UnsafeEmbeddedAtomic(value: value) |
200 | 0 | } |
201 | | |
202 | | /// Atomically compares the value against `expected` and, if they are equal, |
203 | | /// replaces the value with `desired`. |
204 | | /// |
205 | | /// This implementation conforms to C11's `atomic_compare_exchange_strong`. This |
206 | | /// means that the compare-and-swap will always succeed if `expected` is equal to |
207 | | /// value. Additionally, it uses a *sequentially consistent ordering*. For more |
208 | | /// details on atomic memory models, check the documentation for C11's |
209 | | /// `stdatomic.h`. |
210 | | /// |
211 | | /// - Parameter expected: The value that this object must currently hold for the |
212 | | /// compare-and-swap to succeed. |
213 | | /// - Parameter desired: The new value that this object will hold if the compare |
214 | | /// succeeds. |
215 | | /// - Returns: `True` if the exchange occurred, or `False` if `expected` did not |
216 | | /// match the current value and so no exchange occurred. |
217 | | @inlinable |
218 | 0 | public func compareAndExchange(expected: T, desired: T) -> Bool { |
219 | 0 | self.embedded.compareAndExchange(expected: expected, desired: desired) |
220 | 0 | } |
221 | | |
222 | | /// Atomically adds `rhs` to this object. |
223 | | /// |
224 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
225 | | /// more than that this operation is atomic: there is no guarantee that any other |
226 | | /// event will be ordered before or after this one. |
227 | | /// |
228 | | /// - Parameter rhs: The value to add to this object. |
229 | | /// - Returns: The previous value of this object, before the addition occurred. |
230 | | @discardableResult |
231 | | @inlinable |
232 | 0 | public func add(_ rhs: T) -> T { |
233 | 0 | self.embedded.add(rhs) |
234 | 0 | } |
235 | | |
236 | | /// Atomically subtracts `rhs` from this object. |
237 | | /// |
238 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
239 | | /// more than that this operation is atomic: there is no guarantee that any other |
240 | | /// event will be ordered before or after this one. |
241 | | /// |
242 | | /// - Parameter rhs: The value to subtract from this object. |
243 | | /// - Returns: The previous value of this object, before the subtraction occurred. |
244 | | @discardableResult |
245 | | @inlinable |
246 | 0 | public func sub(_ rhs: T) -> T { |
247 | 0 | self.embedded.sub(rhs) |
248 | 0 | } |
249 | | |
250 | | /// Atomically exchanges `value` for the current value of this object. |
251 | | /// |
252 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
253 | | /// more than that this operation is atomic: there is no guarantee that any other |
254 | | /// event will be ordered before or after this one. |
255 | | /// |
256 | | /// - Parameter value: The new value to set this object to. |
257 | | /// - Returns: The value previously held by this object. |
258 | | @inlinable |
259 | 0 | public func exchange(with value: T) -> T { |
260 | 0 | self.embedded.exchange(with: value) |
261 | 0 | } |
262 | | |
263 | | /// Atomically loads and returns the value of this object. |
264 | | /// |
265 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
266 | | /// more than that this operation is atomic: there is no guarantee that any other |
267 | | /// event will be ordered before or after this one. |
268 | | /// |
269 | | /// - Returns: The value of this object |
270 | | @inlinable |
271 | 0 | public func load() -> T { |
272 | 0 | self.embedded.load() |
273 | 0 | } |
274 | | |
275 | | /// Atomically replaces the value of this object with `value`. |
276 | | /// |
277 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
278 | | /// more than that this operation is atomic: there is no guarantee that any other |
279 | | /// event will be ordered before or after this one. |
280 | | /// |
281 | | /// - Parameter value: The new value to set the object to. |
282 | | @inlinable |
283 | 0 | public func store(_ value: T) { |
284 | 0 | self.embedded.store(value) |
285 | 0 | } |
286 | | |
287 | 0 | deinit { |
288 | 0 | self.embedded.destroy() |
289 | 0 | } |
290 | | } |
291 | | |
292 | | @available(*, deprecated) |
293 | | extension Atomic: @unchecked Sendable where T: Sendable {} |
294 | | |
295 | | /// The protocol that all types that can be made atomic must conform to. |
296 | | /// |
297 | | /// **Do not add conformance to this protocol for arbitrary types**. Only a small range |
298 | | /// of types have appropriate atomic operations supported by the CPU, and those types |
299 | | /// already have conformances implemented. |
300 | | @preconcurrency |
301 | | public protocol AtomicPrimitive { |
302 | | static var atomic_create: @Sendable (Self) -> OpaquePointer { get } |
303 | | static var atomic_destroy: @Sendable (OpaquePointer) -> Void { get } |
304 | | static var atomic_compare_and_exchange: @Sendable (OpaquePointer, Self, Self) -> Bool { get } |
305 | | static var atomic_add: @Sendable (OpaquePointer, Self) -> Self { get } |
306 | | static var atomic_sub: @Sendable (OpaquePointer, Self) -> Self { get } |
307 | | static var atomic_exchange: @Sendable (OpaquePointer, Self) -> Self { get } |
308 | | static var atomic_load: @Sendable (OpaquePointer) -> Self { get } |
309 | | static var atomic_store: @Sendable (OpaquePointer, Self) -> Void { get } |
310 | | } |
311 | | |
312 | | extension Bool: AtomicPrimitive { |
313 | | public static let atomic_create = catmc_atomic__Bool_create |
314 | | public static let atomic_destroy = catmc_atomic__Bool_destroy |
315 | | public static let atomic_compare_and_exchange = catmc_atomic__Bool_compare_and_exchange |
316 | | public static let atomic_add = catmc_atomic__Bool_add |
317 | | public static let atomic_sub = catmc_atomic__Bool_sub |
318 | | public static let atomic_exchange = catmc_atomic__Bool_exchange |
319 | | public static let atomic_load = catmc_atomic__Bool_load |
320 | | public static let atomic_store = catmc_atomic__Bool_store |
321 | | } |
322 | | |
323 | | extension Int8: AtomicPrimitive { |
324 | | public static let atomic_create = catmc_atomic_int_least8_t_create |
325 | | public static let atomic_destroy = catmc_atomic_int_least8_t_destroy |
326 | | public static let atomic_compare_and_exchange = catmc_atomic_int_least8_t_compare_and_exchange |
327 | | public static let atomic_add = catmc_atomic_int_least8_t_add |
328 | | public static let atomic_sub = catmc_atomic_int_least8_t_sub |
329 | | public static let atomic_exchange = catmc_atomic_int_least8_t_exchange |
330 | | public static let atomic_load = catmc_atomic_int_least8_t_load |
331 | | public static let atomic_store = catmc_atomic_int_least8_t_store |
332 | | } |
333 | | |
334 | | extension UInt8: AtomicPrimitive { |
335 | | public static let atomic_create = catmc_atomic_uint_least8_t_create |
336 | | public static let atomic_destroy = catmc_atomic_uint_least8_t_destroy |
337 | | public static let atomic_compare_and_exchange = catmc_atomic_uint_least8_t_compare_and_exchange |
338 | | public static let atomic_add = catmc_atomic_uint_least8_t_add |
339 | | public static let atomic_sub = catmc_atomic_uint_least8_t_sub |
340 | | public static let atomic_exchange = catmc_atomic_uint_least8_t_exchange |
341 | | public static let atomic_load = catmc_atomic_uint_least8_t_load |
342 | | public static let atomic_store = catmc_atomic_uint_least8_t_store |
343 | | } |
344 | | |
345 | | extension Int16: AtomicPrimitive { |
346 | | public static let atomic_create = catmc_atomic_int_least16_t_create |
347 | | public static let atomic_destroy = catmc_atomic_int_least16_t_destroy |
348 | | public static let atomic_compare_and_exchange = catmc_atomic_int_least16_t_compare_and_exchange |
349 | | public static let atomic_add = catmc_atomic_int_least16_t_add |
350 | | public static let atomic_sub = catmc_atomic_int_least16_t_sub |
351 | | public static let atomic_exchange = catmc_atomic_int_least16_t_exchange |
352 | | public static let atomic_load = catmc_atomic_int_least16_t_load |
353 | | public static let atomic_store = catmc_atomic_int_least16_t_store |
354 | | } |
355 | | |
356 | | extension UInt16: AtomicPrimitive { |
357 | | public static let atomic_create = catmc_atomic_uint_least16_t_create |
358 | | public static let atomic_destroy = catmc_atomic_uint_least16_t_destroy |
359 | | public static let atomic_compare_and_exchange = catmc_atomic_uint_least16_t_compare_and_exchange |
360 | | public static let atomic_add = catmc_atomic_uint_least16_t_add |
361 | | public static let atomic_sub = catmc_atomic_uint_least16_t_sub |
362 | | public static let atomic_exchange = catmc_atomic_uint_least16_t_exchange |
363 | | public static let atomic_load = catmc_atomic_uint_least16_t_load |
364 | | public static let atomic_store = catmc_atomic_uint_least16_t_store |
365 | | } |
366 | | |
367 | | extension Int32: AtomicPrimitive { |
368 | | public static let atomic_create = catmc_atomic_int_least32_t_create |
369 | | public static let atomic_destroy = catmc_atomic_int_least32_t_destroy |
370 | | public static let atomic_compare_and_exchange = catmc_atomic_int_least32_t_compare_and_exchange |
371 | | public static let atomic_add = catmc_atomic_int_least32_t_add |
372 | | public static let atomic_sub = catmc_atomic_int_least32_t_sub |
373 | | public static let atomic_exchange = catmc_atomic_int_least32_t_exchange |
374 | | public static let atomic_load = catmc_atomic_int_least32_t_load |
375 | | public static let atomic_store = catmc_atomic_int_least32_t_store |
376 | | } |
377 | | |
378 | | extension UInt32: AtomicPrimitive { |
379 | | public static let atomic_create = catmc_atomic_uint_least32_t_create |
380 | | public static let atomic_destroy = catmc_atomic_uint_least32_t_destroy |
381 | | public static let atomic_compare_and_exchange = catmc_atomic_uint_least32_t_compare_and_exchange |
382 | | public static let atomic_add = catmc_atomic_uint_least32_t_add |
383 | | public static let atomic_sub = catmc_atomic_uint_least32_t_sub |
384 | | public static let atomic_exchange = catmc_atomic_uint_least32_t_exchange |
385 | | public static let atomic_load = catmc_atomic_uint_least32_t_load |
386 | | public static let atomic_store = catmc_atomic_uint_least32_t_store |
387 | | } |
388 | | |
389 | | extension Int64: AtomicPrimitive { |
390 | | public static let atomic_create = catmc_atomic_long_long_create |
391 | | public static let atomic_destroy = catmc_atomic_long_long_destroy |
392 | | public static let atomic_compare_and_exchange = catmc_atomic_long_long_compare_and_exchange |
393 | | public static let atomic_add = catmc_atomic_long_long_add |
394 | | public static let atomic_sub = catmc_atomic_long_long_sub |
395 | | public static let atomic_exchange = catmc_atomic_long_long_exchange |
396 | | public static let atomic_load = catmc_atomic_long_long_load |
397 | | public static let atomic_store = catmc_atomic_long_long_store |
398 | | } |
399 | | |
400 | | extension UInt64: AtomicPrimitive { |
401 | | public static let atomic_create = catmc_atomic_unsigned_long_long_create |
402 | | public static let atomic_destroy = catmc_atomic_unsigned_long_long_destroy |
403 | | public static let atomic_compare_and_exchange = catmc_atomic_unsigned_long_long_compare_and_exchange |
404 | | public static let atomic_add = catmc_atomic_unsigned_long_long_add |
405 | | public static let atomic_sub = catmc_atomic_unsigned_long_long_sub |
406 | | public static let atomic_exchange = catmc_atomic_unsigned_long_long_exchange |
407 | | public static let atomic_load = catmc_atomic_unsigned_long_long_load |
408 | | public static let atomic_store = catmc_atomic_unsigned_long_long_store |
409 | | } |
410 | | |
411 | | #if os(Windows) |
412 | | extension Int: AtomicPrimitive { |
413 | | public static let atomic_create = catmc_atomic_intptr_t_create |
414 | | public static let atomic_destroy = catmc_atomic_intptr_t_destroy |
415 | | public static let atomic_compare_and_exchange = catmc_atomic_intptr_t_compare_and_exchange |
416 | | public static let atomic_add = catmc_atomic_intptr_t_add |
417 | | public static let atomic_sub = catmc_atomic_intptr_t_sub |
418 | | public static let atomic_exchange = catmc_atomic_intptr_t_exchange |
419 | | public static let atomic_load = catmc_atomic_intptr_t_load |
420 | | public static let atomic_store = catmc_atomic_intptr_t_store |
421 | | } |
422 | | |
423 | | extension UInt: AtomicPrimitive { |
424 | | public static let atomic_create = catmc_atomic_uintptr_t_create |
425 | | public static let atomic_destroy = catmc_atomic_uintptr_t_destroy |
426 | | public static let atomic_compare_and_exchange = catmc_atomic_uintptr_t_compare_and_exchange |
427 | | public static let atomic_add = catmc_atomic_uintptr_t_add |
428 | | public static let atomic_sub = catmc_atomic_uintptr_t_sub |
429 | | public static let atomic_exchange = catmc_atomic_uintptr_t_exchange |
430 | | public static let atomic_load = catmc_atomic_uintptr_t_load |
431 | | public static let atomic_store = catmc_atomic_uintptr_t_store |
432 | | } |
433 | | #else |
434 | | extension Int: AtomicPrimitive { |
435 | | public static let atomic_create = catmc_atomic_long_create |
436 | | public static let atomic_destroy = catmc_atomic_long_destroy |
437 | | public static let atomic_compare_and_exchange = catmc_atomic_long_compare_and_exchange |
438 | | public static let atomic_add = catmc_atomic_long_add |
439 | | public static let atomic_sub = catmc_atomic_long_sub |
440 | | public static let atomic_exchange = catmc_atomic_long_exchange |
441 | | public static let atomic_load = catmc_atomic_long_load |
442 | | public static let atomic_store = catmc_atomic_long_store |
443 | | } |
444 | | |
445 | | extension UInt: AtomicPrimitive { |
446 | | public static let atomic_create = catmc_atomic_unsigned_long_create |
447 | | public static let atomic_destroy = catmc_atomic_unsigned_long_destroy |
448 | | public static let atomic_compare_and_exchange = catmc_atomic_unsigned_long_compare_and_exchange |
449 | | public static let atomic_add = catmc_atomic_unsigned_long_add |
450 | | public static let atomic_sub = catmc_atomic_unsigned_long_sub |
451 | | public static let atomic_exchange = catmc_atomic_unsigned_long_exchange |
452 | | public static let atomic_load = catmc_atomic_unsigned_long_load |
453 | | public static let atomic_store = catmc_atomic_unsigned_long_store |
454 | | } |
455 | | #endif |
456 | | |
457 | | /// `AtomicBox` is a heap-allocated box which allows lock-free access to an instance of a Swift class. |
458 | | /// |
459 | | /// - warning: The use of `AtomicBox` should be avoided because it requires an implementation of a spin-lock |
460 | | /// (more precisely a CAS loop) to operate correctly. |
461 | | @available( |
462 | | *, |
463 | | deprecated, |
464 | | message: "AtomicBox is deprecated without replacement because the original implementation doesn't work." |
465 | | ) |
466 | | @available(OpenBSD, unavailable, message: "malloc_size is unavailable.") |
467 | | public final class AtomicBox<T: AnyObject> { |
468 | | private let storage: NIOAtomic<UInt> |
469 | | |
470 | 0 | public init(value: T) { |
471 | 0 | let ptr = Unmanaged<T>.passRetained(value) |
472 | 0 | self.storage = NIOAtomic.makeAtomic(value: UInt(bitPattern: ptr.toOpaque())) |
473 | 0 | } |
474 | | |
475 | 0 | deinit { |
476 | 0 | let oldPtrBits = self.storage.exchange(with: 0xdeadbee) |
477 | 0 | let oldPtr = Unmanaged<T>.fromOpaque(UnsafeRawPointer(bitPattern: oldPtrBits)!) |
478 | 0 | oldPtr.release() |
479 | 0 | } |
480 | | |
481 | | /// Atomically compares the value against `expected` and, if they are equal, |
482 | | /// replaces the value with `desired`. |
483 | | /// |
484 | | /// This implementation conforms to C11's `atomic_compare_exchange_strong`. This |
485 | | /// means that the compare-and-swap will always succeed if `expected` is equal to |
486 | | /// value. Additionally, it uses a *sequentially consistent ordering*. For more |
487 | | /// details on atomic memory models, check the documentation for C11's |
488 | | /// `stdatomic.h`. |
489 | | /// |
490 | | /// |
491 | | /// - warning: The implementation of `exchange` contains a _Compare and Exchange loop_, ie. it may busy wait with |
492 | | /// 100% CPU load. |
493 | | /// |
494 | | /// - Parameter expected: The value that this object must currently hold for the |
495 | | /// compare-and-swap to succeed. |
496 | | /// - Parameter desired: The new value that this object will hold if the compare |
497 | | /// succeeds. |
498 | | /// - Returns: `True` if the exchange occurred, or `False` if `expected` did not |
499 | | /// match the current value and so no exchange occurred. |
500 | 0 | public func compareAndExchange(expected: T, desired: T) -> Bool { |
501 | 0 | withExtendedLifetime(desired) { |
502 | 0 | let expectedPtr = Unmanaged<T>.passUnretained(expected) |
503 | 0 | let desiredPtr = Unmanaged<T>.passUnretained(desired) |
504 | 0 | let expectedPtrBits = UInt(bitPattern: expectedPtr.toOpaque()) |
505 | 0 | let desiredPtrBits = UInt(bitPattern: desiredPtr.toOpaque()) |
506 | 0 |
|
507 | 0 | while true { |
508 | 0 | if self.storage.compareAndExchange(expected: expectedPtrBits, desired: desiredPtrBits) { |
509 | 0 | if desiredPtrBits != expectedPtrBits { |
510 | 0 | _ = desiredPtr.retain() |
511 | 0 | expectedPtr.release() |
512 | 0 | } |
513 | 0 | return true |
514 | 0 | } else { |
515 | 0 | let currentPtrBits = self.storage.load() |
516 | 0 | if currentPtrBits == 0 || currentPtrBits == expectedPtrBits { |
517 | 0 | sys_sched_yield() |
518 | 0 | continue |
519 | 0 | } else { |
520 | 0 | return false |
521 | 0 | } |
522 | 0 | } |
523 | 0 | } |
524 | 0 | } |
525 | 0 | } |
526 | | |
527 | | /// Atomically exchanges `value` for the current value of this object. |
528 | | /// |
529 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
530 | | /// more than that this operation is atomic: there is no guarantee that any other |
531 | | /// event will be ordered before or after this one. |
532 | | /// |
533 | | /// - warning: The implementation of `exchange` contains a _Compare and Exchange loop_, ie. it may busy wait with |
534 | | /// 100% CPU load. |
535 | | /// |
536 | | /// - Parameter value: The new value to set this object to. |
537 | | /// - Returns: The value previously held by this object. |
538 | 0 | public func exchange(with value: T) -> T { |
539 | 0 | let newPtr = Unmanaged<T>.passRetained(value) |
540 | 0 | let newPtrBits = UInt(bitPattern: newPtr.toOpaque()) |
541 | 0 |
|
542 | 0 | // step 1: We need to actually CAS loop here to swap out a non-0 value with the new one. |
543 | 0 | var oldPtrBits: UInt = 0 |
544 | 0 | while true { |
545 | 0 | let speculativeVal = self.storage.load() |
546 | 0 | guard speculativeVal != 0 else { |
547 | 0 | sys_sched_yield() |
548 | 0 | continue |
549 | 0 | } |
550 | 0 | if self.storage.compareAndExchange(expected: speculativeVal, desired: newPtrBits) { |
551 | 0 | oldPtrBits = speculativeVal |
552 | 0 | break |
553 | 0 | } |
554 | 0 | } |
555 | 0 |
|
556 | 0 | // step 2: After having gained 'ownership' of the old value, we can release the Unmanaged. |
557 | 0 | let oldPtr = Unmanaged<T>.fromOpaque(UnsafeRawPointer(bitPattern: oldPtrBits)!) |
558 | 0 | return oldPtr.takeRetainedValue() |
559 | 0 | } |
560 | | |
561 | | /// Atomically loads and returns the value of this object. |
562 | | /// |
563 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
564 | | /// more than that this operation is atomic: there is no guarantee that any other |
565 | | /// event will be ordered before or after this one. |
566 | | /// |
567 | | /// - warning: The implementation of `exchange` contains a _Compare and Exchange loop_, ie. it may busy wait with |
568 | | /// 100% CPU load. |
569 | | /// |
570 | | /// - Returns: The value of this object |
571 | 0 | public func load() -> T { |
572 | 0 | // step 1: We need to gain ownership of the value by successfully swapping 0 (marker value) in. |
573 | 0 | var ptrBits: UInt = 0 |
574 | 0 | while true { |
575 | 0 | let speculativeVal = self.storage.load() |
576 | 0 | guard speculativeVal != 0 else { |
577 | 0 | sys_sched_yield() |
578 | 0 | continue |
579 | 0 | } |
580 | 0 | if self.storage.compareAndExchange(expected: speculativeVal, desired: 0) { |
581 | 0 | ptrBits = speculativeVal |
582 | 0 | break |
583 | 0 | } |
584 | 0 | } |
585 | 0 |
|
586 | 0 | // step 2: We now consumed a +1'd version of val, so we have all the time in the world to retain it. |
587 | 0 | let ptr = Unmanaged<T>.fromOpaque(UnsafeRawPointer(bitPattern: ptrBits)!) |
588 | 0 | let value = ptr.takeUnretainedValue() |
589 | 0 |
|
590 | 0 | // step 3: Now, let's exchange it back into the store |
591 | 0 | let casWorked = self.storage.compareAndExchange(expected: 0, desired: ptrBits) |
592 | 0 | precondition(casWorked) // this _has_ to work because `0` means we own it exclusively. |
593 | 0 | return value |
594 | 0 | } |
595 | | |
596 | | /// Atomically replaces the value of this object with `value`. |
597 | | /// |
598 | | /// This implementation uses a *relaxed* memory ordering. This guarantees nothing |
599 | | /// more than that this operation is atomic: there is no guarantee that any other |
600 | | /// event will be ordered before or after this one. |
601 | | /// |
602 | | /// - warning: The implementation of `exchange` contains a _Compare and Exchange loop_, ie. it may busy wait with |
603 | | /// 100% CPU load. |
604 | | /// |
605 | | /// - Parameter value: The new value to set the object to. |
606 | 0 | public func store(_ value: T) { |
607 | 0 | _ = self.exchange(with: value) |
608 | 0 | } |
609 | | } |
610 | | |
611 | | @available(*, deprecated) |
612 | | @available(OpenBSD, unavailable, message: "malloc_size is unavailable.") |
613 | | extension AtomicBox: @unchecked Sendable where T: Sendable {} |