/src/libzmq/src/atomic_ptr.hpp
Line | Count | Source |
1 | | /* SPDX-License-Identifier: MPL-2.0 */ |
2 | | |
3 | | #ifndef __ZMQ_ATOMIC_PTR_HPP_INCLUDED__ |
4 | | #define __ZMQ_ATOMIC_PTR_HPP_INCLUDED__ |
5 | | |
6 | | #include "macros.hpp" |
7 | | |
8 | | #if defined ZMQ_FORCE_MUTEXES |
9 | | #define ZMQ_ATOMIC_PTR_MUTEX |
10 | | #elif (defined __cplusplus && __cplusplus >= 201103L) \ |
11 | | || (defined _MSC_VER && _MSC_VER >= 1900) |
12 | | #define ZMQ_ATOMIC_PTR_CXX11 |
13 | | #elif defined ZMQ_HAVE_ATOMIC_INTRINSICS |
14 | | #define ZMQ_ATOMIC_PTR_INTRINSIC |
15 | | #elif (defined __i386__ || defined __x86_64__) && defined __GNUC__ |
16 | | #define ZMQ_ATOMIC_PTR_X86 |
17 | | #elif defined __ARM_ARCH_7A__ && defined __GNUC__ |
18 | | #define ZMQ_ATOMIC_PTR_ARM |
19 | | #elif defined __tile__ |
20 | | #define ZMQ_ATOMIC_PTR_TILE |
21 | | #elif defined ZMQ_HAVE_WINDOWS |
22 | | #define ZMQ_ATOMIC_PTR_WINDOWS |
23 | | #elif (defined ZMQ_HAVE_SOLARIS || defined ZMQ_HAVE_NETBSD \ |
24 | | || defined ZMQ_HAVE_GNU) |
25 | | #define ZMQ_ATOMIC_PTR_ATOMIC_H |
26 | | #else |
27 | | #define ZMQ_ATOMIC_PTR_MUTEX |
28 | | #endif |
29 | | |
30 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
31 | | #include "mutex.hpp" |
32 | | #elif defined ZMQ_ATOMIC_PTR_CXX11 |
33 | | #include <atomic> |
34 | | #elif defined ZMQ_ATOMIC_PTR_WINDOWS |
35 | | #include "windows.hpp" |
36 | | #elif defined ZMQ_ATOMIC_PTR_ATOMIC_H |
37 | | #include <atomic.h> |
38 | | #elif defined ZMQ_ATOMIC_PTR_TILE |
39 | | #include <arch/atomic.h> |
40 | | #endif |
41 | | |
42 | | namespace zmq |
43 | | { |
44 | | #if !defined ZMQ_ATOMIC_PTR_CXX11 |
45 | | inline void *atomic_xchg_ptr (void **ptr_, |
46 | | void *const val_ |
47 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
48 | | , |
49 | | mutex_t &_sync |
50 | | #endif |
51 | | ) ZMQ_NOEXCEPT |
52 | | { |
53 | | #if defined ZMQ_ATOMIC_PTR_WINDOWS |
54 | | return InterlockedExchangePointer ((PVOID *) ptr_, val_); |
55 | | #elif defined ZMQ_ATOMIC_PTR_INTRINSIC |
56 | | return __atomic_exchange_n (ptr_, val_, __ATOMIC_ACQ_REL); |
57 | | #elif defined ZMQ_ATOMIC_PTR_ATOMIC_H |
58 | | return atomic_swap_ptr (ptr_, val_); |
59 | | #elif defined ZMQ_ATOMIC_PTR_TILE |
60 | | return arch_atomic_exchange (ptr_, val_); |
61 | | #elif defined ZMQ_ATOMIC_PTR_X86 |
62 | | void *old; |
63 | | __asm__ volatile ("lock; xchg %0, %2" |
64 | | : "=r"(old), "=m"(*ptr_) |
65 | | : "m"(*ptr_), "0"(val_)); |
66 | | return old; |
67 | | #elif defined ZMQ_ATOMIC_PTR_ARM |
68 | | void *old; |
69 | | unsigned int flag; |
70 | | __asm__ volatile (" dmb sy\n\t" |
71 | | "1: ldrex %1, [%3]\n\t" |
72 | | " strex %0, %4, [%3]\n\t" |
73 | | " teq %0, #0\n\t" |
74 | | " bne 1b\n\t" |
75 | | " dmb sy\n\t" |
76 | | : "=&r"(flag), "=&r"(old), "+Qo"(*ptr_) |
77 | | : "r"(ptr_), "r"(val_) |
78 | | : "cc"); |
79 | | return old; |
80 | | #elif defined ZMQ_ATOMIC_PTR_MUTEX |
81 | | _sync.lock (); |
82 | | void *old = *ptr_; |
83 | | *ptr_ = val_; |
84 | | _sync.unlock (); |
85 | | return old; |
86 | | #else |
87 | | #error atomic_ptr is not implemented for this platform |
88 | | #endif |
89 | | } |
90 | | |
91 | | inline void *atomic_cas (void *volatile *ptr_, |
92 | | void *cmp_, |
93 | | void *val_ |
94 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
95 | | , |
96 | | mutex_t &_sync |
97 | | #endif |
98 | | ) ZMQ_NOEXCEPT |
99 | | { |
100 | | #if defined ZMQ_ATOMIC_PTR_WINDOWS |
101 | | return InterlockedCompareExchangePointer ((volatile PVOID *) ptr_, val_, |
102 | | cmp_); |
103 | | #elif defined ZMQ_ATOMIC_PTR_INTRINSIC |
104 | | void *old = cmp_; |
105 | | __atomic_compare_exchange_n (ptr_, &old, val_, false, __ATOMIC_RELEASE, |
106 | | __ATOMIC_ACQUIRE); |
107 | | return old; |
108 | | #elif defined ZMQ_ATOMIC_PTR_ATOMIC_H |
109 | | return atomic_cas_ptr (ptr_, cmp_, val_); |
110 | | #elif defined ZMQ_ATOMIC_PTR_TILE |
111 | | return arch_atomic_val_compare_and_exchange (ptr_, cmp_, val_); |
112 | | #elif defined ZMQ_ATOMIC_PTR_X86 |
113 | | void *old; |
114 | | __asm__ volatile ("lock; cmpxchg %2, %3" |
115 | | : "=a"(old), "=m"(*ptr_) |
116 | | : "r"(val_), "m"(*ptr_), "0"(cmp_) |
117 | | : "cc"); |
118 | | return old; |
119 | | #elif defined ZMQ_ATOMIC_PTR_ARM |
120 | | void *old; |
121 | | unsigned int flag; |
122 | | __asm__ volatile (" dmb sy\n\t" |
123 | | "1: ldrex %1, [%3]\n\t" |
124 | | " mov %0, #0\n\t" |
125 | | " teq %1, %4\n\t" |
126 | | " it eq\n\t" |
127 | | " strexeq %0, %5, [%3]\n\t" |
128 | | " teq %0, #0\n\t" |
129 | | " bne 1b\n\t" |
130 | | " dmb sy\n\t" |
131 | | : "=&r"(flag), "=&r"(old), "+Qo"(*ptr_) |
132 | | : "r"(ptr_), "r"(cmp_), "r"(val_) |
133 | | : "cc"); |
134 | | return old; |
135 | | #elif defined ZMQ_ATOMIC_PTR_MUTEX |
136 | | _sync.lock (); |
137 | | void *old = *ptr_; |
138 | | if (*ptr_ == cmp_) |
139 | | *ptr_ = val_; |
140 | | _sync.unlock (); |
141 | | return old; |
142 | | #else |
143 | | #error atomic_ptr is not implemented for this platform |
144 | | #endif |
145 | | } |
146 | | #endif |
147 | | |
148 | | // This class encapsulates several atomic operations on pointers. |
149 | | |
150 | | template <typename T> class atomic_ptr_t |
151 | | { |
152 | | public: |
153 | | // Initialise atomic pointer |
154 | 16.2k | atomic_ptr_t () ZMQ_NOEXCEPT { _ptr = NULL; }zmq::atomic_ptr_t<zmq::yqueue_t<zmq::command_t, 16, 64ul>::chunk_t>::atomic_ptr_t() Line | Count | Source | 154 | 6.85k | atomic_ptr_t () ZMQ_NOEXCEPT { _ptr = NULL; } |
zmq::atomic_ptr_t<zmq::command_t>::atomic_ptr_t() Line | Count | Source | 154 | 6.85k | atomic_ptr_t () ZMQ_NOEXCEPT { _ptr = NULL; } |
zmq::atomic_ptr_t<zmq::yqueue_t<zmq::msg_t, 256, 64ul>::chunk_t>::atomic_ptr_t() Line | Count | Source | 154 | 1.26k | atomic_ptr_t () ZMQ_NOEXCEPT { _ptr = NULL; } |
zmq::atomic_ptr_t<zmq::msg_t>::atomic_ptr_t() Line | Count | Source | 154 | 1.26k | atomic_ptr_t () ZMQ_NOEXCEPT { _ptr = NULL; } |
|
155 | | |
156 | | // Set value of atomic pointer in a non-threadsafe way |
157 | | // Use this function only when you are sure that at most one |
158 | | // thread is accessing the pointer at the moment. |
159 | 18.0k | void set (T *ptr_) ZMQ_NOEXCEPT { _ptr = ptr_; }zmq::atomic_ptr_t<zmq::command_t>::set(zmq::command_t*) Line | Count | Source | 159 | 16.7k | void set (T *ptr_) ZMQ_NOEXCEPT { _ptr = ptr_; } |
zmq::atomic_ptr_t<zmq::msg_t>::set(zmq::msg_t*) Line | Count | Source | 159 | 1.30k | void set (T *ptr_) ZMQ_NOEXCEPT { _ptr = ptr_; } |
|
160 | | |
161 | | // Perform atomic 'exchange pointers' operation. Pointer is set |
162 | | // to the 'val_' value. Old value is returned. |
163 | | T *xchg (T *val_) ZMQ_NOEXCEPT |
164 | 8.11k | { |
165 | 8.11k | #if defined ZMQ_ATOMIC_PTR_CXX11 |
166 | 8.11k | return _ptr.exchange (val_, std::memory_order_acq_rel); |
167 | | #else |
168 | | return (T *) atomic_xchg_ptr ((void **) &_ptr, val_ |
169 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
170 | | , |
171 | | _sync |
172 | | #endif |
173 | | ); |
174 | | #endif |
175 | 8.11k | } zmq::atomic_ptr_t<zmq::yqueue_t<zmq::command_t, 16, 64ul>::chunk_t>::xchg(zmq::yqueue_t<zmq::command_t, 16, 64ul>::chunk_t*) Line | Count | Source | 164 | 6.85k | { | 165 | 6.85k | #if defined ZMQ_ATOMIC_PTR_CXX11 | 166 | 6.85k | return _ptr.exchange (val_, std::memory_order_acq_rel); | 167 | | #else | 168 | | return (T *) atomic_xchg_ptr ((void **) &_ptr, val_ | 169 | | #if defined ZMQ_ATOMIC_PTR_MUTEX | 170 | | , | 171 | | _sync | 172 | | #endif | 173 | | ); | 174 | | #endif | 175 | 6.85k | } |
zmq::atomic_ptr_t<zmq::yqueue_t<zmq::msg_t, 256, 64ul>::chunk_t>::xchg(zmq::yqueue_t<zmq::msg_t, 256, 64ul>::chunk_t*) Line | Count | Source | 164 | 1.26k | { | 165 | 1.26k | #if defined ZMQ_ATOMIC_PTR_CXX11 | 166 | 1.26k | return _ptr.exchange (val_, std::memory_order_acq_rel); | 167 | | #else | 168 | | return (T *) atomic_xchg_ptr ((void **) &_ptr, val_ | 169 | | #if defined ZMQ_ATOMIC_PTR_MUTEX | 170 | | , | 171 | | _sync | 172 | | #endif | 173 | | ); | 174 | | #endif | 175 | 1.26k | } |
|
176 | | |
177 | | // Perform atomic 'compare and swap' operation on the pointer. |
178 | | // The pointer is compared to 'cmp' argument and if they are |
179 | | // equal, its value is set to 'val_'. Old value of the pointer |
180 | | // is returned. |
181 | | T *cas (T *cmp_, T *val_) ZMQ_NOEXCEPT |
182 | 46.4k | { |
183 | 46.4k | #if defined ZMQ_ATOMIC_PTR_CXX11 |
184 | 46.4k | _ptr.compare_exchange_strong (cmp_, val_, std::memory_order_acq_rel); |
185 | 46.4k | return cmp_; |
186 | | #else |
187 | | return (T *) atomic_cas ((void **) &_ptr, cmp_, val_ |
188 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
189 | | , |
190 | | _sync |
191 | | #endif |
192 | | ); |
193 | | #endif |
194 | 46.4k | } zmq::atomic_ptr_t<zmq::command_t>::cas(zmq::command_t*, zmq::command_t*) Line | Count | Source | 182 | 43.1k | { | 183 | 43.1k | #if defined ZMQ_ATOMIC_PTR_CXX11 | 184 | 43.1k | _ptr.compare_exchange_strong (cmp_, val_, std::memory_order_acq_rel); | 185 | 43.1k | return cmp_; | 186 | | #else | 187 | | return (T *) atomic_cas ((void **) &_ptr, cmp_, val_ | 188 | | #if defined ZMQ_ATOMIC_PTR_MUTEX | 189 | | , | 190 | | _sync | 191 | | #endif | 192 | | ); | 193 | | #endif | 194 | 43.1k | } |
zmq::atomic_ptr_t<zmq::msg_t>::cas(zmq::msg_t*, zmq::msg_t*) Line | Count | Source | 182 | 3.34k | { | 183 | 3.34k | #if defined ZMQ_ATOMIC_PTR_CXX11 | 184 | 3.34k | _ptr.compare_exchange_strong (cmp_, val_, std::memory_order_acq_rel); | 185 | 3.34k | return cmp_; | 186 | | #else | 187 | | return (T *) atomic_cas ((void **) &_ptr, cmp_, val_ | 188 | | #if defined ZMQ_ATOMIC_PTR_MUTEX | 189 | | , | 190 | | _sync | 191 | | #endif | 192 | | ); | 193 | | #endif | 194 | 3.34k | } |
|
195 | | |
196 | | private: |
197 | | #if defined ZMQ_ATOMIC_PTR_CXX11 |
198 | | std::atomic<T *> _ptr; |
199 | | #else |
200 | | volatile T *_ptr; |
201 | | #endif |
202 | | |
203 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
204 | | mutex_t _sync; |
205 | | #endif |
206 | | |
207 | | #if !defined ZMQ_ATOMIC_PTR_CXX11 |
208 | | ZMQ_NON_COPYABLE_NOR_MOVABLE (atomic_ptr_t) |
209 | | #endif |
210 | | }; |
211 | | |
212 | | struct atomic_value_t |
213 | | { |
214 | 1.82k | atomic_value_t (const int value_) ZMQ_NOEXCEPT : _value (value_) {} |
215 | | |
216 | | atomic_value_t (const atomic_value_t &src_) ZMQ_NOEXCEPT |
217 | 1.50k | : _value (src_.load ()) |
218 | 1.50k | { |
219 | 1.50k | } |
220 | | |
221 | | void store (const int value_) ZMQ_NOEXCEPT |
222 | 3.45k | { |
223 | 3.45k | #if defined ZMQ_ATOMIC_PTR_CXX11 |
224 | 3.45k | _value.store (value_, std::memory_order_release); |
225 | | #else |
226 | | atomic_xchg_ptr ((void **) &_value, (void *) (ptrdiff_t) value_ |
227 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
228 | | , |
229 | | _sync |
230 | | #endif |
231 | | ); |
232 | | #endif |
233 | 3.45k | } |
234 | | |
235 | | int load () const ZMQ_NOEXCEPT |
236 | 3.26k | { |
237 | 3.26k | #if defined ZMQ_ATOMIC_PTR_CXX11 |
238 | 3.26k | return _value.load (std::memory_order_acquire); |
239 | | #else |
240 | | return (int) (ptrdiff_t) atomic_cas ((void **) &_value, 0, 0 |
241 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
242 | | , |
243 | | #if defined __SUNPRO_CC |
244 | | const_cast<mutex_t &> (_sync) |
245 | | #else |
246 | | _sync |
247 | | #endif |
248 | | #endif |
249 | | ); |
250 | | #endif |
251 | 3.26k | } |
252 | | |
253 | | private: |
254 | | #if defined ZMQ_ATOMIC_PTR_CXX11 |
255 | | std::atomic<int> _value; |
256 | | #else |
257 | | volatile ptrdiff_t _value; |
258 | | #endif |
259 | | |
260 | | #if defined ZMQ_ATOMIC_PTR_MUTEX |
261 | | mutable mutex_t _sync; |
262 | | #endif |
263 | | |
264 | | private: |
265 | | atomic_value_t &operator= (const atomic_value_t &src_); |
266 | | }; |
267 | | } |
268 | | |
269 | | // Remove macros local to this file. |
270 | | #undef ZMQ_ATOMIC_PTR_MUTEX |
271 | | #undef ZMQ_ATOMIC_PTR_INTRINSIC |
272 | | #undef ZMQ_ATOMIC_PTR_CXX11 |
273 | | #undef ZMQ_ATOMIC_PTR_X86 |
274 | | #undef ZMQ_ATOMIC_PTR_ARM |
275 | | #undef ZMQ_ATOMIC_PTR_TILE |
276 | | #undef ZMQ_ATOMIC_PTR_WINDOWS |
277 | | #undef ZMQ_ATOMIC_PTR_ATOMIC_H |
278 | | |
279 | | #endif |