/src/httpd/srclib/apr/atomic/unix/builtins.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* Licensed to the Apache Software Foundation (ASF) under one or more |
2 | | * contributor license agreements. See the NOTICE file distributed with |
3 | | * this work for additional information regarding copyright ownership. |
4 | | * The ASF licenses this file to You under the Apache License, Version 2.0 |
5 | | * (the "License"); you may not use this file except in compliance with |
6 | | * the License. You may obtain a copy of the License at |
7 | | * |
8 | | * http://www.apache.org/licenses/LICENSE-2.0 |
9 | | * |
10 | | * Unless required by applicable law or agreed to in writing, software |
11 | | * distributed under the License is distributed on an "AS IS" BASIS, |
12 | | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 | | * See the License for the specific language governing permissions and |
14 | | * limitations under the License. |
15 | | */ |
16 | | |
17 | | #include "apr_arch_atomic.h" |
18 | | |
19 | | #ifdef USE_ATOMICS_BUILTINS |
20 | | |
21 | | #if defined(__i386__) || defined(__x86_64__) \ |
22 | | || defined(__s390__) || defined(__s390x__) |
23 | | #define WEAK_MEMORY_ORDERING 0 |
24 | | #else |
25 | | #define WEAK_MEMORY_ORDERING 1 |
26 | | #endif |
27 | | |
28 | | APR_DECLARE(apr_status_t) apr_atomic_init(apr_pool_t *p) |
29 | 5.33k | { |
30 | | #if defined (NEED_ATOMICS_GENERIC64) |
31 | | return apr__atomic_generic64_init(p); |
32 | | #else |
33 | 5.33k | return APR_SUCCESS; |
34 | 5.33k | #endif |
35 | 5.33k | } |
36 | | |
37 | | APR_DECLARE(apr_uint32_t) apr_atomic_read32(volatile apr_uint32_t *mem) |
38 | 0 | { |
39 | 0 | #if HAVE__ATOMIC_BUILTINS |
40 | 0 | return __atomic_load_n(mem, __ATOMIC_SEQ_CST); |
41 | | #elif WEAK_MEMORY_ORDERING |
42 | | /* No __sync_load() available => apr_atomic_add32(mem, 0) */ |
43 | | return __sync_fetch_and_add(mem, 0); |
44 | | #else |
45 | | return *mem; |
46 | | #endif |
47 | 0 | } |
48 | | |
49 | | APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val) |
50 | 0 | { |
51 | 0 | #if HAVE__ATOMIC_BUILTINS |
52 | 0 | __atomic_store_n(mem, val, __ATOMIC_SEQ_CST); |
53 | | #elif WEAK_MEMORY_ORDERING |
54 | | /* No __sync_store() available => apr_atomic_xchg32(mem, val) */ |
55 | | __sync_synchronize(); |
56 | | __sync_lock_test_and_set(mem, val); |
57 | | #else |
58 | | *mem = val; |
59 | | #endif |
60 | 0 | } |
61 | | |
62 | | APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val) |
63 | 0 | { |
64 | 0 | #if HAVE__ATOMIC_BUILTINS |
65 | 0 | return __atomic_fetch_add(mem, val, __ATOMIC_SEQ_CST); |
66 | | #else |
67 | | return __sync_fetch_and_add(mem, val); |
68 | | #endif |
69 | 0 | } |
70 | | |
71 | | APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val) |
72 | 0 | { |
73 | 0 | #if HAVE__ATOMIC_BUILTINS |
74 | 0 | __atomic_fetch_sub(mem, val, __ATOMIC_SEQ_CST); |
75 | | #else |
76 | | __sync_fetch_and_sub(mem, val); |
77 | | #endif |
78 | 0 | } |
79 | | |
80 | | APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem) |
81 | 0 | { |
82 | 0 | #if HAVE__ATOMIC_BUILTINS |
83 | 0 | return __atomic_fetch_add(mem, 1, __ATOMIC_SEQ_CST); |
84 | | #else |
85 | | return __sync_fetch_and_add(mem, 1); |
86 | | #endif |
87 | 0 | } |
88 | | |
89 | | APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem) |
90 | 0 | { |
91 | 0 | #if HAVE__ATOMIC_BUILTINS |
92 | 0 | return __atomic_sub_fetch(mem, 1, __ATOMIC_SEQ_CST); |
93 | | #else |
94 | | return __sync_sub_and_fetch(mem, 1); |
95 | | #endif |
96 | 0 | } |
97 | | |
98 | | APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint32_t val, |
99 | | apr_uint32_t cmp) |
100 | 0 | { |
101 | 0 | #if HAVE__ATOMIC_BUILTINS |
102 | 0 | __atomic_compare_exchange_n(mem, &cmp, val, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); |
103 | 0 | return cmp; |
104 | | #else |
105 | | return __sync_val_compare_and_swap(mem, cmp, val); |
106 | | #endif |
107 | 0 | } |
108 | | |
109 | | APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val) |
110 | 0 | { |
111 | 0 | #if HAVE__ATOMIC_BUILTINS |
112 | 0 | return __atomic_exchange_n(mem, val, __ATOMIC_SEQ_CST); |
113 | | #else |
114 | | __sync_synchronize(); |
115 | | return __sync_lock_test_and_set(mem, val); |
116 | | #endif |
117 | 0 | } |
118 | | |
119 | | APR_DECLARE(void*) apr_atomic_casptr(void *volatile *mem, void *ptr, const void *cmp) |
120 | 0 | { |
121 | 0 | #if HAVE__ATOMIC_BUILTINS |
122 | 0 | __atomic_compare_exchange_n(mem, (void *)&cmp, ptr, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); |
123 | 0 | return (void *)cmp; |
124 | | #else |
125 | | return (void *)__sync_val_compare_and_swap(mem, (void *)cmp, ptr); |
126 | | #endif |
127 | 0 | } |
128 | | |
129 | | APR_DECLARE(void*) apr_atomic_xchgptr(void *volatile *mem, void *ptr) |
130 | 0 | { |
131 | 0 | #if HAVE__ATOMIC_BUILTINS |
132 | 0 | return __atomic_exchange_n(mem, ptr, __ATOMIC_SEQ_CST); |
133 | | #else |
134 | | __sync_synchronize(); |
135 | | return __sync_lock_test_and_set(mem, ptr); |
136 | | #endif |
137 | 0 | } |
138 | | |
139 | | #endif /* USE_ATOMICS_BUILTINS */ |