Chris@102
|
1 /*
|
Chris@102
|
2 * Distributed under the Boost Software License, Version 1.0.
|
Chris@102
|
3 * (See accompanying file LICENSE_1_0.txt or copy at
|
Chris@102
|
4 * http://www.boost.org/LICENSE_1_0.txt)
|
Chris@102
|
5 *
|
Chris@102
|
6 * Copyright (c) 2009 Helge Bahmann
|
Chris@102
|
7 * Copyright (c) 2012 Tim Blechmann
|
Chris@102
|
8 * Copyright (c) 2014 Andrey Semashev
|
Chris@102
|
9 */
|
Chris@102
|
10 /*!
|
Chris@102
|
11 * \file atomic/detail/ops_msvc_arm.hpp
|
Chris@102
|
12 *
|
Chris@102
|
13 * This header contains implementation of the \c operations template.
|
Chris@102
|
14 */
|
Chris@102
|
15
|
Chris@102
|
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
|
Chris@102
|
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
|
Chris@102
|
18
|
Chris@102
|
19 #include <intrin.h>
|
Chris@102
|
20 #include <boost/memory_order.hpp>
|
Chris@102
|
21 #include <boost/type_traits/make_signed.hpp>
|
Chris@102
|
22 #include <boost/atomic/detail/config.hpp>
|
Chris@102
|
23 #include <boost/atomic/detail/interlocked.hpp>
|
Chris@102
|
24 #include <boost/atomic/detail/storage_type.hpp>
|
Chris@102
|
25 #include <boost/atomic/detail/operations_fwd.hpp>
|
Chris@102
|
26 #include <boost/atomic/capabilities.hpp>
|
Chris@102
|
27 #include <boost/atomic/detail/ops_msvc_common.hpp>
|
Chris@102
|
28
|
Chris@102
|
29 #ifdef BOOST_HAS_PRAGMA_ONCE
|
Chris@102
|
30 #pragma once
|
Chris@102
|
31 #endif
|
Chris@102
|
32
|
Chris@102
|
33 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
|
Chris@102
|
34 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
|
Chris@102
|
35 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
|
Chris@102
|
36 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
|
Chris@102
|
37 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
|
Chris@102
|
38 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
|
Chris@102
|
39 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
|
Chris@102
|
40 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
|
Chris@102
|
41
|
Chris@102
|
42 namespace boost {
|
Chris@102
|
43 namespace atomics {
|
Chris@102
|
44 namespace detail {
|
Chris@102
|
45
|
Chris@102
|
46 // A note about memory_order_consume. Technically, this architecture allows to avoid
|
Chris@102
|
47 // unnecessary memory barrier after consume load since it supports data dependency ordering.
|
Chris@102
|
48 // However, some compiler optimizations may break a seemingly valid code relying on data
|
Chris@102
|
49 // dependency tracking by injecting bogus branches to aid out of order execution.
|
Chris@102
|
50 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
|
Chris@102
|
51 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
|
Chris@102
|
52 // For this reason we promote memory_order_consume to memory_order_acquire.
|
Chris@102
|
53
|
Chris@102
|
54 struct msvc_arm_operations_base
|
Chris@102
|
55 {
|
Chris@102
|
56 static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
|
Chris@102
|
57 {
|
Chris@102
|
58 __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
|
Chris@102
|
59 }
|
Chris@102
|
60
|
Chris@102
|
61 static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
62 {
|
Chris@102
|
63 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
64
|
Chris@102
|
65 if ((order & memory_order_release) != 0)
|
Chris@102
|
66 hardware_full_fence();
|
Chris@102
|
67
|
Chris@102
|
68 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
69 }
|
Chris@102
|
70
|
Chris@102
|
71 static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
72 {
|
Chris@102
|
73 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
74
|
Chris@102
|
75 if (order == memory_order_seq_cst)
|
Chris@102
|
76 hardware_full_fence();
|
Chris@102
|
77
|
Chris@102
|
78 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
79 }
|
Chris@102
|
80
|
Chris@102
|
81 static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
82 {
|
Chris@102
|
83 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
84
|
Chris@102
|
85 if ((order & (memory_order_consume | memory_order_acquire)) != 0)
|
Chris@102
|
86 hardware_full_fence();
|
Chris@102
|
87
|
Chris@102
|
88 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
89 }
|
Chris@102
|
90
|
Chris@102
|
91 static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
|
Chris@102
|
92 {
|
Chris@102
|
93 // Combine order flags together and promote memory_order_consume to memory_order_acquire
|
Chris@102
|
94 return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u));
|
Chris@102
|
95 }
|
Chris@102
|
96 };
|
Chris@102
|
97
|
Chris@102
|
98 template< typename T, typename Derived >
|
Chris@102
|
99 struct msvc_arm_operations :
|
Chris@102
|
100 public msvc_arm_operations_base
|
Chris@102
|
101 {
|
Chris@102
|
102 typedef T storage_type;
|
Chris@102
|
103
|
Chris@102
|
104 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
105 {
|
Chris@102
|
106 typedef typename make_signed< storage_type >::type signed_storage_type;
|
Chris@102
|
107 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
|
Chris@102
|
108 }
|
Chris@102
|
109
|
Chris@102
|
110 static BOOST_FORCEINLINE bool compare_exchange_weak(
|
Chris@102
|
111 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
|
Chris@102
|
112 {
|
Chris@102
|
113 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
|
Chris@102
|
114 }
|
Chris@102
|
115
|
Chris@102
|
116 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
117 {
|
Chris@102
|
118 return !!Derived::exchange(storage, (storage_type)1, order);
|
Chris@102
|
119 }
|
Chris@102
|
120
|
Chris@102
|
121 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
122 {
|
Chris@102
|
123 Derived::store(storage, (storage_type)0, order);
|
Chris@102
|
124 }
|
Chris@102
|
125
|
Chris@102
|
126 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
|
Chris@102
|
127 {
|
Chris@102
|
128 return true;
|
Chris@102
|
129 }
|
Chris@102
|
130 };
|
Chris@102
|
131
|
Chris@102
|
132 template< bool Signed >
|
Chris@102
|
133 struct operations< 1u, Signed > :
|
Chris@102
|
134 public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
|
Chris@102
|
135 {
|
Chris@102
|
136 typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
|
Chris@102
|
137 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
138
|
Chris@102
|
139 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
140 {
|
Chris@102
|
141 base_type::fence_before_store(order);
|
Chris@102
|
142 BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
|
Chris@102
|
143 base_type::fence_after_store(order);
|
Chris@102
|
144 }
|
Chris@102
|
145
|
Chris@102
|
146 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
147 {
|
Chris@102
|
148 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
|
Chris@102
|
149 base_type::fence_after_load(order);
|
Chris@102
|
150 return v;
|
Chris@102
|
151 }
|
Chris@102
|
152
|
Chris@102
|
153 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
154 {
|
Chris@102
|
155 switch (order)
|
Chris@102
|
156 {
|
Chris@102
|
157 case memory_order_relaxed:
|
Chris@102
|
158 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
|
Chris@102
|
159 break;
|
Chris@102
|
160 case memory_order_consume:
|
Chris@102
|
161 case memory_order_acquire:
|
Chris@102
|
162 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
|
Chris@102
|
163 break;
|
Chris@102
|
164 case memory_order_release:
|
Chris@102
|
165 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
|
Chris@102
|
166 break;
|
Chris@102
|
167 case memory_order_acq_rel:
|
Chris@102
|
168 case memory_order_seq_cst:
|
Chris@102
|
169 default:
|
Chris@102
|
170 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
|
Chris@102
|
171 break;
|
Chris@102
|
172 }
|
Chris@102
|
173 return v;
|
Chris@102
|
174 }
|
Chris@102
|
175
|
Chris@102
|
176 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
177 {
|
Chris@102
|
178 switch (order)
|
Chris@102
|
179 {
|
Chris@102
|
180 case memory_order_relaxed:
|
Chris@102
|
181 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
|
Chris@102
|
182 break;
|
Chris@102
|
183 case memory_order_consume:
|
Chris@102
|
184 case memory_order_acquire:
|
Chris@102
|
185 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
|
Chris@102
|
186 break;
|
Chris@102
|
187 case memory_order_release:
|
Chris@102
|
188 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
|
Chris@102
|
189 break;
|
Chris@102
|
190 case memory_order_acq_rel:
|
Chris@102
|
191 case memory_order_seq_cst:
|
Chris@102
|
192 default:
|
Chris@102
|
193 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
|
Chris@102
|
194 break;
|
Chris@102
|
195 }
|
Chris@102
|
196 return v;
|
Chris@102
|
197 }
|
Chris@102
|
198
|
Chris@102
|
199 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
200 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
|
Chris@102
|
201 {
|
Chris@102
|
202 storage_type previous = expected, old_val;
|
Chris@102
|
203
|
Chris@102
|
204 switch (cas_common_order(success_order, failure_order))
|
Chris@102
|
205 {
|
Chris@102
|
206 case memory_order_relaxed:
|
Chris@102
|
207 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
|
Chris@102
|
208 break;
|
Chris@102
|
209 case memory_order_consume:
|
Chris@102
|
210 case memory_order_acquire:
|
Chris@102
|
211 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
|
Chris@102
|
212 break;
|
Chris@102
|
213 case memory_order_release:
|
Chris@102
|
214 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
|
Chris@102
|
215 break;
|
Chris@102
|
216 case memory_order_acq_rel:
|
Chris@102
|
217 case memory_order_seq_cst:
|
Chris@102
|
218 default:
|
Chris@102
|
219 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
|
Chris@102
|
220 break;
|
Chris@102
|
221 }
|
Chris@102
|
222 expected = old_val;
|
Chris@102
|
223
|
Chris@102
|
224 return (previous == old_val);
|
Chris@102
|
225 }
|
Chris@102
|
226
|
Chris@102
|
227 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
228 {
|
Chris@102
|
229 switch (order)
|
Chris@102
|
230 {
|
Chris@102
|
231 case memory_order_relaxed:
|
Chris@102
|
232 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
|
Chris@102
|
233 break;
|
Chris@102
|
234 case memory_order_consume:
|
Chris@102
|
235 case memory_order_acquire:
|
Chris@102
|
236 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
|
Chris@102
|
237 break;
|
Chris@102
|
238 case memory_order_release:
|
Chris@102
|
239 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
|
Chris@102
|
240 break;
|
Chris@102
|
241 case memory_order_acq_rel:
|
Chris@102
|
242 case memory_order_seq_cst:
|
Chris@102
|
243 default:
|
Chris@102
|
244 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
|
Chris@102
|
245 break;
|
Chris@102
|
246 }
|
Chris@102
|
247 return v;
|
Chris@102
|
248 }
|
Chris@102
|
249
|
Chris@102
|
250 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
251 {
|
Chris@102
|
252 switch (order)
|
Chris@102
|
253 {
|
Chris@102
|
254 case memory_order_relaxed:
|
Chris@102
|
255 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
|
Chris@102
|
256 break;
|
Chris@102
|
257 case memory_order_consume:
|
Chris@102
|
258 case memory_order_acquire:
|
Chris@102
|
259 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
|
Chris@102
|
260 break;
|
Chris@102
|
261 case memory_order_release:
|
Chris@102
|
262 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
|
Chris@102
|
263 break;
|
Chris@102
|
264 case memory_order_acq_rel:
|
Chris@102
|
265 case memory_order_seq_cst:
|
Chris@102
|
266 default:
|
Chris@102
|
267 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
|
Chris@102
|
268 break;
|
Chris@102
|
269 }
|
Chris@102
|
270 return v;
|
Chris@102
|
271 }
|
Chris@102
|
272
|
Chris@102
|
273 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
274 {
|
Chris@102
|
275 switch (order)
|
Chris@102
|
276 {
|
Chris@102
|
277 case memory_order_relaxed:
|
Chris@102
|
278 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
|
Chris@102
|
279 break;
|
Chris@102
|
280 case memory_order_consume:
|
Chris@102
|
281 case memory_order_acquire:
|
Chris@102
|
282 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
|
Chris@102
|
283 break;
|
Chris@102
|
284 case memory_order_release:
|
Chris@102
|
285 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
|
Chris@102
|
286 break;
|
Chris@102
|
287 case memory_order_acq_rel:
|
Chris@102
|
288 case memory_order_seq_cst:
|
Chris@102
|
289 default:
|
Chris@102
|
290 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
|
Chris@102
|
291 break;
|
Chris@102
|
292 }
|
Chris@102
|
293 return v;
|
Chris@102
|
294 }
|
Chris@102
|
295 };
|
Chris@102
|
296
|
Chris@102
|
297 template< bool Signed >
|
Chris@102
|
298 struct operations< 2u, Signed > :
|
Chris@102
|
299 public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
|
Chris@102
|
300 {
|
Chris@102
|
301 typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
|
Chris@102
|
302 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
303
|
Chris@102
|
304 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
305 {
|
Chris@102
|
306 base_type::fence_before_store(order);
|
Chris@102
|
307 BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
|
Chris@102
|
308 base_type::fence_after_store(order);
|
Chris@102
|
309 }
|
Chris@102
|
310
|
Chris@102
|
311 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
312 {
|
Chris@102
|
313 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
|
Chris@102
|
314 base_type::fence_after_load(order);
|
Chris@102
|
315 return v;
|
Chris@102
|
316 }
|
Chris@102
|
317
|
Chris@102
|
318 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
319 {
|
Chris@102
|
320 switch (order)
|
Chris@102
|
321 {
|
Chris@102
|
322 case memory_order_relaxed:
|
Chris@102
|
323 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
|
Chris@102
|
324 break;
|
Chris@102
|
325 case memory_order_consume:
|
Chris@102
|
326 case memory_order_acquire:
|
Chris@102
|
327 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
|
Chris@102
|
328 break;
|
Chris@102
|
329 case memory_order_release:
|
Chris@102
|
330 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
|
Chris@102
|
331 break;
|
Chris@102
|
332 case memory_order_acq_rel:
|
Chris@102
|
333 case memory_order_seq_cst:
|
Chris@102
|
334 default:
|
Chris@102
|
335 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
|
Chris@102
|
336 break;
|
Chris@102
|
337 }
|
Chris@102
|
338 return v;
|
Chris@102
|
339 }
|
Chris@102
|
340
|
Chris@102
|
341 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
342 {
|
Chris@102
|
343 switch (order)
|
Chris@102
|
344 {
|
Chris@102
|
345 case memory_order_relaxed:
|
Chris@102
|
346 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
|
Chris@102
|
347 break;
|
Chris@102
|
348 case memory_order_consume:
|
Chris@102
|
349 case memory_order_acquire:
|
Chris@102
|
350 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
|
Chris@102
|
351 break;
|
Chris@102
|
352 case memory_order_release:
|
Chris@102
|
353 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
|
Chris@102
|
354 break;
|
Chris@102
|
355 case memory_order_acq_rel:
|
Chris@102
|
356 case memory_order_seq_cst:
|
Chris@102
|
357 default:
|
Chris@102
|
358 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
|
Chris@102
|
359 break;
|
Chris@102
|
360 }
|
Chris@102
|
361 return v;
|
Chris@102
|
362 }
|
Chris@102
|
363
|
Chris@102
|
364 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
365 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
|
Chris@102
|
366 {
|
Chris@102
|
367 storage_type previous = expected, old_val;
|
Chris@102
|
368
|
Chris@102
|
369 switch (cas_common_order(success_order, failure_order))
|
Chris@102
|
370 {
|
Chris@102
|
371 case memory_order_relaxed:
|
Chris@102
|
372 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
|
Chris@102
|
373 break;
|
Chris@102
|
374 case memory_order_consume:
|
Chris@102
|
375 case memory_order_acquire:
|
Chris@102
|
376 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
|
Chris@102
|
377 break;
|
Chris@102
|
378 case memory_order_release:
|
Chris@102
|
379 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
|
Chris@102
|
380 break;
|
Chris@102
|
381 case memory_order_acq_rel:
|
Chris@102
|
382 case memory_order_seq_cst:
|
Chris@102
|
383 default:
|
Chris@102
|
384 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
|
Chris@102
|
385 break;
|
Chris@102
|
386 }
|
Chris@102
|
387 expected = old_val;
|
Chris@102
|
388
|
Chris@102
|
389 return (previous == old_val);
|
Chris@102
|
390 }
|
Chris@102
|
391
|
Chris@102
|
392 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
393 {
|
Chris@102
|
394 switch (order)
|
Chris@102
|
395 {
|
Chris@102
|
396 case memory_order_relaxed:
|
Chris@102
|
397 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
|
Chris@102
|
398 break;
|
Chris@102
|
399 case memory_order_consume:
|
Chris@102
|
400 case memory_order_acquire:
|
Chris@102
|
401 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
|
Chris@102
|
402 break;
|
Chris@102
|
403 case memory_order_release:
|
Chris@102
|
404 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
|
Chris@102
|
405 break;
|
Chris@102
|
406 case memory_order_acq_rel:
|
Chris@102
|
407 case memory_order_seq_cst:
|
Chris@102
|
408 default:
|
Chris@102
|
409 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
|
Chris@102
|
410 break;
|
Chris@102
|
411 }
|
Chris@102
|
412 return v;
|
Chris@102
|
413 }
|
Chris@102
|
414
|
Chris@102
|
415 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
416 {
|
Chris@102
|
417 switch (order)
|
Chris@102
|
418 {
|
Chris@102
|
419 case memory_order_relaxed:
|
Chris@102
|
420 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
|
Chris@102
|
421 break;
|
Chris@102
|
422 case memory_order_consume:
|
Chris@102
|
423 case memory_order_acquire:
|
Chris@102
|
424 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
|
Chris@102
|
425 break;
|
Chris@102
|
426 case memory_order_release:
|
Chris@102
|
427 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
|
Chris@102
|
428 break;
|
Chris@102
|
429 case memory_order_acq_rel:
|
Chris@102
|
430 case memory_order_seq_cst:
|
Chris@102
|
431 default:
|
Chris@102
|
432 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
|
Chris@102
|
433 break;
|
Chris@102
|
434 }
|
Chris@102
|
435 return v;
|
Chris@102
|
436 }
|
Chris@102
|
437
|
Chris@102
|
438 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
439 {
|
Chris@102
|
440 switch (order)
|
Chris@102
|
441 {
|
Chris@102
|
442 case memory_order_relaxed:
|
Chris@102
|
443 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
|
Chris@102
|
444 break;
|
Chris@102
|
445 case memory_order_consume:
|
Chris@102
|
446 case memory_order_acquire:
|
Chris@102
|
447 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
|
Chris@102
|
448 break;
|
Chris@102
|
449 case memory_order_release:
|
Chris@102
|
450 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
|
Chris@102
|
451 break;
|
Chris@102
|
452 case memory_order_acq_rel:
|
Chris@102
|
453 case memory_order_seq_cst:
|
Chris@102
|
454 default:
|
Chris@102
|
455 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
|
Chris@102
|
456 break;
|
Chris@102
|
457 }
|
Chris@102
|
458 return v;
|
Chris@102
|
459 }
|
Chris@102
|
460 };
|
Chris@102
|
461
|
Chris@102
|
462 template< bool Signed >
|
Chris@102
|
463 struct operations< 4u, Signed > :
|
Chris@102
|
464 public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
|
Chris@102
|
465 {
|
Chris@102
|
466 typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
|
Chris@102
|
467 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
468
|
Chris@102
|
469 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
470 {
|
Chris@102
|
471 base_type::fence_before_store(order);
|
Chris@102
|
472 BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
|
Chris@102
|
473 base_type::fence_after_store(order);
|
Chris@102
|
474 }
|
Chris@102
|
475
|
Chris@102
|
476 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
477 {
|
Chris@102
|
478 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
|
Chris@102
|
479 base_type::fence_after_load(order);
|
Chris@102
|
480 return v;
|
Chris@102
|
481 }
|
Chris@102
|
482
|
Chris@102
|
483 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
484 {
|
Chris@102
|
485 switch (order)
|
Chris@102
|
486 {
|
Chris@102
|
487 case memory_order_relaxed:
|
Chris@102
|
488 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
|
Chris@102
|
489 break;
|
Chris@102
|
490 case memory_order_consume:
|
Chris@102
|
491 case memory_order_acquire:
|
Chris@102
|
492 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
|
Chris@102
|
493 break;
|
Chris@102
|
494 case memory_order_release:
|
Chris@102
|
495 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
|
Chris@102
|
496 break;
|
Chris@102
|
497 case memory_order_acq_rel:
|
Chris@102
|
498 case memory_order_seq_cst:
|
Chris@102
|
499 default:
|
Chris@102
|
500 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
|
Chris@102
|
501 break;
|
Chris@102
|
502 }
|
Chris@102
|
503 return v;
|
Chris@102
|
504 }
|
Chris@102
|
505
|
Chris@102
|
506 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
507 {
|
Chris@102
|
508 switch (order)
|
Chris@102
|
509 {
|
Chris@102
|
510 case memory_order_relaxed:
|
Chris@102
|
511 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
|
Chris@102
|
512 break;
|
Chris@102
|
513 case memory_order_consume:
|
Chris@102
|
514 case memory_order_acquire:
|
Chris@102
|
515 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
|
Chris@102
|
516 break;
|
Chris@102
|
517 case memory_order_release:
|
Chris@102
|
518 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
|
Chris@102
|
519 break;
|
Chris@102
|
520 case memory_order_acq_rel:
|
Chris@102
|
521 case memory_order_seq_cst:
|
Chris@102
|
522 default:
|
Chris@102
|
523 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
|
Chris@102
|
524 break;
|
Chris@102
|
525 }
|
Chris@102
|
526 return v;
|
Chris@102
|
527 }
|
Chris@102
|
528
|
Chris@102
|
529 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
530 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
|
Chris@102
|
531 {
|
Chris@102
|
532 storage_type previous = expected, old_val;
|
Chris@102
|
533
|
Chris@102
|
534 switch (cas_common_order(success_order, failure_order))
|
Chris@102
|
535 {
|
Chris@102
|
536 case memory_order_relaxed:
|
Chris@102
|
537 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
|
Chris@102
|
538 break;
|
Chris@102
|
539 case memory_order_consume:
|
Chris@102
|
540 case memory_order_acquire:
|
Chris@102
|
541 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
|
Chris@102
|
542 break;
|
Chris@102
|
543 case memory_order_release:
|
Chris@102
|
544 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
|
Chris@102
|
545 break;
|
Chris@102
|
546 case memory_order_acq_rel:
|
Chris@102
|
547 case memory_order_seq_cst:
|
Chris@102
|
548 default:
|
Chris@102
|
549 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
|
Chris@102
|
550 break;
|
Chris@102
|
551 }
|
Chris@102
|
552 expected = old_val;
|
Chris@102
|
553
|
Chris@102
|
554 return (previous == old_val);
|
Chris@102
|
555 }
|
Chris@102
|
556
|
Chris@102
|
557 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
558 {
|
Chris@102
|
559 switch (order)
|
Chris@102
|
560 {
|
Chris@102
|
561 case memory_order_relaxed:
|
Chris@102
|
562 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
|
Chris@102
|
563 break;
|
Chris@102
|
564 case memory_order_consume:
|
Chris@102
|
565 case memory_order_acquire:
|
Chris@102
|
566 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
|
Chris@102
|
567 break;
|
Chris@102
|
568 case memory_order_release:
|
Chris@102
|
569 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
|
Chris@102
|
570 break;
|
Chris@102
|
571 case memory_order_acq_rel:
|
Chris@102
|
572 case memory_order_seq_cst:
|
Chris@102
|
573 default:
|
Chris@102
|
574 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
|
Chris@102
|
575 break;
|
Chris@102
|
576 }
|
Chris@102
|
577 return v;
|
Chris@102
|
578 }
|
Chris@102
|
579
|
Chris@102
|
580 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
581 {
|
Chris@102
|
582 switch (order)
|
Chris@102
|
583 {
|
Chris@102
|
584 case memory_order_relaxed:
|
Chris@102
|
585 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
|
Chris@102
|
586 break;
|
Chris@102
|
587 case memory_order_consume:
|
Chris@102
|
588 case memory_order_acquire:
|
Chris@102
|
589 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
|
Chris@102
|
590 break;
|
Chris@102
|
591 case memory_order_release:
|
Chris@102
|
592 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
|
Chris@102
|
593 break;
|
Chris@102
|
594 case memory_order_acq_rel:
|
Chris@102
|
595 case memory_order_seq_cst:
|
Chris@102
|
596 default:
|
Chris@102
|
597 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
|
Chris@102
|
598 break;
|
Chris@102
|
599 }
|
Chris@102
|
600 return v;
|
Chris@102
|
601 }
|
Chris@102
|
602
|
Chris@102
|
603 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
604 {
|
Chris@102
|
605 switch (order)
|
Chris@102
|
606 {
|
Chris@102
|
607 case memory_order_relaxed:
|
Chris@102
|
608 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
|
Chris@102
|
609 break;
|
Chris@102
|
610 case memory_order_consume:
|
Chris@102
|
611 case memory_order_acquire:
|
Chris@102
|
612 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
|
Chris@102
|
613 break;
|
Chris@102
|
614 case memory_order_release:
|
Chris@102
|
615 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
|
Chris@102
|
616 break;
|
Chris@102
|
617 case memory_order_acq_rel:
|
Chris@102
|
618 case memory_order_seq_cst:
|
Chris@102
|
619 default:
|
Chris@102
|
620 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
|
Chris@102
|
621 break;
|
Chris@102
|
622 }
|
Chris@102
|
623 return v;
|
Chris@102
|
624 }
|
Chris@102
|
625 };
|
Chris@102
|
626
|
Chris@102
|
627 template< bool Signed >
|
Chris@102
|
628 struct operations< 8u, Signed > :
|
Chris@102
|
629 public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
|
Chris@102
|
630 {
|
Chris@102
|
631 typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
|
Chris@102
|
632 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
633
|
Chris@102
|
634 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
635 {
|
Chris@102
|
636 base_type::fence_before_store(order);
|
Chris@102
|
637 BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
|
Chris@102
|
638 base_type::fence_after_store(order);
|
Chris@102
|
639 }
|
Chris@102
|
640
|
Chris@102
|
641 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
642 {
|
Chris@102
|
643 storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
|
Chris@102
|
644 base_type::fence_after_load(order);
|
Chris@102
|
645 return v;
|
Chris@102
|
646 }
|
Chris@102
|
647
|
Chris@102
|
648 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
649 {
|
Chris@102
|
650 switch (order)
|
Chris@102
|
651 {
|
Chris@102
|
652 case memory_order_relaxed:
|
Chris@102
|
653 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
|
Chris@102
|
654 break;
|
Chris@102
|
655 case memory_order_consume:
|
Chris@102
|
656 case memory_order_acquire:
|
Chris@102
|
657 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
|
Chris@102
|
658 break;
|
Chris@102
|
659 case memory_order_release:
|
Chris@102
|
660 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
|
Chris@102
|
661 break;
|
Chris@102
|
662 case memory_order_acq_rel:
|
Chris@102
|
663 case memory_order_seq_cst:
|
Chris@102
|
664 default:
|
Chris@102
|
665 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
|
Chris@102
|
666 break;
|
Chris@102
|
667 }
|
Chris@102
|
668 return v;
|
Chris@102
|
669 }
|
Chris@102
|
670
|
Chris@102
|
671 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
672 {
|
Chris@102
|
673 switch (order)
|
Chris@102
|
674 {
|
Chris@102
|
675 case memory_order_relaxed:
|
Chris@102
|
676 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
|
Chris@102
|
677 break;
|
Chris@102
|
678 case memory_order_consume:
|
Chris@102
|
679 case memory_order_acquire:
|
Chris@102
|
680 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
|
Chris@102
|
681 break;
|
Chris@102
|
682 case memory_order_release:
|
Chris@102
|
683 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
|
Chris@102
|
684 break;
|
Chris@102
|
685 case memory_order_acq_rel:
|
Chris@102
|
686 case memory_order_seq_cst:
|
Chris@102
|
687 default:
|
Chris@102
|
688 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
|
Chris@102
|
689 break;
|
Chris@102
|
690 }
|
Chris@102
|
691 return v;
|
Chris@102
|
692 }
|
Chris@102
|
693
|
Chris@102
|
694 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
695 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
|
Chris@102
|
696 {
|
Chris@102
|
697 storage_type previous = expected, old_val;
|
Chris@102
|
698
|
Chris@102
|
699 switch (cas_common_order(success_order, failure_order))
|
Chris@102
|
700 {
|
Chris@102
|
701 case memory_order_relaxed:
|
Chris@102
|
702 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
|
Chris@102
|
703 break;
|
Chris@102
|
704 case memory_order_consume:
|
Chris@102
|
705 case memory_order_acquire:
|
Chris@102
|
706 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
|
Chris@102
|
707 break;
|
Chris@102
|
708 case memory_order_release:
|
Chris@102
|
709 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
|
Chris@102
|
710 break;
|
Chris@102
|
711 case memory_order_acq_rel:
|
Chris@102
|
712 case memory_order_seq_cst:
|
Chris@102
|
713 default:
|
Chris@102
|
714 old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
|
Chris@102
|
715 break;
|
Chris@102
|
716 }
|
Chris@102
|
717 expected = old_val;
|
Chris@102
|
718
|
Chris@102
|
719 return (previous == old_val);
|
Chris@102
|
720 }
|
Chris@102
|
721
|
Chris@102
|
722 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
723 {
|
Chris@102
|
724 switch (order)
|
Chris@102
|
725 {
|
Chris@102
|
726 case memory_order_relaxed:
|
Chris@102
|
727 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
|
Chris@102
|
728 break;
|
Chris@102
|
729 case memory_order_consume:
|
Chris@102
|
730 case memory_order_acquire:
|
Chris@102
|
731 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
|
Chris@102
|
732 break;
|
Chris@102
|
733 case memory_order_release:
|
Chris@102
|
734 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
|
Chris@102
|
735 break;
|
Chris@102
|
736 case memory_order_acq_rel:
|
Chris@102
|
737 case memory_order_seq_cst:
|
Chris@102
|
738 default:
|
Chris@102
|
739 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
|
Chris@102
|
740 break;
|
Chris@102
|
741 }
|
Chris@102
|
742 return v;
|
Chris@102
|
743 }
|
Chris@102
|
744
|
Chris@102
|
745 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
746 {
|
Chris@102
|
747 switch (order)
|
Chris@102
|
748 {
|
Chris@102
|
749 case memory_order_relaxed:
|
Chris@102
|
750 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
|
Chris@102
|
751 break;
|
Chris@102
|
752 case memory_order_consume:
|
Chris@102
|
753 case memory_order_acquire:
|
Chris@102
|
754 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
|
Chris@102
|
755 break;
|
Chris@102
|
756 case memory_order_release:
|
Chris@102
|
757 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
|
Chris@102
|
758 break;
|
Chris@102
|
759 case memory_order_acq_rel:
|
Chris@102
|
760 case memory_order_seq_cst:
|
Chris@102
|
761 default:
|
Chris@102
|
762 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
|
Chris@102
|
763 break;
|
Chris@102
|
764 }
|
Chris@102
|
765 return v;
|
Chris@102
|
766 }
|
Chris@102
|
767
|
Chris@102
|
768 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
769 {
|
Chris@102
|
770 switch (order)
|
Chris@102
|
771 {
|
Chris@102
|
772 case memory_order_relaxed:
|
Chris@102
|
773 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
|
Chris@102
|
774 break;
|
Chris@102
|
775 case memory_order_consume:
|
Chris@102
|
776 case memory_order_acquire:
|
Chris@102
|
777 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
|
Chris@102
|
778 break;
|
Chris@102
|
779 case memory_order_release:
|
Chris@102
|
780 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
|
Chris@102
|
781 break;
|
Chris@102
|
782 case memory_order_acq_rel:
|
Chris@102
|
783 case memory_order_seq_cst:
|
Chris@102
|
784 default:
|
Chris@102
|
785 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
|
Chris@102
|
786 break;
|
Chris@102
|
787 }
|
Chris@102
|
788 return v;
|
Chris@102
|
789 }
|
Chris@102
|
790 };
|
Chris@102
|
791
|
Chris@102
|
792
|
Chris@102
|
793 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
794 {
|
Chris@102
|
795 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
796 if (order != memory_order_relaxed)
|
Chris@102
|
797 msvc_arm_operations_base::hardware_full_fence();
|
Chris@102
|
798 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
799 }
|
Chris@102
|
800
|
Chris@102
|
801 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
802 {
|
Chris@102
|
803 if (order != memory_order_relaxed)
|
Chris@102
|
804 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
|
Chris@102
|
805 }
|
Chris@102
|
806
|
Chris@102
|
807 } // namespace detail
|
Chris@102
|
808 } // namespace atomics
|
Chris@102
|
809 } // namespace boost
|
Chris@102
|
810
|
Chris@102
|
811 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
|
Chris@102
|
812 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
|
Chris@102
|
813 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
|
Chris@102
|
814 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
|
Chris@102
|
815 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
|
Chris@102
|
816 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
|
Chris@102
|
817 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
|
Chris@102
|
818 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
|
Chris@102
|
819
|
Chris@102
|
820 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
|