Chris@102
|
1 /*
|
Chris@102
|
2 * Distributed under the Boost Software License, Version 1.0.
|
Chris@102
|
3 * (See accompanying file LICENSE_1_0.txt or copy at
|
Chris@102
|
4 * http://www.boost.org/LICENSE_1_0.txt)
|
Chris@102
|
5 *
|
Chris@102
|
6 * Copyright (c) 2009 Helge Bahmann
|
Chris@102
|
7 * Copyright (c) 2012 Tim Blechmann
|
Chris@102
|
8 * Copyright (c) 2014 Andrey Semashev
|
Chris@102
|
9 */
|
Chris@102
|
10 /*!
|
Chris@102
|
11 * \file atomic/detail/ops_gcc_x86.hpp
|
Chris@102
|
12 *
|
Chris@102
|
13 * This header contains implementation of the \c operations template.
|
Chris@102
|
14 */
|
Chris@102
|
15
|
Chris@102
|
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_X86_HPP_INCLUDED_
|
Chris@102
|
17 #define BOOST_ATOMIC_DETAIL_OPS_GCC_X86_HPP_INCLUDED_
|
Chris@102
|
18
|
Chris@102
|
19 #include <boost/memory_order.hpp>
|
Chris@102
|
20 #include <boost/atomic/detail/config.hpp>
|
Chris@102
|
21 #include <boost/atomic/detail/storage_type.hpp>
|
Chris@102
|
22 #include <boost/atomic/detail/operations_fwd.hpp>
|
Chris@102
|
23 #include <boost/atomic/capabilities.hpp>
|
Chris@102
|
24 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B) || defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
|
Chris@102
|
25 #include <boost/atomic/detail/ops_gcc_x86_dcas.hpp>
|
Chris@102
|
26 #include <boost/atomic/detail/ops_cas_based.hpp>
|
Chris@102
|
27 #endif
|
Chris@102
|
28
|
Chris@102
|
29 #ifdef BOOST_HAS_PRAGMA_ONCE
|
Chris@102
|
30 #pragma once
|
Chris@102
|
31 #endif
|
Chris@102
|
32
|
Chris@102
|
33 #if defined(__x86_64__)
|
Chris@102
|
34 #define BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "rdx"
|
Chris@102
|
35 #else
|
Chris@102
|
36 #define BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "edx"
|
Chris@102
|
37 #endif
|
Chris@102
|
38
|
Chris@102
|
39 namespace boost {
|
Chris@102
|
40 namespace atomics {
|
Chris@102
|
41 namespace detail {
|
Chris@102
|
42
|
Chris@102
|
43 struct gcc_x86_operations_base
|
Chris@102
|
44 {
|
Chris@102
|
45 static BOOST_FORCEINLINE void fence_before(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
46 {
|
Chris@102
|
47 if ((order & memory_order_release) != 0)
|
Chris@102
|
48 __asm__ __volatile__ ("" ::: "memory");
|
Chris@102
|
49 }
|
Chris@102
|
50
|
Chris@102
|
51 static BOOST_FORCEINLINE void fence_after(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
52 {
|
Chris@102
|
53 if ((order & memory_order_acquire) != 0)
|
Chris@102
|
54 __asm__ __volatile__ ("" ::: "memory");
|
Chris@102
|
55 }
|
Chris@102
|
56 };
|
Chris@102
|
57
|
Chris@102
|
58 template< typename T, typename Derived >
|
Chris@102
|
59 struct gcc_x86_operations :
|
Chris@102
|
60 public gcc_x86_operations_base
|
Chris@102
|
61 {
|
Chris@102
|
62 typedef T storage_type;
|
Chris@102
|
63
|
Chris@102
|
64 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
65 {
|
Chris@102
|
66 if (order != memory_order_seq_cst)
|
Chris@102
|
67 {
|
Chris@102
|
68 fence_before(order);
|
Chris@102
|
69 storage = v;
|
Chris@102
|
70 fence_after(order);
|
Chris@102
|
71 }
|
Chris@102
|
72 else
|
Chris@102
|
73 {
|
Chris@102
|
74 Derived::exchange(storage, v, order);
|
Chris@102
|
75 }
|
Chris@102
|
76 }
|
Chris@102
|
77
|
Chris@102
|
78 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
79 {
|
Chris@102
|
80 storage_type v = storage;
|
Chris@102
|
81 fence_after(order);
|
Chris@102
|
82 return v;
|
Chris@102
|
83 }
|
Chris@102
|
84
|
Chris@102
|
85 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
86 {
|
Chris@102
|
87 return Derived::fetch_add(storage, -v, order);
|
Chris@102
|
88 }
|
Chris@102
|
89
|
Chris@102
|
90 static BOOST_FORCEINLINE bool compare_exchange_weak(
|
Chris@102
|
91 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
|
Chris@102
|
92 {
|
Chris@102
|
93 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
|
Chris@102
|
94 }
|
Chris@102
|
95
|
Chris@102
|
96 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
97 {
|
Chris@102
|
98 return !!Derived::exchange(storage, (storage_type)1, order);
|
Chris@102
|
99 }
|
Chris@102
|
100
|
Chris@102
|
101 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
102 {
|
Chris@102
|
103 store(storage, (storage_type)0, order);
|
Chris@102
|
104 }
|
Chris@102
|
105
|
Chris@102
|
106 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
|
Chris@102
|
107 {
|
Chris@102
|
108 return true;
|
Chris@102
|
109 }
|
Chris@102
|
110 };
|
Chris@102
|
111
|
Chris@102
|
112 template< bool Signed >
|
Chris@102
|
113 struct operations< 1u, Signed > :
|
Chris@102
|
114 public gcc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
|
Chris@102
|
115 {
|
Chris@102
|
116 typedef gcc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
|
Chris@102
|
117 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
118
|
Chris@102
|
119 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
120 {
|
Chris@102
|
121 __asm__ __volatile__
|
Chris@102
|
122 (
|
Chris@102
|
123 "lock; xaddb %0, %1"
|
Chris@102
|
124 : "+q" (v), "+m" (storage)
|
Chris@102
|
125 :
|
Chris@102
|
126 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
127 );
|
Chris@102
|
128 return v;
|
Chris@102
|
129 }
|
Chris@102
|
130
|
Chris@102
|
131 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
132 {
|
Chris@102
|
133 __asm__ __volatile__
|
Chris@102
|
134 (
|
Chris@102
|
135 "xchgb %0, %1"
|
Chris@102
|
136 : "+q" (v), "+m" (storage)
|
Chris@102
|
137 :
|
Chris@102
|
138 : "memory"
|
Chris@102
|
139 );
|
Chris@102
|
140 return v;
|
Chris@102
|
141 }
|
Chris@102
|
142
|
Chris@102
|
143 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
144 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
145 {
|
Chris@102
|
146 storage_type previous = expected;
|
Chris@102
|
147 bool success;
|
Chris@102
|
148 __asm__ __volatile__
|
Chris@102
|
149 (
|
Chris@102
|
150 "lock; cmpxchgb %3, %1\n\t"
|
Chris@102
|
151 "sete %2"
|
Chris@102
|
152 : "+a" (previous), "+m" (storage), "=q" (success)
|
Chris@102
|
153 : "q" (desired)
|
Chris@102
|
154 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
155 );
|
Chris@102
|
156 expected = previous;
|
Chris@102
|
157 return success;
|
Chris@102
|
158 }
|
Chris@102
|
159
|
Chris@102
|
160 #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
|
Chris@102
|
161 __asm__ __volatile__\
|
Chris@102
|
162 (\
|
Chris@102
|
163 "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
|
Chris@102
|
164 ".align 16\n\t"\
|
Chris@102
|
165 "1: movb %[arg], %%dl\n\t"\
|
Chris@102
|
166 op " %%al, %%dl\n\t"\
|
Chris@102
|
167 "lock; cmpxchgb %%dl, %[storage]\n\t"\
|
Chris@102
|
168 "jne 1b"\
|
Chris@102
|
169 : [res] "+a" (result), [storage] "+m" (storage)\
|
Chris@102
|
170 : [arg] "q" (argument)\
|
Chris@102
|
171 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
|
Chris@102
|
172 )
|
Chris@102
|
173
|
Chris@102
|
174 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
175 {
|
Chris@102
|
176 storage_type res = storage;
|
Chris@102
|
177 BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, res);
|
Chris@102
|
178 return res;
|
Chris@102
|
179 }
|
Chris@102
|
180
|
Chris@102
|
181 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
182 {
|
Chris@102
|
183 storage_type res = storage;
|
Chris@102
|
184 BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, res);
|
Chris@102
|
185 return res;
|
Chris@102
|
186 }
|
Chris@102
|
187
|
Chris@102
|
188 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
189 {
|
Chris@102
|
190 storage_type res = storage;
|
Chris@102
|
191 BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, res);
|
Chris@102
|
192 return res;
|
Chris@102
|
193 }
|
Chris@102
|
194
|
Chris@102
|
195 #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
|
Chris@102
|
196 };
|
Chris@102
|
197
|
Chris@102
|
198 template< bool Signed >
|
Chris@102
|
199 struct operations< 2u, Signed > :
|
Chris@102
|
200 public gcc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
|
Chris@102
|
201 {
|
Chris@102
|
202 typedef gcc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
|
Chris@102
|
203 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
204
|
Chris@102
|
205 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
206 {
|
Chris@102
|
207 __asm__ __volatile__
|
Chris@102
|
208 (
|
Chris@102
|
209 "lock; xaddw %0, %1"
|
Chris@102
|
210 : "+q" (v), "+m" (storage)
|
Chris@102
|
211 :
|
Chris@102
|
212 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
213 );
|
Chris@102
|
214 return v;
|
Chris@102
|
215 }
|
Chris@102
|
216
|
Chris@102
|
217 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
218 {
|
Chris@102
|
219 __asm__ __volatile__
|
Chris@102
|
220 (
|
Chris@102
|
221 "xchgw %0, %1"
|
Chris@102
|
222 : "+q" (v), "+m" (storage)
|
Chris@102
|
223 :
|
Chris@102
|
224 : "memory"
|
Chris@102
|
225 );
|
Chris@102
|
226 return v;
|
Chris@102
|
227 }
|
Chris@102
|
228
|
Chris@102
|
229 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
230 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
231 {
|
Chris@102
|
232 storage_type previous = expected;
|
Chris@102
|
233 bool success;
|
Chris@102
|
234 __asm__ __volatile__
|
Chris@102
|
235 (
|
Chris@102
|
236 "lock; cmpxchgw %3, %1\n\t"
|
Chris@102
|
237 "sete %2"
|
Chris@102
|
238 : "+a" (previous), "+m" (storage), "=q" (success)
|
Chris@102
|
239 : "q" (desired)
|
Chris@102
|
240 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
241 );
|
Chris@102
|
242 expected = previous;
|
Chris@102
|
243 return success;
|
Chris@102
|
244 }
|
Chris@102
|
245
|
Chris@102
|
246 #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
|
Chris@102
|
247 __asm__ __volatile__\
|
Chris@102
|
248 (\
|
Chris@102
|
249 "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
|
Chris@102
|
250 ".align 16\n\t"\
|
Chris@102
|
251 "1: movw %[arg], %%dx\n\t"\
|
Chris@102
|
252 op " %%ax, %%dx\n\t"\
|
Chris@102
|
253 "lock; cmpxchgw %%dx, %[storage]\n\t"\
|
Chris@102
|
254 "jne 1b"\
|
Chris@102
|
255 : [res] "+a" (result), [storage] "+m" (storage)\
|
Chris@102
|
256 : [arg] "q" (argument)\
|
Chris@102
|
257 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
|
Chris@102
|
258 )
|
Chris@102
|
259
|
Chris@102
|
260 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
261 {
|
Chris@102
|
262 storage_type res = storage;
|
Chris@102
|
263 BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, res);
|
Chris@102
|
264 return res;
|
Chris@102
|
265 }
|
Chris@102
|
266
|
Chris@102
|
267 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
268 {
|
Chris@102
|
269 storage_type res = storage;
|
Chris@102
|
270 BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, res);
|
Chris@102
|
271 return res;
|
Chris@102
|
272 }
|
Chris@102
|
273
|
Chris@102
|
274 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
275 {
|
Chris@102
|
276 storage_type res = storage;
|
Chris@102
|
277 BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, res);
|
Chris@102
|
278 return res;
|
Chris@102
|
279 }
|
Chris@102
|
280
|
Chris@102
|
281 #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
|
Chris@102
|
282 };
|
Chris@102
|
283
|
Chris@102
|
284 template< bool Signed >
|
Chris@102
|
285 struct operations< 4u, Signed > :
|
Chris@102
|
286 public gcc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
|
Chris@102
|
287 {
|
Chris@102
|
288 typedef gcc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
|
Chris@102
|
289 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
290
|
Chris@102
|
291 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
292 {
|
Chris@102
|
293 __asm__ __volatile__
|
Chris@102
|
294 (
|
Chris@102
|
295 "lock; xaddl %0, %1"
|
Chris@102
|
296 : "+r" (v), "+m" (storage)
|
Chris@102
|
297 :
|
Chris@102
|
298 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
299 );
|
Chris@102
|
300 return v;
|
Chris@102
|
301 }
|
Chris@102
|
302
|
Chris@102
|
303 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
304 {
|
Chris@102
|
305 __asm__ __volatile__
|
Chris@102
|
306 (
|
Chris@102
|
307 "xchgl %0, %1"
|
Chris@102
|
308 : "+r" (v), "+m" (storage)
|
Chris@102
|
309 :
|
Chris@102
|
310 : "memory"
|
Chris@102
|
311 );
|
Chris@102
|
312 return v;
|
Chris@102
|
313 }
|
Chris@102
|
314
|
Chris@102
|
315 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
316 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
317 {
|
Chris@102
|
318 storage_type previous = expected;
|
Chris@102
|
319 bool success;
|
Chris@102
|
320 __asm__ __volatile__
|
Chris@102
|
321 (
|
Chris@102
|
322 "lock; cmpxchgl %3, %1\n\t"
|
Chris@102
|
323 "sete %2"
|
Chris@102
|
324 : "+a" (previous), "+m" (storage), "=q" (success)
|
Chris@102
|
325 : "r" (desired)
|
Chris@102
|
326 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
327 );
|
Chris@102
|
328 expected = previous;
|
Chris@102
|
329 return success;
|
Chris@102
|
330 }
|
Chris@102
|
331
|
Chris@102
|
332 #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
|
Chris@102
|
333 __asm__ __volatile__\
|
Chris@102
|
334 (\
|
Chris@102
|
335 "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
|
Chris@102
|
336 ".align 16\n\t"\
|
Chris@102
|
337 "1: movl %[arg], %%edx\n\t"\
|
Chris@102
|
338 op " %%eax, %%edx\n\t"\
|
Chris@102
|
339 "lock; cmpxchgl %%edx, %[storage]\n\t"\
|
Chris@102
|
340 "jne 1b"\
|
Chris@102
|
341 : [res] "+a" (result), [storage] "+m" (storage)\
|
Chris@102
|
342 : [arg] "r" (argument)\
|
Chris@102
|
343 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
|
Chris@102
|
344 )
|
Chris@102
|
345
|
Chris@102
|
346 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
347 {
|
Chris@102
|
348 storage_type res = storage;
|
Chris@102
|
349 BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, res);
|
Chris@102
|
350 return res;
|
Chris@102
|
351 }
|
Chris@102
|
352
|
Chris@102
|
353 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
354 {
|
Chris@102
|
355 storage_type res = storage;
|
Chris@102
|
356 BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, res);
|
Chris@102
|
357 return res;
|
Chris@102
|
358 }
|
Chris@102
|
359
|
Chris@102
|
360 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
361 {
|
Chris@102
|
362 storage_type res = storage;
|
Chris@102
|
363 BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, res);
|
Chris@102
|
364 return res;
|
Chris@102
|
365 }
|
Chris@102
|
366
|
Chris@102
|
367 #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
|
Chris@102
|
368 };
|
Chris@102
|
369
|
Chris@102
|
370 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B)
|
Chris@102
|
371
|
Chris@102
|
372 template< bool Signed >
|
Chris@102
|
373 struct operations< 8u, Signed > :
|
Chris@102
|
374 public cas_based_operations< gcc_dcas_x86< Signed > >
|
Chris@102
|
375 {
|
Chris@102
|
376 };
|
Chris@102
|
377
|
Chris@102
|
378 #elif defined(__x86_64__)
|
Chris@102
|
379
|
Chris@102
|
380 template< bool Signed >
|
Chris@102
|
381 struct operations< 8u, Signed > :
|
Chris@102
|
382 public gcc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
|
Chris@102
|
383 {
|
Chris@102
|
384 typedef gcc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
|
Chris@102
|
385 typedef typename base_type::storage_type storage_type;
|
Chris@102
|
386
|
Chris@102
|
387 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
388 {
|
Chris@102
|
389 __asm__ __volatile__
|
Chris@102
|
390 (
|
Chris@102
|
391 "lock; xaddq %0, %1"
|
Chris@102
|
392 : "+r" (v), "+m" (storage)
|
Chris@102
|
393 :
|
Chris@102
|
394 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
395 );
|
Chris@102
|
396 return v;
|
Chris@102
|
397 }
|
Chris@102
|
398
|
Chris@102
|
399 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
400 {
|
Chris@102
|
401 __asm__ __volatile__
|
Chris@102
|
402 (
|
Chris@102
|
403 "xchgq %0, %1"
|
Chris@102
|
404 : "+r" (v), "+m" (storage)
|
Chris@102
|
405 :
|
Chris@102
|
406 : "memory"
|
Chris@102
|
407 );
|
Chris@102
|
408 return v;
|
Chris@102
|
409 }
|
Chris@102
|
410
|
Chris@102
|
411 static BOOST_FORCEINLINE bool compare_exchange_strong(
|
Chris@102
|
412 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
413 {
|
Chris@102
|
414 storage_type previous = expected;
|
Chris@102
|
415 bool success;
|
Chris@102
|
416 __asm__ __volatile__
|
Chris@102
|
417 (
|
Chris@102
|
418 "lock; cmpxchgq %3, %1\n\t"
|
Chris@102
|
419 "sete %2"
|
Chris@102
|
420 : "+a" (previous), "+m" (storage), "=q" (success)
|
Chris@102
|
421 : "r" (desired)
|
Chris@102
|
422 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
|
Chris@102
|
423 );
|
Chris@102
|
424 expected = previous;
|
Chris@102
|
425 return success;
|
Chris@102
|
426 }
|
Chris@102
|
427
|
Chris@102
|
428 #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
|
Chris@102
|
429 __asm__ __volatile__\
|
Chris@102
|
430 (\
|
Chris@102
|
431 "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
|
Chris@102
|
432 ".align 16\n\t"\
|
Chris@102
|
433 "1: movq %[arg], %%rdx\n\t"\
|
Chris@102
|
434 op " %%rax, %%rdx\n\t"\
|
Chris@102
|
435 "lock; cmpxchgq %%rdx, %[storage]\n\t"\
|
Chris@102
|
436 "jne 1b"\
|
Chris@102
|
437 : [res] "+a" (result), [storage] "+m" (storage)\
|
Chris@102
|
438 : [arg] "r" (argument)\
|
Chris@102
|
439 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
|
Chris@102
|
440 )
|
Chris@102
|
441
|
Chris@102
|
442 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
443 {
|
Chris@102
|
444 storage_type res = storage;
|
Chris@102
|
445 BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, res);
|
Chris@102
|
446 return res;
|
Chris@102
|
447 }
|
Chris@102
|
448
|
Chris@102
|
449 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
450 {
|
Chris@102
|
451 storage_type res = storage;
|
Chris@102
|
452 BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, res);
|
Chris@102
|
453 return res;
|
Chris@102
|
454 }
|
Chris@102
|
455
|
Chris@102
|
456 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
|
Chris@102
|
457 {
|
Chris@102
|
458 storage_type res = storage;
|
Chris@102
|
459 BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, res);
|
Chris@102
|
460 return res;
|
Chris@102
|
461 }
|
Chris@102
|
462
|
Chris@102
|
463 #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
|
Chris@102
|
464 };
|
Chris@102
|
465
|
Chris@102
|
466 #endif
|
Chris@102
|
467
|
Chris@102
|
468 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
|
Chris@102
|
469
|
Chris@102
|
470 template< bool Signed >
|
Chris@102
|
471 struct operations< 16u, Signed > :
|
Chris@102
|
472 public cas_based_operations< gcc_dcas_x86_64< Signed > >
|
Chris@102
|
473 {
|
Chris@102
|
474 };
|
Chris@102
|
475
|
Chris@102
|
476 #endif // defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
|
Chris@102
|
477
|
Chris@102
|
478 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
479 {
|
Chris@102
|
480 if (order == memory_order_seq_cst)
|
Chris@102
|
481 {
|
Chris@102
|
482 __asm__ __volatile__
|
Chris@102
|
483 (
|
Chris@102
|
484 #if defined(__x86_64__) || defined(__SSE2__)
|
Chris@102
|
485 "mfence\n"
|
Chris@102
|
486 #else
|
Chris@102
|
487 "lock; addl $0, (%%esp)\n"
|
Chris@102
|
488 #endif
|
Chris@102
|
489 ::: "memory"
|
Chris@102
|
490 );
|
Chris@102
|
491 }
|
Chris@102
|
492 else if ((order & (memory_order_acquire | memory_order_release)) != 0)
|
Chris@102
|
493 {
|
Chris@102
|
494 __asm__ __volatile__ ("" ::: "memory");
|
Chris@102
|
495 }
|
Chris@102
|
496 }
|
Chris@102
|
497
|
Chris@102
|
498 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
|
Chris@102
|
499 {
|
Chris@102
|
500 if (order != memory_order_relaxed)
|
Chris@102
|
501 __asm__ __volatile__ ("" ::: "memory");
|
Chris@102
|
502 }
|
Chris@102
|
503
|
Chris@102
|
504 } // namespace detail
|
Chris@102
|
505 } // namespace atomics
|
Chris@102
|
506 } // namespace boost
|
Chris@102
|
507
|
Chris@102
|
508 #undef BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER
|
Chris@102
|
509
|
Chris@102
|
510 #endif // BOOST_ATOMIC_DETAIL_OPS_GCC_X86_HPP_INCLUDED_
|