comparison DEPENDENCIES/generic/include/boost/atomic/detail/ops_windows.hpp @ 102:f46d142149f5

Whoops, finish that update
author Chris Cannam
date Mon, 07 Sep 2015 11:13:41 +0100
parents
children
comparison
equal deleted inserted replaced
101:c530137014c0 102:f46d142149f5
1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2012 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
9 */
10 /*!
11 * \file atomic/detail/ops_windows.hpp
12 *
13 * This header contains implementation of the \c operations template.
14 *
15 * This implementation is the most basic version for Windows. It should
16 * work for any non-MSVC-like compilers as long as there are Interlocked WinAPI
17 * functions available. This version is also used for WinCE.
18 *
19 * Notably, this implementation is not as efficient as other
20 * versions based on compiler intrinsics.
21 */
22
23 #ifndef BOOST_ATOMIC_DETAIL_OPS_WINDOWS_HPP_INCLUDED_
24 #define BOOST_ATOMIC_DETAIL_OPS_WINDOWS_HPP_INCLUDED_
25
26 #include <boost/memory_order.hpp>
27 #include <boost/type_traits/make_signed.hpp>
28 #include <boost/atomic/detail/config.hpp>
29 #include <boost/atomic/detail/interlocked.hpp>
30 #include <boost/atomic/detail/storage_type.hpp>
31 #include <boost/atomic/detail/operations_fwd.hpp>
32 #include <boost/atomic/capabilities.hpp>
33 #include <boost/atomic/detail/ops_msvc_common.hpp>
34 #include <boost/atomic/detail/ops_extending_cas_based.hpp>
35
36 #ifdef BOOST_HAS_PRAGMA_ONCE
37 #pragma once
38 #endif
39
40 namespace boost {
41 namespace atomics {
42 namespace detail {
43
44 struct windows_operations_base
45 {
46 static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
47 {
48 long tmp;
49 BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&tmp, 0);
50 }
51
52 static BOOST_FORCEINLINE void fence_before(memory_order) BOOST_NOEXCEPT
53 {
54 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
55 }
56
57 static BOOST_FORCEINLINE void fence_after(memory_order) BOOST_NOEXCEPT
58 {
59 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
60 }
61 };
62
63 template< typename T, typename Derived >
64 struct windows_operations :
65 public windows_operations_base
66 {
67 typedef T storage_type;
68
69 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
70 {
71 Derived::exchange(storage, v, order);
72 }
73
74 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
75 {
76 return Derived::fetch_add(const_cast< storage_type volatile& >(storage), (storage_type)0, order);
77 }
78
79 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
80 {
81 typedef typename make_signed< storage_type >::type signed_storage_type;
82 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
83 }
84
85 static BOOST_FORCEINLINE bool compare_exchange_weak(
86 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
87 {
88 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
89 }
90
91 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
92 {
93 return !!Derived::exchange(storage, (storage_type)1, order);
94 }
95
96 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
97 {
98 store(storage, (storage_type)0, order);
99 }
100
101 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
102 {
103 return true;
104 }
105 };
106
107 template< bool Signed >
108 struct operations< 4u, Signed > :
109 public windows_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
110 {
111 typedef windows_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
112 typedef typename base_type::storage_type storage_type;
113
114 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
115 {
116 base_type::fence_before(order);
117 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
118 base_type::fence_after(order);
119 return v;
120 }
121
122 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
123 {
124 base_type::fence_before(order);
125 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
126 base_type::fence_after(order);
127 return v;
128 }
129
130 static BOOST_FORCEINLINE bool compare_exchange_strong(
131 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
132 {
133 storage_type previous = expected;
134 base_type::fence_before(success_order);
135 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
136 expected = old_val;
137 // The success and failure fences are the same anyway
138 base_type::fence_after(success_order);
139 return (previous == old_val);
140 }
141
142 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
143 {
144 #if defined(BOOST_ATOMIC_INTERLOCKED_AND)
145 base_type::fence_before(order);
146 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
147 base_type::fence_after(order);
148 return v;
149 #else
150 storage_type res = storage;
151 while (!compare_exchange_strong(storage, res, res & v, order, memory_order_relaxed)) {}
152 return res;
153 #endif
154 }
155
156 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
157 {
158 #if defined(BOOST_ATOMIC_INTERLOCKED_OR)
159 base_type::fence_before(order);
160 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
161 base_type::fence_after(order);
162 return v;
163 #else
164 storage_type res = storage;
165 while (!compare_exchange_strong(storage, res, res | v, order, memory_order_relaxed)) {}
166 return res;
167 #endif
168 }
169
170 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
171 {
172 #if defined(BOOST_ATOMIC_INTERLOCKED_XOR)
173 base_type::fence_before(order);
174 v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
175 base_type::fence_after(order);
176 return v;
177 #else
178 storage_type res = storage;
179 while (!compare_exchange_strong(storage, res, res ^ v, order, memory_order_relaxed)) {}
180 return res;
181 #endif
182 }
183 };
184
185 template< bool Signed >
186 struct operations< 1u, Signed > :
187 public extending_cas_based_operations< operations< 4u, Signed >, 1u, Signed >
188 {
189 };
190
191 template< bool Signed >
192 struct operations< 2u, Signed > :
193 public extending_cas_based_operations< operations< 4u, Signed >, 2u, Signed >
194 {
195 };
196
197 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
198 {
199 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
200 if (order == memory_order_seq_cst)
201 windows_operations_base::hardware_full_fence();
202 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
203 }
204
205 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
206 {
207 if (order != memory_order_relaxed)
208 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
209 }
210
211 } // namespace detail
212 } // namespace atomics
213 } // namespace boost
214
215 #endif // BOOST_ATOMIC_DETAIL_OPS_WINDOWS_HPP_INCLUDED_