1 | /* |
2 | * Distributed under the Boost Software License, Version 1.0. |
3 | * (See accompanying file LICENSE_1_0.txt or copy at |
4 | * http://www.boost.org/LICENSE_1_0.txt) |
5 | * |
6 | * Copyright (c) 2011 Helge Bahmann |
7 | * Copyright (c) 2013 Tim Blechmann |
8 | * Copyright (c) 2014 Andrey Semashev |
9 | */ |
10 | /*! |
11 | * \file atomic/detail/atomic_template.hpp |
12 | * |
13 | * This header contains interface definition of \c atomic template. |
14 | */ |
15 | |
16 | #ifndef BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_ |
17 | #define BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_ |
18 | |
19 | #include <cstddef> |
20 | #include <boost/cstdint.hpp> |
21 | #include <boost/assert.hpp> |
22 | #include <boost/atomic/detail/config.hpp> |
23 | #include <boost/atomic/detail/bitwise_cast.hpp> |
24 | #include <boost/atomic/detail/operations_fwd.hpp> |
25 | #include <boost/atomic/detail/type_traits/is_signed.hpp> |
26 | #include <boost/atomic/detail/type_traits/is_integral.hpp> |
27 | #include <boost/atomic/detail/type_traits/is_function.hpp> |
28 | #include <boost/atomic/detail/type_traits/conditional.hpp> |
29 | |
30 | #ifdef BOOST_HAS_PRAGMA_ONCE |
31 | #pragma once |
32 | #endif |
33 | |
34 | #if defined(BOOST_MSVC) |
35 | #pragma warning(push) |
36 | // 'boost::atomics::atomic<T>' : multiple assignment operators specified |
37 | #pragma warning(disable: 4522) |
38 | #endif |
39 | |
40 | /* |
41 | * IMPLEMENTATION NOTE: All interface functions MUST be declared with BOOST_FORCEINLINE, |
42 | * see comment for convert_memory_order_to_gcc in ops_gcc_atomic.hpp. |
43 | */ |
44 | |
45 | namespace boost { |
46 | namespace atomics { |
47 | namespace detail { |
48 | |
49 | BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order deduce_failure_order(memory_order order) BOOST_NOEXCEPT |
50 | { |
51 | return order == memory_order_acq_rel ? memory_order_acquire : (order == memory_order_release ? memory_order_relaxed : order); |
52 | } |
53 | |
54 | BOOST_FORCEINLINE BOOST_CONSTEXPR bool cas_failure_order_must_not_be_stronger_than_success_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT |
55 | { |
56 | // 15 == (memory_order_seq_cst | memory_order_consume), see memory_order.hpp |
57 | // Given the enum values we can test the strength of memory order requirements with this single condition. |
58 | return (failure_order & 15u) <= (success_order & 15u); |
59 | } |
60 | |
61 | template< typename T, bool IsFunction = boost::atomics::detail::is_function< T >::value > |
62 | struct classify_pointer |
63 | { |
64 | typedef void* type; |
65 | }; |
66 | |
67 | template< typename T > |
68 | struct classify_pointer< T, true > |
69 | { |
70 | typedef void type; |
71 | }; |
72 | |
73 | template< typename T, bool IsInt = boost::atomics::detail::is_integral< T >::value > |
74 | struct classify |
75 | { |
76 | typedef void type; |
77 | }; |
78 | |
79 | template< typename T > |
80 | struct classify< T, true > { typedef int type; }; |
81 | |
82 | template< typename T > |
83 | struct classify< T*, false > { typedef typename classify_pointer< T >::type type; }; |
84 | |
85 | template< > |
86 | struct classify< void*, false > { typedef void type; }; |
87 | |
88 | template< > |
89 | struct classify< const void*, false > { typedef void type; }; |
90 | |
91 | template< > |
92 | struct classify< volatile void*, false > { typedef void type; }; |
93 | |
94 | template< > |
95 | struct classify< const volatile void*, false > { typedef void type; }; |
96 | |
97 | template< typename T, typename U > |
98 | struct classify< T U::*, false > { typedef void type; }; |
99 | |
100 | template< bool > |
101 | struct boolean_constant {}; |
102 | typedef boolean_constant< true > true_constant; |
103 | typedef boolean_constant< false > false_constant; |
104 | |
105 | |
106 | template< typename T, typename Kind > |
107 | class base_atomic; |
108 | |
109 | //! General template. Implementation for user-defined types, such as structs and enums, and pointers to non-object types |
110 | template< typename T > |
111 | class base_atomic< T, void > |
112 | { |
113 | public: |
114 | typedef T value_type; |
115 | |
116 | protected: |
117 | typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations; |
118 | typedef typename boost::atomics::detail::conditional< sizeof(value_type) <= sizeof(void*), value_type, value_type const& >::type value_arg_type; |
119 | |
120 | public: |
121 | typedef typename operations::storage_type storage_type; |
122 | |
123 | private: |
124 | typedef boolean_constant< sizeof(value_type) == sizeof(storage_type) > value_matches_storage; |
125 | |
126 | protected: |
127 | typename operations::aligned_storage_type m_storage; |
128 | |
129 | public: |
130 | BOOST_FORCEINLINE explicit base_atomic(value_arg_type v = value_type()) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v)) |
131 | { |
132 | } |
133 | |
134 | BOOST_FORCEINLINE void store(value_arg_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
135 | { |
136 | BOOST_ASSERT(order != memory_order_consume); |
137 | BOOST_ASSERT(order != memory_order_acquire); |
138 | BOOST_ASSERT(order != memory_order_acq_rel); |
139 | |
140 | operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order); |
141 | } |
142 | |
143 | BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT |
144 | { |
145 | BOOST_ASSERT(order != memory_order_release); |
146 | BOOST_ASSERT(order != memory_order_acq_rel); |
147 | |
148 | return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order)); |
149 | } |
150 | |
151 | BOOST_FORCEINLINE value_type exchange(value_arg_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
152 | { |
153 | return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order)); |
154 | } |
155 | |
156 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_arg_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
157 | { |
158 | BOOST_ASSERT(failure_order != memory_order_release); |
159 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
160 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
161 | |
162 | return compare_exchange_strong_impl(expected, desired, success_order, failure_order, value_matches_storage()); |
163 | } |
164 | |
165 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_arg_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
166 | { |
167 | return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
168 | } |
169 | |
170 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_arg_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
171 | { |
172 | BOOST_ASSERT(failure_order != memory_order_release); |
173 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
174 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
175 | |
176 | return compare_exchange_weak_impl(expected, desired, success_order, failure_order, value_matches_storage()); |
177 | } |
178 | |
179 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_arg_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
180 | { |
181 | return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
182 | } |
183 | |
184 | BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&)) |
185 | BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&)) |
186 | |
187 | private: |
188 | BOOST_FORCEINLINE bool compare_exchange_strong_impl(value_type& expected, value_arg_type desired, memory_order success_order, memory_order failure_order, true_constant) volatile BOOST_NOEXCEPT |
189 | { |
190 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
191 | return operations::compare_exchange_strong(m_storage.value, reinterpret_cast< storage_type& >(expected), atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
192 | #else |
193 | return compare_exchange_strong_impl(expected, desired, success_order, failure_order, false_constant()); |
194 | #endif |
195 | } |
196 | |
197 | BOOST_FORCEINLINE bool compare_exchange_strong_impl(value_type& expected, value_arg_type desired, memory_order success_order, memory_order failure_order, false_constant) volatile BOOST_NOEXCEPT |
198 | { |
199 | storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected); |
200 | const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
201 | expected = atomics::detail::bitwise_cast< value_type >(old_value); |
202 | return res; |
203 | } |
204 | |
205 | BOOST_FORCEINLINE bool compare_exchange_weak_impl(value_type& expected, value_arg_type desired, memory_order success_order, memory_order failure_order, true_constant) volatile BOOST_NOEXCEPT |
206 | { |
207 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
208 | return operations::compare_exchange_weak(m_storage.value, reinterpret_cast< storage_type& >(expected), atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
209 | #else |
210 | return compare_exchange_weak_impl(expected, desired, success_order, failure_order, false_constant()); |
211 | #endif |
212 | } |
213 | |
214 | BOOST_FORCEINLINE bool compare_exchange_weak_impl(value_type& expected, value_arg_type desired, memory_order success_order, memory_order failure_order, false_constant) volatile BOOST_NOEXCEPT |
215 | { |
216 | storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected); |
217 | const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
218 | expected = atomics::detail::bitwise_cast< value_type >(old_value); |
219 | return res; |
220 | } |
221 | }; |
222 | |
223 | |
224 | //! Implementation for integers |
225 | template< typename T > |
226 | class base_atomic< T, int > |
227 | { |
228 | public: |
229 | typedef T value_type; |
230 | typedef T difference_type; |
231 | |
232 | protected: |
233 | typedef atomics::detail::operations< storage_size_of< value_type >::value, boost::atomics::detail::is_signed< T >::value > operations; |
234 | typedef value_type value_arg_type; |
235 | |
236 | public: |
237 | typedef typename operations::storage_type storage_type; |
238 | |
239 | protected: |
240 | typename operations::aligned_storage_type m_storage; |
241 | |
242 | public: |
243 | BOOST_DEFAULTED_FUNCTION(base_atomic(), {}) |
244 | BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : m_storage(v) {} |
245 | |
246 | BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
247 | { |
248 | BOOST_ASSERT(order != memory_order_consume); |
249 | BOOST_ASSERT(order != memory_order_acquire); |
250 | BOOST_ASSERT(order != memory_order_acq_rel); |
251 | |
252 | operations::store(m_storage.value, static_cast< storage_type >(v), order); |
253 | } |
254 | |
255 | BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT |
256 | { |
257 | BOOST_ASSERT(order != memory_order_release); |
258 | BOOST_ASSERT(order != memory_order_acq_rel); |
259 | |
260 | return static_cast< value_type >(operations::load(m_storage.value, order)); |
261 | } |
262 | |
263 | BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
264 | { |
265 | return static_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v), order)); |
266 | } |
267 | |
268 | BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
269 | { |
270 | return static_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v), order)); |
271 | } |
272 | |
273 | BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
274 | { |
275 | return static_cast< value_type >(operations::exchange(m_storage.value, static_cast< storage_type >(v), order)); |
276 | } |
277 | |
278 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
279 | { |
280 | BOOST_ASSERT(failure_order != memory_order_release); |
281 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
282 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
283 | |
284 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
285 | return operations::compare_exchange_strong(m_storage.value, reinterpret_cast< storage_type& >(expected), static_cast< storage_type >(desired), success_order, failure_order); |
286 | #else |
287 | storage_type old_value = static_cast< storage_type >(expected); |
288 | const bool res = operations::compare_exchange_strong(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order); |
289 | expected = static_cast< value_type >(old_value); |
290 | return res; |
291 | #endif |
292 | } |
293 | |
294 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
295 | { |
296 | return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
297 | } |
298 | |
299 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
300 | { |
301 | BOOST_ASSERT(failure_order != memory_order_release); |
302 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
303 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
304 | |
305 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
306 | return operations::compare_exchange_weak(m_storage.value, reinterpret_cast< storage_type& >(expected), static_cast< storage_type >(desired), success_order, failure_order); |
307 | #else |
308 | storage_type old_value = static_cast< storage_type >(expected); |
309 | const bool res = operations::compare_exchange_weak(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order); |
310 | expected = static_cast< value_type >(old_value); |
311 | return res; |
312 | #endif |
313 | } |
314 | |
315 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
316 | { |
317 | return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
318 | } |
319 | |
320 | BOOST_FORCEINLINE value_type fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
321 | { |
322 | return static_cast< value_type >(operations::fetch_and(m_storage.value, static_cast< storage_type >(v), order)); |
323 | } |
324 | |
325 | BOOST_FORCEINLINE value_type fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
326 | { |
327 | return static_cast< value_type >(operations::fetch_or(m_storage.value, static_cast< storage_type >(v), order)); |
328 | } |
329 | |
330 | BOOST_FORCEINLINE value_type fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
331 | { |
332 | return static_cast< value_type >(operations::fetch_xor(m_storage.value, static_cast< storage_type >(v), order)); |
333 | } |
334 | |
335 | BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT |
336 | { |
337 | return fetch_add(1); |
338 | } |
339 | |
340 | BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT |
341 | { |
342 | return fetch_add(1) + 1; |
343 | } |
344 | |
345 | BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT |
346 | { |
347 | return fetch_sub(1); |
348 | } |
349 | |
350 | BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT |
351 | { |
352 | return fetch_sub(1) - 1; |
353 | } |
354 | |
355 | BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT |
356 | { |
357 | return fetch_add(v) + v; |
358 | } |
359 | |
360 | BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT |
361 | { |
362 | return fetch_sub(v) - v; |
363 | } |
364 | |
365 | BOOST_FORCEINLINE value_type operator&=(value_type v) volatile BOOST_NOEXCEPT |
366 | { |
367 | return fetch_and(v) & v; |
368 | } |
369 | |
370 | BOOST_FORCEINLINE value_type operator|=(value_type v) volatile BOOST_NOEXCEPT |
371 | { |
372 | return fetch_or(v) | v; |
373 | } |
374 | |
375 | BOOST_FORCEINLINE value_type operator^=(value_type v) volatile BOOST_NOEXCEPT |
376 | { |
377 | return fetch_xor(v) ^ v; |
378 | } |
379 | |
380 | BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&)) |
381 | BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&)) |
382 | }; |
383 | |
384 | //! Implementation for bool |
385 | template< > |
386 | class base_atomic< bool, int > |
387 | { |
388 | public: |
389 | typedef bool value_type; |
390 | |
391 | protected: |
392 | typedef atomics::detail::operations< 1u, false > operations; |
393 | typedef value_type value_arg_type; |
394 | |
395 | public: |
396 | typedef operations::storage_type storage_type; |
397 | |
398 | protected: |
399 | operations::aligned_storage_type m_storage; |
400 | |
401 | public: |
402 | BOOST_DEFAULTED_FUNCTION(base_atomic(), {}) |
403 | BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : m_storage(v) {} |
404 | |
405 | BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
406 | { |
407 | BOOST_ASSERT(order != memory_order_consume); |
408 | BOOST_ASSERT(order != memory_order_acquire); |
409 | BOOST_ASSERT(order != memory_order_acq_rel); |
410 | |
411 | operations::store(m_storage.value, static_cast< storage_type >(v), order); |
412 | } |
413 | |
414 | BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT |
415 | { |
416 | BOOST_ASSERT(order != memory_order_release); |
417 | BOOST_ASSERT(order != memory_order_acq_rel); |
418 | |
419 | return !!operations::load(m_storage.value, order); |
420 | } |
421 | |
422 | BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
423 | { |
424 | return !!operations::exchange(m_storage.value, static_cast< storage_type >(v), order); |
425 | } |
426 | |
427 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
428 | { |
429 | BOOST_ASSERT(failure_order != memory_order_release); |
430 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
431 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
432 | |
433 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
434 | return operations::compare_exchange_strong(m_storage.value, reinterpret_cast< storage_type& >(expected), static_cast< storage_type >(desired), success_order, failure_order); |
435 | #else |
436 | storage_type old_value = static_cast< storage_type >(expected); |
437 | const bool res = operations::compare_exchange_strong(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order); |
438 | expected = !!old_value; |
439 | return res; |
440 | #endif |
441 | } |
442 | |
443 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
444 | { |
445 | return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
446 | } |
447 | |
448 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
449 | { |
450 | BOOST_ASSERT(failure_order != memory_order_release); |
451 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
452 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
453 | |
454 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
455 | return operations::compare_exchange_weak(m_storage.value, reinterpret_cast< storage_type& >(expected), static_cast< storage_type >(desired), success_order, failure_order); |
456 | #else |
457 | storage_type old_value = static_cast< storage_type >(expected); |
458 | const bool res = operations::compare_exchange_weak(m_storage.value, old_value, static_cast< storage_type >(desired), success_order, failure_order); |
459 | expected = !!old_value; |
460 | return res; |
461 | #endif |
462 | } |
463 | |
464 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
465 | { |
466 | return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
467 | } |
468 | |
469 | BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&)) |
470 | BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&)) |
471 | }; |
472 | |
473 | |
474 | //! Implementation for pointers to object types |
475 | template< typename T > |
476 | class base_atomic< T*, void* > |
477 | { |
478 | public: |
479 | typedef T* value_type; |
480 | typedef std::ptrdiff_t difference_type; |
481 | |
482 | protected: |
483 | typedef atomics::detail::operations< storage_size_of< value_type >::value, false > operations; |
484 | typedef value_type value_arg_type; |
485 | |
486 | public: |
487 | typedef typename operations::storage_type storage_type; |
488 | |
489 | protected: |
490 | typename operations::aligned_storage_type m_storage; |
491 | |
492 | public: |
493 | BOOST_DEFAULTED_FUNCTION(base_atomic(), {}) |
494 | BOOST_FORCEINLINE explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : m_storage(atomics::detail::bitwise_cast< storage_type >(v)) |
495 | { |
496 | } |
497 | |
498 | BOOST_FORCEINLINE void store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
499 | { |
500 | BOOST_ASSERT(order != memory_order_consume); |
501 | BOOST_ASSERT(order != memory_order_acquire); |
502 | BOOST_ASSERT(order != memory_order_acq_rel); |
503 | |
504 | operations::store(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order); |
505 | } |
506 | |
507 | BOOST_FORCEINLINE value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT |
508 | { |
509 | BOOST_ASSERT(order != memory_order_release); |
510 | BOOST_ASSERT(order != memory_order_acq_rel); |
511 | |
512 | return atomics::detail::bitwise_cast< value_type >(operations::load(m_storage.value, order)); |
513 | } |
514 | |
515 | BOOST_FORCEINLINE value_type fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
516 | { |
517 | return atomics::detail::bitwise_cast< value_type >(operations::fetch_add(m_storage.value, static_cast< storage_type >(v * sizeof(T)), order)); |
518 | } |
519 | |
520 | BOOST_FORCEINLINE value_type fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
521 | { |
522 | return atomics::detail::bitwise_cast< value_type >(operations::fetch_sub(m_storage.value, static_cast< storage_type >(v * sizeof(T)), order)); |
523 | } |
524 | |
525 | BOOST_FORCEINLINE value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
526 | { |
527 | return atomics::detail::bitwise_cast< value_type >(operations::exchange(m_storage.value, atomics::detail::bitwise_cast< storage_type >(v), order)); |
528 | } |
529 | |
530 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
531 | { |
532 | BOOST_ASSERT(failure_order != memory_order_release); |
533 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
534 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
535 | |
536 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
537 | return operations::compare_exchange_strong(m_storage.value, reinterpret_cast< storage_type& >(expected), atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
538 | #else |
539 | storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected); |
540 | const bool res = operations::compare_exchange_strong(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
541 | expected = atomics::detail::bitwise_cast< value_type >(old_value); |
542 | return res; |
543 | #endif |
544 | } |
545 | |
546 | BOOST_FORCEINLINE bool compare_exchange_strong(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
547 | { |
548 | return compare_exchange_strong(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
549 | } |
550 | |
551 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order success_order, memory_order failure_order) volatile BOOST_NOEXCEPT |
552 | { |
553 | BOOST_ASSERT(failure_order != memory_order_release); |
554 | BOOST_ASSERT(failure_order != memory_order_acq_rel); |
555 | BOOST_ASSERT(cas_failure_order_must_not_be_stronger_than_success_order(success_order, failure_order)); |
556 | |
557 | #if defined(BOOST_ATOMIC_DETAIL_STORAGE_TYPE_MAY_ALIAS) |
558 | return operations::compare_exchange_weak(m_storage.value, reinterpret_cast< storage_type& >(expected), atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
559 | #else |
560 | storage_type old_value = atomics::detail::bitwise_cast< storage_type >(expected); |
561 | const bool res = operations::compare_exchange_weak(m_storage.value, old_value, atomics::detail::bitwise_cast< storage_type >(desired), success_order, failure_order); |
562 | expected = atomics::detail::bitwise_cast< value_type >(old_value); |
563 | return res; |
564 | #endif |
565 | } |
566 | |
567 | BOOST_FORCEINLINE bool compare_exchange_weak(value_type& expected, value_type desired, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT |
568 | { |
569 | return compare_exchange_weak(expected, desired, order, atomics::detail::deduce_failure_order(order)); |
570 | } |
571 | |
572 | BOOST_FORCEINLINE value_type operator++(int) volatile BOOST_NOEXCEPT |
573 | { |
574 | return fetch_add(1); |
575 | } |
576 | |
577 | BOOST_FORCEINLINE value_type operator++() volatile BOOST_NOEXCEPT |
578 | { |
579 | return fetch_add(1) + 1; |
580 | } |
581 | |
582 | BOOST_FORCEINLINE value_type operator--(int) volatile BOOST_NOEXCEPT |
583 | { |
584 | return fetch_sub(1); |
585 | } |
586 | |
587 | BOOST_FORCEINLINE value_type operator--() volatile BOOST_NOEXCEPT |
588 | { |
589 | return fetch_sub(1) - 1; |
590 | } |
591 | |
592 | BOOST_FORCEINLINE value_type operator+=(difference_type v) volatile BOOST_NOEXCEPT |
593 | { |
594 | return fetch_add(v) + v; |
595 | } |
596 | |
597 | BOOST_FORCEINLINE value_type operator-=(difference_type v) volatile BOOST_NOEXCEPT |
598 | { |
599 | return fetch_sub(v) - v; |
600 | } |
601 | |
602 | BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&)) |
603 | BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&)) |
604 | }; |
605 | |
606 | } // namespace detail |
607 | |
608 | template< typename T > |
609 | class atomic : |
610 | public atomics::detail::base_atomic< T, typename atomics::detail::classify< T >::type > |
611 | { |
612 | private: |
613 | typedef atomics::detail::base_atomic< T, typename atomics::detail::classify< T >::type > base_type; |
614 | typedef typename base_type::value_arg_type value_arg_type; |
615 | |
616 | public: |
617 | typedef typename base_type::value_type value_type; |
618 | typedef typename base_type::storage_type storage_type; |
619 | |
620 | public: |
621 | static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = base_type::operations::is_always_lock_free; |
622 | |
623 | public: |
624 | BOOST_DEFAULTED_FUNCTION(atomic(), BOOST_NOEXCEPT {}) |
625 | |
626 | // NOTE: The constructor is made explicit because gcc 4.7 complains that |
627 | // operator=(value_arg_type) is considered ambiguous with operator=(atomic const&) |
628 | // in assignment expressions, even though conversion to atomic<> is less preferred |
629 | // than conversion to value_arg_type. |
630 | BOOST_FORCEINLINE explicit BOOST_CONSTEXPR atomic(value_arg_type v) BOOST_NOEXCEPT : base_type(v) {} |
631 | |
632 | BOOST_FORCEINLINE value_type operator= (value_arg_type v) volatile BOOST_NOEXCEPT |
633 | { |
634 | this->store(v); |
635 | return v; |
636 | } |
637 | |
638 | BOOST_FORCEINLINE operator value_type() const volatile BOOST_NOEXCEPT |
639 | { |
640 | return this->load(); |
641 | } |
642 | |
643 | BOOST_FORCEINLINE bool is_lock_free() const volatile BOOST_NOEXCEPT |
644 | { |
645 | // C++17 requires all instances of atomic<> return a value consistent with is_always_lock_free here |
646 | return is_always_lock_free; |
647 | } |
648 | |
649 | BOOST_FORCEINLINE storage_type& storage() BOOST_NOEXCEPT { return this->m_storage.value; } |
650 | BOOST_FORCEINLINE storage_type volatile& storage() volatile BOOST_NOEXCEPT { return this->m_storage.value; } |
651 | BOOST_FORCEINLINE storage_type const& storage() const BOOST_NOEXCEPT { return this->m_storage.value; } |
652 | BOOST_FORCEINLINE storage_type const volatile& storage() const volatile BOOST_NOEXCEPT { return this->m_storage.value; } |
653 | |
654 | BOOST_DELETED_FUNCTION(atomic(atomic const&)) |
655 | BOOST_DELETED_FUNCTION(atomic& operator= (atomic const&)) |
656 | BOOST_DELETED_FUNCTION(atomic& operator= (atomic const&) volatile) |
657 | }; |
658 | |
659 | template< typename T > |
660 | BOOST_CONSTEXPR_OR_CONST bool atomic< T >::is_always_lock_free; |
661 | |
662 | typedef atomic< char > atomic_char; |
663 | typedef atomic< unsigned char > atomic_uchar; |
664 | typedef atomic< signed char > atomic_schar; |
665 | typedef atomic< uint8_t > atomic_uint8_t; |
666 | typedef atomic< int8_t > atomic_int8_t; |
667 | typedef atomic< unsigned short > atomic_ushort; |
668 | typedef atomic< short > atomic_short; |
669 | typedef atomic< uint16_t > atomic_uint16_t; |
670 | typedef atomic< int16_t > atomic_int16_t; |
671 | typedef atomic< unsigned int > atomic_uint; |
672 | typedef atomic< int > atomic_int; |
673 | typedef atomic< uint32_t > atomic_uint32_t; |
674 | typedef atomic< int32_t > atomic_int32_t; |
675 | typedef atomic< unsigned long > atomic_ulong; |
676 | typedef atomic< long > atomic_long; |
677 | typedef atomic< uint64_t > atomic_uint64_t; |
678 | typedef atomic< int64_t > atomic_int64_t; |
679 | #ifdef BOOST_HAS_LONG_LONG |
680 | typedef atomic< boost::ulong_long_type > atomic_ullong; |
681 | typedef atomic< boost::long_long_type > atomic_llong; |
682 | #endif |
683 | typedef atomic< void* > atomic_address; |
684 | typedef atomic< bool > atomic_bool; |
685 | typedef atomic< wchar_t > atomic_wchar_t; |
686 | #if !defined(BOOST_NO_CXX11_CHAR16_T) |
687 | typedef atomic< char16_t > atomic_char16_t; |
688 | #endif |
689 | #if !defined(BOOST_NO_CXX11_CHAR32_T) |
690 | typedef atomic< char32_t > atomic_char32_t; |
691 | #endif |
692 | |
693 | typedef atomic< int_least8_t > atomic_int_least8_t; |
694 | typedef atomic< uint_least8_t > atomic_uint_least8_t; |
695 | typedef atomic< int_least16_t > atomic_int_least16_t; |
696 | typedef atomic< uint_least16_t > atomic_uint_least16_t; |
697 | typedef atomic< int_least32_t > atomic_int_least32_t; |
698 | typedef atomic< uint_least32_t > atomic_uint_least32_t; |
699 | typedef atomic< int_least64_t > atomic_int_least64_t; |
700 | typedef atomic< uint_least64_t > atomic_uint_least64_t; |
701 | typedef atomic< int_fast8_t > atomic_int_fast8_t; |
702 | typedef atomic< uint_fast8_t > atomic_uint_fast8_t; |
703 | typedef atomic< int_fast16_t > atomic_int_fast16_t; |
704 | typedef atomic< uint_fast16_t > atomic_uint_fast16_t; |
705 | typedef atomic< int_fast32_t > atomic_int_fast32_t; |
706 | typedef atomic< uint_fast32_t > atomic_uint_fast32_t; |
707 | typedef atomic< int_fast64_t > atomic_int_fast64_t; |
708 | typedef atomic< uint_fast64_t > atomic_uint_fast64_t; |
709 | typedef atomic< intmax_t > atomic_intmax_t; |
710 | typedef atomic< uintmax_t > atomic_uintmax_t; |
711 | |
712 | typedef atomic< std::size_t > atomic_size_t; |
713 | typedef atomic< std::ptrdiff_t > atomic_ptrdiff_t; |
714 | |
715 | #if defined(BOOST_HAS_INTPTR_T) |
716 | typedef atomic< intptr_t > atomic_intptr_t; |
717 | typedef atomic< uintptr_t > atomic_uintptr_t; |
718 | #endif |
719 | |
720 | } // namespace atomics |
721 | } // namespace boost |
722 | |
723 | #if defined(BOOST_MSVC) |
724 | #pragma warning(pop) |
725 | #endif |
726 | |
727 | #endif // BOOST_ATOMIC_DETAIL_ATOMIC_TEMPLATE_HPP_INCLUDED_ |
728 | |