1/*
2 Copyright (c) 2007-2016 Contributors as noted in the AUTHORS file
3
4 This file is part of libzmq, the ZeroMQ core engine in C++.
5
6 libzmq is free software; you can redistribute it and/or modify it under
7 the terms of the GNU Lesser General Public License (LGPL) as published
8 by the Free Software Foundation; either version 3 of the License, or
9 (at your option) any later version.
10
11 As a special exception, the Contributors give you permission to link
12 this library with independent modules to produce an executable,
13 regardless of the license terms of these independent modules, and to
14 copy and distribute the resulting executable under terms of your choice,
15 provided that you also meet, for each linked independent module, the
16 terms and conditions of the license of that module. An independent
17 module is a module which is not derived from or based on this library.
18 If you modify this library, you must extend this exception to your
19 version of the library.
20
21 libzmq is distributed in the hope that it will be useful, but WITHOUT
22 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
23 FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
24 License for more details.
25
26 You should have received a copy of the GNU Lesser General Public License
27 along with this program. If not, see <http://www.gnu.org/licenses/>.
28*/
29
30#ifndef __ZMQ_ATOMIC_PTR_HPP_INCLUDED__
31#define __ZMQ_ATOMIC_PTR_HPP_INCLUDED__
32
33#include "macros.hpp"
34
35#if defined ZMQ_FORCE_MUTEXES
36#define ZMQ_ATOMIC_PTR_MUTEX
37#elif (defined __cplusplus && __cplusplus >= 201103L) \
38 || (defined _MSC_VER && _MSC_VER >= 1900)
39#define ZMQ_ATOMIC_PTR_CXX11
40#elif defined ZMQ_HAVE_ATOMIC_INTRINSICS
41#define ZMQ_ATOMIC_PTR_INTRINSIC
42#elif (defined __i386__ || defined __x86_64__) && defined __GNUC__
43#define ZMQ_ATOMIC_PTR_X86
44#elif defined __ARM_ARCH_7A__ && defined __GNUC__
45#define ZMQ_ATOMIC_PTR_ARM
46#elif defined __tile__
47#define ZMQ_ATOMIC_PTR_TILE
48#elif defined ZMQ_HAVE_WINDOWS
49#define ZMQ_ATOMIC_PTR_WINDOWS
50#elif (defined ZMQ_HAVE_SOLARIS || defined ZMQ_HAVE_NETBSD \
51 || defined ZMQ_HAVE_GNU)
52#define ZMQ_ATOMIC_PTR_ATOMIC_H
53#else
54#define ZMQ_ATOMIC_PTR_MUTEX
55#endif
56
57#if defined ZMQ_ATOMIC_PTR_MUTEX
58#include "mutex.hpp"
59#elif defined ZMQ_ATOMIC_PTR_CXX11
60#include <atomic>
61#elif defined ZMQ_ATOMIC_PTR_WINDOWS
62#include "windows.hpp"
63#elif defined ZMQ_ATOMIC_PTR_ATOMIC_H
64#include <atomic.h>
65#elif defined ZMQ_ATOMIC_PTR_TILE
66#include <arch/atomic.h>
67#endif
68
69namespace zmq
70{
71#if !defined ZMQ_ATOMIC_PTR_CXX11
72inline void *atomic_xchg_ptr (void **ptr_,
73 void *const val_
74#if defined ZMQ_ATOMIC_PTR_MUTEX
75 ,
76 mutex_t &_sync
77#endif
78 ) ZMQ_NOEXCEPT
79{
80#if defined ZMQ_ATOMIC_PTR_WINDOWS
81 return InterlockedExchangePointer ((PVOID *) ptr_, val_);
82#elif defined ZMQ_ATOMIC_PTR_INTRINSIC
83 return __atomic_exchange_n (ptr_, val_, __ATOMIC_ACQ_REL);
84#elif defined ZMQ_ATOMIC_PTR_ATOMIC_H
85 return atomic_swap_ptr (ptr_, val_);
86#elif defined ZMQ_ATOMIC_PTR_TILE
87 return arch_atomic_exchange (ptr_, val_);
88#elif defined ZMQ_ATOMIC_PTR_X86
89 void *old;
90 __asm__ volatile("lock; xchg %0, %2"
91 : "=r"(old), "=m"(*ptr_)
92 : "m"(*ptr_), "0"(val_));
93 return old;
94#elif defined ZMQ_ATOMIC_PTR_ARM
95 void *old;
96 unsigned int flag;
97 __asm__ volatile(" dmb sy\n\t"
98 "1: ldrex %1, [%3]\n\t"
99 " strex %0, %4, [%3]\n\t"
100 " teq %0, #0\n\t"
101 " bne 1b\n\t"
102 " dmb sy\n\t"
103 : "=&r"(flag), "=&r"(old), "+Qo"(*ptr_)
104 : "r"(ptr_), "r"(val_)
105 : "cc");
106 return old;
107#elif defined ZMQ_ATOMIC_PTR_MUTEX
108 _sync.lock ();
109 void *old = *ptr_;
110 *ptr_ = val_;
111 _sync.unlock ();
112 return old;
113#else
114#error atomic_ptr is not implemented for this platform
115#endif
116}
117
118inline void *atomic_cas (void *volatile *ptr_,
119 void *cmp_,
120 void *val_
121#if defined ZMQ_ATOMIC_PTR_MUTEX
122 ,
123 mutex_t &_sync
124#endif
125 ) ZMQ_NOEXCEPT
126{
127#if defined ZMQ_ATOMIC_PTR_WINDOWS
128 return InterlockedCompareExchangePointer ((volatile PVOID *) ptr_, val_,
129 cmp_);
130#elif defined ZMQ_ATOMIC_PTR_INTRINSIC
131 void *old = cmp_;
132 __atomic_compare_exchange_n (ptr_, &old, val_, false, __ATOMIC_RELEASE,
133 __ATOMIC_ACQUIRE);
134 return old;
135#elif defined ZMQ_ATOMIC_PTR_ATOMIC_H
136 return atomic_cas_ptr (ptr_, cmp_, val_);
137#elif defined ZMQ_ATOMIC_PTR_TILE
138 return arch_atomic_val_compare_and_exchange (ptr_, cmp_, val_);
139#elif defined ZMQ_ATOMIC_PTR_X86
140 void *old;
141 __asm__ volatile("lock; cmpxchg %2, %3"
142 : "=a"(old), "=m"(*ptr_)
143 : "r"(val_), "m"(*ptr_), "0"(cmp_)
144 : "cc");
145 return old;
146#elif defined ZMQ_ATOMIC_PTR_ARM
147 void *old;
148 unsigned int flag;
149 __asm__ volatile(" dmb sy\n\t"
150 "1: ldrex %1, [%3]\n\t"
151 " mov %0, #0\n\t"
152 " teq %1, %4\n\t"
153 " it eq\n\t"
154 " strexeq %0, %5, [%3]\n\t"
155 " teq %0, #0\n\t"
156 " bne 1b\n\t"
157 " dmb sy\n\t"
158 : "=&r"(flag), "=&r"(old), "+Qo"(*ptr_)
159 : "r"(ptr_), "r"(cmp_), "r"(val_)
160 : "cc");
161 return old;
162#elif defined ZMQ_ATOMIC_PTR_MUTEX
163 _sync.lock ();
164 void *old = *ptr_;
165 if (*ptr_ == cmp_)
166 *ptr_ = val_;
167 _sync.unlock ();
168 return old;
169#else
170#error atomic_ptr is not implemented for this platform
171#endif
172}
173#endif
174
175// This class encapsulates several atomic operations on pointers.
176
177template <typename T> class atomic_ptr_t
178{
179 public:
180 // Initialise atomic pointer
181 inline atomic_ptr_t () ZMQ_NOEXCEPT { _ptr = NULL; }
182
183 // Set value of atomic pointer in a non-threadsafe way
184 // Use this function only when you are sure that at most one
185 // thread is accessing the pointer at the moment.
186 inline void set (T *ptr_) ZMQ_NOEXCEPT { _ptr = ptr_; }
187
188 // Perform atomic 'exchange pointers' operation. Pointer is set
189 // to the 'val_' value. Old value is returned.
190 inline T *xchg (T *val_) ZMQ_NOEXCEPT
191 {
192#if defined ZMQ_ATOMIC_PTR_CXX11
193 return _ptr.exchange (val_, std::memory_order_acq_rel);
194#else
195 return (T *) atomic_xchg_ptr ((void **) &_ptr, val_
196#if defined ZMQ_ATOMIC_PTR_MUTEX
197 ,
198 _sync
199#endif
200 );
201#endif
202 }
203
204 // Perform atomic 'compare and swap' operation on the pointer.
205 // The pointer is compared to 'cmp' argument and if they are
206 // equal, its value is set to 'val_'. Old value of the pointer
207 // is returned.
208 inline T *cas (T *cmp_, T *val_) ZMQ_NOEXCEPT
209 {
210#if defined ZMQ_ATOMIC_PTR_CXX11
211 _ptr.compare_exchange_strong (cmp_, val_, std::memory_order_acq_rel);
212 return cmp_;
213#else
214 return (T *) atomic_cas ((void **) &_ptr, cmp_, val_
215#if defined ZMQ_ATOMIC_PTR_MUTEX
216 ,
217 _sync
218#endif
219 );
220#endif
221 }
222
223 private:
224#if defined ZMQ_ATOMIC_PTR_CXX11
225 std::atomic<T *> _ptr;
226#else
227 volatile T *_ptr;
228#endif
229
230#if defined ZMQ_ATOMIC_PTR_MUTEX
231 mutex_t _sync;
232#endif
233
234#if !defined ZMQ_ATOMIC_PTR_CXX11
235 ZMQ_NON_COPYABLE_NOR_MOVABLE (atomic_ptr_t)
236#endif
237};
238
239struct atomic_value_t
240{
241 atomic_value_t (const int value_) ZMQ_NOEXCEPT : _value (value_) {}
242
243 atomic_value_t (const atomic_value_t &src_) ZMQ_NOEXCEPT
244 : _value (src_.load ())
245 {
246 }
247
248 void store (const int value_) ZMQ_NOEXCEPT
249 {
250#if defined ZMQ_ATOMIC_PTR_CXX11
251 _value.store (value_, std::memory_order_release);
252#else
253 atomic_xchg_ptr ((void **) &_value, (void *) (ptrdiff_t) value_
254#if defined ZMQ_ATOMIC_PTR_MUTEX
255 ,
256 _sync
257#endif
258 );
259#endif
260 }
261
262 int load () const ZMQ_NOEXCEPT
263 {
264#if defined ZMQ_ATOMIC_PTR_CXX11
265 return _value.load (std::memory_order_acquire);
266#else
267 return (int) (ptrdiff_t) atomic_cas ((void **) &_value, 0, 0
268#if defined ZMQ_ATOMIC_PTR_MUTEX
269 ,
270#if defined __SUNPRO_CC
271 const_cast<mutex_t &> (_sync)
272#else
273 _sync
274#endif
275#endif
276 );
277#endif
278 }
279
280 private:
281#if defined ZMQ_ATOMIC_PTR_CXX11
282 std::atomic<int> _value;
283#else
284 volatile ptrdiff_t _value;
285#endif
286
287#if defined ZMQ_ATOMIC_PTR_MUTEX
288 mutable mutex_t _sync;
289#endif
290
291 private:
292 atomic_value_t &operator= (const atomic_value_t &src_);
293};
294}
295
296// Remove macros local to this file.
297#undef ZMQ_ATOMIC_PTR_MUTEX
298#undef ZMQ_ATOMIC_PTR_INTRINSIC
299#undef ZMQ_ATOMIC_PTR_CXX11
300#undef ZMQ_ATOMIC_PTR_X86
301#undef ZMQ_ATOMIC_PTR_ARM
302#undef ZMQ_ATOMIC_PTR_TILE
303#undef ZMQ_ATOMIC_PTR_WINDOWS
304#undef ZMQ_ATOMIC_PTR_ATOMIC_H
305
306#endif
307