1/*
2 * Copyright © 2007 Chris Wilson
3 * Copyright © 2009,2010 Red Hat, Inc.
4 * Copyright © 2011,2012 Google, Inc.
5 *
6 * This is part of HarfBuzz, a text shaping library.
7 *
8 * Permission is hereby granted, without written agreement and without
9 * license or royalty fees, to use, copy, modify, and distribute this
10 * software and its documentation for any purpose, provided that the
11 * above copyright notice and the following two paragraphs appear in
12 * all copies of this software.
13 *
14 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
15 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
16 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
17 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18 * DAMAGE.
19 *
20 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
21 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
22 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
23 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
24 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 *
26 * Contributor(s):
27 * Chris Wilson <chris@chris-wilson.co.uk>
28 * Red Hat Author(s): Behdad Esfahbod
29 * Google Author(s): Behdad Esfahbod
30 */
31
32#ifndef HB_ATOMIC_HH
33#define HB_ATOMIC_HH
34
35#include "hb.hh"
36#include "hb-meta.hh"
37
38
39/*
40 * Atomic integers and pointers.
41 */
42
43
44/* We need external help for these */
45
46#if defined(hb_atomic_int_impl_add) \
47 && defined(hb_atomic_ptr_impl_get) \
48 && defined(hb_atomic_ptr_impl_cmpexch)
49
50/* Defined externally, i.e. in config.h. */
51
52
53#elif !defined(HB_NO_MT) && defined(__ATOMIC_ACQUIRE)
54
55/* C++11-style GCC primitives. */
56
57#define _hb_memory_barrier() __sync_synchronize ()
58
59#define hb_atomic_int_impl_add(AI, V) __atomic_fetch_add ((AI), (V), __ATOMIC_ACQ_REL)
60#define hb_atomic_int_impl_set_relaxed(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELAXED)
61#define hb_atomic_int_impl_set(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELEASE)
62#define hb_atomic_int_impl_get_relaxed(AI) __atomic_load_n ((AI), __ATOMIC_RELAXED)
63#define hb_atomic_int_impl_get(AI) __atomic_load_n ((AI), __ATOMIC_ACQUIRE)
64
65#define hb_atomic_ptr_impl_set_relaxed(P, V) __atomic_store_n ((P), (V), __ATOMIC_RELAXED)
66#define hb_atomic_ptr_impl_get_relaxed(P) __atomic_load_n ((P), __ATOMIC_RELAXED)
67#define hb_atomic_ptr_impl_get(P) __atomic_load_n ((P), __ATOMIC_ACQUIRE)
68static inline bool
69_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
70{
71 const void *O = O_; // Need lvalue
72 return __atomic_compare_exchange_n ((void **) P, (void **) &O, (void *) N, true, __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
73}
74#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
75
76#elif !defined(HB_NO_MT) && __cplusplus >= 201103L
77
78/* C++11 atomics. */
79
80#include <atomic>
81
82#define _hb_memory_barrier() std::atomic_thread_fence(std::memory_order_ack_rel)
83#define _hb_memory_r_barrier() std::atomic_thread_fence(std::memory_order_acquire)
84#define _hb_memory_w_barrier() std::atomic_thread_fence(std::memory_order_release)
85
86#define hb_atomic_int_impl_add(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->fetch_add ((V), std::memory_order_acq_rel))
87#define hb_atomic_int_impl_set_relaxed(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_relaxed))
88#define hb_atomic_int_impl_set(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_release))
89#define hb_atomic_int_impl_get_relaxed(AI) (reinterpret_cast<std::atomic<int> const *> (AI)->load (std::memory_order_relaxed))
90#define hb_atomic_int_impl_get(AI) (reinterpret_cast<std::atomic<int> const *> (AI)->load (std::memory_order_acquire))
91
92#define hb_atomic_ptr_impl_set_relaxed(P, V) (reinterpret_cast<std::atomic<void*> *> (P)->store ((V), std::memory_order_relaxed))
93#define hb_atomic_ptr_impl_get_relaxed(P) (reinterpret_cast<std::atomic<void*> const *> (P)->load (std::memory_order_relaxed))
94#define hb_atomic_ptr_impl_get(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_acquire))
95static inline bool
96_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
97{
98 const void *O = O_; // Need lvalue
99 return reinterpret_cast<std::atomic<const void*> *> (P)->compare_exchange_weak (O, N, std::memory_order_acq_rel, std::memory_order_relaxed);
100}
101#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
102
103
104#elif !defined(HB_NO_MT) && defined(_WIN32)
105
106#include <windows.h>
107
108static inline void _hb_memory_barrier ()
109{
110#if !defined(MemoryBarrier) && !defined(__MINGW32_VERSION)
111 /* MinGW has a convoluted history of supporting MemoryBarrier. */
112 LONG dummy = 0;
113 InterlockedExchange (&dummy, 1);
114#else
115 MemoryBarrier ();
116#endif
117}
118#define _hb_memory_barrier() _hb_memory_barrier ()
119
120#define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd ((LONG *) (AI), (V))
121static_assert ((sizeof (LONG) == sizeof (int)), "");
122
123#define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((P), (N), (O)) == (O))
124
125
126#elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES)
127
128#define _hb_memory_barrier() __sync_synchronize ()
129
130#define hb_atomic_int_impl_add(AI, V) __sync_fetch_and_add ((AI), (V))
131
132#define hb_atomic_ptr_impl_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N))
133
134
135#elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS)
136
137#include <atomic.h>
138#include <mbarrier.h>
139
140#define _hb_memory_r_barrier() __machine_r_barrier ()
141#define _hb_memory_w_barrier() __machine_w_barrier ()
142#define _hb_memory_barrier() __machine_rw_barrier ()
143
144static inline int _hb_fetch_and_add (int *AI, int V)
145{
146 _hb_memory_w_barrier ();
147 int result = atomic_add_int_nv ((uint_t *) AI, V) - V;
148 _hb_memory_r_barrier ();
149 return result;
150}
151static inline bool _hb_compare_and_swap_ptr (void **P, void *O, void *N)
152{
153 _hb_memory_w_barrier ();
154 bool result = atomic_cas_ptr (P, O, N) == O;
155 _hb_memory_r_barrier ();
156 return result;
157}
158
159#define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
160
161#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swap_ptr ((P), (O), (N))
162
163
164#elif !defined(HB_NO_MT) && defined(__APPLE__)
165
166#include <libkern/OSAtomic.h>
167#ifdef __MAC_OS_X_MIN_REQUIRED
168#include <AvailabilityMacros.h>
169#elif defined(__IPHONE_OS_MIN_REQUIRED)
170#include <Availability.h>
171#endif
172
173#define _hb_memory_barrier() OSMemoryBarrier ()
174
175#define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), (AI)) - (V))
176
177#if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
178#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((O), (N), (P))
179#else
180#if __ppc64__ || __x86_64__ || __aarch64__
181#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (O), (int64_t) (N), (int64_t*) (P))
182#else
183#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (O), (int32_t) (N), (int32_t*) (P))
184#endif
185#endif
186
187
188#elif !defined(HB_NO_MT) && defined(_AIX) && (defined(__IBMCPP__) || defined(__ibmxl__))
189
190#include <builtins.h>
191
192#define _hb_memory_barrier() __lwsync ()
193
194static inline int _hb_fetch_and_add (int *AI, int V)
195{
196 _hb_memory_barrier ();
197 int result = __fetch_and_add (AI, V);
198 _hb_memory_barrier ();
199 return result;
200}
201static inline bool _hb_compare_and_swaplp (long *P, long O, long N)
202{
203 _hb_memory_barrier ();
204 bool result = __compare_and_swaplp (P, &O, N);
205 _hb_memory_barrier ();
206 return result;
207}
208
209#define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
210
211#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swaplp ((long *) (P), (long) (O), (long) (N))
212static_assert ((sizeof (long) == sizeof (void *)), "");
213
214
215#elif defined(HB_NO_MT)
216
217#define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
218
219#define _hb_memory_barrier() do {} while (0)
220
221#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
222
223
224#else
225
226#error "Could not find any system to define atomic_int macros."
227#error "Check hb-atomic.hh for possible resolutions."
228
229#endif
230
231
232#ifndef _hb_memory_r_barrier
233#define _hb_memory_r_barrier() _hb_memory_barrier ()
234#endif
235#ifndef _hb_memory_w_barrier
236#define _hb_memory_w_barrier() _hb_memory_barrier ()
237#endif
238#ifndef hb_atomic_int_impl_set_relaxed
239#define hb_atomic_int_impl_set_relaxed(AI, V) (*(AI) = (V))
240#endif
241#ifndef hb_atomic_int_impl_get_relaxed
242#define hb_atomic_int_impl_get_relaxed(AI) (*(AI))
243#endif
244
245#ifndef hb_atomic_ptr_impl_set_relaxed
246#define hb_atomic_ptr_impl_set_relaxed(P, V) (*(P) = (V))
247#endif
248#ifndef hb_atomic_ptr_impl_get_relaxed
249#define hb_atomic_ptr_impl_get_relaxed(P) (*(P))
250#endif
251#ifndef hb_atomic_int_impl_set
252inline void hb_atomic_int_impl_set (int *AI, int v) { _hb_memory_w_barrier (); *AI = v; }
253#endif
254#ifndef hb_atomic_int_impl_get
255inline int hb_atomic_int_impl_get (const int *AI) { int v = *AI; _hb_memory_r_barrier (); return v; }
256#endif
257#ifndef hb_atomic_ptr_impl_get
258inline void *hb_atomic_ptr_impl_get (void ** const P) { void *v = *P; _hb_memory_r_barrier (); return v; }
259#endif
260
261
262#define HB_ATOMIC_INT_INIT(V) {V}
263struct hb_atomic_int_t
264{
265 void set_relaxed (int v_) { hb_atomic_int_impl_set_relaxed (&v, v_); }
266 void set (int v_) { hb_atomic_int_impl_set (&v, v_); }
267 int get_relaxed () const { return hb_atomic_int_impl_get_relaxed (&v); }
268 int get () const { return hb_atomic_int_impl_get (&v); }
269 int inc () { return hb_atomic_int_impl_add (&v, 1); }
270 int dec () { return hb_atomic_int_impl_add (&v, -1); }
271
272 int v;
273};
274
275
276#define HB_ATOMIC_PTR_INIT(V) {V}
277template <typename P>
278struct hb_atomic_ptr_t
279{
280 typedef hb_remove_pointer<P> T;
281
282 void init (T* v_ = nullptr) { set_relaxed (v_); }
283 void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
284 T *get_relaxed () const { return (T *) hb_atomic_ptr_impl_get_relaxed (&v); }
285 T *get () const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); }
286 bool cmpexch (const T *old, T *new_) const { return hb_atomic_ptr_impl_cmpexch ((void **) &v, (void *) old, (void *) new_); }
287
288 T * operator -> () const { return get (); }
289 template <typename C> operator C * () const { return get (); }
290
291 T *v;
292};
293
294
295#endif /* HB_ATOMIC_HH */
296