1/*
2 * Copyright © 2007 Chris Wilson
3 * Copyright © 2009,2010 Red Hat, Inc.
4 * Copyright © 2011,2012 Google, Inc.
5 *
6 * This is part of HarfBuzz, a text shaping library.
7 *
8 * Permission is hereby granted, without written agreement and without
9 * license or royalty fees, to use, copy, modify, and distribute this
10 * software and its documentation for any purpose, provided that the
11 * above copyright notice and the following two paragraphs appear in
12 * all copies of this software.
13 *
14 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
15 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
16 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
17 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18 * DAMAGE.
19 *
20 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
21 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
22 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
23 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
24 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 *
26 * Contributor(s):
27 * Chris Wilson <chris@chris-wilson.co.uk>
28 * Red Hat Author(s): Behdad Esfahbod
29 * Google Author(s): Behdad Esfahbod
30 */
31
32#ifndef HB_ATOMIC_HH
33#define HB_ATOMIC_HH
34
35#include "hb.hh"
36
37
38/*
39 * Atomic integers and pointers.
40 */
41
42
43/* We need external help for these */
44
45#if defined(hb_atomic_int_impl_add) \
46 && defined(hb_atomic_ptr_impl_get) \
47 && defined(hb_atomic_ptr_impl_cmpexch)
48
49/* Defined externally, i.e. in config.h. */
50
51
52#elif !defined(HB_NO_MT) && defined(__ATOMIC_CONSUME)
53
54/* C++11-style GCC primitives. */
55
56#define hb_atomic_int_impl_add(AI, V) __atomic_fetch_add ((AI), (V), __ATOMIC_ACQ_REL)
57#define hb_atomic_int_impl_set_relaxed(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELAXED)
58#define hb_atomic_int_impl_get_relaxed(AI) __atomic_load_n ((AI), __ATOMIC_RELAXED)
59
60#define hb_atomic_ptr_impl_set_relaxed(P, V) __atomic_store_n ((P), (V), __ATOMIC_RELAXED)
61#define hb_atomic_ptr_impl_get_relaxed(P) __atomic_load_n ((P), __ATOMIC_RELAXED)
62#define hb_atomic_ptr_impl_get(P) __atomic_load_n ((P), __ATOMIC_CONSUME)
63static inline bool
64_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
65{
66 const void *O = O_; // Need lvalue
67 return __atomic_compare_exchange_n ((void **) P, (void **) &O, (void *) N, true, __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
68}
69#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
70
71#elif !defined(HB_NO_MT) && __cplusplus >= 201103L
72
73/* C++11 atomics. */
74
75#include <atomic>
76
77#define hb_atomic_int_impl_add(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->fetch_add ((V), std::memory_order_acq_rel))
78#define hb_atomic_int_impl_set_relaxed(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_relaxed))
79#define hb_atomic_int_impl_get_relaxed(AI) (reinterpret_cast<std::atomic<int> *> (AI)->load (std::memory_order_relaxed))
80
81#define hb_atomic_ptr_impl_set_relaxed(P, V) (reinterpret_cast<std::atomic<void*> *> (P)->store ((V), std::memory_order_relaxed))
82#define hb_atomic_ptr_impl_get_relaxed(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_relaxed))
83#define hb_atomic_ptr_impl_get(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_consume))
84static inline bool
85_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
86{
87 const void *O = O_; // Need lvalue
88 return reinterpret_cast<std::atomic<const void*> *> (P)->compare_exchange_weak (O, N, std::memory_order_acq_rel, std::memory_order_relaxed);
89}
90#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
91
92
93#elif !defined(HB_NO_MT) && (defined(_WIN32) || defined(__CYGWIN__))
94
95#include <windows.h>
96
97static inline void _hb_memory_barrier (void)
98{
99#if !defined(MemoryBarrier)
100 /* MinGW has a convoluted history of supporting MemoryBarrier. */
101 LONG dummy = 0;
102 InterlockedExchange (&dummy, 1);
103#else
104 MemoryBarrier ();
105#endif
106}
107#define _hb_memory_barrier() _hb_memory_barrier ()
108
109#define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd ((LONG *) (AI), (V))
110static_assert ((sizeof (LONG) == sizeof (int)), "");
111
112#define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((void **) (P), (void *) (N), (void *) (O)) == (void *) (O))
113
114
115#elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES)
116
117#define _hb_memory_barrier() __sync_synchronize ()
118
119#define hb_atomic_int_impl_add(AI, V) __sync_fetch_and_add ((AI), (V))
120
121#define hb_atomic_ptr_impl_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N))
122
123
124#elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS)
125
126#include <atomic.h>
127#include <mbarrier.h>
128
129#define _hb_memory_r_barrier() __machine_r_barrier ()
130#define _hb_memory_w_barrier() __machine_w_barrier ()
131#define _hb_memory_barrier() __machine_rw_barrier ()
132
133static inline int _hb_fetch_and_add (int *AI, int V)
134{
135 _hb_memory_w_barrier ();
136 int result = atomic_add_int_nv ((uint_t *) AI, V);
137 _hb_memory_r_barrier ();
138 return result;
139}
140static inline bool _hb_compare_and_swap_ptr (const void **P, const void *O, const void *N)
141{
142 _hb_memory_w_barrier ();
143 int result = atomic_cas_ptr ((void **) P, (void *) O, (void *) N) == (void *) O;
144 _hb_memory_r_barrier ();
145 return result;
146}
147
148#define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
149
150#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swap_ptr ((const void **) (P), (O), (N))
151
152
153#elif !defined(HB_NO_MT) && defined(__APPLE__)
154
155#include <libkern/OSAtomic.h>
156#ifdef __MAC_OS_X_MIN_REQUIRED
157#include <AvailabilityMacros.h>
158#elif defined(__IPHONE_OS_MIN_REQUIRED)
159#include <Availability.h>
160#endif
161
162#define _hb_memory_barrier() OSMemoryBarrier ()
163
164#define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), (AI)) - (V))
165
166#if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
167#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P))
168#else
169#if __ppc64__ || __x86_64__ || __aarch64__
170#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (void *) (O), (int64_t) (void *) (N), (int64_t*) (P))
171#else
172#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (void *) (O), (int32_t) (void *) (N), (int32_t*) (P))
173#endif
174#endif
175
176
177#elif !defined(HB_NO_MT) && defined(_AIX) && defined(__IBMCPP__)
178
179#include <builtins.h>
180
181#define _hb_memory_barrier() __lwsync ()
182
183static inline int _hb_fetch_and_add (int *AI, int V)
184{
185 _hb_memory_barrier ();
186 int result = __fetch_and_add (AI, V);
187 _hb_memory_barrier ();
188 return result;
189}
190static inline bool _hb_compare_and_swaplp (long *P, long O, long N)
191{
192 _hb_memory_barrier ();
193 bool result = __compare_and_swaplp (P, &O, N);
194 _hb_memory_barrier ();
195 return result;
196}
197
198#define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
199
200#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swaplp ((long *) (P), (long) (O), (long) (N))
201static_assert ((sizeof (long) == sizeof (void *)), "");
202
203
204#elif !defined(HB_NO_MT)
205
206#define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */
207
208#define _hb_memory_barrier()
209
210#define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
211
212#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
213
214
215#else /* HB_NO_MT */
216
217#define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
218
219#define _hb_memory_barrier()
220
221#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
222
223
224#endif
225
226
227#ifndef _hb_memory_r_barrier
228#define _hb_memory_r_barrier() _hb_memory_barrier ()
229#endif
230#ifndef _hb_memory_w_barrier
231#define _hb_memory_w_barrier() _hb_memory_barrier ()
232#endif
233#ifndef hb_atomic_int_impl_set_relaxed
234#define hb_atomic_int_impl_set_relaxed(AI, V) (*(AI) = (V))
235#endif
236#ifndef hb_atomic_int_impl_get_relaxed
237#define hb_atomic_int_impl_get_relaxed(AI) (*(AI))
238#endif
239
240#ifndef hb_atomic_ptr_impl_set_relaxed
241#define hb_atomic_ptr_impl_set_relaxed(P, V) (*(P) = (V))
242#endif
243#ifndef hb_atomic_ptr_impl_get_relaxed
244#define hb_atomic_ptr_impl_get_relaxed(P) (*(P))
245#endif
246#ifndef hb_atomic_ptr_impl_get
247inline void *hb_atomic_ptr_impl_get (void **P) { void *v = *P; _hb_memory_r_barrier (); return v; }
248#endif
249
250
251#define HB_ATOMIC_INT_INIT(V) {V}
252struct hb_atomic_int_t
253{
254 inline void set_relaxed (int v_) const { hb_atomic_int_impl_set_relaxed (&v, v_); }
255 inline int get_relaxed (void) const { return hb_atomic_int_impl_get_relaxed (&v); }
256 inline int inc (void) { return hb_atomic_int_impl_add (&v, 1); }
257 inline int dec (void) { return hb_atomic_int_impl_add (&v, -1); }
258
259 mutable int v;
260};
261
262
263template <typename T> struct hb_remove_ptr_t { typedef T value; };
264template <typename T> struct hb_remove_ptr_t<T *> { typedef T value; };
265
266#define HB_ATOMIC_PTR_INIT(V) {V}
267template <typename P>
268struct hb_atomic_ptr_t
269{
270 typedef typename hb_remove_ptr_t<P>::value T;
271
272 inline void init (T* v_ = nullptr) { set_relaxed (v_); }
273 inline void set_relaxed (T* v_) const { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
274 inline T *get_relaxed (void) const { return hb_atomic_ptr_impl_get_relaxed (&v); }
275 inline T *get (void) const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); }
276 inline bool cmpexch (const T *old, T *new_) const{ return hb_atomic_ptr_impl_cmpexch (&v, old, new_); }
277
278 mutable T *v;
279};
280
281
282#endif /* HB_ATOMIC_HH */
283