1 | /* Copyright (c) 2013, Ben Noordhuis <info@bnoordhuis.nl> |
2 | * |
3 | * Permission to use, copy, modify, and/or distribute this software for any |
4 | * purpose with or without fee is hereby granted, provided that the above |
5 | * copyright notice and this permission notice appear in all copies. |
6 | * |
7 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES |
8 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF |
9 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR |
10 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES |
11 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
12 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF |
13 | * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
14 | */ |
15 | |
16 | #ifndef UV_ATOMIC_OPS_H_ |
17 | #define UV_ATOMIC_OPS_H_ |
18 | |
19 | #include "internal.h" /* UV_UNUSED */ |
20 | |
21 | #if defined(__SUNPRO_C) || defined(__SUNPRO_CC) |
22 | #include <atomic.h> |
23 | #endif |
24 | |
25 | UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)); |
26 | UV_UNUSED(static void cpu_relax(void)); |
27 | |
28 | /* Prefer hand-rolled assembly over the gcc builtins because the latter also |
29 | * issue full memory barriers. |
30 | */ |
31 | UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) { |
32 | #if defined(__i386__) || defined(__x86_64__) |
33 | int out; |
34 | __asm__ __volatile__ ("lock; cmpxchg %2, %1;" |
35 | : "=a" (out), "+m" (*(volatile int*) ptr) |
36 | : "r" (newval), "0" (oldval) |
37 | : "memory" ); |
38 | return out; |
39 | #elif defined(__MVS__) |
40 | unsigned int op4; |
41 | if (__plo_CSST(ptr, (unsigned int*) &oldval, newval, |
42 | (unsigned int*) ptr, *ptr, &op4)) |
43 | return oldval; |
44 | else |
45 | return op4; |
46 | #elif defined(__SUNPRO_C) || defined(__SUNPRO_CC) |
47 | return atomic_cas_uint((uint_t *)ptr, (uint_t)oldval, (uint_t)newval); |
48 | #else |
49 | return __sync_val_compare_and_swap(ptr, oldval, newval); |
50 | #endif |
51 | } |
52 | |
53 | UV_UNUSED(static void cpu_relax(void)) { |
54 | #if defined(__i386__) || defined(__x86_64__) |
55 | __asm__ __volatile__ ("rep; nop" ); /* a.k.a. PAUSE */ |
56 | #elif (defined(__arm__) && __ARM_ARCH >= 7) || defined(__aarch64__) |
57 | __asm__ volatile("yield" ); |
58 | #endif |
59 | } |
60 | |
61 | #endif /* UV_ATOMIC_OPS_H_ */ |
62 | |