1 | /* Copyright (c) 2013, Ben Noordhuis <info@bnoordhuis.nl> |
2 | * |
3 | * Permission to use, copy, modify, and/or distribute this software for any |
4 | * purpose with or without fee is hereby granted, provided that the above |
5 | * copyright notice and this permission notice appear in all copies. |
6 | * |
7 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES |
8 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF |
9 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR |
10 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES |
11 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
12 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF |
13 | * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
14 | */ |
15 | |
16 | #ifndef UV_SPINLOCK_H_ |
17 | #define UV_SPINLOCK_H_ |
18 | |
19 | #include "internal.h" /* ACCESS_ONCE, UV_UNUSED */ |
20 | #include "atomic-ops.h" |
21 | |
22 | #define UV_SPINLOCK_INITIALIZER { 0 } |
23 | |
24 | typedef struct { |
25 | int lock; |
26 | } uv_spinlock_t; |
27 | |
28 | UV_UNUSED(static void uv_spinlock_init(uv_spinlock_t* spinlock)); |
29 | UV_UNUSED(static void uv_spinlock_lock(uv_spinlock_t* spinlock)); |
30 | UV_UNUSED(static void uv_spinlock_unlock(uv_spinlock_t* spinlock)); |
31 | UV_UNUSED(static int uv_spinlock_trylock(uv_spinlock_t* spinlock)); |
32 | |
33 | UV_UNUSED(static void uv_spinlock_init(uv_spinlock_t* spinlock)) { |
34 | ACCESS_ONCE(int, spinlock->lock) = 0; |
35 | } |
36 | |
37 | UV_UNUSED(static void uv_spinlock_lock(uv_spinlock_t* spinlock)) { |
38 | while (!uv_spinlock_trylock(spinlock)) cpu_relax(); |
39 | } |
40 | |
41 | UV_UNUSED(static void uv_spinlock_unlock(uv_spinlock_t* spinlock)) { |
42 | ACCESS_ONCE(int, spinlock->lock) = 0; |
43 | } |
44 | |
45 | UV_UNUSED(static int uv_spinlock_trylock(uv_spinlock_t* spinlock)) { |
46 | /* TODO(bnoordhuis) Maybe change to a ticket lock to guarantee fair queueing. |
47 | * Not really critical until we have locks that are (frequently) contended |
48 | * for by several threads. |
49 | */ |
50 | return 0 == cmpxchgi(&spinlock->lock, 0, 1); |
51 | } |
52 | |
53 | #endif /* UV_SPINLOCK_H_ */ |
54 | |