1// shared_ptr atomic access -*- C++ -*-
2
3// Copyright (C) 2014-2019 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file bits/shared_ptr_atomic.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{memory}
28 */
29
30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
32
33#include <bits/atomic_base.h>
34
35namespace std _GLIBCXX_VISIBILITY(default)
36{
37_GLIBCXX_BEGIN_NAMESPACE_VERSION
38
39 /**
40 * @addtogroup pointer_abstractions
41 * @{
42 */
43
44 struct _Sp_locker
45 {
46 _Sp_locker(const _Sp_locker&) = delete;
47 _Sp_locker& operator=(const _Sp_locker&) = delete;
48
49#ifdef __GTHREADS
50 explicit
51 _Sp_locker(const void*) noexcept;
52 _Sp_locker(const void*, const void*) noexcept;
53 ~_Sp_locker();
54
55 private:
56 unsigned char _M_key1;
57 unsigned char _M_key2;
58#else
59 explicit _Sp_locker(const void*, const void* = nullptr) { }
60#endif
61 };
62
63 /**
64 * @brief Report whether shared_ptr atomic operations are lock-free.
65 * @param __p A non-null pointer to a shared_ptr object.
66 * @return True if atomic access to @c *__p is lock-free, false otherwise.
67 * @{
68 */
69 template<typename _Tp, _Lock_policy _Lp>
70 inline bool
71 atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>* __p)
72 {
73#ifdef __GTHREADS
74 return __gthread_active_p() == 0;
75#else
76 return true;
77#endif
78 }
79
80 template<typename _Tp>
81 inline bool
82 atomic_is_lock_free(const shared_ptr<_Tp>* __p)
83 { return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
84
85 // @}
86
87 /**
88 * @brief Atomic load for shared_ptr objects.
89 * @param __p A non-null pointer to a shared_ptr object.
90 * @return @c *__p
91 *
92 * The memory order shall not be @c memory_order_release or
93 * @c memory_order_acq_rel.
94 * @{
95 */
96 template<typename _Tp>
97 inline shared_ptr<_Tp>
98 atomic_load_explicit(const shared_ptr<_Tp>* __p, memory_order)
99 {
100 _Sp_locker __lock{__p};
101 return *__p;
102 }
103
104 template<typename _Tp>
105 inline shared_ptr<_Tp>
106 atomic_load(const shared_ptr<_Tp>* __p)
107 { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
108
109 template<typename _Tp, _Lock_policy _Lp>
110 inline __shared_ptr<_Tp, _Lp>
111 atomic_load_explicit(const __shared_ptr<_Tp, _Lp>* __p, memory_order)
112 {
113 _Sp_locker __lock{__p};
114 return *__p;
115 }
116
117 template<typename _Tp, _Lock_policy _Lp>
118 inline __shared_ptr<_Tp, _Lp>
119 atomic_load(const __shared_ptr<_Tp, _Lp>* __p)
120 { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
121 // @}
122
123 /**
124 * @brief Atomic store for shared_ptr objects.
125 * @param __p A non-null pointer to a shared_ptr object.
126 * @param __r The value to store.
127 *
128 * The memory order shall not be @c memory_order_acquire or
129 * @c memory_order_acq_rel.
130 * @{
131 */
132 template<typename _Tp>
133 inline void
134 atomic_store_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
135 memory_order)
136 {
137 _Sp_locker __lock{__p};
138 __p->swap(__r); // use swap so that **__p not destroyed while lock held
139 }
140
141 template<typename _Tp>
142 inline void
143 atomic_store(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
144 { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
145
146 template<typename _Tp, _Lock_policy _Lp>
147 inline void
148 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
149 __shared_ptr<_Tp, _Lp> __r,
150 memory_order)
151 {
152 _Sp_locker __lock{__p};
153 __p->swap(__r); // use swap so that **__p not destroyed while lock held
154 }
155
156 template<typename _Tp, _Lock_policy _Lp>
157 inline void
158 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
159 { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
160 // @}
161
162 /**
163 * @brief Atomic exchange for shared_ptr objects.
164 * @param __p A non-null pointer to a shared_ptr object.
165 * @param __r New value to store in @c *__p.
166 * @return The original value of @c *__p
167 * @{
168 */
169 template<typename _Tp>
170 inline shared_ptr<_Tp>
171 atomic_exchange_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
172 memory_order)
173 {
174 _Sp_locker __lock{__p};
175 __p->swap(__r);
176 return __r;
177 }
178
179 template<typename _Tp>
180 inline shared_ptr<_Tp>
181 atomic_exchange(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
182 {
183 return std::atomic_exchange_explicit(__p, std::move(__r),
184 memory_order_seq_cst);
185 }
186
187 template<typename _Tp, _Lock_policy _Lp>
188 inline __shared_ptr<_Tp, _Lp>
189 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
190 __shared_ptr<_Tp, _Lp> __r,
191 memory_order)
192 {
193 _Sp_locker __lock{__p};
194 __p->swap(__r);
195 return __r;
196 }
197
198 template<typename _Tp, _Lock_policy _Lp>
199 inline __shared_ptr<_Tp, _Lp>
200 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
201 {
202 return std::atomic_exchange_explicit(__p, std::move(__r),
203 memory_order_seq_cst);
204 }
205 // @}
206
207 /**
208 * @brief Atomic compare-and-swap for shared_ptr objects.
209 * @param __p A non-null pointer to a shared_ptr object.
210 * @param __v A non-null pointer to a shared_ptr object.
211 * @param __w A non-null pointer to a shared_ptr object.
212 * @return True if @c *__p was equivalent to @c *__v, false otherwise.
213 *
214 * The memory order for failure shall not be @c memory_order_release or
215 * @c memory_order_acq_rel, or stronger than the memory order for success.
216 * @{
217 */
218 template<typename _Tp>
219 bool
220 atomic_compare_exchange_strong_explicit(shared_ptr<_Tp>* __p,
221 shared_ptr<_Tp>* __v,
222 shared_ptr<_Tp> __w,
223 memory_order,
224 memory_order)
225 {
226 shared_ptr<_Tp> __x; // goes out of scope after __lock
227 _Sp_locker __lock{__p, __v};
228 owner_less<shared_ptr<_Tp>> __less;
229 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
230 {
231 __x = std::move(*__p);
232 *__p = std::move(__w);
233 return true;
234 }
235 __x = std::move(*__v);
236 *__v = *__p;
237 return false;
238 }
239
240 template<typename _Tp>
241 inline bool
242 atomic_compare_exchange_strong(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
243 shared_ptr<_Tp> __w)
244 {
245 return std::atomic_compare_exchange_strong_explicit(__p, __v,
246 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
247 }
248
249 template<typename _Tp>
250 inline bool
251 atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
252 shared_ptr<_Tp>* __v,
253 shared_ptr<_Tp> __w,
254 memory_order __success,
255 memory_order __failure)
256 {
257 return std::atomic_compare_exchange_strong_explicit(__p, __v,
258 std::move(__w), __success, __failure);
259 }
260
261 template<typename _Tp>
262 inline bool
263 atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
264 shared_ptr<_Tp> __w)
265 {
266 return std::atomic_compare_exchange_weak_explicit(__p, __v,
267 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
268 }
269
270 template<typename _Tp, _Lock_policy _Lp>
271 bool
272 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
273 __shared_ptr<_Tp, _Lp>* __v,
274 __shared_ptr<_Tp, _Lp> __w,
275 memory_order,
276 memory_order)
277 {
278 __shared_ptr<_Tp, _Lp> __x; // goes out of scope after __lock
279 _Sp_locker __lock{__p, __v};
280 owner_less<__shared_ptr<_Tp, _Lp>> __less;
281 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
282 {
283 __x = std::move(*__p);
284 *__p = std::move(__w);
285 return true;
286 }
287 __x = std::move(*__v);
288 *__v = *__p;
289 return false;
290 }
291
292 template<typename _Tp, _Lock_policy _Lp>
293 inline bool
294 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
295 __shared_ptr<_Tp, _Lp>* __v,
296 __shared_ptr<_Tp, _Lp> __w)
297 {
298 return std::atomic_compare_exchange_strong_explicit(__p, __v,
299 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
300 }
301
302 template<typename _Tp, _Lock_policy _Lp>
303 inline bool
304 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
305 __shared_ptr<_Tp, _Lp>* __v,
306 __shared_ptr<_Tp, _Lp> __w,
307 memory_order __success,
308 memory_order __failure)
309 {
310 return std::atomic_compare_exchange_strong_explicit(__p, __v,
311 std::move(__w), __success, __failure);
312 }
313
314 template<typename _Tp, _Lock_policy _Lp>
315 inline bool
316 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
317 __shared_ptr<_Tp, _Lp>* __v,
318 __shared_ptr<_Tp, _Lp> __w)
319 {
320 return std::atomic_compare_exchange_weak_explicit(__p, __v,
321 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
322 }
323 // @}
324
325 // @} group pointer_abstractions
326
327_GLIBCXX_END_NAMESPACE_VERSION
328} // namespace
329
330#endif // _SHARED_PTR_ATOMIC_H
331