1/*
2 * Copyright 2018-present Facebook, Inc.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16#pragma once
17
18#include <folly/Traits.h>
19
20#include <folly/synchronization/Hazptr-fwd.h>
21#include <folly/synchronization/HazptrDomain.h>
22#include <folly/synchronization/HazptrRec.h>
23#include <folly/synchronization/HazptrThrLocal.h>
24
25#include <folly/synchronization/AsymmetricMemoryBarrier.h>
26
27namespace folly {
28
29///
30/// Classes related to hazard pointer holders.
31///
32
33/**
34 * hazptr_holder
35 *
36 * Class for automatic acquisition and release of hazard pointers,
37 * and interface for hazard pointer operations.
38 *
39 * Usage example:
40 * T* ptr;
41 * {
42 * hazptr_holder h;
43 * ptr = h.get_protected(src);
44 * // ... *ptr is protected ...
45 * h.reset();
46 * // ... *ptr is not protected ...
47 * ptr = src.load();
48 * while (!h.try_protect(ptr, src)) {}
49 * // ... *ptr is protected ...
50 * }
51 * // ... *ptr is not protected
52 */
53template <template <typename> class Atom>
54class hazptr_holder {
55 hazptr_rec<Atom>* hprec_;
56
57 public:
58 /** Constructor - automatically acquires a hazard pointer. */
59 FOLLY_ALWAYS_INLINE explicit hazptr_holder(
60 hazptr_domain<Atom>& domain = default_hazptr_domain<Atom>()) {
61#if FOLLY_HAZPTR_THR_LOCAL
62 if (LIKELY(&domain == &default_hazptr_domain<Atom>())) {
63 auto hprec = hazptr_tc_tls<Atom>().try_get();
64 if (LIKELY(hprec != nullptr)) {
65 hprec_ = hprec;
66 return;
67 }
68 }
69#endif
70 hprec_ = domain.hprec_acquire();
71 }
72
73 /** Empty constructor */
74 FOLLY_ALWAYS_INLINE explicit hazptr_holder(std::nullptr_t) noexcept
75 : hprec_(nullptr) {}
76
77 /** Move constructor */
78 FOLLY_ALWAYS_INLINE hazptr_holder(hazptr_holder&& rhs) noexcept {
79 hprec_ = rhs.hprec_;
80 rhs.hprec_ = nullptr;
81 }
82
83 hazptr_holder(const hazptr_holder&) = delete;
84 hazptr_holder& operator=(const hazptr_holder&) = delete;
85
86 /** Destructor */
87 FOLLY_ALWAYS_INLINE ~hazptr_holder() {
88 if (LIKELY(hprec_ != nullptr)) {
89 hprec_->reset_hazptr();
90 auto domain = hprec_->domain();
91#if FOLLY_HAZPTR_THR_LOCAL
92 if (LIKELY(domain == &default_hazptr_domain<Atom>())) {
93 if (LIKELY(hazptr_tc_tls<Atom>().try_put(hprec_))) {
94 return;
95 }
96 }
97#endif
98 domain->hprec_release(hprec_);
99 }
100 }
101
102 /** Move operator */
103 FOLLY_ALWAYS_INLINE hazptr_holder& operator=(hazptr_holder&& rhs) noexcept {
104 /* Self-move is a no-op. */
105 if (LIKELY(this != &rhs)) {
106 this->~hazptr_holder();
107 new (this) hazptr_holder(nullptr);
108 hprec_ = rhs.hprec_;
109 rhs.hprec_ = nullptr;
110 }
111 return *this;
112 }
113
114 /** Hazard pointer operations */
115
116 /** try_protect */
117 template <typename T>
118 FOLLY_ALWAYS_INLINE bool try_protect(T*& ptr, const Atom<T*>& src) noexcept {
119 return try_protect(ptr, src, [](T* t) { return t; });
120 }
121
122 template <typename T, typename Func>
123 FOLLY_ALWAYS_INLINE bool
124 try_protect(T*& ptr, const Atom<T*>& src, Func f) noexcept {
125 /* Filtering the protected pointer through function Func is useful
126 for stealing bits of the pointer word */
127 auto p = ptr;
128 reset(f(p));
129 /*** Full fence ***/ folly::asymmetricLightBarrier();
130 ptr = src.load(std::memory_order_acquire);
131 if (UNLIKELY(p != ptr)) {
132 reset();
133 return false;
134 }
135 return true;
136 }
137
138 /** get_protected */
139 template <typename T>
140 FOLLY_ALWAYS_INLINE T* get_protected(const Atom<T*>& src) noexcept {
141 return get_protected(src, [](T* t) { return t; });
142 }
143
144 template <typename T, typename Func>
145 FOLLY_ALWAYS_INLINE T* get_protected(const Atom<T*>& src, Func f) noexcept {
146 T* ptr = src.load(std::memory_order_relaxed);
147 while (!try_protect(ptr, src, f)) {
148 /* Keep trying */;
149 }
150 return ptr;
151 }
152
153 /** reset */
154 template <typename T>
155 FOLLY_ALWAYS_INLINE void reset(const T* ptr) noexcept {
156 auto p = static_cast<hazptr_obj<Atom>*>(const_cast<T*>(ptr));
157 DCHECK(hprec_); // UB if *this is empty
158 hprec_->reset_hazptr(p);
159 }
160
161 FOLLY_ALWAYS_INLINE void reset(std::nullptr_t = nullptr) noexcept {
162 DCHECK(hprec_); // UB if *this is empty
163 hprec_->reset_hazptr();
164 }
165
166 /* Swap ownership of hazard pointers between hazptr_holder-s. */
167 /* Note: The owned hazard pointers remain unmodified during the swap
168 * and continue to protect the respective objects that they were
169 * protecting before the swap, if any. */
170 FOLLY_ALWAYS_INLINE void swap(hazptr_holder<Atom>& rhs) noexcept {
171 std::swap(this->hprec_, rhs.hprec_);
172 }
173
174 /** Returns a pointer to the owned hazptr_rec */
175 FOLLY_ALWAYS_INLINE hazptr_rec<Atom>* hprec() const noexcept {
176 return hprec_;
177 }
178
179 /** Set the pointer to the owned hazptr_rec */
180 FOLLY_ALWAYS_INLINE void set_hprec(hazptr_rec<Atom>* hprec) noexcept {
181 hprec_ = hprec;
182 }
183}; // hazptr_holder
184
185/**
186 * Free function swap of hazptr_holder-s.
187 */
188template <template <typename> class Atom>
189FOLLY_ALWAYS_INLINE void swap(
190 hazptr_holder<Atom>& lhs,
191 hazptr_holder<Atom>& rhs) noexcept {
192 lhs.swap(rhs);
193}
194
195/**
196 * Type used by hazptr_array and hazptr_local.
197 */
198template <template <typename> class Atom>
199using aligned_hazptr_holder = aligned_storage_for_t<hazptr_holder<Atom>>;
200
201/**
202 * hazptr_array
203 *
204 * Optimized template for bulk construction and destruction of hazard
205 * pointers.
206 *
207 * WARNING: Do not move from or to individual hazptr_holder-s.
208 * Only move the whole hazptr_array.
209 *
210 * NOTE: It is allowed to swap an individual hazptr_holder that
211 * belongs to hazptr_array with (a) a hazptr_holder object, or (b) a
212 * hazptr_holder that is part of hazptr_array, under the conditions:
213 * (i) both hazptr_holder-s are either both empty or both nonempty
214 * and (ii) both belong to the same domain.
215 */
216template <uint8_t M, template <typename> class Atom>
217class hazptr_array {
218 static_assert(M > 0, "M must be a positive integer.");
219
220 aligned_hazptr_holder<Atom> raw_[M];
221 bool empty_{false};
222
223 public:
224 /** Constructor */
225 FOLLY_ALWAYS_INLINE hazptr_array() {
226 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
227#if FOLLY_HAZPTR_THR_LOCAL
228 static_assert(
229 M <= hazptr_tc<Atom>::capacity(),
230 "M must be within the thread cache capacity.");
231 auto& tc = hazptr_tc_tls<Atom>();
232 auto count = tc.count();
233 if (UNLIKELY(M > count)) {
234 tc.fill(M - count);
235 count = M;
236 }
237 uint8_t offset = count - M;
238 for (uint8_t i = 0; i < M; ++i) {
239 auto hprec = tc[offset + i].get();
240 DCHECK(hprec != nullptr);
241 new (&h[i]) hazptr_holder<Atom>(nullptr);
242 h[i].set_hprec(hprec);
243 }
244 tc.set_count(offset);
245#else
246 for (uint8_t i = 0; i < M; ++i) {
247 new (&h[i]) hazptr_holder<Atom>;
248 }
249#endif
250 }
251
252 /** Empty constructor */
253 FOLLY_ALWAYS_INLINE hazptr_array(std::nullptr_t) noexcept {
254 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
255 for (uint8_t i = 0; i < M; ++i) {
256 new (&h[i]) hazptr_holder<Atom>(nullptr);
257 }
258 empty_ = true;
259 }
260
261 /** Move constructor */
262 FOLLY_ALWAYS_INLINE hazptr_array(hazptr_array&& other) noexcept {
263 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
264 auto hother = reinterpret_cast<hazptr_holder<Atom>*>(&other.raw_);
265 for (uint8_t i = 0; i < M; ++i) {
266 new (&h[i]) hazptr_holder<Atom>(std::move(hother[i]));
267 }
268 empty_ = other.empty_;
269 other.empty_ = true;
270 }
271
272 hazptr_array(const hazptr_array&) = delete;
273 hazptr_array& operator=(const hazptr_array&) = delete;
274
275 /** Destructor */
276 FOLLY_ALWAYS_INLINE ~hazptr_array() {
277 if (empty_) {
278 return;
279 }
280 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
281#if FOLLY_HAZPTR_THR_LOCAL
282 auto& tc = hazptr_tc_tls<Atom>();
283 auto count = tc.count();
284 auto cap = hazptr_tc<Atom>::capacity();
285 if (UNLIKELY((M + count) > cap)) {
286 tc.evict((M + count) - cap);
287 count = cap - M;
288 }
289 for (uint8_t i = 0; i < M; ++i) {
290 h[i].reset();
291 tc[count + i].fill(h[i].hprec());
292 new (&h[i]) hazptr_holder<Atom>(nullptr);
293 }
294 tc.set_count(count + M);
295#else
296 for (uint8_t i = 0; i < M; ++i) {
297 h[i].~hazptr_holder();
298 }
299#endif
300 }
301
302 /** Move operator */
303 FOLLY_ALWAYS_INLINE hazptr_array& operator=(hazptr_array&& other) noexcept {
304 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
305 for (uint8_t i = 0; i < M; ++i) {
306 h[i] = std::move(other[i]);
307 }
308 empty_ = other.empty_;
309 other.empty_ = true;
310 return *this;
311 }
312
313 /** [] operator */
314 FOLLY_ALWAYS_INLINE hazptr_holder<Atom>& operator[](uint8_t i) noexcept {
315 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
316 DCHECK(i < M);
317 return h[i];
318 }
319}; // hazptr_array
320
321/**
322 * hazptr_local
323 *
324 * Optimized for construction and destruction of one or more
325 * hazptr_holder-s with local scope.
326 *
327 * WARNING 1: Do not move from or to individual hazptr_holder-s.
328 *
329 * WARNING 2: There can only be one hazptr_local active for the same
330 * thread at any time. This is not tracked and checked by the
331 * implementation (except in debug mode) because it would negate the
332 * performance gains of this class.
333 */
334template <uint8_t M, template <typename> class Atom>
335class hazptr_local {
336 static_assert(M > 0, "M must be a positive integer.");
337
338 aligned_hazptr_holder<Atom> raw_[M];
339
340 public:
341 /** Constructor */
342 FOLLY_ALWAYS_INLINE hazptr_local() {
343 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
344#if FOLLY_HAZPTR_THR_LOCAL
345 static_assert(
346 M <= hazptr_tc<Atom>::capacity(),
347 "M must be <= hazptr_tc::capacity().");
348 auto& tc = hazptr_tc_tls<Atom>();
349 auto count = tc.count();
350 if (UNLIKELY(M > count)) {
351 tc.fill(M - count);
352 }
353 if (kIsDebug) {
354 DCHECK(!tc.local());
355 tc.set_local(true);
356 }
357 for (uint8_t i = 0; i < M; ++i) {
358 auto hprec = tc[i].get();
359 DCHECK(hprec != nullptr);
360 new (&h[i]) hazptr_holder<Atom>(nullptr);
361 h[i].set_hprec(hprec);
362 }
363#else
364 for (uint8_t i = 0; i < M; ++i) {
365 new (&h[i]) hazptr_holder<Atom>;
366 }
367#endif
368 }
369
370 hazptr_local(const hazptr_local&) = delete;
371 hazptr_local& operator=(const hazptr_local&) = delete;
372 hazptr_local(hazptr_local&&) = delete;
373 hazptr_local& operator=(hazptr_local&&) = delete;
374
375 /** Destructor */
376 FOLLY_ALWAYS_INLINE ~hazptr_local() {
377 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
378#if FOLLY_HAZPTR_THR_LOCAL
379 if (kIsDebug) {
380 auto& tc = hazptr_tc_tls<Atom>();
381 DCHECK(tc.local());
382 tc.set_local(false);
383 }
384 for (uint8_t i = 0; i < M; ++i) {
385 h[i].reset();
386 }
387#else
388 for (uint8_t i = 0; i < M; ++i) {
389 h[i].~hazptr_holder();
390 }
391#endif
392 }
393
394 /** [] operator */
395 FOLLY_ALWAYS_INLINE hazptr_holder<Atom>& operator[](uint8_t i) noexcept {
396 auto h = reinterpret_cast<hazptr_holder<Atom>*>(&raw_);
397 DCHECK(i < M);
398 return h[i];
399 }
400}; // hazptr_local
401
402} // namespace folly
403