1/*
2 * Copyright 2018-present Facebook, Inc.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16#pragma once
17
18#include <folly/synchronization/Hazptr-fwd.h>
19#include <folly/synchronization/detail/HazptrUtils.h>
20
21#include <folly/CPortability.h>
22#include <folly/Portability.h>
23#include <folly/concurrency/CacheLocality.h>
24
25#include <glog/logging.h>
26
27#include <atomic>
28#include <memory>
29
30///
31/// Classes related to objects protected by hazard pointers.
32///
33
34namespace folly {
35
36/**
37 * hazptr_obj
38 *
39 * Private base class for objects protected by hazard pointers.
40 *
41 * Data members:
42 * - next_: link to next object in private singly linked lists.
43 * - reclaim_: reclamation function for this object.
44 * - batch_tag_: A pointer to a batch (a linked list where the object
45 * is to be pushed when retired). It can also be used as a tag (see
46 * below). See details below.
47 *
48 * Batches, Tags, Tagged Objects, and Untagged Objects:
49 *
50 * - Batches: Batches (instances of hazptr_obj_batch) are sets of
51 * retired hazptr_obj-s. Batches are used to keep related objects
52 * together instead of being spread across thread local structures
53 * and/or mixed with unrelated objects.
54 *
55 * - Tags: A tag is a unique identifier used for fast identification
56 * of related objects. Tags are implemented as addresses of
57 * batches, with the lowest bit set (to save the space of separate
58 * batch and tag data members and to differentiate from batches of
59 * untagged objects.
60 *
61 * - Tagged objects: Objects are tagged for fast identification. The
62 * primary use case is for guaranteeing the destruction of all
63 * objects with a certain tag (e.g., the destruction of all Key and
64 * Value objects that were part of a Folly ConcurrentHashMap
65 * instance). Member function set_batch_tag makes an object tagged.
66 *
67 * - Untagged objects: Objects that do not need to be identified
68 * separately from unrelated objects are not tagged (to keep tagged
69 * objects uncluttered). Untagged objects may or may not be
70 * associated with batches. An example of untagged objects
71 * associated with batches are Segment-s of Folly UnboundedQueue.
72 * Although such objects do not need to be tagged, keeping them in
73 * batches helps avoid cases of a few missing objects delaying the
74 * reclamation of large numbers of link-counted objects. Objects
75 * are untagged either by default or after calling
76 * set_batch_no_tag.
77 *
78 * - Thread Safety: Member functions set_batch_tag and
79 * set_batch_no_tag are not thread-safe. Thread safety must be
80 * ensured by the calling thread.
81 */
82template <template <typename> class Atom>
83class hazptr_obj {
84 using ReclaimFnPtr = void (*)(hazptr_obj<Atom>*, hazptr_obj_list<Atom>&);
85
86 template <template <typename> class>
87 friend class hazptr_domain;
88 template <typename, template <typename> class, typename>
89 friend class hazptr_obj_base;
90 template <typename, template <typename> class, typename>
91 friend class hazptr_obj_base_linked;
92 template <template <typename> class>
93 friend class hazptr_obj_list;
94 template <template <typename> class>
95 friend class hazptr_priv;
96 friend class hazptr_detail::linked_list<hazptr_obj<Atom>>;
97 friend class hazptr_detail::shared_head_only_list<hazptr_obj<Atom>, Atom>;
98 friend class hazptr_detail::shared_head_tail_list<hazptr_obj<Atom>, Atom>;
99
100 static constexpr uintptr_t kTagBit = 1u;
101
102 ReclaimFnPtr reclaim_;
103 hazptr_obj<Atom>* next_;
104 uintptr_t batch_tag_;
105
106 public:
107 /** Constructors */
108 /* All constructors set next_ to this in order to catch misuse bugs
109 such as double retire. By default, objects are untagged and not
110 associated with a batch. */
111
112 hazptr_obj() noexcept : next_(this), batch_tag_(0) {}
113
114 hazptr_obj(const hazptr_obj<Atom>& o) noexcept
115 : next_(this), batch_tag_(o.batch_tag_) {}
116
117 hazptr_obj(hazptr_obj<Atom>&& o) noexcept
118 : next_(this), batch_tag_(o.batch_tag_) {}
119
120 /** Copy operator */
121 hazptr_obj<Atom>& operator=(const hazptr_obj<Atom>&) noexcept {
122 return *this;
123 }
124
125 /** Move operator */
126 hazptr_obj<Atom>& operator=(hazptr_obj<Atom>&&) noexcept {
127 return *this;
128 }
129
130 /** batch_tag */
131 uintptr_t batch_tag() {
132 return batch_tag_;
133 }
134
135 /** batch */
136 hazptr_obj_batch<Atom>* batch() {
137 uintptr_t btag = batch_tag_;
138 btag -= btag & kTagBit;
139 return reinterpret_cast<hazptr_obj_batch<Atom>*>(btag);
140 }
141
142 /** set_batch_tag: Set batch and make object tagged. */
143 void set_batch_tag(hazptr_obj_batch<Atom>* batch) {
144 batch_tag_ = reinterpret_cast<uintptr_t>(batch) + kTagBit;
145 }
146
147 /** set_batch_no_tag: Set batch and make object untagged. */
148 void set_batch_no_tag(hazptr_obj_batch<Atom>* batch) {
149 batch_tag_ = reinterpret_cast<uintptr_t>(batch);
150 }
151
152 private:
153 friend class hazptr_domain<Atom>;
154 template <typename, template <typename> class, typename>
155 friend class hazptr_obj_base;
156 template <typename, template <typename> class, typename>
157 friend class hazptr_obj_base_refcounted;
158 friend class hazptr_obj_batch<Atom>;
159 friend class hazptr_priv<Atom>;
160
161 hazptr_obj<Atom>* next() const noexcept {
162 return next_;
163 }
164
165 void set_next(hazptr_obj* obj) noexcept {
166 next_ = obj;
167 }
168
169 ReclaimFnPtr reclaim() noexcept {
170 return reclaim_;
171 }
172
173 const void* raw_ptr() const {
174 return this;
175 }
176
177 void pre_retire_check() noexcept {
178 // Only for catching misuse bugs like double retire
179 if (next_ != this) {
180 pre_retire_check_fail();
181 }
182 }
183
184 void push_obj(hazptr_domain<Atom>& domain) {
185 auto b = batch();
186 if (b) {
187 b->push_obj(this, domain);
188 } else {
189 push_to_retired(domain);
190 }
191 }
192
193 void push_to_retired(hazptr_domain<Atom>& domain) {
194#if FOLLY_HAZPTR_THR_LOCAL
195 if (&domain == &default_hazptr_domain<Atom>() && !domain.shutdown_) {
196 hazptr_priv_tls<Atom>().push(this);
197 return;
198 }
199#endif
200 hazptr_obj_list<Atom> l(this);
201 hazptr_domain_push_retired(l, true, domain);
202 }
203
204 FOLLY_NOINLINE void pre_retire_check_fail() noexcept {
205 CHECK_EQ(next_, this);
206 }
207}; // hazptr_obj
208
209/**
210 * hazptr_obj_list
211 *
212 * List of hazptr_obj-s.
213 */
214template <template <typename> class Atom>
215class hazptr_obj_list {
216 using Obj = hazptr_obj<Atom>;
217 using List = hazptr_detail::linked_list<Obj>;
218
219 List l_;
220 int count_;
221
222 public:
223 hazptr_obj_list() noexcept : l_(nullptr, nullptr), count_(0) {}
224
225 explicit hazptr_obj_list(Obj* obj) noexcept : l_(obj, obj), count_(1) {
226 obj->set_next(nullptr);
227 }
228
229 explicit hazptr_obj_list(Obj* head, Obj* tail, int count) noexcept
230 : l_(head, tail), count_(count) {}
231
232 Obj* head() const noexcept {
233 return l_.head();
234 }
235
236 Obj* tail() const noexcept {
237 return l_.tail();
238 }
239
240 int count() const noexcept {
241 return count_;
242 }
243
244 void set_count(int val) {
245 count_ = val;
246 }
247
248 bool empty() const noexcept {
249 return head() == nullptr;
250 }
251
252 void push(Obj* obj) {
253 l_.push(obj);
254 ++count_;
255 }
256
257 void splice(hazptr_obj_list<Atom>& l) {
258 if (l.count() == 0) {
259 return;
260 }
261 l_.splice(l.l_);
262 count_ += l.count();
263 l.clear();
264 }
265
266 void clear() {
267 l_.clear();
268 count_ = 0;
269 }
270}; // hazptr_obj_list
271
272/**
273 * hazptr_obj_batch
274 *
275 * List of retired objects. For objects to be retred to a batch,
276 * either of the hazptr_obj member functions set_batch_tag or
277 * set_batch_no_tag needs to be called before the object is retired.
278 *
279 * See description of hazptr_obj for notes on batches, tags, and
280 * tageed and untagged objects.
281 */
282template <template <typename> class Atom>
283class hazptr_obj_batch {
284 using Obj = hazptr_obj<Atom>;
285 using List = hazptr_detail::linked_list<Obj>;
286 using SharedList = hazptr_detail::shared_head_tail_list<Obj, Atom>;
287
288 static constexpr int kThreshold = 20;
289
290 SharedList l_;
291 Atom<int> count_;
292 bool active_;
293
294 public:
295 /** Constructor */
296 hazptr_obj_batch() noexcept : l_(), count_(0), active_(true) {}
297
298 /** Not copyable or moveable */
299 hazptr_obj_batch(const hazptr_obj_batch& o) = delete;
300 hazptr_obj_batch(hazptr_obj_batch&& o) = delete;
301 hazptr_obj_batch& operator=(const hazptr_obj_batch&& o) = delete;
302 hazptr_obj_batch& operator=(hazptr_obj_batch&& o) = delete;
303
304 /** Destructor */
305 ~hazptr_obj_batch() {
306 DCHECK(!active_);
307 DCHECK(l_.empty());
308 }
309
310 /** shutdown */
311 void shutdown_and_reclaim() {
312 DCHECK(active_);
313 active_ = false;
314 if (!l_.empty()) {
315 List l = l_.pop_all();
316 clear_count();
317 Obj* obj = l.head();
318 reclaim_list(obj);
319 }
320 }
321
322 private:
323 friend class hazptr_obj<Atom>;
324
325 int count() const noexcept {
326 return count_.load(std::memory_order_acquire);
327 }
328
329 void clear_count() noexcept {
330 count_.store(0, std::memory_order_release);
331 }
332
333 void inc_count() noexcept {
334 count_.fetch_add(1, std::memory_order_release);
335 }
336
337 bool cas_count(int& expected, int newval) noexcept {
338 return count_.compare_exchange_weak(
339 expected, newval, std::memory_order_acq_rel, std::memory_order_acquire);
340 }
341
342 /** push_obj */
343 void push_obj(
344 Obj* obj,
345 hazptr_domain<Atom>& domain = default_hazptr_domain<Atom>()) {
346 if (active_) {
347 l_.push(obj);
348 inc_count();
349 check_threshold_push(domain);
350 } else {
351 obj->set_next(nullptr);
352 reclaim_list(obj);
353 }
354 }
355
356 /** reclaim_list */
357 void reclaim_list(hazptr_obj<Atom>* obj) {
358 while (obj) {
359 hazptr_obj_list<Atom> children;
360 while (obj) {
361 Obj* next = obj->next();
362 (*(obj->reclaim()))(obj, children);
363 obj = next;
364 }
365 obj = children.head();
366 }
367 }
368
369 /** check_threshold_push */
370 void check_threshold_push(hazptr_domain<Atom>& domain) {
371 auto c = count();
372 while (c >= kThreshold) {
373 if (cas_count(c, 0)) {
374 List ll = l_.pop_all();
375 if (kIsDebug) {
376 Obj* p = ll.head();
377 for (int i = 1; p; ++i, p = p->next()) {
378 DCHECK_EQ(reinterpret_cast<uintptr_t>(p) & 7, 0) << p << " " << i;
379 }
380 }
381 hazptr_obj_list<Atom> l(ll.head(), ll.tail(), c);
382 hazptr_domain_push_list<Atom>(l, domain);
383 return;
384 }
385 }
386 }
387}; // hazptr_obj_batch
388
389/**
390 * hazptr_obj_retired_list
391 *
392 * Used for maintaining lists of retired objects in domain
393 * structure. Locked operations are used for lists of tagged
394 * objects. Unlocked operations are used for the untagged list.
395 */
396/** hazptr_obj_retired_list */
397template <template <typename> class Atom>
398class hazptr_obj_retired_list {
399 using Obj = hazptr_obj<Atom>;
400 using List = hazptr_detail::linked_list<Obj>;
401 using RetiredList = hazptr_detail::shared_head_only_list<Obj, Atom>;
402
403 alignas(hardware_destructive_interference_size) RetiredList retired_;
404 Atom<int> count_;
405
406 public:
407 static constexpr bool kAlsoLock = RetiredList::kAlsoLock;
408 static constexpr bool kDontLock = RetiredList::kDontLock;
409 static constexpr bool kMayBeLocked = RetiredList::kMayBeLocked;
410 static constexpr bool kMayNotBeLocked = RetiredList::kMayNotBeLocked;
411
412 public:
413 hazptr_obj_retired_list() noexcept : count_(0) {}
414
415 void push(hazptr_obj_list<Atom>& l, bool may_be_locked) noexcept {
416 List ll(l.head(), l.tail());
417 retired_.push(ll, may_be_locked);
418 add_count(l.count());
419 }
420
421 void push_unlock(hazptr_obj_list<Atom>& l) noexcept {
422 List ll(l.head(), l.tail());
423 retired_.push_unlock(ll);
424 auto count = l.count();
425 if (count) {
426 add_count(count);
427 }
428 }
429
430 int count() const noexcept {
431 return count_.load(std::memory_order_acquire);
432 }
433
434 bool empty() {
435 return retired_.empty();
436 }
437
438 bool check_threshold_try_zero_count(int thresh) {
439 auto oldval = count();
440 while (oldval >= thresh) {
441 if (cas_count(oldval, 0)) {
442 return true;
443 }
444 }
445 return false;
446 }
447
448 Obj* pop_all(bool lock) {
449 return retired_.pop_all(lock);
450 }
451
452 bool check_lock() {
453 return retired_.check_lock();
454 }
455
456 private:
457 void add_count(int val) {
458 count_.fetch_add(val, std::memory_order_release);
459 }
460
461 bool cas_count(int& expected, int newval) {
462 return count_.compare_exchange_weak(
463 expected, newval, std::memory_order_acq_rel, std::memory_order_acquire);
464 }
465}; // hazptr_obj_retired_list
466
467/**
468 * hazptr_deleter
469 *
470 * For empty base optimization.
471 */
472template <typename T, typename D>
473class hazptr_deleter {
474 D deleter_;
475
476 public:
477 void set_deleter(D d = {}) {
478 deleter_ = std::move(d);
479 }
480
481 void delete_obj(T* p) {
482 deleter_(p);
483 }
484};
485
486template <typename T>
487class hazptr_deleter<T, std::default_delete<T>> {
488 public:
489 void set_deleter(std::default_delete<T> = {}) {}
490
491 void delete_obj(T* p) {
492 delete p;
493 }
494};
495
496/**
497 * hazptr_obj_base
498 *
499 * Base template for objects protected by hazard pointers.
500 */
501template <typename T, template <typename> class Atom, typename D>
502class hazptr_obj_base : public hazptr_obj<Atom>, public hazptr_deleter<T, D> {
503 public:
504 /* Retire a removed object and pass the responsibility for
505 * reclaiming it to the hazptr library */
506 void retire(
507 D deleter = {},
508 hazptr_domain<Atom>& domain = default_hazptr_domain<Atom>()) {
509 pre_retire(std::move(deleter));
510 set_reclaim();
511 this->push_obj(domain); // defined in hazptr_obj
512 }
513
514 void retire(hazptr_domain<Atom>& domain) {
515 retire({}, domain);
516 }
517
518 private:
519 void pre_retire(D deleter) {
520 this->pre_retire_check(); // defined in hazptr_obj
521 this->set_deleter(std::move(deleter));
522 }
523
524 void set_reclaim() {
525 this->reclaim_ = [](hazptr_obj<Atom>* p, hazptr_obj_list<Atom>&) {
526 auto hobp = static_cast<hazptr_obj_base<T, Atom, D>*>(p);
527 auto obj = static_cast<T*>(hobp);
528 hobp->delete_obj(obj);
529 };
530 }
531}; // hazptr_obj_base
532
533} // namespace folly
534