1// Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
2// Licensed under the MIT License:
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21
22#pragma once
23
24#include "memory.h"
25
26#if defined(__GNUC__) && !KJ_HEADER_WARNINGS
27#pragma GCC system_header
28#endif
29
30#if _MSC_VER
31#if _MSC_VER < 1910
32#include <intrin.h>
33#else
34#include <intrin0.h>
35#endif
36#endif
37
38namespace kj {
39
40// =======================================================================================
41// Non-atomic (thread-unsafe) refcounting
42
43class Refcounted: private Disposer {
44 // Subclass this to create a class that contains a reference count. Then, use
45 // `kj::refcounted<T>()` to allocate a new refcounted pointer.
46 //
47 // Do NOT use this lightly. Refcounting is a crutch. Good designs should strive to make object
48 // ownership clear, so that refcounting is not necessary. All that said, reference counting can
49 // sometimes simplify code that would otherwise become convoluted with explicit ownership, even
50 // when ownership relationships are clear at an abstract level.
51 //
52 // NOT THREADSAFE: This refcounting implementation assumes that an object's references are
53 // manipulated only in one thread, because atomic (thread-safe) refcounting is surprisingly slow.
54 //
55 // In general, abstract classes should _not_ subclass this. The concrete class at the bottom
56 // of the hierarchy should be the one to decide how it implements refcounting. Interfaces should
57 // expose only an `addRef()` method that returns `Own<InterfaceType>`. There are two reasons for
58 // this rule:
59 // 1. Interfaces would need to virtually inherit Refcounted, otherwise two refcounted interfaces
60 // could not be inherited by the same subclass. Virtual inheritance is awkward and
61 // inefficient.
62 // 2. An implementation may decide that it would rather return a copy than a refcount, or use
63 // some other strategy.
64 //
65 // TODO(cleanup): Rethink above. Virtual inheritance is not necessarily that bad. OTOH, a
66 // virtual function call for every refcount is sad in its own way. A Ref<T> type to replace
67 // Own<T> could also be nice.
68
69public:
70 Refcounted() = default;
71 virtual ~Refcounted() noexcept(false);
72 KJ_DISALLOW_COPY(Refcounted);
73
74 inline bool isShared() const { return refcount > 1; }
75 // Check if there are multiple references to this object. This is sometimes useful for deciding
76 // whether it's safe to modify the object vs. make a copy.
77
78private:
79 mutable uint refcount = 0;
80 // "mutable" because disposeImpl() is const. Bleh.
81
82 void disposeImpl(void* pointer) const override;
83 template <typename T>
84 static Own<T> addRefInternal(T* object);
85
86 template <typename T>
87 friend Own<T> addRef(T& object);
88 template <typename T, typename... Params>
89 friend Own<T> refcounted(Params&&... params);
90};
91
92template <typename T, typename... Params>
93inline Own<T> refcounted(Params&&... params) {
94 // Allocate a new refcounted instance of T, passing `params` to its constructor. Returns an
95 // initial reference to the object. More references can be created with `kj::addRef()`.
96
97 return Refcounted::addRefInternal(new T(kj::fwd<Params>(params)...));
98}
99
100template <typename T>
101Own<T> addRef(T& object) {
102 // Return a new reference to `object`, which must subclass Refcounted and have been allocated
103 // using `kj::refcounted<>()`. It is suggested that subclasses implement a non-static addRef()
104 // method which wraps this and returns the appropriate type.
105
106 KJ_IREQUIRE(object.Refcounted::refcount > 0, "Object not allocated with kj::refcounted().");
107 return Refcounted::addRefInternal(&object);
108}
109
110template <typename T>
111Own<T> Refcounted::addRefInternal(T* object) {
112 Refcounted* refcounted = object;
113 ++refcounted->refcount;
114 return Own<T>(object, *refcounted);
115}
116
117// =======================================================================================
118// Atomic (thread-safe) refcounting
119//
120// Warning: Atomic ops are SLOW.
121
122#if _MSC_VER
123#if _M_ARM
124#define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP##_##MEM
125#else
126#define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP
127#endif
128#endif
129
130class AtomicRefcounted: private kj::Disposer {
131public:
132 AtomicRefcounted() = default;
133 virtual ~AtomicRefcounted() noexcept(false);
134 KJ_DISALLOW_COPY(AtomicRefcounted);
135
136 inline bool isShared() const {
137#if _MSC_VER
138 return KJ_MSVC_INTERLOCKED(Or, acq)(&refcount, 0) > 1;
139#else
140 return __atomic_load_n(&refcount, __ATOMIC_ACQUIRE) > 1;
141#endif
142 }
143
144private:
145#if _MSC_VER
146 mutable volatile long refcount = 0;
147#else
148 mutable volatile uint refcount = 0;
149#endif
150
151 bool addRefWeakInternal() const;
152
153 void disposeImpl(void* pointer) const override;
154 template <typename T>
155 static kj::Own<T> addRefInternal(T* object);
156 template <typename T>
157 static kj::Own<const T> addRefInternal(const T* object);
158
159 template <typename T>
160 friend kj::Own<T> atomicAddRef(T& object);
161 template <typename T>
162 friend kj::Own<const T> atomicAddRef(const T& object);
163 template <typename T>
164 friend kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object);
165 template <typename T, typename... Params>
166 friend kj::Own<T> atomicRefcounted(Params&&... params);
167};
168
169template <typename T, typename... Params>
170inline kj::Own<T> atomicRefcounted(Params&&... params) {
171 return AtomicRefcounted::addRefInternal(new T(kj::fwd<Params>(params)...));
172}
173
174template <typename T>
175kj::Own<T> atomicAddRef(T& object) {
176 KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, "Object not allocated with kj::refcounted().");
177 return AtomicRefcounted::addRefInternal(&object);
178}
179
180template <typename T>
181kj::Own<const T> atomicAddRef(const T& object) {
182 KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, "Object not allocated with kj::refcounted().");
183 return AtomicRefcounted::addRefInternal(&object);
184}
185
186template <typename T>
187kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object) {
188 // Try to addref an object whose refcount could have already reached zero in another thread, and
189 // whose destructor could therefore already have started executing. The destructor must contain
190 // some synchronization that guarantees that said destructor has not yet completed when
191 // attomicAddRefWeak() is called (so that the object is still valid). Since the destructor cannot
192 // be canceled once it has started, in the case that it has already started, this function
193 // returns nullptr.
194
195 const AtomicRefcounted* refcounted = &object;
196 if (refcounted->addRefWeakInternal()) {
197 return kj::Own<const T>(&object, *refcounted);
198 } else {
199 return nullptr;
200 }
201}
202
203template <typename T>
204kj::Own<T> AtomicRefcounted::addRefInternal(T* object) {
205 AtomicRefcounted* refcounted = object;
206#if _MSC_VER
207 KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount);
208#else
209 __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
210#endif
211 return kj::Own<T>(object, *refcounted);
212}
213
214template <typename T>
215kj::Own<const T> AtomicRefcounted::addRefInternal(const T* object) {
216 const AtomicRefcounted* refcounted = object;
217#if _MSC_VER
218 KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount);
219#else
220 __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
221#endif
222 return kj::Own<const T>(object, *refcounted);
223}
224
225} // namespace kj
226