comparison CSP2/CSP2_env/env-d9b9114564458d9d-741b3de822f2aaca6c6caa4325c4afce/include/kj/refcount.h @ 69:33d812a61356

planemo upload commit 2e9511a184a1ca667c7be0c6321a36dc4e3d116d
author jpayne
date Tue, 18 Mar 2025 17:55:14 -0400
parents
children
comparison
equal deleted inserted replaced
67:0e9998148a16 69:33d812a61356
1 // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
2 // Licensed under the MIT License:
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21
22 #pragma once
23
24 #include "memory.h"
25
26 #if _MSC_VER
27 #if _MSC_VER < 1910
28 #include <intrin.h>
29 #else
30 #include <intrin0.h>
31 #endif
32 #endif
33
34 KJ_BEGIN_HEADER
35
36 namespace kj {
37
38 // =======================================================================================
39 // Non-atomic (thread-unsafe) refcounting
40
41 class Refcounted: private Disposer {
42 // Subclass this to create a class that contains a reference count. Then, use
43 // `kj::refcounted<T>()` to allocate a new refcounted pointer.
44 //
45 // Do NOT use this lightly. Refcounting is a crutch. Good designs should strive to make object
46 // ownership clear, so that refcounting is not necessary. All that said, reference counting can
47 // sometimes simplify code that would otherwise become convoluted with explicit ownership, even
48 // when ownership relationships are clear at an abstract level.
49 //
50 // NOT THREADSAFE: This refcounting implementation assumes that an object's references are
51 // manipulated only in one thread, because atomic (thread-safe) refcounting is surprisingly slow.
52 //
53 // In general, abstract classes should _not_ subclass this. The concrete class at the bottom
54 // of the hierarchy should be the one to decide how it implements refcounting. Interfaces should
55 // expose only an `addRef()` method that returns `Own<InterfaceType>`. There are two reasons for
56 // this rule:
57 // 1. Interfaces would need to virtually inherit Refcounted, otherwise two refcounted interfaces
58 // could not be inherited by the same subclass. Virtual inheritance is awkward and
59 // inefficient.
60 // 2. An implementation may decide that it would rather return a copy than a refcount, or use
61 // some other strategy.
62 //
63 // TODO(cleanup): Rethink above. Virtual inheritance is not necessarily that bad. OTOH, a
64 // virtual function call for every refcount is sad in its own way. A Ref<T> type to replace
65 // Own<T> could also be nice.
66
67 public:
68 Refcounted() = default;
69 virtual ~Refcounted() noexcept(false);
70 KJ_DISALLOW_COPY_AND_MOVE(Refcounted);
71
72 inline bool isShared() const { return refcount > 1; }
73 // Check if there are multiple references to this object. This is sometimes useful for deciding
74 // whether it's safe to modify the object vs. make a copy.
75
76 private:
77 mutable uint refcount = 0;
78 // "mutable" because disposeImpl() is const. Bleh.
79
80 void disposeImpl(void* pointer) const override;
81 template <typename T>
82 static Own<T> addRefInternal(T* object);
83
84 template <typename T>
85 friend Own<T> addRef(T& object);
86 template <typename T, typename... Params>
87 friend Own<T> refcounted(Params&&... params);
88
89 template <typename T>
90 friend class RefcountedWrapper;
91 };
92
93 template <typename T, typename... Params>
94 inline Own<T> refcounted(Params&&... params) {
95 // Allocate a new refcounted instance of T, passing `params` to its constructor. Returns an
96 // initial reference to the object. More references can be created with `kj::addRef()`.
97
98 return Refcounted::addRefInternal(new T(kj::fwd<Params>(params)...));
99 }
100
101 template <typename T>
102 Own<T> addRef(T& object) {
103 // Return a new reference to `object`, which must subclass Refcounted and have been allocated
104 // using `kj::refcounted<>()`. It is suggested that subclasses implement a non-static addRef()
105 // method which wraps this and returns the appropriate type.
106
107 KJ_IREQUIRE(object.Refcounted::refcount > 0, "Object not allocated with kj::refcounted().");
108 return Refcounted::addRefInternal(&object);
109 }
110
111 template <typename T>
112 Own<T> Refcounted::addRefInternal(T* object) {
113 Refcounted* refcounted = object;
114 ++refcounted->refcount;
115 return Own<T>(object, *refcounted);
116 }
117
118 template <typename T>
119 class RefcountedWrapper: public Refcounted {
120 // Adds refcounting as a wrapper around an existing type, allowing you to construct references
121 // with type Own<T> that appears to point directly to the underlying object.
122
123 public:
124 template <typename... Params>
125 RefcountedWrapper(Params&&... params): wrapped(kj::fwd<Params>(params)...) {}
126
127 T& getWrapped() { return wrapped; }
128 const T& getWrapped() const { return wrapped; }
129
130 Own<T> addWrappedRef() {
131 // Return an owned reference to the wrapped object that is backed by a refcount.
132 ++refcount;
133 return Own<T>(&wrapped, *this);
134 }
135
136 private:
137 T wrapped;
138 };
139
140 template <typename T>
141 class RefcountedWrapper<Own<T>>: public Refcounted {
142 // Specialization for when the wrapped type is itself Own<T>. We don't want this to result in
143 // Own<Own<T>>.
144
145 public:
146 RefcountedWrapper(Own<T> wrapped): wrapped(kj::mv(wrapped)) {}
147
148 T& getWrapped() { return *wrapped; }
149 const T& getWrapped() const { return *wrapped; }
150
151 Own<T> addWrappedRef() {
152 // Return an owned reference to the wrapped object that is backed by a refcount.
153 ++refcount;
154 return Own<T>(wrapped.get(), *this);
155 }
156
157 private:
158 Own<T> wrapped;
159 };
160
161 template <typename T, typename... Params>
162 Own<RefcountedWrapper<T>> refcountedWrapper(Params&&... params) {
163 return refcounted<RefcountedWrapper<T>>(kj::fwd<Params>(params)...);
164 }
165
166 template <typename T>
167 Own<RefcountedWrapper<Own<T>>> refcountedWrapper(Own<T>&& wrapped) {
168 return refcounted<RefcountedWrapper<Own<T>>>(kj::mv(wrapped));
169 }
170
171 // =======================================================================================
172 // Atomic (thread-safe) refcounting
173 //
174 // Warning: Atomic ops are SLOW.
175
176 #if _MSC_VER && !defined(__clang__)
177 #if _M_ARM
178 #define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP##_##MEM
179 #else
180 #define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP
181 #endif
182 #endif
183
184 class AtomicRefcounted: private kj::Disposer {
185 public:
186 AtomicRefcounted() = default;
187 virtual ~AtomicRefcounted() noexcept(false);
188 KJ_DISALLOW_COPY_AND_MOVE(AtomicRefcounted);
189
190 inline bool isShared() const {
191 #if _MSC_VER && !defined(__clang__)
192 return KJ_MSVC_INTERLOCKED(Or, acq)(&refcount, 0) > 1;
193 #else
194 return __atomic_load_n(&refcount, __ATOMIC_ACQUIRE) > 1;
195 #endif
196 }
197
198 private:
199 #if _MSC_VER && !defined(__clang__)
200 mutable volatile long refcount = 0;
201 #else
202 mutable volatile uint refcount = 0;
203 #endif
204
205 bool addRefWeakInternal() const;
206
207 void disposeImpl(void* pointer) const override;
208 template <typename T>
209 static kj::Own<T> addRefInternal(T* object);
210 template <typename T>
211 static kj::Own<const T> addRefInternal(const T* object);
212
213 template <typename T>
214 friend kj::Own<T> atomicAddRef(T& object);
215 template <typename T>
216 friend kj::Own<const T> atomicAddRef(const T& object);
217 template <typename T>
218 friend kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object);
219 template <typename T, typename... Params>
220 friend kj::Own<T> atomicRefcounted(Params&&... params);
221 };
222
223 template <typename T, typename... Params>
224 inline kj::Own<T> atomicRefcounted(Params&&... params) {
225 return AtomicRefcounted::addRefInternal(new T(kj::fwd<Params>(params)...));
226 }
227
228 template <typename T>
229 kj::Own<T> atomicAddRef(T& object) {
230 KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0,
231 "Object not allocated with kj::atomicRefcounted().");
232 return AtomicRefcounted::addRefInternal(&object);
233 }
234
235 template <typename T>
236 kj::Own<const T> atomicAddRef(const T& object) {
237 KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0,
238 "Object not allocated with kj::atomicRefcounted().");
239 return AtomicRefcounted::addRefInternal(&object);
240 }
241
242 template <typename T>
243 kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object) {
244 // Try to addref an object whose refcount could have already reached zero in another thread, and
245 // whose destructor could therefore already have started executing. The destructor must contain
246 // some synchronization that guarantees that said destructor has not yet completed when
247 // attomicAddRefWeak() is called (so that the object is still valid). Since the destructor cannot
248 // be canceled once it has started, in the case that it has already started, this function
249 // returns nullptr.
250
251 const AtomicRefcounted* refcounted = &object;
252 if (refcounted->addRefWeakInternal()) {
253 return kj::Own<const T>(&object, *refcounted);
254 } else {
255 return nullptr;
256 }
257 }
258
259 template <typename T>
260 kj::Own<T> AtomicRefcounted::addRefInternal(T* object) {
261 AtomicRefcounted* refcounted = object;
262 #if _MSC_VER && !defined(__clang__)
263 KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount);
264 #else
265 __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
266 #endif
267 return kj::Own<T>(object, *refcounted);
268 }
269
270 template <typename T>
271 kj::Own<const T> AtomicRefcounted::addRefInternal(const T* object) {
272 const AtomicRefcounted* refcounted = object;
273 #if _MSC_VER && !defined(__clang__)
274 KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount);
275 #else
276 __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
277 #endif
278 return kj::Own<const T>(object, *refcounted);
279 }
280
281 } // namespace kj
282
283 KJ_END_HEADER