jpayne@69: // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors jpayne@69: // Licensed under the MIT License: jpayne@69: // jpayne@69: // Permission is hereby granted, free of charge, to any person obtaining a copy jpayne@69: // of this software and associated documentation files (the "Software"), to deal jpayne@69: // in the Software without restriction, including without limitation the rights jpayne@69: // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell jpayne@69: // copies of the Software, and to permit persons to whom the Software is jpayne@69: // furnished to do so, subject to the following conditions: jpayne@69: // jpayne@69: // The above copyright notice and this permission notice shall be included in jpayne@69: // all copies or substantial portions of the Software. jpayne@69: // jpayne@69: // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR jpayne@69: // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, jpayne@69: // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE jpayne@69: // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER jpayne@69: // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, jpayne@69: // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN jpayne@69: // THE SOFTWARE. jpayne@69: jpayne@69: #pragma once jpayne@69: jpayne@69: #include "memory.h" jpayne@69: jpayne@69: #if _MSC_VER jpayne@69: #if _MSC_VER < 1910 jpayne@69: #include jpayne@69: #else jpayne@69: #include jpayne@69: #endif jpayne@69: #endif jpayne@69: jpayne@69: KJ_BEGIN_HEADER jpayne@69: jpayne@69: namespace kj { jpayne@69: jpayne@69: // ======================================================================================= jpayne@69: // Non-atomic (thread-unsafe) refcounting jpayne@69: jpayne@69: class Refcounted: private Disposer { jpayne@69: // Subclass this to create a class that contains a reference count. Then, use jpayne@69: // `kj::refcounted()` to allocate a new refcounted pointer. jpayne@69: // jpayne@69: // Do NOT use this lightly. Refcounting is a crutch. Good designs should strive to make object jpayne@69: // ownership clear, so that refcounting is not necessary. All that said, reference counting can jpayne@69: // sometimes simplify code that would otherwise become convoluted with explicit ownership, even jpayne@69: // when ownership relationships are clear at an abstract level. jpayne@69: // jpayne@69: // NOT THREADSAFE: This refcounting implementation assumes that an object's references are jpayne@69: // manipulated only in one thread, because atomic (thread-safe) refcounting is surprisingly slow. jpayne@69: // jpayne@69: // In general, abstract classes should _not_ subclass this. The concrete class at the bottom jpayne@69: // of the hierarchy should be the one to decide how it implements refcounting. Interfaces should jpayne@69: // expose only an `addRef()` method that returns `Own`. There are two reasons for jpayne@69: // this rule: jpayne@69: // 1. Interfaces would need to virtually inherit Refcounted, otherwise two refcounted interfaces jpayne@69: // could not be inherited by the same subclass. Virtual inheritance is awkward and jpayne@69: // inefficient. jpayne@69: // 2. An implementation may decide that it would rather return a copy than a refcount, or use jpayne@69: // some other strategy. jpayne@69: // jpayne@69: // TODO(cleanup): Rethink above. Virtual inheritance is not necessarily that bad. OTOH, a jpayne@69: // virtual function call for every refcount is sad in its own way. A Ref type to replace jpayne@69: // Own could also be nice. jpayne@69: jpayne@69: public: jpayne@69: Refcounted() = default; jpayne@69: virtual ~Refcounted() noexcept(false); jpayne@69: KJ_DISALLOW_COPY_AND_MOVE(Refcounted); jpayne@69: jpayne@69: inline bool isShared() const { return refcount > 1; } jpayne@69: // Check if there are multiple references to this object. This is sometimes useful for deciding jpayne@69: // whether it's safe to modify the object vs. make a copy. jpayne@69: jpayne@69: private: jpayne@69: mutable uint refcount = 0; jpayne@69: // "mutable" because disposeImpl() is const. Bleh. jpayne@69: jpayne@69: void disposeImpl(void* pointer) const override; jpayne@69: template jpayne@69: static Own addRefInternal(T* object); jpayne@69: jpayne@69: template jpayne@69: friend Own addRef(T& object); jpayne@69: template jpayne@69: friend Own refcounted(Params&&... params); jpayne@69: jpayne@69: template jpayne@69: friend class RefcountedWrapper; jpayne@69: }; jpayne@69: jpayne@69: template jpayne@69: inline Own refcounted(Params&&... params) { jpayne@69: // Allocate a new refcounted instance of T, passing `params` to its constructor. Returns an jpayne@69: // initial reference to the object. More references can be created with `kj::addRef()`. jpayne@69: jpayne@69: return Refcounted::addRefInternal(new T(kj::fwd(params)...)); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: Own addRef(T& object) { jpayne@69: // Return a new reference to `object`, which must subclass Refcounted and have been allocated jpayne@69: // using `kj::refcounted<>()`. It is suggested that subclasses implement a non-static addRef() jpayne@69: // method which wraps this and returns the appropriate type. jpayne@69: jpayne@69: KJ_IREQUIRE(object.Refcounted::refcount > 0, "Object not allocated with kj::refcounted()."); jpayne@69: return Refcounted::addRefInternal(&object); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: Own Refcounted::addRefInternal(T* object) { jpayne@69: Refcounted* refcounted = object; jpayne@69: ++refcounted->refcount; jpayne@69: return Own(object, *refcounted); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: class RefcountedWrapper: public Refcounted { jpayne@69: // Adds refcounting as a wrapper around an existing type, allowing you to construct references jpayne@69: // with type Own that appears to point directly to the underlying object. jpayne@69: jpayne@69: public: jpayne@69: template jpayne@69: RefcountedWrapper(Params&&... params): wrapped(kj::fwd(params)...) {} jpayne@69: jpayne@69: T& getWrapped() { return wrapped; } jpayne@69: const T& getWrapped() const { return wrapped; } jpayne@69: jpayne@69: Own addWrappedRef() { jpayne@69: // Return an owned reference to the wrapped object that is backed by a refcount. jpayne@69: ++refcount; jpayne@69: return Own(&wrapped, *this); jpayne@69: } jpayne@69: jpayne@69: private: jpayne@69: T wrapped; jpayne@69: }; jpayne@69: jpayne@69: template jpayne@69: class RefcountedWrapper>: public Refcounted { jpayne@69: // Specialization for when the wrapped type is itself Own. We don't want this to result in jpayne@69: // Own>. jpayne@69: jpayne@69: public: jpayne@69: RefcountedWrapper(Own wrapped): wrapped(kj::mv(wrapped)) {} jpayne@69: jpayne@69: T& getWrapped() { return *wrapped; } jpayne@69: const T& getWrapped() const { return *wrapped; } jpayne@69: jpayne@69: Own addWrappedRef() { jpayne@69: // Return an owned reference to the wrapped object that is backed by a refcount. jpayne@69: ++refcount; jpayne@69: return Own(wrapped.get(), *this); jpayne@69: } jpayne@69: jpayne@69: private: jpayne@69: Own wrapped; jpayne@69: }; jpayne@69: jpayne@69: template jpayne@69: Own> refcountedWrapper(Params&&... params) { jpayne@69: return refcounted>(kj::fwd(params)...); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: Own>> refcountedWrapper(Own&& wrapped) { jpayne@69: return refcounted>>(kj::mv(wrapped)); jpayne@69: } jpayne@69: jpayne@69: // ======================================================================================= jpayne@69: // Atomic (thread-safe) refcounting jpayne@69: // jpayne@69: // Warning: Atomic ops are SLOW. jpayne@69: jpayne@69: #if _MSC_VER && !defined(__clang__) jpayne@69: #if _M_ARM jpayne@69: #define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP##_##MEM jpayne@69: #else jpayne@69: #define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP jpayne@69: #endif jpayne@69: #endif jpayne@69: jpayne@69: class AtomicRefcounted: private kj::Disposer { jpayne@69: public: jpayne@69: AtomicRefcounted() = default; jpayne@69: virtual ~AtomicRefcounted() noexcept(false); jpayne@69: KJ_DISALLOW_COPY_AND_MOVE(AtomicRefcounted); jpayne@69: jpayne@69: inline bool isShared() const { jpayne@69: #if _MSC_VER && !defined(__clang__) jpayne@69: return KJ_MSVC_INTERLOCKED(Or, acq)(&refcount, 0) > 1; jpayne@69: #else jpayne@69: return __atomic_load_n(&refcount, __ATOMIC_ACQUIRE) > 1; jpayne@69: #endif jpayne@69: } jpayne@69: jpayne@69: private: jpayne@69: #if _MSC_VER && !defined(__clang__) jpayne@69: mutable volatile long refcount = 0; jpayne@69: #else jpayne@69: mutable volatile uint refcount = 0; jpayne@69: #endif jpayne@69: jpayne@69: bool addRefWeakInternal() const; jpayne@69: jpayne@69: void disposeImpl(void* pointer) const override; jpayne@69: template jpayne@69: static kj::Own addRefInternal(T* object); jpayne@69: template jpayne@69: static kj::Own addRefInternal(const T* object); jpayne@69: jpayne@69: template jpayne@69: friend kj::Own atomicAddRef(T& object); jpayne@69: template jpayne@69: friend kj::Own atomicAddRef(const T& object); jpayne@69: template jpayne@69: friend kj::Maybe> atomicAddRefWeak(const T& object); jpayne@69: template jpayne@69: friend kj::Own atomicRefcounted(Params&&... params); jpayne@69: }; jpayne@69: jpayne@69: template jpayne@69: inline kj::Own atomicRefcounted(Params&&... params) { jpayne@69: return AtomicRefcounted::addRefInternal(new T(kj::fwd(params)...)); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: kj::Own atomicAddRef(T& object) { jpayne@69: KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, jpayne@69: "Object not allocated with kj::atomicRefcounted()."); jpayne@69: return AtomicRefcounted::addRefInternal(&object); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: kj::Own atomicAddRef(const T& object) { jpayne@69: KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, jpayne@69: "Object not allocated with kj::atomicRefcounted()."); jpayne@69: return AtomicRefcounted::addRefInternal(&object); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: kj::Maybe> atomicAddRefWeak(const T& object) { jpayne@69: // Try to addref an object whose refcount could have already reached zero in another thread, and jpayne@69: // whose destructor could therefore already have started executing. The destructor must contain jpayne@69: // some synchronization that guarantees that said destructor has not yet completed when jpayne@69: // attomicAddRefWeak() is called (so that the object is still valid). Since the destructor cannot jpayne@69: // be canceled once it has started, in the case that it has already started, this function jpayne@69: // returns nullptr. jpayne@69: jpayne@69: const AtomicRefcounted* refcounted = &object; jpayne@69: if (refcounted->addRefWeakInternal()) { jpayne@69: return kj::Own(&object, *refcounted); jpayne@69: } else { jpayne@69: return nullptr; jpayne@69: } jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: kj::Own AtomicRefcounted::addRefInternal(T* object) { jpayne@69: AtomicRefcounted* refcounted = object; jpayne@69: #if _MSC_VER && !defined(__clang__) jpayne@69: KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount); jpayne@69: #else jpayne@69: __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED); jpayne@69: #endif jpayne@69: return kj::Own(object, *refcounted); jpayne@69: } jpayne@69: jpayne@69: template jpayne@69: kj::Own AtomicRefcounted::addRefInternal(const T* object) { jpayne@69: const AtomicRefcounted* refcounted = object; jpayne@69: #if _MSC_VER && !defined(__clang__) jpayne@69: KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount); jpayne@69: #else jpayne@69: __atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED); jpayne@69: #endif jpayne@69: return kj::Own(object, *refcounted); jpayne@69: } jpayne@69: jpayne@69: } // namespace kj jpayne@69: jpayne@69: KJ_END_HEADER