Commit fc620c73 authored by Kenton Varda's avatar Kenton Varda

Add atomic refcounting implementation.

parent cec589a9
......@@ -25,6 +25,9 @@
namespace kj {
// =======================================================================================
// Non-atomic (thread-unsafe) refcounting
Refcounted::~Refcounted() noexcept(false) {
KJ_ASSERT(refcount == 0, "Refcounted object deleted with non-zero refcount.");
}
......@@ -35,4 +38,34 @@ void Refcounted::disposeImpl(void* pointer) const {
}
}
// =======================================================================================
// Atomic (thread-safe) refcounting
AtomicRefcounted::~AtomicRefcounted() noexcept(false) {
KJ_ASSERT(refcount == 0, "Refcounted object deleted with non-zero refcount.");
}
void AtomicRefcounted::disposeImpl(void* pointer) const {
if (__atomic_sub_fetch(&refcount, 1, __ATOMIC_RELEASE) == 0) {
__atomic_thread_fence(__ATOMIC_ACQUIRE);
delete this;
}
}
bool AtomicRefcounted::addRefWeakInternal() const {
for (;;) {
uint orig = __atomic_load_n(&refcount, __ATOMIC_RELAXED);
if (orig == 0) {
// Refcount already hit zero. Destructor is already running so we can't revive the object.
return false;
}
if (__atomic_compare_exchange_n(&refcount, &orig, orig + 1, true,
__ATOMIC_RELAXED, __ATOMIC_RELAXED)) {
// Successfully incremented refcount without letting it hit zero.
return true;
}
}
}
} // namespace kj
......@@ -30,6 +30,9 @@
namespace kj {
// =======================================================================================
// Non-atomic (thread-unsafe) refcounting
class Refcounted: private Disposer {
// Subclass this to create a class that contains a reference count. Then, use
// `kj::refcounted<T>()` to allocate a new refcounted pointer.
......@@ -102,6 +105,86 @@ Own<T> Refcounted::addRefInternal(T* object) {
return Own<T>(object, *refcounted);
}
// =======================================================================================
// Atomic (thread-safe) refcounting
//
// Warning: Atomic ops are SLOW.
class AtomicRefcounted: private kj::Disposer {
public:
virtual ~AtomicRefcounted() noexcept(false);
inline bool isShared() const { return __atomic_load_n(&refcount, __ATOMIC_ACQUIRE) > 1; }
private:
mutable uint refcount = 0;
bool addRefWeakInternal() const;
void disposeImpl(void* pointer) const override;
template <typename T>
static kj::Own<T> addRefInternal(T* object);
template <typename T>
static kj::Own<const T> addRefInternal(const T* object);
template <typename T>
friend kj::Own<T> atomicAddRef(T& object);
template <typename T>
friend kj::Own<const T> atomicAddRef(const T& object);
template <typename T>
friend kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object);
template <typename T, typename... Params>
friend kj::Own<T> atomicRefcounted(Params&&... params);
};
template <typename T, typename... Params>
inline kj::Own<T> atomicRefcounted(Params&&... params) {
return AtomicRefcounted::addRefInternal(new T(kj::fwd<Params>(params)...));
}
template <typename T>
kj::Own<T> atomicAddRef(T& object) {
KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, "Object not allocated with kj::refcounted().");
return AtomicRefcounted::addRefInternal(&object);
}
template <typename T>
kj::Own<const T> atomicAddRef(const T& object) {
KJ_IREQUIRE(object.AtomicRefcounted::refcount > 0, "Object not allocated with kj::refcounted().");
return AtomicRefcounted::addRefInternal(&object);
}
template <typename T>
kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object) {
// Try to addref an object whose refcount could have already reached zero in another thread, and
// whose destructor could therefore already have started executing. The destructor must contain
// some synchronization that guarantees that said destructor has not yet completed when
// attomicAddRefWeak() is called (so that the object is still valid). Since the destructor cannot
// be canceled once it has started, in the case that it has already started, this function
// returns nullptr.
const AtomicRefcounted* refcounted = &object;
if (refcounted->addRefWeakInternal()) {
return kj::Own<const T>(&object, *refcounted);
} else {
return nullptr;
}
}
template <typename T>
kj::Own<T> AtomicRefcounted::addRefInternal(T* object) {
AtomicRefcounted* refcounted = object;
__atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
return kj::Own<T>(object, *refcounted);
}
template <typename T>
kj::Own<const T> AtomicRefcounted::addRefInternal(const T* object) {
const AtomicRefcounted* refcounted = object;
__atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
return kj::Own<const T>(object, *refcounted);
}
} // namespace kj
#endif // KJ_REFCOUNT_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment