Commit 49efe2d7 authored by Kenton Varda's avatar Kenton Varda

Blindly attempt to implement AtomicRefcount for MSVC.

parent 44cc0500
......@@ -21,7 +21,13 @@
#include "refcount.h"
#include "debug.h"
#include <memory>
#if _MSC_VER
// Annoyingly, MSVC only implements the C++ atomic libs, not the C libs, so the only useful
// thing we can get from <atomic> seems to be atomic_thread_fence... but that one function is
// indeed not implemented by the intrinsics, so...
#include <atomic>
#endif
namespace kj {
......@@ -46,15 +52,39 @@ AtomicRefcounted::~AtomicRefcounted() noexcept(false) {
}
void AtomicRefcounted::disposeImpl(void* pointer) const {
#if _MSC_VER
if (KJ_MSVC_INTERLOCKED(Decrement, rel)(&refcount) == 0) {
std::atomic_thread_fence(std::memory_order_acquire);
delete this;
}
#else
if (__atomic_sub_fetch(&refcount, 1, __ATOMIC_RELEASE) == 0) {
__atomic_thread_fence(__ATOMIC_ACQUIRE);
delete this;
}
#endif
}
bool AtomicRefcounted::addRefWeakInternal() const {
#if _MSC_VER
long orig = refcount;
for (;;) {
if (orig == 0) {
// Refcount already hit zero. Destructor is already running so we can't revive the object.
return false;
}
unsigned long old = KJ_MSVC_INTERLOCKED(CompareExchange, nf)(&refcount, orig + 1, orig);
if (old == orig) {
return true;
}
orig = old;
}
#else
uint orig = __atomic_load_n(&refcount, __ATOMIC_RELAXED);
for (;;) {
uint orig = __atomic_load_n(&refcount, __ATOMIC_RELAXED);
if (orig == 0) {
// Refcount already hit zero. Destructor is already running so we can't revive the object.
return false;
......@@ -66,6 +96,7 @@ bool AtomicRefcounted::addRefWeakInternal() const {
return true;
}
}
#endif
}
} // namespace kj
......@@ -19,15 +19,19 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#include "memory.h"
#ifndef KJ_REFCOUNT_H_
#define KJ_REFCOUNT_H_
#include "memory.h"
#if defined(__GNUC__) && !KJ_HEADER_WARNINGS
#pragma GCC system_header
#endif
#if _MSC_VER
#include <intrin0.h>
#endif
namespace kj {
// =======================================================================================
......@@ -110,14 +114,32 @@ Own<T> Refcounted::addRefInternal(T* object) {
//
// Warning: Atomic ops are SLOW.
#if _MSC_VER
#if _M_ARM
#define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP##_##MEM
#else
#define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP
#endif
#endif
class AtomicRefcounted: private kj::Disposer {
public:
virtual ~AtomicRefcounted() noexcept(false);
inline bool isShared() const { return __atomic_load_n(&refcount, __ATOMIC_ACQUIRE) > 1; }
inline bool isShared() const {
#if _MSC_VER
KJ_MSVC_INTERLOCKED(Or, acq)(&refcount, 0) > 1;
#else
return __atomic_load_n(&refcount, __ATOMIC_ACQUIRE) > 1;
#endif
}
private:
mutable uint refcount = 0;
#if _MSC_VER
mutable volatile long refcount = 0;
#else
mutable volatile uint refcount = 0;
#endif
bool addRefWeakInternal() const;
......@@ -174,14 +196,22 @@ kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object) {
template <typename T>
kj::Own<T> AtomicRefcounted::addRefInternal(T* object) {
AtomicRefcounted* refcounted = object;
#if _MSC_VER
KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount);
#else
__atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
#endif
return kj::Own<T>(object, *refcounted);
}
template <typename T>
kj::Own<const T> AtomicRefcounted::addRefInternal(const T* object) {
const AtomicRefcounted* refcounted = object;
#if _MSC_VER
KJ_MSVC_INTERLOCKED(Increment, nf)(&refcounted->refcount);
#else
__atomic_add_fetch(&refcounted->refcount, 1, __ATOMIC_RELAXED);
#endif
return kj::Own<const T>(object, *refcounted);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment