Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
C
capnproto
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
capnproto
Commits
49efe2d7
Commit
49efe2d7
authored
Dec 22, 2017
by
Kenton Varda
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Blindly attempt to implement AtomicRefcount for MSVC.
parent
44cc0500
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
67 additions
and
6 deletions
+67
-6
refcount.c++
c++/src/kj/refcount.c++
+33
-2
refcount.h
c++/src/kj/refcount.h
+34
-4
No files found.
c++/src/kj/refcount.c++
View file @
49efe2d7
...
@@ -21,7 +21,13 @@
...
@@ -21,7 +21,13 @@
#include "refcount.h"
#include "refcount.h"
#include "debug.h"
#include "debug.h"
#include <memory>
#if _MSC_VER
// Annoyingly, MSVC only implements the C++ atomic libs, not the C libs, so the only useful
// thing we can get from <atomic> seems to be atomic_thread_fence... but that one function is
// indeed not implemented by the intrinsics, so...
#include <atomic>
#endif
namespace
kj
{
namespace
kj
{
...
@@ -46,15 +52,39 @@ AtomicRefcounted::~AtomicRefcounted() noexcept(false) {
...
@@ -46,15 +52,39 @@ AtomicRefcounted::~AtomicRefcounted() noexcept(false) {
}
}
void
AtomicRefcounted
::
disposeImpl
(
void
*
pointer
)
const
{
void
AtomicRefcounted
::
disposeImpl
(
void
*
pointer
)
const
{
#if _MSC_VER
if
(
KJ_MSVC_INTERLOCKED
(
Decrement
,
rel
)(
&
refcount
)
==
0
)
{
std
::
atomic_thread_fence
(
std
::
memory_order_acquire
);
delete
this
;
}
#else
if
(
__atomic_sub_fetch
(
&
refcount
,
1
,
__ATOMIC_RELEASE
)
==
0
)
{
if
(
__atomic_sub_fetch
(
&
refcount
,
1
,
__ATOMIC_RELEASE
)
==
0
)
{
__atomic_thread_fence
(
__ATOMIC_ACQUIRE
);
__atomic_thread_fence
(
__ATOMIC_ACQUIRE
);
delete
this
;
delete
this
;
}
}
#endif
}
}
bool
AtomicRefcounted
::
addRefWeakInternal
()
const
{
bool
AtomicRefcounted
::
addRefWeakInternal
()
const
{
#if _MSC_VER
long
orig
=
refcount
;
for
(;;)
{
if
(
orig
==
0
)
{
// Refcount already hit zero. Destructor is already running so we can't revive the object.
return
false
;
}
unsigned
long
old
=
KJ_MSVC_INTERLOCKED
(
CompareExchange
,
nf
)(
&
refcount
,
orig
+
1
,
orig
);
if
(
old
==
orig
)
{
return
true
;
}
orig
=
old
;
}
#else
uint
orig
=
__atomic_load_n
(
&
refcount
,
__ATOMIC_RELAXED
);
for
(;;)
{
for
(;;)
{
uint
orig
=
__atomic_load_n
(
&
refcount
,
__ATOMIC_RELAXED
);
if
(
orig
==
0
)
{
if
(
orig
==
0
)
{
// Refcount already hit zero. Destructor is already running so we can't revive the object.
// Refcount already hit zero. Destructor is already running so we can't revive the object.
return
false
;
return
false
;
...
@@ -66,6 +96,7 @@ bool AtomicRefcounted::addRefWeakInternal() const {
...
@@ -66,6 +96,7 @@ bool AtomicRefcounted::addRefWeakInternal() const {
return
true
;
return
true
;
}
}
}
}
#endif
}
}
}
// namespace kj
}
// namespace kj
c++/src/kj/refcount.h
View file @
49efe2d7
...
@@ -19,15 +19,19 @@
...
@@ -19,15 +19,19 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// THE SOFTWARE.
#include "memory.h"
#ifndef KJ_REFCOUNT_H_
#ifndef KJ_REFCOUNT_H_
#define KJ_REFCOUNT_H_
#define KJ_REFCOUNT_H_
#include "memory.h"
#if defined(__GNUC__) && !KJ_HEADER_WARNINGS
#if defined(__GNUC__) && !KJ_HEADER_WARNINGS
#pragma GCC system_header
#pragma GCC system_header
#endif
#endif
#if _MSC_VER
#include <intrin0.h>
#endif
namespace
kj
{
namespace
kj
{
// =======================================================================================
// =======================================================================================
...
@@ -110,14 +114,32 @@ Own<T> Refcounted::addRefInternal(T* object) {
...
@@ -110,14 +114,32 @@ Own<T> Refcounted::addRefInternal(T* object) {
//
//
// Warning: Atomic ops are SLOW.
// Warning: Atomic ops are SLOW.
#if _MSC_VER
#if _M_ARM
#define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP##_##MEM
#else
#define KJ_MSVC_INTERLOCKED(OP, MEM) _Interlocked##OP
#endif
#endif
class
AtomicRefcounted
:
private
kj
::
Disposer
{
class
AtomicRefcounted
:
private
kj
::
Disposer
{
public
:
public
:
virtual
~
AtomicRefcounted
()
noexcept
(
false
);
virtual
~
AtomicRefcounted
()
noexcept
(
false
);
inline
bool
isShared
()
const
{
return
__atomic_load_n
(
&
refcount
,
__ATOMIC_ACQUIRE
)
>
1
;
}
inline
bool
isShared
()
const
{
#if _MSC_VER
KJ_MSVC_INTERLOCKED
(
Or
,
acq
)(
&
refcount
,
0
)
>
1
;
#else
return
__atomic_load_n
(
&
refcount
,
__ATOMIC_ACQUIRE
)
>
1
;
#endif
}
private
:
private
:
mutable
uint
refcount
=
0
;
#if _MSC_VER
mutable
volatile
long
refcount
=
0
;
#else
mutable
volatile
uint
refcount
=
0
;
#endif
bool
addRefWeakInternal
()
const
;
bool
addRefWeakInternal
()
const
;
...
@@ -174,14 +196,22 @@ kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object) {
...
@@ -174,14 +196,22 @@ kj::Maybe<kj::Own<const T>> atomicAddRefWeak(const T& object) {
template
<
typename
T
>
template
<
typename
T
>
kj
::
Own
<
T
>
AtomicRefcounted
::
addRefInternal
(
T
*
object
)
{
kj
::
Own
<
T
>
AtomicRefcounted
::
addRefInternal
(
T
*
object
)
{
AtomicRefcounted
*
refcounted
=
object
;
AtomicRefcounted
*
refcounted
=
object
;
#if _MSC_VER
KJ_MSVC_INTERLOCKED
(
Increment
,
nf
)(
&
refcounted
->
refcount
);
#else
__atomic_add_fetch
(
&
refcounted
->
refcount
,
1
,
__ATOMIC_RELAXED
);
__atomic_add_fetch
(
&
refcounted
->
refcount
,
1
,
__ATOMIC_RELAXED
);
#endif
return
kj
::
Own
<
T
>
(
object
,
*
refcounted
);
return
kj
::
Own
<
T
>
(
object
,
*
refcounted
);
}
}
template
<
typename
T
>
template
<
typename
T
>
kj
::
Own
<
const
T
>
AtomicRefcounted
::
addRefInternal
(
const
T
*
object
)
{
kj
::
Own
<
const
T
>
AtomicRefcounted
::
addRefInternal
(
const
T
*
object
)
{
const
AtomicRefcounted
*
refcounted
=
object
;
const
AtomicRefcounted
*
refcounted
=
object
;
#if _MSC_VER
KJ_MSVC_INTERLOCKED
(
Increment
,
nf
)(
&
refcounted
->
refcount
);
#else
__atomic_add_fetch
(
&
refcounted
->
refcount
,
1
,
__ATOMIC_RELAXED
);
__atomic_add_fetch
(
&
refcounted
->
refcount
,
1
,
__ATOMIC_RELAXED
);
#endif
return
kj
::
Own
<
const
T
>
(
object
,
*
refcounted
);
return
kj
::
Own
<
const
T
>
(
object
,
*
refcounted
);
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment