Commit 52ef3f3f authored by Martin Lucina's avatar Martin Lucina

Revert commit 7cb076e5, atomic ops cleanup

Reverted to using atomic.h on NetBSD
Removed GNU builtins (see http://lists.zeromq.org/pipermail/zeromq-dev/2010-May/003485.html)
Removed SPARC native atomic ops as they are untested and have been commented out for years
Add "memory" to asm clobber for X86 atomic_counter::sub()
parent 9fbb9141
......@@ -27,14 +27,10 @@
#define ZMQ_ATOMIC_COUNTER_MUTEX
#elif (defined __i386__ || defined __x86_64__) && defined __GNUC__
#define ZMQ_ATOMIC_COUNTER_X86
#elif 0 && defined __sparc__ && defined __GNUC__
#define ZMQ_ATOMIC_COUNTER_SPARC
#elif defined ZMQ_HAVE_WINDOWS
#define ZMQ_ATOMIC_COUNTER_WINDOWS
#elif defined sun
#define ZMQ_ATOMIC_COUNTER_SUN
#elif defined( __GNUC__ ) && ( __GNUC__ * 100 + __GNUC_MINOR__ >= 401 )
#define ZMQ_ATOMIC_COUNTER_GNU
#elif (defined ZMQ_HAVE_SOLARIS || defined ZMQ_HAVE_NETBSD)
#define ZMQ_ATOMIC_COUNTER_ATOMIC_H
#else
#define ZMQ_ATOMIC_COUNTER_MUTEX
#endif
......@@ -43,7 +39,7 @@
#include "mutex.hpp"
#elif defined ZMQ_ATOMIC_COUNTER_WINDOWS
#include "windows.hpp"
#elif defined ZMQ_ATOMIC_COUNTER_SUN
#elif defined ZMQ_ATOMIC_COUNTER_ATOMIC_H
#include <atomic.h>
#endif
......@@ -81,9 +77,7 @@ namespace zmq
#if defined ZMQ_ATOMIC_COUNTER_WINDOWS
old_value = InterlockedExchangeAdd ((LONG*) &value, increment_);
#elif defined ZMQ_ATOMIC_COUNTER_GNU
old_value = __sync_fetch_and_add (&value, increment_);
#elif defined ZMQ_ATOMIC_COUNTER_SUN
#elif defined ZMQ_ATOMIC_COUNTER_ATOMIC_H
integer_t new_value = atomic_add_32_nv (&value, increment_);
old_value = new_value - increment_;
#elif defined ZMQ_ATOMIC_COUNTER_X86
......@@ -92,26 +86,13 @@ namespace zmq
: "=r" (old_value), "=m" (value)
: "0" (increment_), "m" (value)
: "cc", "memory");
#elif defined ZMQ_ATOMIC_COUNTER_SPARC
integer_t tmp;
__asm__ volatile (
"ld [%4], %0 \n\t"
"1: \n\t"
"add %0, %3, %1 \n\t"
"cas [%4], %0, %1 \n\t"
"cmp %0, %1 \n\t"
"bne,a,pn %%icc, 1b \n\t"
"mov %1, %0 \n\t"
: "=&r" (old_value), "=&r" (tmp), "=m" (value)
: "r" (increment_), "r" (&value)
: "cc", "memory");
#elif defined ZMQ_ATOMIC_COUNTER_MUTEX
sync.lock ();
old_value = value;
value += increment_;
sync.unlock ();
#else
#error
#error atomic_counter is not implemented for this platform
#endif
return old_value;
}
......@@ -123,11 +104,7 @@ namespace zmq
LONG delta = - ((LONG) decrement);
integer_t old = InterlockedExchangeAdd ((LONG*) &value, delta);
return old - decrement != 0;
#elif defined ZMQ_ATOMIC_COUNTER_GNU
int32_t delta = - ((int32_t) decrement);
integer_t nv = __sync_fetch_and_add (&value, delta);
return nv != 0;
#elif defined ZMQ_ATOMIC_COUNTER_SUN
#elif defined ZMQ_ATOMIC_COUNTER_ATOMIC_H
int32_t delta = - ((int32_t) decrement);
integer_t nv = atomic_add_32_nv (&value, delta);
return nv != 0;
......@@ -137,24 +114,8 @@ namespace zmq
__asm__ volatile ("lock; xaddl %0,%1"
: "=r" (oldval), "=m" (*val)
: "0" (oldval), "m" (*val)
: "cc");
: "cc", "memory");
return oldval != decrement;
#elif defined ZMQ_ATOMIC_COUNTER_SPARC
volatile integer_t *val = &value;
integer_t tmp;
integer_t result;
__asm__ volatile(
"ld [%4], %1\n\t"
"1:\n\t"
"add %1, %0, %2\n\t"
"cas [%4], %1, %2\n\t"
"cmp %1, %2\n\t"
"bne,a,pn %%icc, 1b\n\t"
"mov %2, %1\n\t"
: "+r" (-decrement), "=&r" (tmp), "=&r" (result), "+m" (*val)
: "r" (val)
: "cc");
return result <= decrement;
#elif defined ZMQ_ATOMIC_COUNTER_MUTEX
sync.lock ();
value -= decrement;
......@@ -162,7 +123,7 @@ namespace zmq
sync.unlock ();
return result;
#else
#error
#error atomic_counter is not implemented for this platform
#endif
}
......@@ -188,18 +149,12 @@ namespace zmq
#if defined ZMQ_ATOMIC_COUNTER_WINDOWS
#undef ZMQ_ATOMIC_COUNTER_WINDOWS
#endif
#if defined ZMQ_ATOMIC_COUNTER_GNU
#undef ZMQ_ATOMIC_COUNTER_GNU
#endif
#if defined ZMQ_ATOMIC_COUNTER_SUN
#undef ZMQ_ATOMIC_COUNTER_SUN
#if defined ZMQ_ATOMIC_COUNTER_ATOMIC_H
#undef ZMQ_ATOMIC_COUNTER_ATOMIC_H
#endif
#if defined ZMQ_ATOMIC_COUNTER_X86
#undef ZMQ_ATOMIC_COUNTER_X86
#endif
#if defined ZMQ_ATOMIC_COUNTER_SPARC
#undef ZMQ_ATOMIC_COUNTER_SPARC
#endif
#if defined ZMQ_ATOMIC_COUNTER_MUTEX
#undef ZMQ_ATOMIC_COUNTER_MUTEX
#endif
......
......@@ -26,14 +26,10 @@
#define ZMQ_ATOMIC_PTR_MUTEX
#elif (defined __i386__ || defined __x86_64__) && defined __GNUC__
#define ZMQ_ATOMIC_PTR_X86
#elif 0 && defined __sparc__ && defined __GNUC__
#define ZMQ_ATOMIC_PTR_SPARC
#elif defined ZMQ_HAVE_WINDOWS
#define ZMQ_ATOMIC_PTR_WINDOWS
#elif defined sun
#define ZMQ_ATOMIC_COUNTER_SUN
#elif defined( __GNUC__ ) && ( __GNUC__ * 100 + __GNUC_MINOR__ >= 401 )
#define ZMQ_ATOMIC_COUNTER_GNU
#elif (defined ZMQ_HAVE_SOLARIS || defined ZMQ_HAVE_NETBSD)
#define ZMQ_ATOMIC_COUNTER_ATOMIC_H
#else
#define ZMQ_ATOMIC_PTR_MUTEX
#endif
......@@ -42,7 +38,7 @@
#include "mutex.hpp"
#elif defined ZMQ_ATOMIC_PTR_WINDOWS
#include "windows.hpp"
#elif defined ZMQ_ATOMIC_PTR_SUN
#elif defined ZMQ_ATOMIC_PTR_ATOMIC_H
#include <atomic.h>
#endif
......@@ -80,9 +76,7 @@ namespace zmq
{
#if defined ZMQ_ATOMIC_PTR_WINDOWS
return (T*) InterlockedExchangePointer ((PVOID*) &ptr, val_);
#elif defined ZMQ_ATOMIC_PTR_GNU
return (T*) __sync_lock_test_and_set (&ptr, val_);
#elif defined ZMQ_ATOMIC_PTR_SUN
#elif defined ZMQ_ATOMIC_PTR_ATOMIC_H
return (T*) atomic_swap_ptr (&ptr, val_);
#elif defined ZMQ_ATOMIC_PTR_X86
T *old;
......@@ -91,23 +85,6 @@ namespace zmq
: "=r" (old), "=m" (ptr)
: "m" (ptr), "0" (val_));
return old;
#elif defined ZMQ_ATOMIC_PTR_SPARC
T* newptr = val_;
volatile T** ptrin = &ptr;
T* tmp;
T* prev;
__asm__ __volatile__(
"ld [%4], %1\n\t"
"1:\n\t"
"mov %0, %2\n\t"
"cas [%4], %1, %2\n\t"
"cmp %1, %2\n\t"
"bne,a,pn %%icc, 1b\n\t"
"mov %2, %1\n\t"
: "+r" (newptr), "=&r" (tmp), "=&r" (prev), "+m" (*ptrin)
: "r" (ptrin)
: "cc");
return prev;
#elif defined ZMQ_ATOMIC_PTR_MUTEX
sync.lock ();
T *old = (T*) ptr;
......@@ -115,7 +92,7 @@ namespace zmq
sync.unlock ();
return old;
#else
#error
#error atomic_ptr is not implemented for this platform
#endif
}
......@@ -128,9 +105,7 @@ namespace zmq
#if defined ZMQ_ATOMIC_PTR_WINDOWS
return (T*) InterlockedCompareExchangePointer (
(volatile PVOID*) &ptr, val_, cmp_);
#elif defined ZMQ_ATOMIC_PTR_GNU
return (T*) __sync_val_compare_and_swap (&ptr, cmp_, val_);
#elif defined ZMQ_ATOMIC_PTR_SUN
#elif defined ZMQ_ATOMIC_PTR_ATOMIC_H
return (T*) atomic_cas_ptr (&ptr, cmp_, val_);
#elif defined ZMQ_ATOMIC_PTR_X86
T *old;
......@@ -140,15 +115,6 @@ namespace zmq
: "r" (val_), "m" (ptr), "0" (cmp_)
: "cc");
return old;
#elif defined ZMQ_ATOMIC_PTR_SPARC
volatile T** ptrin = &ptr;
volatile T* prev = ptr;
__asm__ __volatile__(
"cas [%3], %1, %2\n\t"
: "+m" (*ptrin)
: "r" (cmp_), "r" (val_), "r" (ptrin)
: "cc");
return prev;
#elif defined ZMQ_ATOMIC_PTR_MUTEX
sync.lock ();
T *old = (T*) ptr;
......@@ -157,7 +123,7 @@ namespace zmq
sync.unlock ();
return old;
#else
#error
#error atomic_ptr is not implemented for this platform
#endif
}
......@@ -178,18 +144,12 @@ namespace zmq
#if defined ZMQ_ATOMIC_PTR_WINDOWS
#undef ZMQ_ATOMIC_PTR_WINDOWS
#endif
#if defined ZMQ_ATOMIC_PTR_GNU
#undef ZMQ_ATOMIC_PTR_GNU
#endif
#if defined ZMQ_ATOMIC_PTR_SUN
#undef ZMQ_ATOMIC_PTR_SUN
#if defined ZMQ_ATOMIC_PTR_ATOMIC_H
#undef ZMQ_ATOMIC_PTR_ATOMIC_H
#endif
#if defined ZMQ_ATOMIC_PTR_X86
#undef ZMQ_ATOMIC_PTR_X86
#endif
#if defined ZMQ_ATOMIC_PTR_SPARC
#undef ZMQ_ATOMIC_PTR_SPARC
#endif
#if defined ZMQ_ATOMIC_PTR_MUTEX
#undef ZMQ_ATOMIC_PTR_MUTEX
#endif
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment