atomics: add atomic_fetch_and/atomic_fetch_or

As usual, we prefer plain C11 names and semantics, and have to emulate
them if C11 atomics are not available.

For the non-atomic fallback (which is just there to make code compile in
situations the atomic property is not overly important), we require a
gross hack to make the generic macros work without using compiler-
specific extensions.
This commit is contained in:
wm4 2015-05-11 23:20:45 +02:00
parent e5573728c7
commit cc24ec5b3c
1 changed files with 20 additions and 7 deletions

View File

@ -30,13 +30,14 @@
// Emulate the parts of C11 stdatomic.h needed by mpv.
// Still relies on gcc/clang atomic builtins.
// The t field is a hack to make the non-atomic fallback macro mess work.
typedef struct { volatile unsigned long v; } atomic_ulong;
typedef struct { volatile int v; } atomic_int;
typedef struct { volatile _Bool v; } atomic_bool;
typedef struct { volatile long long v; } atomic_llong;
typedef struct { volatile uint_least32_t v; } atomic_uint_least32_t;
typedef struct { volatile unsigned long long v; } atomic_ullong;
typedef struct { volatile unsigned long v, t; } atomic_ulong;
typedef struct { volatile int v, t; } atomic_int;
typedef struct { volatile _Bool v, t; } atomic_bool;
typedef struct { volatile long long v, t; } atomic_llong;
typedef struct { volatile uint_least32_t v, t; } atomic_uint_least32_t;
typedef struct { volatile unsigned long long v, t; } atomic_ullong;
#define ATOMIC_VAR_INIT(x) \
{.v = (x)}
@ -54,6 +55,10 @@ typedef struct { volatile unsigned long long v; } atomic_ullong;
__atomic_store_n(&(p)->v, val, __ATOMIC_SEQ_CST)
#define atomic_fetch_add(a, b) \
__atomic_fetch_add(&(a)->v, b, __ATOMIC_SEQ_CST)
#define atomic_fetch_and(a, b) \
__atomic_fetch_and(&(a)->v, b, __ATOMIC_SEQ_CST)
#define atomic_fetch_or(a, b) \
__atomic_fetch_or(&(a)->v, b, __ATOMIC_SEQ_CST)
#define atomic_compare_exchange_strong(a, b, c) \
__atomic_compare_exchange_n(&(a)->v, b, c, 0, __ATOMIC_SEQ_CST, \
__ATOMIC_SEQ_CST)
@ -66,6 +71,10 @@ typedef struct { volatile unsigned long long v; } atomic_ullong;
(__sync_synchronize(), (p)->v = (val), __sync_synchronize())
#define atomic_fetch_add(a, b) \
__sync_fetch_and_add(&(a)->v, b)
#define atomic_fetch_and(a, b) \
__sync_fetch_and_and(&(a)->v, b)
#define atomic_fetch_or(a, b) \
__sync_fetch_and_or(&(a)->v, b)
// Assumes __sync_val_compare_and_swap is "strong" (using the C11 meaning).
#define atomic_compare_exchange_strong(p, old, new) \
({ __typeof__((p)->v) val_ = __sync_val_compare_and_swap(&(p)->v, *(old), new); \
@ -79,7 +88,11 @@ typedef struct { volatile unsigned long long v; } atomic_ullong;
// a serious dependency on working atomics, so this is barely ok.
#define atomic_load(p) ((p)->v)
#define atomic_store(p, val) ((p)->v = (val))
#define atomic_fetch_add(a, b) (((a)->v += (b)) - (b))
#define atomic_fetch_op_(a, b, op) \
((a)->t = (a)->v, (a)->v = (a)->v op (b), (a)->t)
#define atomic_fetch_add(a, b) atomic_fetch_op_(a, b, +)
#define atomic_fetch_and(a, b) atomic_fetch_op_(a, b, &)
#define atomic_fetch_or(a, b) atomic_fetch_op_(a, b, |)
#define atomic_compare_exchange_strong(p, old, new) \
((p)->v == *(old) ? ((p)->v = (new), 1) : (*(old) = (p)->v, 0))