Update rbimpl_atomic_* to all take a memory order

This commit is contained in:
John Hawthorn 2025-07-28 16:13:37 -07:00
parent 2f95eb4e80
commit 1d9f76096e

View file

@ -84,6 +84,28 @@ typedef unsigned int rb_atomic_t;
# error No atomic operation found # error No atomic operation found
#endif #endif
/* Memory ordering constants */
#if defined(HAVE_GCC_ATOMIC_BUILTINS)
# define RBIMPL_ATOMIC_RELAXED __ATOMIC_RELAXED
# define RBIMPL_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
# define RBIMPL_ATOMIC_RELEASE __ATOMIC_RELEASE
# define RBIMPL_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
# define RBIMPL_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
#elif defined(HAVE_STDATOMIC_H)
# define RBIMPL_ATOMIC_RELAXED memory_order_relaxed
# define RBIMPL_ATOMIC_ACQUIRE memory_order_acquire
# define RBIMPL_ATOMIC_RELEASE memory_order_release
# define RBIMPL_ATOMIC_ACQ_REL memory_order_acq_rel
# define RBIMPL_ATOMIC_SEQ_CST memory_order_seq_cst
#else
/* Dummy values for unsupported platforms */
# define RBIMPL_ATOMIC_RELAXED 0
# define RBIMPL_ATOMIC_ACQUIRE 1
# define RBIMPL_ATOMIC_RELEASE 2
# define RBIMPL_ATOMIC_ACQ_REL 3
# define RBIMPL_ATOMIC_SEQ_CST 4
#endif
/** /**
* Atomically replaces the value pointed by `var` with the result of addition * Atomically replaces the value pointed by `var` with the result of addition
* of `val` to the old value of `var`. * of `val` to the old value of `var`.
@ -93,7 +115,7 @@ typedef unsigned int rb_atomic_t;
* @return What was stored in `var` before the addition. * @return What was stored in `var` before the addition.
* @post `var` holds `var + val`. * @post `var` holds `var + val`.
*/ */
#define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val)) #define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Atomically replaces the value pointed by `var` with the result of * Atomically replaces the value pointed by `var` with the result of
@ -104,7 +126,7 @@ typedef unsigned int rb_atomic_t;
* @return What was stored in `var` before the subtraction. * @return What was stored in `var` before the subtraction.
* @post `var` holds `var - val`. * @post `var` holds `var - val`.
*/ */
#define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val)) #define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Atomically replaces the value pointed by `var` with the result of * Atomically replaces the value pointed by `var` with the result of
@ -116,7 +138,7 @@ typedef unsigned int rb_atomic_t;
* @post `var` holds `var | val`. * @post `var` holds `var | val`.
* @note For portability, this macro can return void. * @note For portability, this macro can return void.
*/ */
#define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val)) #define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Atomically replaces the value pointed by `var` with `val`. This is just an * Atomically replaces the value pointed by `var` with `val`. This is just an
@ -127,7 +149,7 @@ typedef unsigned int rb_atomic_t;
* @return What was stored in `var` before the assignment. * @return What was stored in `var` before the assignment.
* @post `var` holds `val`. * @post `var` holds `val`.
*/ */
#define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val)) #define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Atomic compare-and-swap. This stores `val` to `var` if and only if the * Atomic compare-and-swap. This stores `val` to `var` if and only if the
@ -141,7 +163,7 @@ typedef unsigned int rb_atomic_t;
* @retval otherwise Something else is at `var`; not updated. * @retval otherwise Something else is at `var`; not updated.
*/ */
#define RUBY_ATOMIC_CAS(var, oldval, newval) \ #define RUBY_ATOMIC_CAS(var, oldval, newval) \
rbimpl_atomic_cas(&(var), (oldval), (newval)) rbimpl_atomic_cas(&(var), (oldval), (newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Atomic load. This loads `var` with an atomic intrinsic and returns * Atomic load. This loads `var` with an atomic intrinsic and returns
@ -150,7 +172,7 @@ typedef unsigned int rb_atomic_t;
* @param var A variable of ::rb_atomic_t * @param var A variable of ::rb_atomic_t
* @return What was stored in `var`j * @return What was stored in `var`j
*/ */
#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var)) #define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_EXCHANGE, except for the return type. * Identical to #RUBY_ATOMIC_EXCHANGE, except for the return type.
@ -160,7 +182,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `val`. * @post `var` holds `val`.
*/ */
#define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_store(&(var), (val)) #define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_store(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_FETCH_ADD, except for the return type. * Identical to #RUBY_ATOMIC_FETCH_ADD, except for the return type.
@ -170,7 +192,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var + val`. * @post `var` holds `var + val`.
*/ */
#define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val)) #define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_FETCH_SUB, except for the return type. * Identical to #RUBY_ATOMIC_FETCH_SUB, except for the return type.
@ -180,7 +202,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var - val`. * @post `var` holds `var - val`.
*/ */
#define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val)) #define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Atomically increments the value pointed by `var`. * Atomically increments the value pointed by `var`.
@ -189,7 +211,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var + 1`. * @post `var` holds `var + 1`.
*/ */
#define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var)) #define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Atomically decrements the value pointed by `var`. * Atomically decrements the value pointed by `var`.
@ -198,7 +220,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var - 1`. * @post `var` holds `var - 1`.
*/ */
#define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var)) #define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_FETCH_ADD, except it expects its arguments to be `size_t`. * Identical to #RUBY_ATOMIC_FETCH_ADD, except it expects its arguments to be `size_t`.
@ -210,7 +232,7 @@ typedef unsigned int rb_atomic_t;
* @return What was stored in `var` before the addition. * @return What was stored in `var` before the addition.
* @post `var` holds `var + val`. * @post `var` holds `var + val`.
*/ */
#define RUBY_ATOMIC_SIZE_FETCH_ADD(var, val) rbimpl_atomic_size_fetch_add(&(var), (val)) #define RUBY_ATOMIC_SIZE_FETCH_ADD(var, val) rbimpl_atomic_size_fetch_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_INC, except it expects its argument is `size_t`. * Identical to #RUBY_ATOMIC_INC, except it expects its argument is `size_t`.
@ -221,7 +243,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var + 1`. * @post `var` holds `var + 1`.
*/ */
#define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var)) #define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_DEC, except it expects its argument is `size_t`. * Identical to #RUBY_ATOMIC_DEC, except it expects its argument is `size_t`.
@ -232,7 +254,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var - 1`. * @post `var` holds `var - 1`.
*/ */
#define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var)) #define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_EXCHANGE, except it expects its arguments are * Identical to #RUBY_ATOMIC_EXCHANGE, except it expects its arguments are
@ -246,7 +268,7 @@ typedef unsigned int rb_atomic_t;
* @post `var` holds `val`. * @post `var` holds `val`.
*/ */
#define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \ #define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \
rbimpl_atomic_size_exchange(&(var), (val)) rbimpl_atomic_size_exchange(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are `size_t`. * Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are `size_t`.
@ -260,7 +282,7 @@ typedef unsigned int rb_atomic_t;
* @retval otherwise Something else is at `var`; not updated. * @retval otherwise Something else is at `var`; not updated.
*/ */
#define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \ #define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \
rbimpl_atomic_size_cas(&(var), (oldval), (newval)) rbimpl_atomic_size_cas(&(var), (oldval), (newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_ADD, except it expects its arguments are `size_t`. * Identical to #RUBY_ATOMIC_ADD, except it expects its arguments are `size_t`.
@ -272,7 +294,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var + val`. * @post `var` holds `var + val`.
*/ */
#define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val)) #define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_SUB, except it expects its arguments are `size_t`. * Identical to #RUBY_ATOMIC_SUB, except it expects its arguments are `size_t`.
@ -284,7 +306,7 @@ typedef unsigned int rb_atomic_t;
* @return void * @return void
* @post `var` holds `var - val`. * @post `var` holds `var - val`.
*/ */
#define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val)) #define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_EXCHANGE, except it expects its arguments are * Identical to #RUBY_ATOMIC_EXCHANGE, except it expects its arguments are
@ -303,7 +325,7 @@ typedef unsigned int rb_atomic_t;
* some pointers, most notably function pointers. * some pointers, most notably function pointers.
*/ */
#define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \ #define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \
RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val)) RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val, RBIMPL_ATOMIC_SEQ_CST))
/** /**
* Identical to #RUBY_ATOMIC_LOAD, except it expects its arguments are `void*`. * Identical to #RUBY_ATOMIC_LOAD, except it expects its arguments are `void*`.
@ -314,7 +336,7 @@ typedef unsigned int rb_atomic_t;
* @return The value of `var` (without tearing) * @return The value of `var` (without tearing)
*/ */
#define RUBY_ATOMIC_PTR_LOAD(var) \ #define RUBY_ATOMIC_PTR_LOAD(var) \
RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var)) RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var, RBIMPL_ATOMIC_SEQ_CST))
/** /**
* Identical to #RUBY_ATOMIC_SET, except it expects its arguments are * Identical to #RUBY_ATOMIC_SET, except it expects its arguments are
@ -327,7 +349,7 @@ typedef unsigned int rb_atomic_t;
* @post `var` holds `val`. * @post `var` holds `val`.
*/ */
#define RUBY_ATOMIC_PTR_SET(var, val) \ #define RUBY_ATOMIC_PTR_SET(var, val) \
rbimpl_atomic_ptr_store((volatile void **)&(var), (val)) rbimpl_atomic_ptr_store((volatile void **)&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are `void*`. * Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are `void*`.
@ -341,7 +363,7 @@ typedef unsigned int rb_atomic_t;
* @retval otherwise Something else is at `var`; not updated. * @retval otherwise Something else is at `var`; not updated.
*/ */
#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \ #define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \
RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (void *)(oldval), (void *)(newval))) RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (void *)(oldval), (void *)(newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST))
/** /**
* Identical to #RUBY_ATOMIC_SET, except it expects its arguments are * Identical to #RUBY_ATOMIC_SET, except it expects its arguments are
@ -354,7 +376,7 @@ typedef unsigned int rb_atomic_t;
* @post `var` holds `val`. * @post `var` holds `val`.
*/ */
#define RUBY_ATOMIC_VALUE_SET(var, val) \ #define RUBY_ATOMIC_VALUE_SET(var, val) \
rbimpl_atomic_value_store(&(var), (val)) rbimpl_atomic_value_store(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_EXCHANGE, except it expects its arguments are * Identical to #RUBY_ATOMIC_EXCHANGE, except it expects its arguments are
@ -368,7 +390,7 @@ typedef unsigned int rb_atomic_t;
* @post `var` holds `val`. * @post `var` holds `val`.
*/ */
#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \ #define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \
rbimpl_atomic_value_exchange(&(var), (val)) rbimpl_atomic_value_exchange(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
/** /**
* Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are ::VALUE. * Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are ::VALUE.
@ -382,19 +404,20 @@ typedef unsigned int rb_atomic_t;
* @retval otherwise Something else is at `var`; not updated. * @retval otherwise Something else is at `var`; not updated.
*/ */
#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \ #define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \
rbimpl_atomic_value_cas(&(var), (oldval), (newval)) rbimpl_atomic_value_cas(&(var), (oldval), (newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST)
/** @cond INTERNAL_MACRO */ /** @cond INTERNAL_MACRO */
RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline rb_atomic_t static inline rb_atomic_t
rbimpl_atomic_fetch_add(volatile rb_atomic_t *ptr, rb_atomic_t val) rbimpl_atomic_fetch_add(volatile rb_atomic_t *ptr, rb_atomic_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST); return __atomic_fetch_add(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
return __sync_fetch_and_add(ptr, val); return __sync_fetch_and_add(ptr, val);
@ -412,7 +435,7 @@ rbimpl_atomic_fetch_add(volatile rb_atomic_t *ptr, rb_atomic_t val)
return atomic_add_int_nv(ptr, val) - val; return atomic_add_int_nv(ptr, val) - val;
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
return atomic_fetch_add((_Atomic volatile rb_atomic_t *)ptr, val); return atomic_fetch_add_explicit((_Atomic volatile rb_atomic_t *)ptr, val, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -424,12 +447,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline size_t static inline size_t
rbimpl_atomic_size_fetch_add(volatile size_t *ptr, size_t val) rbimpl_atomic_size_fetch_add(volatile size_t *ptr, size_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST); return __atomic_fetch_add(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
return __sync_fetch_and_add(ptr, val); return __sync_fetch_and_add(ptr, val);
@ -446,10 +470,10 @@ rbimpl_atomic_size_fetch_add(volatile size_t *ptr, size_t val)
RBIMPL_STATIC_ASSERT(size_of_rb_atomic_t, sizeof *ptr == sizeof(rb_atomic_t)); RBIMPL_STATIC_ASSERT(size_of_rb_atomic_t, sizeof *ptr == sizeof(rb_atomic_t));
volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr); volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr);
rbimpl_atomic_fetch_add(tmp, val); rbimpl_atomic_fetch_add(tmp, val, memory_order);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
return atomic_fetch_add((_Atomic volatile size_t *)ptr, val); return atomic_fetch_add_explicit((_Atomic volatile size_t *)ptr, val, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -460,8 +484,9 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_add(volatile rb_atomic_t *ptr, rb_atomic_t val) rbimpl_atomic_add(volatile rb_atomic_t *ptr, rb_atomic_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
@ -470,7 +495,7 @@ rbimpl_atomic_add(volatile rb_atomic_t *ptr, rb_atomic_t val)
* return value is not used, then compiles it into single `LOCK ADD` * return value is not used, then compiles it into single `LOCK ADD`
* instruction. * instruction.
*/ */
__atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST); __atomic_add_fetch(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
__sync_add_and_fetch(ptr, val); __sync_add_and_fetch(ptr, val);
@ -500,12 +525,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_size_add(volatile size_t *ptr, size_t val) rbimpl_atomic_size_add(volatile size_t *ptr, size_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST); __atomic_add_fetch(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
__sync_add_and_fetch(ptr, val); __sync_add_and_fetch(ptr, val);
@ -523,7 +549,7 @@ rbimpl_atomic_size_add(volatile size_t *ptr, size_t val)
RBIMPL_STATIC_ASSERT(size_of_rb_atomic_t, sizeof *ptr == sizeof(rb_atomic_t)); RBIMPL_STATIC_ASSERT(size_of_rb_atomic_t, sizeof *ptr == sizeof(rb_atomic_t));
volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr); volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr);
rbimpl_atomic_add(tmp, val); rbimpl_atomic_add(tmp, val, memory_order);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
*(_Atomic volatile size_t *)ptr += val; *(_Atomic volatile size_t *)ptr += val;
@ -537,12 +563,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_inc(volatile rb_atomic_t *ptr) rbimpl_atomic_inc(volatile rb_atomic_t *ptr, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
rbimpl_atomic_add(ptr, 1); rbimpl_atomic_add(ptr, 1, memory_order);
#elif defined(_WIN32) #elif defined(_WIN32)
InterlockedIncrement(ptr); InterlockedIncrement(ptr);
@ -551,7 +578,7 @@ rbimpl_atomic_inc(volatile rb_atomic_t *ptr)
atomic_inc_uint(ptr); atomic_inc_uint(ptr);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
rbimpl_atomic_add(ptr, 1); rbimpl_atomic_add(ptr, 1, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -562,12 +589,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_size_inc(volatile size_t *ptr) rbimpl_atomic_size_inc(volatile size_t *ptr, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
rbimpl_atomic_size_add(ptr, 1); rbimpl_atomic_size_add(ptr, 1, memory_order);
#elif defined(_WIN64) #elif defined(_WIN64)
InterlockedIncrement64(ptr); InterlockedIncrement64(ptr);
@ -578,10 +606,10 @@ rbimpl_atomic_size_inc(volatile size_t *ptr)
#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H)) #elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t)); RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t));
rbimpl_atomic_size_add(ptr, 1); rbimpl_atomic_size_add(ptr, 1, memory_order);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
rbimpl_atomic_size_add(ptr, 1); rbimpl_atomic_size_add(ptr, 1, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -592,12 +620,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline rb_atomic_t static inline rb_atomic_t
rbimpl_atomic_fetch_sub(volatile rb_atomic_t *ptr, rb_atomic_t val) rbimpl_atomic_fetch_sub(volatile rb_atomic_t *ptr, rb_atomic_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
return __atomic_fetch_sub(ptr, val, __ATOMIC_SEQ_CST); return __atomic_fetch_sub(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
return __sync_fetch_and_sub(ptr, val); return __sync_fetch_and_sub(ptr, val);
@ -613,7 +642,7 @@ rbimpl_atomic_fetch_sub(volatile rb_atomic_t *ptr, rb_atomic_t val)
return atomic_add_int_nv(ptr, neg * val) + val; return atomic_add_int_nv(ptr, neg * val) + val;
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
return atomic_fetch_sub((_Atomic volatile rb_atomic_t *)ptr, val); return atomic_fetch_sub_explicit((_Atomic volatile rb_atomic_t *)ptr, val, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -624,12 +653,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_sub(volatile rb_atomic_t *ptr, rb_atomic_t val) rbimpl_atomic_sub(volatile rb_atomic_t *ptr, rb_atomic_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST); __atomic_sub_fetch(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
__sync_sub_and_fetch(ptr, val); __sync_sub_and_fetch(ptr, val);
@ -654,12 +684,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_size_sub(volatile size_t *ptr, size_t val) rbimpl_atomic_size_sub(volatile size_t *ptr, size_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST); __atomic_sub_fetch(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
__sync_sub_and_fetch(ptr, val); __sync_sub_and_fetch(ptr, val);
@ -677,7 +708,7 @@ rbimpl_atomic_size_sub(volatile size_t *ptr, size_t val)
RBIMPL_STATIC_ASSERT(size_of_rb_atomic_t, sizeof *ptr == sizeof(rb_atomic_t)); RBIMPL_STATIC_ASSERT(size_of_rb_atomic_t, sizeof *ptr == sizeof(rb_atomic_t));
volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr); volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr);
rbimpl_atomic_sub(tmp, val); rbimpl_atomic_sub(tmp, val, memory_order);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
*(_Atomic volatile size_t *)ptr -= val; *(_Atomic volatile size_t *)ptr -= val;
@ -691,12 +722,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_dec(volatile rb_atomic_t *ptr) rbimpl_atomic_dec(volatile rb_atomic_t *ptr, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
rbimpl_atomic_sub(ptr, 1); rbimpl_atomic_sub(ptr, 1, memory_order);
#elif defined(_WIN32) #elif defined(_WIN32)
InterlockedDecrement(ptr); InterlockedDecrement(ptr);
@ -705,7 +737,7 @@ rbimpl_atomic_dec(volatile rb_atomic_t *ptr)
atomic_dec_uint(ptr); atomic_dec_uint(ptr);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
rbimpl_atomic_sub(ptr, 1); rbimpl_atomic_sub(ptr, 1, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -716,12 +748,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_size_dec(volatile size_t *ptr) rbimpl_atomic_size_dec(volatile size_t *ptr, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
rbimpl_atomic_size_sub(ptr, 1); rbimpl_atomic_size_sub(ptr, 1, memory_order);
#elif defined(_WIN64) #elif defined(_WIN64)
InterlockedDecrement64(ptr); InterlockedDecrement64(ptr);
@ -732,10 +765,10 @@ rbimpl_atomic_size_dec(volatile size_t *ptr)
#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H)) #elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t)); RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t));
rbimpl_atomic_size_sub(ptr, 1); rbimpl_atomic_size_sub(ptr, 1, memory_order);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
rbimpl_atomic_size_sub(ptr, 1); rbimpl_atomic_size_sub(ptr, 1, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -746,12 +779,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_or(volatile rb_atomic_t *ptr, rb_atomic_t val) rbimpl_atomic_or(volatile rb_atomic_t *ptr, rb_atomic_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_or_fetch(ptr, val, __ATOMIC_SEQ_CST); __atomic_or_fetch(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
__sync_or_and_fetch(ptr, val); __sync_or_and_fetch(ptr, val);
@ -796,12 +830,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline rb_atomic_t static inline rb_atomic_t
rbimpl_atomic_exchange(volatile rb_atomic_t *ptr, rb_atomic_t val) rbimpl_atomic_exchange(volatile rb_atomic_t *ptr, rb_atomic_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST); return __atomic_exchange_n(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
return __sync_lock_test_and_set(ptr, val); return __sync_lock_test_and_set(ptr, val);
@ -813,7 +848,7 @@ rbimpl_atomic_exchange(volatile rb_atomic_t *ptr, rb_atomic_t val)
return atomic_swap_uint(ptr, val); return atomic_swap_uint(ptr, val);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
return atomic_exchange((_Atomic volatile rb_atomic_t *)ptr, val); return atomic_exchange_explicit((_Atomic volatile rb_atomic_t *)ptr, val, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -824,12 +859,13 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline size_t static inline size_t
rbimpl_atomic_size_exchange(volatile size_t *ptr, size_t val) rbimpl_atomic_size_exchange(volatile size_t *ptr, size_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST); return __atomic_exchange_n(ptr, val, memory_order);
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
return __sync_lock_test_and_set(ptr, val); return __sync_lock_test_and_set(ptr, val);
@ -844,11 +880,11 @@ rbimpl_atomic_size_exchange(volatile size_t *ptr, size_t val)
RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t)); RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t));
volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr); volatile rb_atomic_t *const tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr);
const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val); const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val, memory_order);
return RBIMPL_CAST((size_t)ret); return RBIMPL_CAST((size_t)ret);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
return atomic_exchange((_Atomic volatile size_t *)ptr, val); return atomic_exchange_explicit((_Atomic volatile size_t *)ptr, val, memory_order);
#else #else
# error Unsupported platform. # error Unsupported platform.
@ -859,15 +895,16 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_size_store(volatile size_t *ptr, size_t val) rbimpl_atomic_size_store(volatile size_t *ptr, size_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_store_n(ptr, val, __ATOMIC_SEQ_CST); __atomic_store_n(ptr, val, memory_order);
#else #else
rbimpl_atomic_size_exchange(ptr, val); rbimpl_atomic_size_exchange(ptr, val, memory_order);
#endif #endif
} }
@ -876,8 +913,9 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void * static inline void *
rbimpl_atomic_ptr_exchange(void *volatile *ptr, const void *val) rbimpl_atomic_ptr_exchange(void *volatile *ptr, const void *val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(InterlockedExchangePointer) #elif defined(InterlockedExchangePointer)
@ -894,7 +932,7 @@ rbimpl_atomic_ptr_exchange(void *volatile *ptr, const void *val)
const size_t sval = RBIMPL_CAST((size_t)val); const size_t sval = RBIMPL_CAST((size_t)val);
volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr); volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr);
const size_t sret = rbimpl_atomic_size_exchange(sptr, sval); const size_t sret = rbimpl_atomic_size_exchange(sptr, sval, memory_order);
return RBIMPL_CAST((void *)sret); return RBIMPL_CAST((void *)sret);
#endif #endif
@ -904,26 +942,26 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_ptr_store(volatile void **ptr, void *val) rbimpl_atomic_ptr_store(volatile void **ptr, void *val, int memory_order)
{ {
RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t)); RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t));
const size_t sval = RBIMPL_CAST((size_t)val); const size_t sval = RBIMPL_CAST((size_t)val);
volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr); volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr);
rbimpl_atomic_size_store(sptr, sval); rbimpl_atomic_size_store(sptr, sval, memory_order);
} }
RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline VALUE static inline VALUE
rbimpl_atomic_value_exchange(volatile VALUE *ptr, VALUE val) rbimpl_atomic_value_exchange(volatile VALUE *ptr, VALUE val, int memory_order)
{ {
RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t)); RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t));
const size_t sval = RBIMPL_CAST((size_t)val); const size_t sval = RBIMPL_CAST((size_t)val);
volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr); volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr);
const size_t sret = rbimpl_atomic_size_exchange(sptr, sval); const size_t sret = rbimpl_atomic_size_exchange(sptr, sval, memory_order);
return RBIMPL_CAST((VALUE)sret); return RBIMPL_CAST((VALUE)sret);
} }
@ -931,27 +969,28 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_value_store(volatile VALUE *ptr, VALUE val) rbimpl_atomic_value_store(volatile VALUE *ptr, VALUE val, int memory_order)
{ {
RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t)); RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t));
const size_t sval = RBIMPL_CAST((size_t)val); const size_t sval = RBIMPL_CAST((size_t)val);
volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr); volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr);
rbimpl_atomic_size_store(sptr, sval); rbimpl_atomic_size_store(sptr, sval, memory_order);
} }
RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline rb_atomic_t static inline rb_atomic_t
rbimpl_atomic_load(volatile rb_atomic_t *ptr) rbimpl_atomic_load(volatile rb_atomic_t *ptr, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); return __atomic_load_n(ptr, memory_order);
#else #else
return rbimpl_atomic_fetch_add(ptr, 0); return rbimpl_atomic_fetch_add(ptr, 0, memory_order);
#endif #endif
} }
@ -959,16 +998,17 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void static inline void
rbimpl_atomic_store(volatile rb_atomic_t *ptr, rb_atomic_t val) rbimpl_atomic_store(volatile rb_atomic_t *ptr, rb_atomic_t val, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_store_n(ptr, val, __ATOMIC_SEQ_CST); __atomic_store_n(ptr, val, memory_order);
#else #else
/* Maybe std::atomic<rb_atomic_t>::store can be faster? */ /* Maybe std::atomic<rb_atomic_t>::store can be faster? */
rbimpl_atomic_exchange(ptr, val); rbimpl_atomic_exchange(ptr, val, memory_order);
#endif #endif
} }
@ -977,13 +1017,15 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline rb_atomic_t static inline rb_atomic_t
rbimpl_atomic_cas(volatile rb_atomic_t *ptr, rb_atomic_t oldval, rb_atomic_t newval) rbimpl_atomic_cas(volatile rb_atomic_t *ptr, rb_atomic_t oldval, rb_atomic_t newval, int success_memorder, int failure_memorder)
{ {
(void)success_memorder;
(void)failure_memorder;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_compare_exchange_n( __atomic_compare_exchange_n(
ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); ptr, &oldval, newval, 0, success_memorder, failure_memorder);
return oldval; return oldval;
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
@ -1003,8 +1045,8 @@ rbimpl_atomic_cas(volatile rb_atomic_t *ptr, rb_atomic_t oldval, rb_atomic_t new
return atomic_cas_uint(ptr, oldval, newval); return atomic_cas_uint(ptr, oldval, newval);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
atomic_compare_exchange_strong( atomic_compare_exchange_strong_explicit(
(_Atomic volatile rb_atomic_t *)ptr, &oldval, newval); (_Atomic volatile rb_atomic_t *)ptr, &oldval, newval, success_memorder, failure_memorder);
return oldval; return oldval;
#else #else
@ -1025,13 +1067,15 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline size_t static inline size_t
rbimpl_atomic_size_cas(volatile size_t *ptr, size_t oldval, size_t newval) rbimpl_atomic_size_cas(volatile size_t *ptr, size_t oldval, size_t newval, int success_memorder, int failure_memorder)
{ {
(void)success_memorder;
(void)failure_memorder;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
__atomic_compare_exchange_n( __atomic_compare_exchange_n(
ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); ptr, &oldval, newval, 0, success_memorder, failure_memorder);
return oldval; return oldval;
#elif defined(HAVE_GCC_SYNC_BUILTINS) #elif defined(HAVE_GCC_SYNC_BUILTINS)
@ -1047,11 +1091,11 @@ rbimpl_atomic_size_cas(volatile size_t *ptr, size_t oldval, size_t newval)
RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t)); RBIMPL_STATIC_ASSERT(size_of_size_t, sizeof *ptr == sizeof(rb_atomic_t));
volatile rb_atomic_t *tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr); volatile rb_atomic_t *tmp = RBIMPL_CAST((volatile rb_atomic_t *)ptr);
return rbimpl_atomic_cas(tmp, oldval, newval); return rbimpl_atomic_cas(tmp, oldval, newval, success_memorder, failure_memorder);
#elif defined(HAVE_STDATOMIC_H) #elif defined(HAVE_STDATOMIC_H)
atomic_compare_exchange_strong( atomic_compare_exchange_strong_explicit(
(_Atomic volatile size_t *)ptr, &oldval, newval); (_Atomic volatile size_t *)ptr, &oldval, newval, success_memorder, failure_memorder);
return oldval; return oldval;
#else #else
@ -1063,8 +1107,10 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void * static inline void *
rbimpl_atomic_ptr_cas(void **ptr, const void *oldval, const void *newval) rbimpl_atomic_ptr_cas(void **ptr, const void *oldval, const void *newval, int success_memorder, int failure_memorder)
{ {
(void)success_memorder;
(void)failure_memorder;
#if 0 #if 0
#elif defined(InterlockedExchangePointer) #elif defined(InterlockedExchangePointer)
@ -1087,7 +1133,7 @@ rbimpl_atomic_ptr_cas(void **ptr, const void *oldval, const void *newval)
const size_t snew = RBIMPL_CAST((size_t)newval); const size_t snew = RBIMPL_CAST((size_t)newval);
const size_t sold = RBIMPL_CAST((size_t)oldval); const size_t sold = RBIMPL_CAST((size_t)oldval);
volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr); volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr);
const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew); const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew, success_memorder, failure_memorder);
return RBIMPL_CAST((void *)sret); return RBIMPL_CAST((void *)sret);
#endif #endif
@ -1097,15 +1143,16 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline void * static inline void *
rbimpl_atomic_ptr_load(void **ptr) rbimpl_atomic_ptr_load(void **ptr, int memory_order)
{ {
(void)memory_order;
#if 0 #if 0
#elif defined(HAVE_GCC_ATOMIC_BUILTINS) #elif defined(HAVE_GCC_ATOMIC_BUILTINS)
return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); return __atomic_load_n(ptr, memory_order);
#else #else
void *val = *ptr; void *val = *ptr;
return rbimpl_atomic_ptr_cas(ptr, val, val); return rbimpl_atomic_ptr_cas(ptr, val, val, memory_order, memory_order);
#endif #endif
} }
@ -1113,14 +1160,14 @@ RBIMPL_ATTR_ARTIFICIAL()
RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NOALIAS()
RBIMPL_ATTR_NONNULL((1)) RBIMPL_ATTR_NONNULL((1))
static inline VALUE static inline VALUE
rbimpl_atomic_value_cas(volatile VALUE *ptr, VALUE oldval, VALUE newval) rbimpl_atomic_value_cas(volatile VALUE *ptr, VALUE oldval, VALUE newval, int success_memorder, int failure_memorder)
{ {
RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t)); RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t));
const size_t snew = RBIMPL_CAST((size_t)newval); const size_t snew = RBIMPL_CAST((size_t)newval);
const size_t sold = RBIMPL_CAST((size_t)oldval); const size_t sold = RBIMPL_CAST((size_t)oldval);
volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr); volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr);
const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew); const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew, success_memorder, failure_memorder);
return RBIMPL_CAST((VALUE)sret); return RBIMPL_CAST((VALUE)sret);
} }
/** @endcond */ /** @endcond */