#ifndef _I386_ATOMIC_H_
#define _I386_ATOMIC_H_
#include <i386/smp.h>
#if __SMP__
#define memory_order_consume_smp memory_order_consume
#define memory_order_acquire_smp memory_order_acquire
#define memory_order_release_smp memory_order_release
#define memory_order_acq_rel_smp memory_order_acq_rel
#define memory_order_seq_cst_smp memory_order_seq_cst
#else
#define memory_order_consume_smp memory_order_relaxed
#define memory_order_acquire_smp memory_order_relaxed
#define memory_order_release_smp memory_order_relaxed
#define memory_order_acq_rel_smp memory_order_relaxed
#define memory_order_seq_cst_smp memory_order_relaxed
#endif
#ifdef ATOMIC_PRIVATE
static boolean_t
atomic_compare_exchange(uintptr_t *target, uintptr_t oldval, uintptr_t newval,
enum memory_order ord, boolean_t wait)
{
(void)wait;
return __c11_atomic_compare_exchange_strong((_Atomic uintptr_t *)target, &oldval, newval, ord, memory_order_relaxed);
}
#endif // ATOMIC_PRIVATE
#define os_atomic_rmw_loop(p, ov, nv, m, ...) ({ \
bool _result = false; \
typeof(p) _p = (p); \
ov = atomic_load_explicit(_p, memory_order_relaxed); \
do { \
__VA_ARGS__; \
typeof(ov) _r = (ov); \
_result = atomic_compare_exchange_weak_explicit(_p, &_r, nv, \
memory_order_##m, memory_order_relaxed); \
(ov) = _r; \
} while (__builtin_expect(!_result, 0)); \
_result; \
})
#define os_atomic_rmw_loop_give_up(expr) ({ expr; __builtin_trap(); })
#endif // _I386_ATOMIC_H_