#ifndef Atomics_h
#define Atomics_h
#include <atomic>
#include <wtf/StdLibExtras.h>
#if OS(WINDOWS)
#if !COMPILER(GCC_OR_CLANG)
extern "C" void _ReadWriteBarrier(void);
#pragma intrinsic(_ReadWriteBarrier)
#endif
#include <windows.h>
#endif
namespace WTF {
template<typename T>
struct Atomic {
T load(std::memory_order order = std::memory_order_seq_cst) const { return value.load(order); }
void store(T desired, std::memory_order order = std::memory_order_seq_cst) { value.store(desired, order); }
bool compareExchangeWeak(T expected, T desired, std::memory_order order = std::memory_order_seq_cst)
{
#if OS(WINDOWS)
order = std::memory_order_seq_cst;
#endif
T expectedOrActual = expected;
return value.compare_exchange_weak(expectedOrActual, desired, order);
}
bool compareExchangeStrong(T expected, T desired, std::memory_order order = std::memory_order_seq_cst)
{
#if OS(WINDOWS)
order = std::memory_order_seq_cst;
#endif
T expectedOrActual = expected;
return value.compare_exchange_strong(expectedOrActual, desired, order);
}
template<typename U>
T exchangeAndAdd(U addend, std::memory_order order = std::memory_order_seq_cst)
{
#if OS(WINDOWS)
order = std::memory_order_seq_cst;
#endif
return value.fetch_add(addend, order);
}
T exchange(T newValue, std::memory_order order = std::memory_order_seq_cst)
{
#if OS(WINDOWS)
order = std::memory_order_seq_cst;
#endif
return value.exchange(newValue, order);
}
std::atomic<T> value;
};
template<typename T>
inline bool weakCompareAndSwap(volatile T* location, T expected, T newValue)
{
return bitwise_cast<Atomic<T>*>(location)->compareExchangeWeak(expected, newValue, std::memory_order_relaxed);
}
inline void compilerFence()
{
#if OS(WINDOWS) && !COMPILER(GCC_OR_CLANG)
_ReadWriteBarrier();
#else
asm volatile("" ::: "memory");
#endif
}
#if CPU(ARM_THUMB2) || CPU(ARM64)
inline void armV7_dmb()
{
asm volatile("dmb sy" ::: "memory");
}
inline void armV7_dmb_st()
{
asm volatile("dmb st" ::: "memory");
}
inline void loadLoadFence() { armV7_dmb(); }
inline void loadStoreFence() { armV7_dmb(); }
inline void storeLoadFence() { armV7_dmb(); }
inline void storeStoreFence() { armV7_dmb_st(); }
inline void memoryBarrierAfterLock() { armV7_dmb(); }
inline void memoryBarrierBeforeUnlock() { armV7_dmb(); }
#elif CPU(X86) || CPU(X86_64)
inline void x86_mfence()
{
#if OS(WINDOWS)
MemoryBarrier();
#else
asm volatile("mfence" ::: "memory");
#endif
}
inline void loadLoadFence() { compilerFence(); }
inline void loadStoreFence() { compilerFence(); }
inline void storeLoadFence() { x86_mfence(); }
inline void storeStoreFence() { compilerFence(); }
inline void memoryBarrierAfterLock() { compilerFence(); }
inline void memoryBarrierBeforeUnlock() { compilerFence(); }
#else
inline void loadLoadFence() { compilerFence(); }
inline void loadStoreFence() { compilerFence(); }
inline void storeLoadFence() { compilerFence(); }
inline void storeStoreFence() { compilerFence(); }
inline void memoryBarrierAfterLock() { compilerFence(); }
inline void memoryBarrierBeforeUnlock() { compilerFence(); }
#endif
}
using WTF::Atomic;
#endif // Atomics_h