30 #ifndef V8_ATOMICOPS_INTERNALS_X86_GCC_H_
31 #define V8_ATOMICOPS_INTERNALS_X86_GCC_H_
47 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
55 __asm__ __volatile__(
"lock; cmpxchgl %1,%2"
57 :
"q" (new_value),
"m" (*ptr),
"0" (old_value)
64 __asm__ __volatile__(
"xchgl %1,%0"
66 :
"m" (*ptr),
"0" (new_value)
74 __asm__ __volatile__(
"lock; xaddl %0,%1"
75 :
"+r" (temp),
"+m" (*ptr)
78 return temp + increment;
84 __asm__ __volatile__(
"lock; xaddl %0,%1"
85 :
"+r" (temp),
"+m" (*ptr)
89 __asm__ __volatile__(
"lfence" : : :
"memory");
91 return temp + increment;
99 __asm__ __volatile__(
"lfence" : : :
"memory");
114 #if defined(__x86_64__)
119 __asm__ __volatile__(
"mfence" : : :
"memory");
131 __asm__ __volatile__(
"mfence" : : :
"memory");
141 __asm__ __volatile__(
"mfence" : : :
"memory");
171 #if defined(__x86_64__)
177 Atomic64 new_value) {
179 __asm__ __volatile__(
"lock; cmpxchgq %1,%2"
181 :
"q" (new_value),
"m" (*ptr),
"0" (old_value)
187 Atomic64 new_value) {
188 __asm__ __volatile__(
"xchgq %1,%0"
190 :
"m" (*ptr),
"0" (new_value)
196 Atomic64 increment) {
197 Atomic64 temp = increment;
198 __asm__ __volatile__(
"lock; xaddq %0,%1"
199 :
"+r" (temp),
"+m" (*ptr)
202 return temp + increment;
206 Atomic64 increment) {
207 Atomic64 temp = increment;
208 __asm__ __volatile__(
"lock; xaddq %0,%1"
209 :
"+r" (temp),
"+m" (*ptr)
213 __asm__ __volatile__(
"lfence" : : :
"memory");
215 return temp + increment;
222 inline void Acquire_Store(
volatile Atomic64* ptr, Atomic64 value) {
227 inline void Release_Store(
volatile Atomic64* ptr, Atomic64 value) {
252 inline Atomic64
Acquire_Load(
volatile const Atomic64* ptr) {
253 Atomic64 value = *ptr;
260 inline Atomic64
Release_Load(
volatile const Atomic64* ptr) {
267 Atomic64 new_value) {
270 __asm__ __volatile__(
"lfence" : : :
"memory");
277 Atomic64 new_value) {
281 #endif // defined(__x86_64__)
285 #undef ATOMICOPS_COMPILER_BARRIER
287 #endif // V8_ATOMICOPS_INTERNALS_X86_GCC_H_
void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value)
Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value)
Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, Atomic32 new_value)
#define ATOMICOPS_COMPILER_BARRIER()
Atomic32 Release_Load(volatile const Atomic32 *ptr)
Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
void Release_Store(volatile Atomic32 *ptr, Atomic32 value)
Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr)
struct AtomicOps_x86CPUFeatureStruct AtomicOps_Internalx86CPUFeatures
Atomic32 Acquire_Load(volatile const Atomic32 *ptr)
Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)