v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
atomicops_internals_mac.h
Go to the documentation of this file.
1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 // This file is an internal atomic implementation, use atomicops.h instead.
29 
30 #ifndef V8_ATOMICOPS_INTERNALS_MAC_H_
31 #define V8_ATOMICOPS_INTERNALS_MAC_H_
32 
33 #include <libkern/OSAtomic.h>
34 
35 namespace v8 {
36 namespace internal {
37 
38 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
39  Atomic32 old_value,
40  Atomic32 new_value) {
41  Atomic32 prev_value;
42  do {
43  if (OSAtomicCompareAndSwap32(old_value, new_value,
44  const_cast<Atomic32*>(ptr))) {
45  return old_value;
46  }
47  prev_value = *ptr;
48  } while (prev_value == old_value);
49  return prev_value;
50 }
51 
52 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
53  Atomic32 new_value) {
54  Atomic32 old_value;
55  do {
56  old_value = *ptr;
57  } while (!OSAtomicCompareAndSwap32(old_value, new_value,
58  const_cast<Atomic32*>(ptr)));
59  return old_value;
60 }
61 
62 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
63  Atomic32 increment) {
64  return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
65 }
66 
67 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
68  Atomic32 increment) {
69  return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
70 }
71 
72 inline void MemoryBarrier() {
73  OSMemoryBarrier();
74 }
75 
76 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
77  Atomic32 old_value,
78  Atomic32 new_value) {
79  Atomic32 prev_value;
80  do {
81  if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
82  const_cast<Atomic32*>(ptr))) {
83  return old_value;
84  }
85  prev_value = *ptr;
86  } while (prev_value == old_value);
87  return prev_value;
88 }
89 
90 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
91  Atomic32 old_value,
92  Atomic32 new_value) {
93  return Acquire_CompareAndSwap(ptr, old_value, new_value);
94 }
95 
96 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
97  *ptr = value;
98 }
99 
100 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
101  *ptr = value;
102  MemoryBarrier();
103 }
104 
105 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
106  MemoryBarrier();
107  *ptr = value;
108 }
109 
110 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
111  return *ptr;
112 }
113 
114 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
115  Atomic32 value = *ptr;
116  MemoryBarrier();
117  return value;
118 }
119 
120 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
121  MemoryBarrier();
122  return *ptr;
123 }
124 
125 #ifdef __LP64__
126 
127 // 64-bit implementation on 64-bit platform
128 
129 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
130  Atomic64 old_value,
131  Atomic64 new_value) {
132  Atomic64 prev_value;
133  do {
134  if (OSAtomicCompareAndSwap64(old_value, new_value,
135  reinterpret_cast<volatile int64_t*>(ptr))) {
136  return old_value;
137  }
138  prev_value = *ptr;
139  } while (prev_value == old_value);
140  return prev_value;
141 }
142 
143 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
144  Atomic64 new_value) {
145  Atomic64 old_value;
146  do {
147  old_value = *ptr;
148  } while (!OSAtomicCompareAndSwap64(old_value, new_value,
149  reinterpret_cast<volatile int64_t*>(ptr)));
150  return old_value;
151 }
152 
153 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
154  Atomic64 increment) {
155  return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
156 }
157 
158 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
159  Atomic64 increment) {
160  return OSAtomicAdd64Barrier(increment,
161  reinterpret_cast<volatile int64_t*>(ptr));
162 }
163 
164 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
165  Atomic64 old_value,
166  Atomic64 new_value) {
167  Atomic64 prev_value;
168  do {
169  if (OSAtomicCompareAndSwap64Barrier(
170  old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
171  return old_value;
172  }
173  prev_value = *ptr;
174  } while (prev_value == old_value);
175  return prev_value;
176 }
177 
178 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
179  Atomic64 old_value,
180  Atomic64 new_value) {
181  // The lib kern interface does not distinguish between
182  // Acquire and Release memory barriers; they are equivalent.
183  return Acquire_CompareAndSwap(ptr, old_value, new_value);
184 }
185 
186 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
187  *ptr = value;
188 }
189 
190 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
191  *ptr = value;
192  MemoryBarrier();
193 }
194 
195 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
196  MemoryBarrier();
197  *ptr = value;
198 }
199 
200 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
201  return *ptr;
202 }
203 
204 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
205  Atomic64 value = *ptr;
206  MemoryBarrier();
207  return value;
208 }
209 
210 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
211  MemoryBarrier();
212  return *ptr;
213 }
214 
215 #endif // defined(__LP64__)
216 
217 } } // namespace v8::internal
218 
219 #endif // V8_ATOMICOPS_INTERNALS_MAC_H_
void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value)
Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value)
Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, Atomic32 new_value)
Atomic32 Release_Load(volatile const Atomic32 *ptr)
Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
void Release_Store(volatile Atomic32 *ptr, Atomic32 value)
Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr)
Atomic32 Acquire_Load(volatile const Atomic32 *ptr)
int32_t Atomic32
Definition: atomicops.h:66
Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)