atomicops_internals_x86_macosx.h revision 3345a6884c488ff3a535c2c9acdd33d74b37e311
1// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// This file is an internal atomic implementation, use base/atomicops.h instead.
6
7#ifndef BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
8#define BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
9#pragma once
10
11#include <libkern/OSAtomic.h>
12
13namespace base {
14namespace subtle {
15
16inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
17                                         Atomic32 old_value,
18                                         Atomic32 new_value) {
19  Atomic32 prev_value;
20  do {
21    if (OSAtomicCompareAndSwap32(old_value, new_value,
22                                 const_cast<Atomic32*>(ptr))) {
23      return old_value;
24    }
25    prev_value = *ptr;
26  } while (prev_value == old_value);
27  return prev_value;
28}
29
30inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
31                                         Atomic32 new_value) {
32  Atomic32 old_value;
33  do {
34    old_value = *ptr;
35  } while (!OSAtomicCompareAndSwap32(old_value, new_value,
36                                     const_cast<Atomic32*>(ptr)));
37  return old_value;
38}
39
40inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
41                                          Atomic32 increment) {
42  return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
43}
44
45inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
46                                          Atomic32 increment) {
47  return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
48}
49
50inline void MemoryBarrier() {
51  OSMemoryBarrier();
52}
53
54inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
55                                       Atomic32 old_value,
56                                       Atomic32 new_value) {
57  Atomic32 prev_value;
58  do {
59    if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
60                                        const_cast<Atomic32*>(ptr))) {
61      return old_value;
62    }
63    prev_value = *ptr;
64  } while (prev_value == old_value);
65  return prev_value;
66}
67
68inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
69                                       Atomic32 old_value,
70                                       Atomic32 new_value) {
71  return Acquire_CompareAndSwap(ptr, old_value, new_value);
72}
73
74inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
75  *ptr = value;
76}
77
78inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
79  *ptr = value;
80  MemoryBarrier();
81}
82
83inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
84  MemoryBarrier();
85  *ptr = value;
86}
87
88inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
89  return *ptr;
90}
91
92inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
93  Atomic32 value = *ptr;
94  MemoryBarrier();
95  return value;
96}
97
98inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
99  MemoryBarrier();
100  return *ptr;
101}
102
103#ifdef __LP64__
104
105// 64-bit implementation on 64-bit platform
106
107inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
108                                         Atomic64 old_value,
109                                         Atomic64 new_value) {
110  Atomic64 prev_value;
111  do {
112    if (OSAtomicCompareAndSwap64(old_value, new_value,
113                                 const_cast<Atomic64*>(ptr))) {
114      return old_value;
115    }
116    prev_value = *ptr;
117  } while (prev_value == old_value);
118  return prev_value;
119}
120
121inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
122                                         Atomic64 new_value) {
123  Atomic64 old_value;
124  do {
125    old_value = *ptr;
126  } while (!OSAtomicCompareAndSwap64(old_value, new_value,
127                                     const_cast<Atomic64*>(ptr)));
128  return old_value;
129}
130
131inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
132                                          Atomic64 increment) {
133  return OSAtomicAdd64(increment, const_cast<Atomic64*>(ptr));
134}
135
136inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
137                                        Atomic64 increment) {
138  return OSAtomicAdd64Barrier(increment, const_cast<Atomic64*>(ptr));
139}
140
141inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
142                                       Atomic64 old_value,
143                                       Atomic64 new_value) {
144  Atomic64 prev_value;
145  do {
146    if (OSAtomicCompareAndSwap64Barrier(old_value, new_value,
147                                        const_cast<Atomic64*>(ptr))) {
148      return old_value;
149    }
150    prev_value = *ptr;
151  } while (prev_value == old_value);
152  return prev_value;
153}
154
155inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
156                                       Atomic64 old_value,
157                                       Atomic64 new_value) {
158  // The lib kern interface does not distinguish between
159  // Acquire and Release memory barriers; they are equivalent.
160  return Acquire_CompareAndSwap(ptr, old_value, new_value);
161}
162
163inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
164  *ptr = value;
165}
166
167inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
168  *ptr = value;
169  MemoryBarrier();
170}
171
172inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
173  MemoryBarrier();
174  *ptr = value;
175}
176
177inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
178  return *ptr;
179}
180
181inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
182  Atomic64 value = *ptr;
183  MemoryBarrier();
184  return value;
185}
186
187inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
188  MemoryBarrier();
189  return *ptr;
190}
191
192#endif  // defined(__LP64__)
193
194// MacOS uses long for intptr_t, AtomicWord and Atomic32 are always different
195// on the Mac, even when they are the same size.  We need to explicitly cast
196// from AtomicWord to Atomic32/64 to implement the AtomicWord interface.
197#ifdef __LP64__
198#define AtomicWordCastType Atomic64
199#else
200#define AtomicWordCastType Atomic32
201#endif
202
203inline AtomicWord NoBarrier_CompareAndSwap(volatile AtomicWord* ptr,
204                                           AtomicWord old_value,
205                                           AtomicWord new_value) {
206  return NoBarrier_CompareAndSwap(
207      reinterpret_cast<volatile AtomicWordCastType*>(ptr),
208      old_value, new_value);
209}
210
211inline AtomicWord NoBarrier_AtomicExchange(volatile AtomicWord* ptr,
212                                           AtomicWord new_value) {
213  return NoBarrier_AtomicExchange(
214      reinterpret_cast<volatile AtomicWordCastType*>(ptr), new_value);
215}
216
217inline AtomicWord NoBarrier_AtomicIncrement(volatile AtomicWord* ptr,
218                                            AtomicWord increment) {
219  return NoBarrier_AtomicIncrement(
220      reinterpret_cast<volatile AtomicWordCastType*>(ptr), increment);
221}
222
223inline AtomicWord Barrier_AtomicIncrement(volatile AtomicWord* ptr,
224                                          AtomicWord increment) {
225  return Barrier_AtomicIncrement(
226      reinterpret_cast<volatile AtomicWordCastType*>(ptr), increment);
227}
228
229inline AtomicWord Acquire_CompareAndSwap(volatile AtomicWord* ptr,
230                                         AtomicWord old_value,
231                                         AtomicWord new_value) {
232  return base::subtle::Acquire_CompareAndSwap(
233      reinterpret_cast<volatile AtomicWordCastType*>(ptr),
234      old_value, new_value);
235}
236
237inline AtomicWord Release_CompareAndSwap(volatile AtomicWord* ptr,
238                                         AtomicWord old_value,
239                                         AtomicWord new_value) {
240  return base::subtle::Release_CompareAndSwap(
241      reinterpret_cast<volatile AtomicWordCastType*>(ptr),
242      old_value, new_value);
243}
244
245inline void NoBarrier_Store(volatile AtomicWord *ptr, AtomicWord value) {
246  NoBarrier_Store(
247      reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
248}
249
250inline void Acquire_Store(volatile AtomicWord* ptr, AtomicWord value) {
251  return base::subtle::Acquire_Store(
252      reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
253}
254
255inline void Release_Store(volatile AtomicWord* ptr, AtomicWord value) {
256  return base::subtle::Release_Store(
257      reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
258}
259
260inline AtomicWord NoBarrier_Load(volatile const AtomicWord *ptr) {
261  return NoBarrier_Load(
262      reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
263}
264
265inline AtomicWord Acquire_Load(volatile const AtomicWord* ptr) {
266  return base::subtle::Acquire_Load(
267      reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
268}
269
270inline AtomicWord Release_Load(volatile const AtomicWord* ptr) {
271  return base::subtle::Release_Load(
272      reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
273}
274
275#undef AtomicWordCastType
276
277}   // namespace base::subtle
278}   // namespace base
279
280#endif  // BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
281