atomicops_internals_x86_macosx.h revision ddb351dbec246cf1fab5ec20d2d5520909041de1
1// Copyright (c) 2011 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// This file is an internal atomic implementation, use base/atomicops.h instead.
6
7#ifndef BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
8#define BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
9#pragma once
10
11#include <libkern/OSAtomic.h>
12
13namespace base {
14namespace subtle {
15
16inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
17                                         Atomic32 old_value,
18                                         Atomic32 new_value) {
19  Atomic32 prev_value;
20  do {
21    if (OSAtomicCompareAndSwap32(old_value, new_value,
22                                 const_cast<Atomic32*>(ptr))) {
23      return old_value;
24    }
25    prev_value = *ptr;
26  } while (prev_value == old_value);
27  return prev_value;
28}
29
30inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
31                                         Atomic32 new_value) {
32  Atomic32 old_value;
33  do {
34    old_value = *ptr;
35  } while (!OSAtomicCompareAndSwap32(old_value, new_value,
36                                     const_cast<Atomic32*>(ptr)));
37  return old_value;
38}
39
40inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
41                                          Atomic32 increment) {
42  return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
43}
44
45inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
46                                          Atomic32 increment) {
47  return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
48}
49
50inline void MemoryBarrier() {
51  OSMemoryBarrier();
52}
53
54inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
55                                       Atomic32 old_value,
56                                       Atomic32 new_value) {
57  Atomic32 prev_value;
58  do {
59    if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
60                                        const_cast<Atomic32*>(ptr))) {
61      return old_value;
62    }
63    prev_value = *ptr;
64  } while (prev_value == old_value);
65  return prev_value;
66}
67
68inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
69                                       Atomic32 old_value,
70                                       Atomic32 new_value) {
71  return Acquire_CompareAndSwap(ptr, old_value, new_value);
72}
73
74inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
75  *ptr = value;
76}
77
78inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
79  *ptr = value;
80  MemoryBarrier();
81}
82
83inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
84  MemoryBarrier();
85  *ptr = value;
86}
87
88inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
89  return *ptr;
90}
91
92inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
93  Atomic32 value = *ptr;
94  MemoryBarrier();
95  return value;
96}
97
98inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
99  MemoryBarrier();
100  return *ptr;
101}
102
103#ifdef __LP64__
104
105// 64-bit implementation on 64-bit platform
106
107inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
108                                         Atomic64 old_value,
109                                         Atomic64 new_value) {
110  Atomic64 prev_value;
111  do {
112    if (OSAtomicCompareAndSwap64(old_value, new_value,
113                                 reinterpret_cast<volatile int64_t*>(ptr))) {
114      return old_value;
115    }
116    prev_value = *ptr;
117  } while (prev_value == old_value);
118  return prev_value;
119}
120
121inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
122                                         Atomic64 new_value) {
123  Atomic64 old_value;
124  do {
125    old_value = *ptr;
126  } while (!OSAtomicCompareAndSwap64(old_value, new_value,
127                                     reinterpret_cast<volatile int64_t*>(ptr)));
128  return old_value;
129}
130
131inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
132                                          Atomic64 increment) {
133  return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
134}
135
136inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
137                                        Atomic64 increment) {
138  return OSAtomicAdd64Barrier(increment,
139                              reinterpret_cast<volatile int64_t*>(ptr));
140}
141
142inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
143                                       Atomic64 old_value,
144                                       Atomic64 new_value) {
145  Atomic64 prev_value;
146  do {
147    if (OSAtomicCompareAndSwap64Barrier(
148        old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
149      return old_value;
150    }
151    prev_value = *ptr;
152  } while (prev_value == old_value);
153  return prev_value;
154}
155
156inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
157                                       Atomic64 old_value,
158                                       Atomic64 new_value) {
159  // The lib kern interface does not distinguish between
160  // Acquire and Release memory barriers; they are equivalent.
161  return Acquire_CompareAndSwap(ptr, old_value, new_value);
162}
163
164inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
165  *ptr = value;
166}
167
168inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
169  *ptr = value;
170  MemoryBarrier();
171}
172
173inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
174  MemoryBarrier();
175  *ptr = value;
176}
177
178inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
179  return *ptr;
180}
181
182inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
183  Atomic64 value = *ptr;
184  MemoryBarrier();
185  return value;
186}
187
188inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
189  MemoryBarrier();
190  return *ptr;
191}
192
193#endif  // defined(__LP64__)
194
195}   // namespace base::subtle
196}   // namespace base
197
198#endif  // BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
199