1// Copyright (c) 2009 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// This file is an internal atomic implementation, use base/atomicops.h instead.
6//
7// LinuxKernelCmpxchg and Barrier_AtomicIncrement are from Google Gears.
8
9#ifndef BASE_ATOMICOPS_INTERNALS_ARM_GCC_H_
10#define BASE_ATOMICOPS_INTERNALS_ARM_GCC_H_
11#pragma once
12
13namespace base {
14namespace subtle {
15
16// 0xffff0fc0 is the hard coded address of a function provided by
17// the kernel which implements an atomic compare-exchange. On older
18// ARM architecture revisions (pre-v6) this may be implemented using
19// a syscall. This address is stable, and in active use (hard coded)
20// by at least glibc-2.7 and the Android C library.
21typedef Atomic32 (*LinuxKernelCmpxchgFunc)(Atomic32 old_value,
22                                           Atomic32 new_value,
23                                           volatile Atomic32* ptr);
24LinuxKernelCmpxchgFunc pLinuxKernelCmpxchg __attribute__((weak)) =
25    (LinuxKernelCmpxchgFunc) 0xffff0fc0;
26
27typedef void (*LinuxKernelMemoryBarrierFunc)(void);
28LinuxKernelMemoryBarrierFunc pLinuxKernelMemoryBarrier __attribute__((weak)) =
29    (LinuxKernelMemoryBarrierFunc) 0xffff0fa0;
30
31
32inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
33                                         Atomic32 old_value,
34                                         Atomic32 new_value) {
35  Atomic32 prev_value = *ptr;
36  do {
37    if (!pLinuxKernelCmpxchg(old_value, new_value,
38                             const_cast<Atomic32*>(ptr))) {
39      return old_value;
40    }
41    prev_value = *ptr;
42  } while (prev_value == old_value);
43  return prev_value;
44}
45
46inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
47                                         Atomic32 new_value) {
48  Atomic32 old_value;
49  do {
50    old_value = *ptr;
51  } while (pLinuxKernelCmpxchg(old_value, new_value,
52                               const_cast<Atomic32*>(ptr)));
53  return old_value;
54}
55
56inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
57                                          Atomic32 increment) {
58  return Barrier_AtomicIncrement(ptr, increment);
59}
60
61inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
62                                        Atomic32 increment) {
63  for (;;) {
64    // Atomic exchange the old value with an incremented one.
65    Atomic32 old_value = *ptr;
66    Atomic32 new_value = old_value + increment;
67    if (pLinuxKernelCmpxchg(old_value, new_value,
68                            const_cast<Atomic32*>(ptr)) == 0) {
69      // The exchange took place as expected.
70      return new_value;
71    }
72    // Otherwise, *ptr changed mid-loop and we need to retry.
73  }
74
75}
76
77inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
78                                       Atomic32 old_value,
79                                       Atomic32 new_value) {
80  return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
81}
82
83inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
84                                       Atomic32 old_value,
85                                       Atomic32 new_value) {
86  return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
87}
88
89inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
90  *ptr = value;
91}
92
93inline void MemoryBarrier() {
94  pLinuxKernelMemoryBarrier();
95}
96
97inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
98  *ptr = value;
99  MemoryBarrier();
100}
101
102inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
103  MemoryBarrier();
104  *ptr = value;
105}
106
107inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
108  return *ptr;
109}
110
111inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
112  Atomic32 value = *ptr;
113  MemoryBarrier();
114  return value;
115}
116
117inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
118  MemoryBarrier();
119  return *ptr;
120}
121
122} // namespace base::subtle
123} // namespace base
124
125#endif  // BASE_ATOMICOPS_INTERNALS_ARM_GCC_H_
126