atomic-arm.h revision 464431e65fbede57b0d41d230fe6f6dc465c20f8
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ANDROID_CUTILS_ATOMIC_ARM_H
18#define ANDROID_CUTILS_ATOMIC_ARM_H
19
20#include <stdint.h>
21#include <machine/cpu-features.h>
22
23extern inline void android_compiler_barrier(void)
24{
25    __asm__ __volatile__ ("" : : : "memory");
26}
27
28#if ANDROID_SMP == 0
29extern inline void android_memory_barrier(void)
30{
31    android_compiler_barrier();
32}
33extern inline void android_memory_store_barrier(void)
34{
35    android_compiler_barrier();
36}
37#elif defined(__ARM_HAVE_DMB)
38extern inline void android_memory_barrier(void)
39{
40    __asm__ __volatile__ ("dmb" : : : "memory");
41}
42extern inline void android_memory_store_barrier(void)
43{
44    /* TODO: use "dmb st" once the toolchain understands it */
45    __asm__ __volatile__ ("dmb" : : : "memory");
46}
47#elif defined(__ARM_HAVE_LDREX_STREX)
48extern inline void android_memory_barrier(void)
49{
50    __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory");
51}
52extern inline void android_memory_store_barrier(void)
53{
54    android_memory_barrier();
55}
56#else
57extern inline void android_memory_barrier(void)
58{
59    typedef void (kuser_memory_barrier)(void);
60    (*(kuser_memory_barrier *)0xffff0fa0)();
61}
62extern inline void android_memory_store_barrier(void)
63{
64    android_memory_barrier();
65}
66#endif
67
68extern inline int32_t android_atomic_acquire_load(volatile int32_t *ptr)
69{
70    int32_t value = *ptr;
71    android_memory_barrier();
72    return value;
73}
74
75extern inline int32_t android_atomic_release_load(volatile int32_t *ptr)
76{
77    android_memory_barrier();
78    return *ptr;
79}
80
81extern inline void android_atomic_acquire_store(int32_t value,
82                                                volatile int32_t *ptr)
83{
84    *ptr = value;
85    android_memory_barrier();
86}
87
88extern inline void android_atomic_release_store(int32_t value,
89                                                volatile int32_t *ptr)
90{
91    android_memory_barrier();
92    *ptr = value;
93}
94
95#if defined(__thumb__)
96extern int android_atomic_cas(int32_t old_value, int32_t new_value,
97                              volatile int32_t *ptr);
98#elif defined(__ARM_HAVE_LDREX_STREX)
99extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
100                                     volatile int32_t *ptr)
101{
102    int32_t prev, status;
103    do {
104        __asm__ __volatile__ ("ldrex %0, [%3]\n"
105                              "mov %1, #0\n"
106                              "teq %0, %4\n"
107                              "strexeq %1, %5, [%3]"
108                              : "=&r" (prev), "=&r" (status), "+m"(*ptr)
109                              : "r" (ptr), "Ir" (old_value), "r" (new_value)
110                              : "cc");
111    } while (__builtin_expect(status != 0, 0));
112    return prev != old_value;
113}
114#else
115extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
116                                     volatile int32_t *ptr)
117{
118    typedef int (kuser_cmpxchg)(int32_t, int32_t, volatile int32_t *);
119    int32_t prev, status;
120    prev = *ptr;
121    do {
122        status = (*(kuser_cmpxchg *)0xffff0fc0)(old_value, new_value, ptr);
123        if (__builtin_expect(status == 0, 1))
124            return 0;
125        prev = *ptr;
126    } while (prev == old_value);
127    return 1;
128}
129#endif
130
131extern inline int android_atomic_acquire_cas(int32_t old_value,
132                                             int32_t new_value,
133                                             volatile int32_t *ptr)
134{
135    int status = android_atomic_cas(old_value, new_value, ptr);
136    android_memory_barrier();
137    return status;
138}
139
140extern inline int android_atomic_release_cas(int32_t old_value,
141                                             int32_t new_value,
142                                             volatile int32_t *ptr)
143{
144    android_memory_barrier();
145    return android_atomic_cas(old_value, new_value, ptr);
146}
147
148
149#if defined(__thumb__)
150extern int32_t android_atomic_swap(int32_t new_value,
151                                   volatile int32_t *ptr);
152#elif defined(__ARM_HAVE_LDREX_STREX)
153extern inline int32_t android_atomic_swap(int32_t new_value,
154                                          volatile int32_t *ptr)
155{
156    int32_t prev, status;
157    do {
158        __asm__ __volatile__ ("ldrex %0, [%3]\n"
159                              "strex %1, %4, [%3]"
160                              : "=&r" (prev), "=&r" (status), "+m" (*ptr)
161                              : "r" (ptr), "r" (new_value)
162                              : "cc");
163    } while (__builtin_expect(status != 0, 0));
164    android_memory_barrier();
165    return prev;
166}
167#else
168extern inline int32_t android_atomic_swap(int32_t new_value,
169                                          volatile int32_t *ptr)
170{
171    int32_t prev;
172    __asm__ __volatile__ ("swp %0, %2, [%3]"
173                          : "=&r" (prev), "+m" (*ptr)
174                          : "r" (new_value), "r" (ptr)
175                          : "cc");
176    android_memory_barrier();
177    return prev;
178}
179#endif
180
181#if defined(__thumb__)
182extern int32_t android_atomic_add(int32_t increment,
183                                  volatile int32_t *ptr);
184#elif defined(__ARM_HAVE_LDREX_STREX)
185extern inline int32_t android_atomic_add(int32_t increment,
186                                         volatile int32_t *ptr)
187{
188    int32_t prev, tmp, status;
189    android_memory_barrier();
190    do {
191        __asm__ __volatile__ ("ldrex %0, [%4]\n"
192                              "add %1, %0, %5\n"
193                              "strex %2, %1, [%4]"
194                              : "=&r" (prev), "=&r" (tmp),
195                                "=&r" (status), "+m" (*ptr)
196                              : "r" (ptr), "Ir" (increment)
197                              : "cc");
198    } while (__builtin_expect(status != 0, 0));
199    return prev;
200}
201#else
202extern inline int32_t android_atomic_add(int32_t increment,
203                                         volatile int32_t *ptr)
204{
205    int32_t prev, status;
206    android_memory_barrier();
207    do {
208        prev = *ptr;
209        status = android_atomic_cas(prev, prev + increment, ptr);
210    } while (__builtin_expect(status != 0, 0));
211    return prev;
212}
213#endif
214
215extern inline int32_t android_atomic_inc(volatile int32_t *addr) {
216    return android_atomic_add(1, addr);
217}
218
219extern inline int32_t android_atomic_dec(volatile int32_t *addr) {
220    return android_atomic_add(-1, addr);
221}
222
223#if defined(__thumb__)
224extern int32_t android_atomic_and(int32_t value, volatile int32_t *ptr);
225#elif defined(__ARM_HAVE_LDREX_STREX)
226extern inline int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
227{
228    int32_t prev, tmp, status;
229    android_memory_barrier();
230    do {
231        __asm__ __volatile__ ("ldrex %0, [%4]\n"
232                              "and %1, %0, %5\n"
233                              "strex %2, %1, [%4]"
234                              : "=&r" (prev), "=&r" (tmp),
235                                "=&r" (status), "+m" (*ptr)
236                              : "r" (ptr), "Ir" (value)
237                              : "cc");
238    } while (__builtin_expect(status != 0, 0));
239    return prev;
240}
241#else
242extern inline int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
243{
244    int32_t prev, status;
245    android_memory_barrier();
246    do {
247        prev = *ptr;
248        status = android_atomic_cas(prev, prev & value, ptr);
249    } while (__builtin_expect(status != 0, 0));
250    return prev;
251}
252#endif
253
254#if defined(__thumb__)
255extern int32_t android_atomic_or(int32_t value, volatile int32_t *ptr);
256#elif defined(__ARM_HAVE_LDREX_STREX)
257extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
258{
259    int32_t prev, tmp, status;
260    android_memory_barrier();
261    do {
262        __asm__ __volatile__ ("ldrex %0, [%4]\n"
263                              "orr %1, %0, %5\n"
264                              "strex %2, %1, [%4]"
265                              : "=&r" (prev), "=&r" (tmp),
266                                "=&r" (status), "+m" (*ptr)
267                              : "r" (ptr), "Ir" (value)
268                              : "cc");
269    } while (__builtin_expect(status != 0, 0));
270    return prev;
271}
272#else
273extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
274{
275    int32_t prev, status;
276    android_memory_barrier();
277    do {
278        prev = *ptr;
279        status = android_atomic_cas(prev, prev | value, ptr);
280    } while (__builtin_expect(status != 0, 0));
281    return prev;
282}
283#endif
284
285#endif /* ANDROID_CUTILS_ATOMIC_ARM_H */
286