atomic-x86_64.h revision 4bc2f8db0ad28604d6e2c394ca1b27736a0941ca
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *  * Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 *  * Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in
12 *    the documentation and/or other materials provided with the
13 *    distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29#ifndef ANDROID_CUTILS_ATOMIC_X86_64_H
30#define ANDROID_CUTILS_ATOMIC_X86_64_H
31
32#include <stdint.h>
33
34#ifndef ANDROID_ATOMIC_INLINE
35#define ANDROID_ATOMIC_INLINE inline __attribute__((always_inline))
36#endif
37
38extern ANDROID_ATOMIC_INLINE
39void android_compiler_barrier(void)
40{
41    __asm__ __volatile__ ("" : : : "memory");
42}
43
44#if ANDROID_SMP == 0
45extern ANDROID_ATOMIC_INLINE
46void android_memory_barrier(void)
47{
48    android_compiler_barrier();
49}
50extern ANDROID_ATOMIC_INLINE
51void android_memory_store_barrier(void)
52{
53    android_compiler_barrier();
54}
55#else
56extern ANDROID_ATOMIC_INLINE
57void android_memory_barrier(void)
58{
59    __asm__ __volatile__ ("mfence" : : : "memory");
60}
61extern ANDROID_ATOMIC_INLINE
62void android_memory_store_barrier(void)
63{
64    android_compiler_barrier();
65}
66#endif
67
68extern ANDROID_ATOMIC_INLINE
69int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
70{
71    int32_t value = *ptr;
72    android_compiler_barrier();
73    return value;
74}
75
76extern ANDROID_ATOMIC_INLINE
77int64_t android_atomic_acquire_load64(volatile const int64_t *ptr)
78{
79    int64_t value = *ptr;
80    android_compiler_barrier();
81    return value;
82}
83
84extern ANDROID_ATOMIC_INLINE
85int32_t android_atomic_release_load(volatile const int32_t *ptr)
86{
87    android_memory_barrier();
88    return *ptr;
89}
90
91extern ANDROID_ATOMIC_INLINE
92int64_t android_atomic_release_load64(volatile const int64_t *ptr)
93{
94    android_memory_barrier();
95    return *ptr;
96}
97
98extern ANDROID_ATOMIC_INLINE
99void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
100{
101    *ptr = value;
102    android_memory_barrier();
103}
104
105extern ANDROID_ATOMIC_INLINE
106void android_atomic_acquire_store64(int64_t value, volatile int64_t *ptr)
107{
108    *ptr = value;
109    android_memory_barrier();
110}
111
112extern ANDROID_ATOMIC_INLINE
113void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
114{
115    android_compiler_barrier();
116    *ptr = value;
117}
118
119extern ANDROID_ATOMIC_INLINE
120void android_atomic_release_store64(int64_t value, volatile int64_t *ptr)
121{
122    android_compiler_barrier();
123    *ptr = value;
124}
125
126extern ANDROID_ATOMIC_INLINE
127int android_atomic_cas(int32_t old_value, int32_t new_value,
128                       volatile int32_t *ptr)
129{
130    int32_t prev;
131    __asm__ __volatile__ ("lock; cmpxchgl %1, %2"
132                          : "=a" (prev)
133                          : "q" (new_value), "m" (*ptr), "0" (old_value)
134                          : "memory");
135    return prev != old_value;
136}
137
138extern ANDROID_ATOMIC_INLINE
139int64_t android_atomic_cas64(int64_t old_value, int64_t new_value,
140                             volatile int64_t *ptr)
141{
142    int64_t prev;
143    __asm__ __volatile__ ("lock; cmpxchgq %1, %2"
144                          : "=a" (prev)
145                          : "q" (new_value), "m" (*ptr), "0" (old_value)
146                          : "memory");
147    return prev != old_value;
148}
149
150extern ANDROID_ATOMIC_INLINE
151int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
152                               volatile int32_t *ptr)
153{
154    /* Loads are not reordered with other loads. */
155    return android_atomic_cas(old_value, new_value, ptr);
156}
157
158extern ANDROID_ATOMIC_INLINE
159int64_t android_atomic_acquire_cas64(int64_t old_value, int64_t new_value,
160                                     volatile int64_t *ptr)
161{
162    /* Loads are not reordered with other loads. */
163    return android_atomic_cas64(old_value, new_value, ptr);
164}
165
166extern ANDROID_ATOMIC_INLINE
167int android_atomic_release_cas(int32_t old_value, int32_t new_value,
168                               volatile int32_t *ptr)
169{
170    /* Stores are not reordered with other stores. */
171    return android_atomic_cas(old_value, new_value, ptr);
172}
173
174extern ANDROID_ATOMIC_INLINE
175int64_t android_atomic_release_cas64(int64_t old_value, int64_t new_value,
176                                     volatile int64_t *ptr)
177{
178    /* Stores are not reordered with other stores. */
179    return android_atomic_cas64(old_value, new_value, ptr);
180}
181
182extern ANDROID_ATOMIC_INLINE
183int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
184{
185    __asm__ __volatile__ ("lock; xaddl %0, %1"
186                          : "+r" (increment), "+m" (*ptr)
187                          : : "memory");
188    /* increment now holds the old value of *ptr */
189    return increment;
190}
191
192extern ANDROID_ATOMIC_INLINE
193int32_t android_atomic_inc(volatile int32_t *addr)
194{
195    return android_atomic_add(1, addr);
196}
197
198extern ANDROID_ATOMIC_INLINE
199int32_t android_atomic_dec(volatile int32_t *addr)
200{
201    return android_atomic_add(-1, addr);
202}
203
204extern ANDROID_ATOMIC_INLINE
205int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
206{
207    int32_t prev, status;
208    do {
209        prev = *ptr;
210        status = android_atomic_cas(prev, prev & value, ptr);
211    } while (__builtin_expect(status != 0, 0));
212    return prev;
213}
214
215extern ANDROID_ATOMIC_INLINE
216int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
217{
218    int32_t prev, status;
219    do {
220        prev = *ptr;
221        status = android_atomic_cas(prev, prev | value, ptr);
222    } while (__builtin_expect(status != 0, 0));
223    return prev;
224}
225
226#endif /* ANDROID_CUTILS_ATOMIC_X86_64_H */
227