atomic-x86.h revision a4176578804e1b300f7652919161be035cf3cfd2
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ANDROID_CUTILS_ATOMIC_X86_H
18#define ANDROID_CUTILS_ATOMIC_X86_H
19
20#include <stdint.h>
21
22extern inline void android_compiler_barrier(void)
23{
24    __asm__ __volatile__ ("" : : : "memory");
25}
26
27#if ANDROID_SMP == 0
28extern inline void android_memory_barrier(void)
29{
30    android_compiler_barrier();
31}
32extern inline void android_memory_store_barrier(void)
33{
34    android_compiler_barrier();
35}
36#else
37extern inline void android_memory_barrier(void)
38{
39    __asm__ __volatile__ ("mfence" : : : "memory");
40}
41extern inline void android_memory_store_barrier(void)
42{
43    android_compiler_barrier();
44}
45#endif
46
47extern inline int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
48{
49    int32_t value = *ptr;
50    android_compiler_barrier();
51    return value;
52}
53
54extern inline int32_t android_atomic_release_load(volatile const int32_t *ptr)
55{
56    android_memory_barrier();
57    return *ptr;
58}
59
60extern inline void android_atomic_acquire_store(int32_t value,
61                                                volatile int32_t *ptr)
62{
63    *ptr = value;
64    android_memory_barrier();
65}
66
67extern inline void android_atomic_release_store(int32_t value,
68                                                volatile int32_t *ptr)
69{
70    android_compiler_barrier();
71    *ptr = value;
72}
73
74extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
75                                     volatile int32_t *ptr)
76{
77    int32_t prev;
78    __asm__ __volatile__ ("lock; cmpxchgl %1, %2"
79                          : "=a" (prev)
80                          : "q" (new_value), "m" (*ptr), "0" (old_value)
81                          : "memory");
82    return prev != old_value;
83}
84
85extern inline int android_atomic_acquire_cas(int32_t old_value,
86                                             int32_t new_value,
87                                             volatile int32_t *ptr)
88{
89    /* Loads are not reordered with other loads. */
90    return android_atomic_cas(old_value, new_value, ptr);
91}
92
93extern inline int android_atomic_release_cas(int32_t old_value,
94                                             int32_t new_value,
95                                             volatile int32_t *ptr)
96{
97    /* Stores are not reordered with other stores. */
98    return android_atomic_cas(old_value, new_value, ptr);
99}
100
101extern inline int32_t android_atomic_swap(int32_t new_value,
102                                          volatile int32_t *ptr)
103{
104    __asm__ __volatile__ ("xchgl %1, %0"
105                          : "=r" (new_value)
106                          : "m" (*ptr), "0" (new_value)
107                          : "memory");
108    /* new_value now holds the old value of *ptr */
109    return new_value;
110}
111
112extern inline int32_t android_atomic_add(int32_t increment,
113                                         volatile int32_t *ptr)
114{
115    __asm__ __volatile__ ("lock; xaddl %0, %1"
116                          : "+r" (increment), "+m" (*ptr)
117                          : : "memory");
118    /* increment now holds the old value of *ptr */
119    return increment;
120}
121
122extern inline int32_t android_atomic_inc(volatile int32_t *addr)
123{
124    return android_atomic_add(1, addr);
125}
126
127extern inline int32_t android_atomic_dec(volatile int32_t *addr)
128{
129    return android_atomic_add(-1, addr);
130}
131
132extern inline int32_t android_atomic_and(int32_t value,
133                                         volatile int32_t *ptr)
134{
135    int32_t prev, status;
136    do {
137        prev = *ptr;
138        status = android_atomic_cas(prev, prev & value, ptr);
139    } while (__builtin_expect(status != 0, 0));
140    return prev;
141}
142
143extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
144{
145    int32_t prev, status;
146    do {
147        prev = *ptr;
148        status = android_atomic_cas(prev, prev | value, ptr);
149    } while (__builtin_expect(status != 0, 0));
150    return prev;
151}
152
153#endif /* ANDROID_CUTILS_ATOMIC_X86_H */
154