atomic-x86.h revision 464431e65fbede57b0d41d230fe6f6dc465c20f8
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ANDROID_CUTILS_ATOMIC_X86_H 18#define ANDROID_CUTILS_ATOMIC_X86_H 19 20#include <stdint.h> 21 22extern inline void android_compiler_barrier(void) 23{ 24 __asm__ __volatile__ ("" : : : "memory"); 25} 26 27#if ANDROID_SMP == 0 28extern inline void android_memory_barrier(void) 29{ 30 android_compiler_barrier(); 31} 32extern inline void android_memory_store_barrier(void) 33{ 34 android_compiler_barrier(); 35} 36#else 37extern inline void android_memory_barrier(void) 38{ 39 __asm__ __volatile__ ("mfence" : : : "memory"); 40} 41extern inline void android_memory_store_barrier(void) 42{ 43 android_compiler_barrier(); 44} 45#endif 46 47extern inline int32_t android_atomic_acquire_load(volatile int32_t *ptr) { 48 int32_t value = *ptr; 49 android_compiler_barrier(); 50 return value; 51} 52 53extern inline int32_t android_atomic_release_load(volatile int32_t *ptr) { 54 android_memory_barrier(); 55 return *ptr; 56} 57 58extern inline void android_atomic_acquire_store(int32_t value, 59 volatile int32_t *ptr) { 60 *ptr = value; 61 android_memory_barrier(); 62} 63 64extern inline void android_atomic_release_store(int32_t value, 65 volatile int32_t *ptr) { 66 android_compiler_barrier(); 67 *ptr = value; 68} 69 70extern inline int android_atomic_cas(int32_t old_value, int32_t new_value, 71 volatile int32_t *ptr) 72{ 73 int32_t prev; 74 __asm__ __volatile__ ("lock; cmpxchgl %1, %2" 75 : "=a" (prev) 76 : "q" (new_value), "m" (*ptr), "0" (old_value) 77 : "memory"); 78 return prev != old_value; 79} 80 81extern inline int android_atomic_acquire_cas(int32_t old_value, 82 int32_t new_value, 83 volatile int32_t *ptr) 84{ 85 /* Loads are not reordered with other loads. */ 86 return android_atomic_cas(old_value, new_value, ptr); 87} 88 89extern inline int android_atomic_release_cas(int32_t old_value, 90 int32_t new_value, 91 volatile int32_t *ptr) 92{ 93 /* Stores are not reordered with other stores. */ 94 return android_atomic_cas(old_value, new_value, ptr); 95} 96 97extern inline int32_t android_atomic_swap(int32_t new_value, 98 volatile int32_t *ptr) 99{ 100 __asm__ __volatile__ ("xchgl %1, %0" 101 : "=r" (new_value) 102 : "m" (*ptr), "0" (new_value) 103 : "memory"); 104 /* new_value now holds the old value of *ptr */ 105 return new_value; 106} 107 108extern inline int32_t android_atomic_add(int32_t increment, 109 volatile int32_t *ptr) 110{ 111 __asm__ __volatile__ ("lock; xaddl %0, %1" 112 : "+r" (increment), "+m" (*ptr) 113 : : "memory"); 114 /* increment now holds the old value of *ptr */ 115 return increment; 116} 117 118extern inline int32_t android_atomic_inc(volatile int32_t *addr) { 119 return android_atomic_add(1, addr); 120} 121 122extern inline int32_t android_atomic_dec(volatile int32_t *addr) { 123 return android_atomic_add(-1, addr); 124} 125 126extern inline int32_t android_atomic_and(int32_t value, 127 volatile int32_t *ptr) 128{ 129 int32_t prev, status; 130 do { 131 prev = *ptr; 132 status = android_atomic_cas(prev, prev & value, ptr); 133 } while (__builtin_expect(status != 0, 0)); 134 return prev; 135} 136 137extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr) 138{ 139 int32_t prev, status; 140 do { 141 prev = *ptr; 142 status = android_atomic_cas(prev, prev | value, ptr); 143 } while (__builtin_expect(status != 0, 0)); 144 return prev; 145} 146 147#endif /* ANDROID_CUTILS_ATOMIC_X86_H */ 148