1// Support for atomic operations -*- C++ -*-
2
3// Copyright (C) 2004, 2005, 2006, 2008, 2009 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library.  This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file atomicity.h
26 *  This is an internal header file, included by other library headers.
27 *  You should not attempt to use it directly.
28 */
29
30#ifndef _GLIBCXX_ATOMICITY_H
31#define _GLIBCXX_ATOMICITY_H	1
32
33#include <bits/c++config.h>
34#include <bits/gthr.h>
35#include <bits/atomic_word.h>
36
37_GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
38
39  // Functions for portable atomic access.
40  // To abstract locking primitives across all thread policies, use:
41  // __exchange_and_add_dispatch
42  // __atomic_add_dispatch
43#ifdef _GLIBCXX_ATOMIC_BUILTINS_4
44  static inline _Atomic_word
45  __exchange_and_add(volatile _Atomic_word* __mem, int __val)
46  { return __sync_fetch_and_add(__mem, __val); }
47
48  static inline void
49  __atomic_add(volatile _Atomic_word* __mem, int __val)
50  { __sync_fetch_and_add(__mem, __val); }
51#else
52  _Atomic_word
53  __attribute__ ((__unused__))
54  __exchange_and_add(volatile _Atomic_word*, int);
55
56  void
57  __attribute__ ((__unused__))
58  __atomic_add(volatile _Atomic_word*, int);
59#endif
60
61  static inline _Atomic_word
62  __exchange_and_add_single(_Atomic_word* __mem, int __val)
63  {
64    _Atomic_word __result = *__mem;
65    *__mem += __val;
66    return __result;
67  }
68
69  static inline void
70  __atomic_add_single(_Atomic_word* __mem, int __val)
71  { *__mem += __val; }
72
73  static inline _Atomic_word
74  __attribute__ ((__unused__))
75  __exchange_and_add_dispatch(_Atomic_word* __mem, int __val)
76  {
77#ifdef __GTHREADS
78    if (__gthread_active_p())
79      return __exchange_and_add(__mem, __val);
80    else
81      return __exchange_and_add_single(__mem, __val);
82#else
83    return __exchange_and_add_single(__mem, __val);
84#endif
85  }
86
87  static inline void
88  __attribute__ ((__unused__))
89  __atomic_add_dispatch(_Atomic_word* __mem, int __val)
90  {
91#ifdef __GTHREADS
92    if (__gthread_active_p())
93      __atomic_add(__mem, __val);
94    else
95      __atomic_add_single(__mem, __val);
96#else
97    __atomic_add_single(__mem, __val);
98#endif
99  }
100
101_GLIBCXX_END_NAMESPACE
102
103// Even if the CPU doesn't need a memory barrier, we need to ensure
104// that the compiler doesn't reorder memory accesses across the
105// barriers.
106#ifndef _GLIBCXX_READ_MEM_BARRIER
107#define _GLIBCXX_READ_MEM_BARRIER __asm __volatile ("":::"memory")
108#endif
109#ifndef _GLIBCXX_WRITE_MEM_BARRIER
110#define _GLIBCXX_WRITE_MEM_BARRIER __asm __volatile ("":::"memory")
111#endif
112
113#endif
114