blob: f8d27863cb7a1cf804fc36c6adaac6b5d4758463 [file] [log] [blame]
Austin Schuh745610d2015-09-06 18:19:50 -07001// -*- Mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*-
2// Copyright (c) 2014, Linaro
3// All rights reserved.
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// * Redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer.
11// * Redistributions in binary form must reproduce the above
12// copyright notice, this list of conditions and the following disclaimer
13// in the documentation and/or other materials provided with the
14// distribution.
15// * Neither the name of Google Inc. nor the names of its
16// contributors may be used to endorse or promote products derived from
17// this software without specific prior written permission.
18//
19// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30// ---
31//
32// Author: Riku Voipio, riku.voipio@linaro.org
33//
34// atomic primitives implemented with gcc atomic intrinsics:
35// http://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
36//
37
38#ifndef BASE_ATOMICOPS_INTERNALS_GCC_GENERIC_H_
39#define BASE_ATOMICOPS_INTERNALS_GCC_GENERIC_H_
40
41#include <stdio.h>
42#include <stdlib.h>
43#include "base/basictypes.h"
44
45typedef int32_t Atomic32;
46
47namespace base {
48namespace subtle {
49
50typedef int64_t Atomic64;
51
52inline void MemoryBarrier() {
53 __sync_synchronize();
54}
55
56inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
57 Atomic32 old_value,
58 Atomic32 new_value) {
59 Atomic32 prev_value = old_value;
60 __atomic_compare_exchange_n(ptr, &prev_value, new_value,
61 0, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
62 return prev_value;
63}
64
65inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
66 Atomic32 new_value) {
67 return __atomic_exchange_n(const_cast<Atomic32*>(ptr), new_value, __ATOMIC_RELAXED);
68}
69
70inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr,
71 Atomic32 new_value) {
72 return __atomic_exchange_n(const_cast<Atomic32*>(ptr), new_value, __ATOMIC_ACQUIRE);
73}
74
75inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr,
76 Atomic32 new_value) {
77 return __atomic_exchange_n(const_cast<Atomic32*>(ptr), new_value, __ATOMIC_RELEASE);
78}
79
80inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
81 Atomic32 old_value,
82 Atomic32 new_value) {
83 Atomic32 prev_value = old_value;
84 __atomic_compare_exchange_n(ptr, &prev_value, new_value,
85 0, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
86 return prev_value;
87}
88
89inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
90 Atomic32 old_value,
91 Atomic32 new_value) {
92 Atomic32 prev_value = old_value;
93 __atomic_compare_exchange_n(ptr, &prev_value, new_value,
94 0, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
95 return prev_value;
96}
97
98inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
99 *ptr = value;
100}
101
102inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
103 *ptr = value;
104 MemoryBarrier();
105}
106
107inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
108 MemoryBarrier();
109 *ptr = value;
110}
111
112inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
113 return *ptr;
114}
115
116inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
117 Atomic32 value = *ptr;
118 MemoryBarrier();
119 return value;
120}
121
122inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
123 MemoryBarrier();
124 return *ptr;
125}
126
127// 64-bit versions
128
129inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
130 Atomic64 old_value,
131 Atomic64 new_value) {
132 Atomic64 prev_value = old_value;
133 __atomic_compare_exchange_n(ptr, &prev_value, new_value,
134 0, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
135 return prev_value;
136}
137
138inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
139 Atomic64 new_value) {
140 return __atomic_exchange_n(const_cast<Atomic64*>(ptr), new_value, __ATOMIC_RELAXED);
141}
142
143inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr,
144 Atomic64 new_value) {
145 return __atomic_exchange_n(const_cast<Atomic64*>(ptr), new_value, __ATOMIC_ACQUIRE);
146}
147
148inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr,
149 Atomic64 new_value) {
150 return __atomic_exchange_n(const_cast<Atomic64*>(ptr), new_value, __ATOMIC_RELEASE);
151}
152
153inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
154 Atomic64 old_value,
155 Atomic64 new_value) {
156 Atomic64 prev_value = old_value;
157 __atomic_compare_exchange_n(ptr, &prev_value, new_value,
158 0, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
159 return prev_value;
160}
161
162inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
163 Atomic64 old_value,
164 Atomic64 new_value) {
165 Atomic64 prev_value = old_value;
166 __atomic_compare_exchange_n(ptr, &prev_value, new_value,
167 0, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
168 return prev_value;
169}
170
171inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
172 *ptr = value;
173}
174
175inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
176 *ptr = value;
177 MemoryBarrier();
178}
179
180inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
181 MemoryBarrier();
182 *ptr = value;
183}
184
185inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
186 return *ptr;
187}
188
189inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
190 Atomic64 value = *ptr;
191 MemoryBarrier();
192 return value;
193}
194
195inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
196 MemoryBarrier();
197 return *ptr;
198}
199
200} // namespace base::subtle
201} // namespace base
202
203#endif // BASE_ATOMICOPS_INTERNALS_GCC_GENERIC_H_