blob: 795f9178411cb14b439be32da2aeff1225941a4f [file] [log] [blame]
Elliott Hughes5ea047b2011-09-13 14:38:18 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_ATOMIC_H_
18#define ART_RUNTIME_ATOMIC_H_
Elliott Hughes5ea047b2011-09-13 14:38:18 -070019
Elliott Hughes7c6169d2012-05-02 16:11:48 -070020#include <stdint.h>
Ian Rogersb122a4b2013-11-19 18:00:50 -080021#include <vector>
Elliott Hughes7c6169d2012-05-02 16:11:48 -070022
Elliott Hughes76160052012-12-12 16:31:20 -080023#include "base/macros.h"
Elliott Hughes5ea047b2011-09-13 14:38:18 -070024
25namespace art {
26
Ian Rogersb122a4b2013-11-19 18:00:50 -080027class Mutex;
28
Ian Rogersef7d42f2014-01-06 12:55:46 -080029template<typename T>
30class Atomic {
31 public:
32 Atomic<T>() : value_(0) { }
33
34 explicit Atomic<T>(T value) : value_(value) { }
35
36 Atomic<T>& operator=(T desired) {
37 Store(desired);
38 return *this;
39 }
40
41 T Load() const {
42 return value_;
43 }
44
45 operator T() const {
46 return Load();
47 }
48
49 T FetchAndAdd(const T value) {
50 return __sync_fetch_and_add(&value_, value); // Return old_value.
51 }
52
53 T FetchAndSub(const T value) {
54 return __sync_fetch_and_sub(&value_, value); // Return old value.
55 }
56
57 T operator++() { // Prefix operator.
58 return __sync_add_and_fetch(&value_, 1); // Return new value.
59 }
60
61 T operator++(int) { // Postfix operator.
62 return __sync_fetch_and_add(&value_, 1); // Return old value.
63 }
64
65 T operator--() { // Prefix operator.
66 return __sync_sub_and_fetch(&value_, 1); // Return new value.
67 }
68
69 T operator--(int) { // Postfix operator.
70 return __sync_fetch_and_sub(&value_, 1); // Return old value.
71 }
72
73 bool CompareAndSwap(T expected_value, T desired_value) {
74 return __sync_bool_compare_and_swap(&value_, expected_value, desired_value);
75 }
76
77 volatile T* Address() {
78 return &value_;
79 }
80
81 private:
82 // Unsafe = operator for non atomic operations on the integer.
83 void Store(T desired) {
84 value_ = desired;
85 }
86
87 volatile T value_;
88};
89
90typedef Atomic<int32_t> AtomicInteger;
91
Elliott Hughes7c6169d2012-05-02 16:11:48 -070092// NOTE: Two "quasiatomic" operations on the exact same memory address
93// are guaranteed to operate atomically with respect to each other,
94// but no guarantees are made about quasiatomic operations mixed with
95// non-quasiatomic operations on the same address, nor about
96// quasiatomic operations that are performed on partially-overlapping
97// memory.
Elliott Hughes7c6169d2012-05-02 16:11:48 -070098class QuasiAtomic {
Ian Rogers936b37f2014-02-14 00:52:24 -080099#if defined(__mips__) && !defined(__LP64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800100 static constexpr bool kNeedSwapMutexes = true;
101#else
102 static constexpr bool kNeedSwapMutexes = false;
103#endif
104
Elliott Hughes7c6169d2012-05-02 16:11:48 -0700105 public:
106 static void Startup();
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700107
Elliott Hughes7c6169d2012-05-02 16:11:48 -0700108 static void Shutdown();
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700109
Ian Rogers9adbff52013-01-23 18:19:03 -0800110 // Reads the 64-bit value at "addr" without tearing.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800111 static int64_t Read64(volatile const int64_t* addr) {
112 if (!kNeedSwapMutexes) {
113 return *addr;
114 } else {
115 return SwapMutexRead64(addr);
116 }
117 }
Elliott Hughes7c6169d2012-05-02 16:11:48 -0700118
Ian Rogers9adbff52013-01-23 18:19:03 -0800119 // Writes to the 64-bit value at "addr" without tearing.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800120 static void Write64(volatile int64_t* addr, int64_t val) {
121 if (!kNeedSwapMutexes) {
122 *addr = val;
123 } else {
124 SwapMutexWrite64(addr, val);
125 }
126 }
Ian Rogers9adbff52013-01-23 18:19:03 -0800127
128 // Atomically compare the value at "addr" to "old_value", if equal replace it with "new_value"
129 // and return true. Otherwise, don't swap, and return false.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800130 static bool Cas64(int64_t old_value, int64_t new_value, volatile int64_t* addr) {
131 if (!kNeedSwapMutexes) {
132 return __sync_bool_compare_and_swap(addr, old_value, new_value);
133 } else {
134 return SwapMutexCas64(old_value, new_value, addr);
135 }
136 }
Ian Rogers9adbff52013-01-23 18:19:03 -0800137
138 // Does the architecture provide reasonable atomic long operations or do we fall back on mutexes?
Ian Rogersb122a4b2013-11-19 18:00:50 -0800139 static bool LongAtomicsUseMutexes() {
140 return !kNeedSwapMutexes;
141 }
142
143 static void MembarLoadStore() {
Stuart Monteith5817e892014-02-18 11:16:29 +0000144 #if defined(__arm__) || defined(__aarch64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800145 __asm__ __volatile__("dmb ish" : : : "memory");
Ian Rogersef7d42f2014-01-06 12:55:46 -0800146 #elif defined(__i386__) || defined(__x86_64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800147 __asm__ __volatile__("" : : : "memory");
148 #elif defined(__mips__)
149 __asm__ __volatile__("sync" : : : "memory");
150 #else
151 #error Unexpected architecture
152 #endif
153 }
154
155 static void MembarLoadLoad() {
Stuart Monteith5817e892014-02-18 11:16:29 +0000156 #if defined(__arm__) || defined(__aarch64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800157 __asm__ __volatile__("dmb ish" : : : "memory");
Ian Rogersef7d42f2014-01-06 12:55:46 -0800158 #elif defined(__i386__) || defined(__x86_64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800159 __asm__ __volatile__("" : : : "memory");
160 #elif defined(__mips__)
161 __asm__ __volatile__("sync" : : : "memory");
162 #else
163 #error Unexpected architecture
164 #endif
165 }
166
167 static void MembarStoreStore() {
Stuart Monteith5817e892014-02-18 11:16:29 +0000168 #if defined(__arm__) || defined(__aarch64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800169 __asm__ __volatile__("dmb ishst" : : : "memory");
Ian Rogersef7d42f2014-01-06 12:55:46 -0800170 #elif defined(__i386__) || defined(__x86_64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800171 __asm__ __volatile__("" : : : "memory");
172 #elif defined(__mips__)
173 __asm__ __volatile__("sync" : : : "memory");
174 #else
175 #error Unexpected architecture
176 #endif
177 }
178
179 static void MembarStoreLoad() {
Stuart Monteith5817e892014-02-18 11:16:29 +0000180 #if defined(__arm__) || defined(__aarch64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800181 __asm__ __volatile__("dmb ish" : : : "memory");
Ian Rogersef7d42f2014-01-06 12:55:46 -0800182 #elif defined(__i386__) || defined(__x86_64__)
Ian Rogersb122a4b2013-11-19 18:00:50 -0800183 __asm__ __volatile__("mfence" : : : "memory");
184 #elif defined(__mips__)
185 __asm__ __volatile__("sync" : : : "memory");
186 #else
187 #error Unexpected architecture
188 #endif
189 }
Elliott Hughes7c6169d2012-05-02 16:11:48 -0700190
191 private:
Ian Rogersb122a4b2013-11-19 18:00:50 -0800192 static Mutex* GetSwapMutex(const volatile int64_t* addr);
193 static int64_t SwapMutexRead64(volatile const int64_t* addr);
194 static void SwapMutexWrite64(volatile int64_t* addr, int64_t val);
195 static bool SwapMutexCas64(int64_t old_value, int64_t new_value, volatile int64_t* addr);
196
197 // We stripe across a bunch of different mutexes to reduce contention.
198 static constexpr size_t kSwapMutexCount = 32;
199 static std::vector<Mutex*>* gSwapMutexes;
200
Elliott Hughes7c6169d2012-05-02 16:11:48 -0700201 DISALLOW_COPY_AND_ASSIGN(QuasiAtomic);
202};
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700203
204} // namespace art
205
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700206#endif // ART_RUNTIME_ATOMIC_H_