Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_ |
| 18 | #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 19 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 20 | #include "array.h" |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 21 | #include "base/bit_utils.h" |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame^] | 22 | #include "base/mutex.h" |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 23 | #include "dex_file_types.h" |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 24 | #include "object.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 25 | #include "object_array.h" |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 26 | |
| 27 | namespace art { |
| 28 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 29 | class ArtField; |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 30 | class ArtMethod; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 31 | struct DexCacheOffsets; |
| 32 | class DexFile; |
| 33 | class ImageWriter; |
| 34 | union JValue; |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 35 | class LinearAlloc; |
| 36 | class Thread; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 37 | |
| 38 | namespace mirror { |
| 39 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 40 | class CallSite; |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 41 | class Class; |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 42 | class MethodType; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 43 | class String; |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 44 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 45 | template <typename T> struct PACKED(8) DexCachePair { |
| 46 | GcRoot<T> object; |
| 47 | uint32_t index; |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 48 | // The array is initially [ {0,0}, {0,0}, {0,0} ... ] |
| 49 | // We maintain the invariant that once a dex cache entry is populated, |
| 50 | // the pointer is always non-0 |
| 51 | // Any given entry would thus be: |
| 52 | // {non-0, non-0} OR {0,0} |
| 53 | // |
| 54 | // It's generally sufficiently enough then to check if the |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 55 | // lookup index matches the stored index (for a >0 lookup index) |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 56 | // because if it's true the pointer is also non-null. |
| 57 | // |
| 58 | // For the 0th entry which is a special case, the value is either |
| 59 | // {0,0} (initial state) or {non-0, 0} which indicates |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 60 | // that a valid object is stored at that index for a dex section id of 0. |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 61 | // |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 62 | // As an optimization, we want to avoid branching on the object pointer since |
| 63 | // it's always non-null if the id branch succeeds (except for the 0th id). |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 64 | // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 65 | // the lookup id == stored id branch. |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 66 | DexCachePair(ObjPtr<T> object, uint32_t index) |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 67 | : object(object), |
| 68 | index(index) {} |
Andreas Gampe | d9911ee | 2017-03-27 13:27:24 -0700 | [diff] [blame] | 69 | DexCachePair() : index(0) {} |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 70 | DexCachePair(const DexCachePair<T>&) = default; |
| 71 | DexCachePair& operator=(const DexCachePair<T>&) = default; |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 72 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 73 | static void Initialize(std::atomic<DexCachePair<T>>* dex_cache) { |
| 74 | DexCachePair<T> first_elem; |
| 75 | first_elem.object = GcRoot<T>(nullptr); |
| 76 | first_elem.index = InvalidIndexForSlot(0); |
| 77 | dex_cache[0].store(first_elem, std::memory_order_relaxed); |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 78 | } |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 79 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 80 | static uint32_t InvalidIndexForSlot(uint32_t slot) { |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 81 | // Since the cache size is a power of two, 0 will always map to slot 0. |
| 82 | // Use 1 for slot 0 and 0 for all other slots. |
| 83 | return (slot == 0) ? 1u : 0u; |
| 84 | } |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 85 | |
| 86 | T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 87 | if (idx != index) { |
| 88 | return nullptr; |
| 89 | } |
| 90 | DCHECK(!object.IsNull()); |
| 91 | return object.Read(); |
| 92 | } |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 93 | }; |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 94 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 95 | template <typename T> struct PACKED(2 * __SIZEOF_POINTER__) NativeDexCachePair { |
| 96 | T* object; |
| 97 | size_t index; |
| 98 | // This is similar to DexCachePair except that we're storing a native pointer |
| 99 | // instead of a GC root. See DexCachePair for the details. |
| 100 | NativeDexCachePair(T* object, uint32_t index) |
| 101 | : object(object), |
| 102 | index(index) {} |
| 103 | NativeDexCachePair() : object(nullptr), index(0u) { } |
| 104 | NativeDexCachePair(const NativeDexCachePair<T>&) = default; |
| 105 | NativeDexCachePair& operator=(const NativeDexCachePair<T>&) = default; |
| 106 | |
| 107 | static void Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache, PointerSize pointer_size); |
| 108 | |
| 109 | static uint32_t InvalidIndexForSlot(uint32_t slot) { |
| 110 | // Since the cache size is a power of two, 0 will always map to slot 0. |
| 111 | // Use 1 for slot 0 and 0 for all other slots. |
| 112 | return (slot == 0) ? 1u : 0u; |
| 113 | } |
| 114 | |
| 115 | T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 116 | if (idx != index) { |
| 117 | return nullptr; |
| 118 | } |
| 119 | DCHECK(object != nullptr); |
| 120 | return object; |
| 121 | } |
| 122 | }; |
| 123 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 124 | using TypeDexCachePair = DexCachePair<Class>; |
| 125 | using TypeDexCacheType = std::atomic<TypeDexCachePair>; |
| 126 | |
| 127 | using StringDexCachePair = DexCachePair<String>; |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 128 | using StringDexCacheType = std::atomic<StringDexCachePair>; |
| 129 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 130 | using FieldDexCachePair = NativeDexCachePair<ArtField>; |
| 131 | using FieldDexCacheType = std::atomic<FieldDexCachePair>; |
| 132 | |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 133 | using MethodDexCachePair = NativeDexCachePair<ArtMethod>; |
| 134 | using MethodDexCacheType = std::atomic<MethodDexCachePair>; |
| 135 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 136 | using MethodTypeDexCachePair = DexCachePair<MethodType>; |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 137 | using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>; |
| 138 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 139 | // C++ mirror of java.lang.DexCache. |
| 140 | class MANAGED DexCache FINAL : public Object { |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 141 | public: |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 142 | // Size of java.lang.DexCache.class. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 143 | static uint32_t ClassSize(PointerSize pointer_size); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 144 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 145 | // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 146 | static constexpr size_t kDexCacheTypeCacheSize = 1024; |
| 147 | static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize), |
| 148 | "Type dex cache size is not a power of 2."); |
| 149 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 150 | // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 151 | static constexpr size_t kDexCacheStringCacheSize = 1024; |
| 152 | static_assert(IsPowerOfTwo(kDexCacheStringCacheSize), |
| 153 | "String dex cache size is not a power of 2."); |
| 154 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 155 | // Size of field dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 156 | static constexpr size_t kDexCacheFieldCacheSize = 1024; |
| 157 | static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize), |
| 158 | "Field dex cache size is not a power of 2."); |
| 159 | |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 160 | // Size of method dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 161 | static constexpr size_t kDexCacheMethodCacheSize = 1024; |
| 162 | static_assert(IsPowerOfTwo(kDexCacheMethodCacheSize), |
| 163 | "Method dex cache size is not a power of 2."); |
| 164 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 165 | // Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions |
| 166 | // to hold. |
| 167 | static constexpr size_t kDexCacheMethodTypeCacheSize = 1024; |
| 168 | static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize), |
| 169 | "MethodType dex cache size is not a power of 2."); |
| 170 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 171 | static constexpr size_t StaticTypeSize() { |
| 172 | return kDexCacheTypeCacheSize; |
| 173 | } |
| 174 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 175 | static constexpr size_t StaticStringSize() { |
| 176 | return kDexCacheStringCacheSize; |
| 177 | } |
| 178 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 179 | static constexpr size_t StaticArtFieldSize() { |
| 180 | return kDexCacheFieldCacheSize; |
| 181 | } |
| 182 | |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 183 | static constexpr size_t StaticMethodSize() { |
| 184 | return kDexCacheMethodCacheSize; |
| 185 | } |
| 186 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 187 | static constexpr size_t StaticMethodTypeSize() { |
| 188 | return kDexCacheMethodTypeCacheSize; |
| 189 | } |
| 190 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 191 | // Size of an instance of java.lang.DexCache not including referenced values. |
| 192 | static constexpr uint32_t InstanceSize() { |
| 193 | return sizeof(DexCache); |
| 194 | } |
| 195 | |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 196 | static void InitializeDexCache(Thread* self, |
| 197 | ObjPtr<mirror::DexCache> dex_cache, |
| 198 | ObjPtr<mirror::String> location, |
| 199 | const DexFile* dex_file, |
| 200 | LinearAlloc* linear_alloc, |
| 201 | PointerSize image_pointer_size) |
| 202 | REQUIRES_SHARED(Locks::mutator_lock_) |
| 203 | REQUIRES(Locks::dex_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 204 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 205 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 206 | void FixupStrings(StringDexCacheType* dest, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 207 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 208 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 209 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 210 | void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 211 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 212 | |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 213 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
| 214 | void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor) |
| 215 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 216 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 217 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
| 218 | void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor) |
| 219 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 220 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 221 | String* GetLocation() REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 222 | return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_)); |
Brian Carlstrom | a663ea5 | 2011-08-19 23:33:41 -0700 | [diff] [blame] | 223 | } |
| 224 | |
buzbee | 5cd2180 | 2011-08-26 10:40:14 -0700 | [diff] [blame] | 225 | static MemberOffset StringsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 226 | return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 227 | } |
| 228 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 229 | static MemberOffset ResolvedTypesOffset() { |
| 230 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_); |
| 231 | } |
| 232 | |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 233 | static MemberOffset ResolvedFieldsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 234 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 235 | } |
| 236 | |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 237 | static MemberOffset ResolvedMethodsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 238 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 239 | } |
| 240 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 241 | static MemberOffset ResolvedMethodTypesOffset() { |
| 242 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_); |
| 243 | } |
| 244 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 245 | static MemberOffset ResolvedCallSitesOffset() { |
| 246 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_); |
| 247 | } |
| 248 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 249 | static MemberOffset NumStringsOffset() { |
| 250 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 251 | } |
| 252 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 253 | static MemberOffset NumResolvedTypesOffset() { |
| 254 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 255 | } |
| 256 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 257 | static MemberOffset NumResolvedFieldsOffset() { |
| 258 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 259 | } |
| 260 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 261 | static MemberOffset NumResolvedMethodsOffset() { |
| 262 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 263 | } |
| 264 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 265 | static MemberOffset NumResolvedMethodTypesOffset() { |
| 266 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_); |
| 267 | } |
| 268 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 269 | static MemberOffset NumResolvedCallSitesOffset() { |
| 270 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_); |
| 271 | } |
| 272 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 273 | String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 274 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 275 | |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 276 | void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 277 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 278 | |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 279 | // Clear a string for a string_idx, used to undo string intern transactions to make sure |
| 280 | // the string isn't kept live. |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 281 | void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 282 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 283 | Class* GetResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 284 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 285 | void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 286 | REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 287 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 288 | void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 289 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 290 | ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 291 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 292 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 293 | ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, |
| 294 | ArtMethod* resolved, |
| 295 | PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 296 | REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 297 | ALWAYS_INLINE void ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) |
| 298 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 299 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 300 | // Pointer sized variant, used for patching. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 301 | ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 302 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 303 | |
| 304 | // Pointer sized variant, used for patching. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 305 | ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 306 | REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 307 | ALWAYS_INLINE void ClearResolvedField(uint32_t idx, PointerSize ptr_size) |
| 308 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 309 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 310 | MethodType* GetResolvedMethodType(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 311 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 312 | void SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved) |
| 313 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 314 | |
| 315 | CallSite* GetResolvedCallSite(uint32_t call_site_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 316 | |
| 317 | // Attempts to bind |call_site_idx| to the call site |resolved|. The |
| 318 | // caller must use the return value in place of |resolved|. This is |
| 319 | // because multiple threads can invoke the bootstrap method each |
| 320 | // producing a call site, but the method handle invocation on the |
| 321 | // call site must be on a common agreed value. |
| 322 | CallSite* SetResolvedCallSite(uint32_t call_site_idx, CallSite* resolved) WARN_UNUSED |
| 323 | REQUIRES_SHARED(Locks::mutator_lock_); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 324 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 325 | StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 326 | return GetFieldPtr64<StringDexCacheType*>(StringsOffset()); |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 327 | } |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 328 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 329 | void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 330 | SetFieldPtr<false>(StringsOffset(), strings); |
| 331 | } |
| 332 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 333 | TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
| 334 | return GetFieldPtr<TypeDexCacheType*>(ResolvedTypesOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 335 | } |
| 336 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 337 | void SetResolvedTypes(TypeDexCacheType* resolved_types) |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 338 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 339 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 340 | SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types); |
| 341 | } |
| 342 | |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 343 | MethodDexCacheType* GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
| 344 | return GetFieldPtr<MethodDexCacheType*>(ResolvedMethodsOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 345 | } |
| 346 | |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 347 | void SetResolvedMethods(MethodDexCacheType* resolved_methods) |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 348 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 349 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 350 | SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods); |
| 351 | } |
| 352 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 353 | FieldDexCacheType* GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
| 354 | return GetFieldPtr<FieldDexCacheType*>(ResolvedFieldsOffset()); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 355 | } |
| 356 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 357 | void SetResolvedFields(FieldDexCacheType* resolved_fields) |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 358 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 359 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 360 | SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields); |
| 361 | } |
| 362 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 363 | MethodTypeDexCacheType* GetResolvedMethodTypes() |
| 364 | ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 365 | return GetFieldPtr64<MethodTypeDexCacheType*>(ResolvedMethodTypesOffset()); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 366 | } |
| 367 | |
| 368 | void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types) |
| 369 | ALWAYS_INLINE |
| 370 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 371 | SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types); |
| 372 | } |
| 373 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 374 | GcRoot<CallSite>* GetResolvedCallSites() |
| 375 | ALWAYS_INLINE |
| 376 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 377 | return GetFieldPtr<GcRoot<CallSite>*>(ResolvedCallSitesOffset()); |
| 378 | } |
| 379 | |
| 380 | void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites) |
| 381 | ALWAYS_INLINE |
| 382 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 383 | SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites); |
| 384 | } |
| 385 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 386 | size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 387 | return GetField32(NumStringsOffset()); |
| 388 | } |
| 389 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 390 | size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 391 | return GetField32(NumResolvedTypesOffset()); |
| 392 | } |
| 393 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 394 | size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 395 | return GetField32(NumResolvedMethodsOffset()); |
| 396 | } |
| 397 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 398 | size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 399 | return GetField32(NumResolvedFieldsOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 400 | } |
| 401 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 402 | size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) { |
| 403 | return GetField32(NumResolvedMethodTypesOffset()); |
| 404 | } |
| 405 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 406 | size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) { |
| 407 | return GetField32(NumResolvedCallSitesOffset()); |
| 408 | } |
| 409 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 410 | const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 411 | return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_)); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 412 | } |
| 413 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 414 | void SetDexFile(const DexFile* dex_file) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | 7617216 | 2016-01-26 14:54:06 -0800 | [diff] [blame] | 415 | SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file); |
Brian Carlstrom | 4b620ff | 2011-09-11 01:11:01 -0700 | [diff] [blame] | 416 | } |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 417 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 418 | void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 7617216 | 2016-01-26 14:54:06 -0800 | [diff] [blame] | 419 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 420 | // NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField** |
| 421 | // provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(), |
| 422 | // so they need to be public. |
| 423 | |
| 424 | template <typename PtrType> |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 425 | static PtrType GetElementPtrSize(PtrType* ptr_array, size_t idx, PointerSize ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 426 | |
| 427 | template <typename PtrType> |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 428 | static void SetElementPtrSize(PtrType* ptr_array, size_t idx, PtrType ptr, PointerSize ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 429 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 430 | template <typename T> |
| 431 | static NativeDexCachePair<T> GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, |
| 432 | size_t idx, |
| 433 | PointerSize ptr_size); |
| 434 | |
| 435 | template <typename T> |
| 436 | static void SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, |
| 437 | size_t idx, |
| 438 | NativeDexCachePair<T> pair, |
| 439 | PointerSize ptr_size); |
| 440 | |
Vladimir Marko | f25cc73 | 2017-03-16 16:18:15 +0000 | [diff] [blame] | 441 | uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 442 | uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 443 | uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 444 | uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | f25cc73 | 2017-03-16 16:18:15 +0000 | [diff] [blame] | 445 | uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 446 | |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 447 | private: |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 448 | void Init(const DexFile* dex_file, |
| 449 | ObjPtr<String> location, |
| 450 | StringDexCacheType* strings, |
| 451 | uint32_t num_strings, |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 452 | TypeDexCacheType* resolved_types, |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 453 | uint32_t num_resolved_types, |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 454 | MethodDexCacheType* resolved_methods, |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 455 | uint32_t num_resolved_methods, |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 456 | FieldDexCacheType* resolved_fields, |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 457 | uint32_t num_resolved_fields, |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 458 | MethodTypeDexCacheType* resolved_method_types, |
| 459 | uint32_t num_resolved_method_types, |
| 460 | GcRoot<CallSite>* resolved_call_sites, |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 461 | uint32_t num_resolved_call_sites) |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 462 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 463 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 464 | // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations, |
| 465 | // so we use a custom pair class for loading and storing the NativeDexCachePair<>. |
| 466 | template <typename IntType> |
| 467 | struct PACKED(2 * sizeof(IntType)) ConversionPair { |
| 468 | ConversionPair(IntType f, IntType s) : first(f), second(s) { } |
| 469 | ConversionPair(const ConversionPair&) = default; |
| 470 | ConversionPair& operator=(const ConversionPair&) = default; |
| 471 | IntType first; |
| 472 | IntType second; |
| 473 | }; |
| 474 | using ConversionPair32 = ConversionPair<uint32_t>; |
| 475 | using ConversionPair64 = ConversionPair<uint64_t>; |
| 476 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 477 | // Visit instance fields of the dex cache as well as its associated arrays. |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 478 | template <bool kVisitNativeRoots, |
| 479 | VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, |
| 480 | ReadBarrierOption kReadBarrierOption = kWithReadBarrier, |
| 481 | typename Visitor> |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 482 | void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 483 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 484 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 485 | // Due to lack of 16-byte atomics support, we use hand-crafted routines. |
Alexey Frunze | 279cfba | 2017-07-22 00:24:43 -0700 | [diff] [blame] | 486 | #if defined(__aarch64__) || defined(__mips__) |
| 487 | // 16-byte atomics are supported on aarch64, mips and mips64. |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 488 | ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B( |
| 489 | std::atomic<ConversionPair64>* target) { |
| 490 | return target->load(std::memory_order_relaxed); |
| 491 | } |
| 492 | |
| 493 | ALWAYS_INLINE static void AtomicStoreRelease16B( |
| 494 | std::atomic<ConversionPair64>* target, ConversionPair64 value) { |
| 495 | target->store(value, std::memory_order_release); |
| 496 | } |
| 497 | #elif defined(__x86_64__) |
| 498 | ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B( |
| 499 | std::atomic<ConversionPair64>* target) { |
| 500 | uint64_t first, second; |
| 501 | __asm__ __volatile__( |
| 502 | "lock cmpxchg16b (%2)" |
| 503 | : "=&a"(first), "=&d"(second) |
| 504 | : "r"(target), "a"(0), "d"(0), "b"(0), "c"(0) |
| 505 | : "cc"); |
| 506 | return ConversionPair64(first, second); |
| 507 | } |
| 508 | |
| 509 | ALWAYS_INLINE static void AtomicStoreRelease16B( |
| 510 | std::atomic<ConversionPair64>* target, ConversionPair64 value) { |
| 511 | uint64_t first, second; |
| 512 | __asm__ __volatile__ ( |
| 513 | "movq (%2), %%rax\n\t" |
| 514 | "movq 8(%2), %%rdx\n\t" |
| 515 | "1:\n\t" |
| 516 | "lock cmpxchg16b (%2)\n\t" |
| 517 | "jnz 1b" |
| 518 | : "=&a"(first), "=&d"(second) |
| 519 | : "r"(target), "b"(value.first), "c"(value.second) |
| 520 | : "cc"); |
| 521 | } |
| 522 | #else |
| 523 | static ConversionPair64 AtomicLoadRelaxed16B(std::atomic<ConversionPair64>* target); |
| 524 | static void AtomicStoreRelease16B(std::atomic<ConversionPair64>* target, ConversionPair64 value); |
| 525 | #endif |
| 526 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 527 | HeapReference<String> location_; |
Narayan Kamath | 6b2dc31 | 2017-03-14 13:26:12 +0000 | [diff] [blame] | 528 | // Number of elements in the call_sites_ array. Note that this appears here |
| 529 | // because of our packing logic for 32 bit fields. |
| 530 | uint32_t num_resolved_call_sites_; |
| 531 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 532 | uint64_t dex_file_; // const DexFile* |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 533 | uint64_t resolved_call_sites_; // GcRoot<CallSite>* array with num_resolved_call_sites_ |
| 534 | // elements. |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 535 | uint64_t resolved_fields_; // std::atomic<FieldDexCachePair>*, array with |
| 536 | // num_resolved_fields_ elements. |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 537 | uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with |
| 538 | // num_resolved_method_types_ elements. |
| 539 | uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements. |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 540 | uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements. |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 541 | uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_ |
| 542 | // elements. |
| 543 | |
| 544 | uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array. |
| 545 | uint32_t num_resolved_method_types_; // Number of elements in the resolved_method_types_ array. |
| 546 | uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array. |
| 547 | uint32_t num_resolved_types_; // Number of elements in the resolved_types_ array. |
| 548 | uint32_t num_strings_; // Number of elements in the strings_ array. |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 549 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 550 | friend struct art::DexCacheOffsets; // for verifying offset information |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 551 | friend class Object; // For VisitReferences |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 552 | DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 553 | }; |
| 554 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 555 | } // namespace mirror |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 556 | } // namespace art |
| 557 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 558 | #endif // ART_RUNTIME_MIRROR_DEX_CACHE_H_ |