Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_READ_BARRIER_H_ |
| 18 | #define ART_RUNTIME_READ_BARRIER_H_ |
| 19 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 20 | #include <android-base/logging.h> |
| 21 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 22 | #include "base/macros.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 23 | #include "base/mutex.h" |
Andreas Gampe | dcc528d | 2017-12-07 13:37:10 -0800 | [diff] [blame] | 24 | #include "base/runtime_debug.h" |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 25 | #include "gc_root.h" |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 26 | #include "jni.h" |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 27 | #include "mirror/object_reference.h" |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 28 | #include "offsets.h" |
Andreas Gampe | 217488a | 2017-09-18 08:34:42 -0700 | [diff] [blame] | 29 | #include "read_barrier_config.h" |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 30 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 31 | namespace art { |
| 32 | namespace mirror { |
Igor Murashkin | 2ffb703 | 2017-11-08 13:35:21 -0800 | [diff] [blame] | 33 | class Object; |
| 34 | template<typename MirrorType> class HeapReference; |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 35 | } // namespace mirror |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 36 | class ArtMethod; |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 37 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 38 | class ReadBarrier { |
| 39 | public: |
Andreas Gampe | aea05c1 | 2017-05-19 08:45:02 -0700 | [diff] [blame] | 40 | // Enable the to-space invariant checks. This is slow and happens very often. Do not enable in |
| 41 | // fast-debug environment. |
| 42 | DECLARE_RUNTIME_DEBUG_FLAG(kEnableToSpaceInvariantChecks); |
| 43 | |
| 44 | // Enable the read barrier checks. This is slow and happens very often. Do not enable in |
| 45 | // fast-debug environment. |
| 46 | DECLARE_RUNTIME_DEBUG_FLAG(kEnableReadBarrierInvariantChecks); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 47 | |
Hans Boehm | cc55e1d | 2017-07-27 15:28:07 -0700 | [diff] [blame] | 48 | // Return the reference at ref_addr, invoking read barrier as appropriate. |
| 49 | // Ref_addr is an address within obj. |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 50 | // It's up to the implementation whether the given field gets updated whereas the return value |
| 51 | // must be an updated reference unless kAlwaysUpdateField is true. |
Hans Boehm | cc55e1d | 2017-07-27 15:28:07 -0700 | [diff] [blame] | 52 | template <typename MirrorType, |
| 53 | bool kIsVolatile, |
| 54 | ReadBarrierOption kReadBarrierOption = kWithReadBarrier, |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 55 | bool kAlwaysUpdateField = false> |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 56 | ALWAYS_INLINE static MirrorType* Barrier( |
| 57 | mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 58 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 4cba0d9 | 2014-05-21 21:10:23 -0700 | [diff] [blame] | 59 | |
Hiroshi Yamauchi | ea2e1bd | 2014-06-18 13:47:35 -0700 | [diff] [blame] | 60 | // It's up to the implementation whether the given root gets updated |
| 61 | // whereas the return value must be an updated reference. |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 62 | template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier> |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 63 | ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root, |
| 64 | GcRootSource* gc_root_source = nullptr) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 65 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 66 | |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 67 | // It's up to the implementation whether the given root gets updated |
| 68 | // whereas the return value must be an updated reference. |
Hiroshi Yamauchi | cc78f3f | 2015-12-11 15:51:04 -0800 | [diff] [blame] | 69 | template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier> |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 70 | ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root, |
| 71 | GcRootSource* gc_root_source = nullptr) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 72 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 73 | |
Nicolas Geoffray | 13056a1 | 2017-05-11 11:48:28 +0000 | [diff] [blame] | 74 | // Return the mirror Object if it is marked, or null if not. |
| 75 | template <typename MirrorType> |
| 76 | ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref) |
| 77 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 78 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 79 | static bool IsDuringStartup(); |
| 80 | |
| 81 | // Without the holder object. |
| 82 | static void AssertToSpaceInvariant(mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 83 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 84 | AssertToSpaceInvariant(nullptr, MemberOffset(0), ref); |
| 85 | } |
| 86 | // With the holder object. |
| 87 | static void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, |
| 88 | mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 89 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 90 | // With GcRootSource. |
| 91 | static void AssertToSpaceInvariant(GcRootSource* gc_root_source, mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 92 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 93 | |
Roland Levillain | a78f5b6 | 2017-09-29 13:50:44 +0100 | [diff] [blame] | 94 | // Without the holder object, and only with the read barrier configuration (no-op otherwise). |
| 95 | static void MaybeAssertToSpaceInvariant(mirror::Object* ref) |
| 96 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 97 | if (kUseReadBarrier) { |
| 98 | AssertToSpaceInvariant(ref); |
| 99 | } |
| 100 | } |
| 101 | |
Hiroshi Yamauchi | 2e5de78 | 2016-01-29 12:06:36 -0800 | [diff] [blame] | 102 | // ALWAYS_INLINE on this caused a performance regression b/26744236. |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 103 | static mirror::Object* Mark(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 104 | |
Roland Levillain | 14e5a29 | 2018-06-28 12:00:56 +0100 | [diff] [blame] | 105 | static constexpr uint32_t NonGrayState() { |
| 106 | return kNonGrayState; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 107 | } |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 108 | static constexpr uint32_t GrayState() { |
Roland Levillain | 14e5a29 | 2018-06-28 12:00:56 +0100 | [diff] [blame] | 109 | return kGrayState; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 110 | } |
| 111 | |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 112 | // fake_address_dependency will be zero which should be bitwise-or'ed with the address of the |
| 113 | // subsequent load to prevent the reordering of the read barrier bit load and the subsequent |
| 114 | // object reference load (from one of `obj`'s fields). |
| 115 | // *fake_address_dependency will be set to 0. |
| 116 | ALWAYS_INLINE static bool IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 117 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 118 | |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 119 | // This uses a load-acquire to load the read barrier bit internally to prevent the reordering of |
| 120 | // the read barrier bit load and the subsequent load. |
| 121 | ALWAYS_INLINE static bool IsGray(mirror::Object* obj) |
| 122 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 123 | |
| 124 | static bool IsValidReadBarrierState(uint32_t rb_state) { |
Roland Levillain | 14e5a29 | 2018-06-28 12:00:56 +0100 | [diff] [blame] | 125 | return rb_state == kNonGrayState || rb_state == kGrayState; |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 126 | } |
| 127 | |
Roland Levillain | 14e5a29 | 2018-06-28 12:00:56 +0100 | [diff] [blame] | 128 | private: |
| 129 | static constexpr uint32_t kNonGrayState = 0x0; // White (not marked) or black (marked through). |
| 130 | static constexpr uint32_t kGrayState = 0x1; // Marked, but not marked through. On mark stack. |
| 131 | static constexpr uint32_t kRBStateMask = 0x1; // The low bits for non-gray|gray. |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 132 | }; |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 133 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 134 | } // namespace art |
Hiroshi Yamauchi | 624468c | 2014-03-31 15:14:47 -0700 | [diff] [blame] | 135 | |
| 136 | #endif // ART_RUNTIME_READ_BARRIER_H_ |