blob: 553f3d64187fc25bfccecea2997ddd98cd117388 [file] [log] [blame]
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_ROOT_H_
18#define ART_RUNTIME_GC_ROOT_H_
19
Andreas Gampe7fbc4a52018-11-28 08:26:47 -080020#include "base/locks.h" // For Locks::mutator_lock_.
Mathieu Chartierbad02672014-08-25 13:08:22 -070021#include "base/macros.h"
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070022#include "mirror/object_reference.h"
Andreas Gampe217488a2017-09-18 08:34:42 -070023#include "read_barrier_option.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070024
25namespace art {
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070026class ArtField;
27class ArtMethod;
Andreas Gampec73cb642017-02-22 10:11:30 -080028template<class MirrorType> class ObjPtr;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070029
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080030namespace mirror {
31class Object;
32} // namespace mirror
33
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070034template <size_t kBufferSize>
35class BufferedRootVisitor;
36
Mathieu Chartier4809d0a2015-04-07 10:39:04 -070037// Dependent on pointer size so that we don't have frames that are too big on 64 bit.
38static const size_t kDefaultBufferedRootCount = 1024 / sizeof(void*);
39
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080040enum RootType {
41 kRootUnknown = 0,
42 kRootJNIGlobal,
43 kRootJNILocal,
44 kRootJavaFrame,
45 kRootNativeStack,
46 kRootStickyClass,
47 kRootThreadBlock,
48 kRootMonitorUsed,
49 kRootThreadObject,
50 kRootInternedString,
Man Cao1ed11b92015-06-11 22:47:35 -070051 kRootFinalizing, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080052 kRootDebugger,
Man Cao1ed11b92015-06-11 22:47:35 -070053 kRootReferenceCleanup, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080054 kRootVMInternal,
55 kRootJNIMonitor,
56};
Vladimir Marko9974e3c2020-06-10 16:27:06 +010057std::ostream& operator<<(std::ostream& os, RootType root_type);
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080058
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070059// Only used by hprof. thread_id_ and type_ are only used by hprof.
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080060class RootInfo {
61 public:
62 // Thread id 0 is for non thread roots.
63 explicit RootInfo(RootType type, uint32_t thread_id = 0)
64 : type_(type), thread_id_(thread_id) {
65 }
Andreas Gampe758a8012015-04-03 21:28:42 -070066 RootInfo(const RootInfo&) = default;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080067 virtual ~RootInfo() {
68 }
69 RootType GetType() const {
70 return type_;
71 }
72 uint32_t GetThreadId() const {
73 return thread_id_;
74 }
75 virtual void Describe(std::ostream& os) const {
76 os << "Type=" << type_ << " thread_id=" << thread_id_;
77 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070078 std::string ToString() const;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080079
80 private:
81 const RootType type_;
82 const uint32_t thread_id_;
83};
84
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070085inline std::ostream& operator<<(std::ostream& os, const RootInfo& root_info) {
86 root_info.Describe(os);
87 return os;
88}
89
Andreas Gampe585da952016-12-02 14:52:29 -080090// Not all combinations of flags are valid. You may not visit all roots as well as the new roots
91// (no logical reason to do this). You also may not start logging new roots and stop logging new
92// roots (also no logical reason to do this).
93//
94// The precise flag ensures that more metadata is supplied. An example is vreg data for compiled
95// method frames.
96enum VisitRootFlags : uint8_t {
Florian Mayere828ea02019-10-10 18:20:21 +010097 kVisitRootFlagAllRoots = (1 << 0),
98 kVisitRootFlagNewRoots = (1 << 1),
99 kVisitRootFlagStartLoggingNewRoots = (1 << 2),
100 kVisitRootFlagStopLoggingNewRoots = (1 << 3),
101 kVisitRootFlagClearRootLog = (1 << 4),
102 kVisitRootFlagClassLoader = (1 << 5),
103 // There is no (1 << 6).
104 kVisitRootFlagPrecise = (1 << 7),
Andreas Gampe585da952016-12-02 14:52:29 -0800105};
106
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700107class RootVisitor {
108 public:
109 virtual ~RootVisitor() { }
110
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -0700111 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700112 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700113 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700114 VisitRoots(&root, 1, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700115 }
116
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -0700117 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700118 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700119 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700120 if (*root != nullptr) {
121 VisitRoot(root, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700122 }
123 }
124
125 virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700126 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700127
128 virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
129 const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700130 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700131};
132
133// Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't
134// critical.
135class SingleRootVisitor : public RootVisitor {
136 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100137 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) override
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700138 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700139 for (size_t i = 0; i < count; ++i) {
140 VisitRoot(*roots[i], info);
141 }
142 }
143
144 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100145 const RootInfo& info) override
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700146 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700147 for (size_t i = 0; i < count; ++i) {
148 VisitRoot(roots[i]->AsMirrorPtr(), info);
149 }
150 }
151
152 virtual void VisitRoot(mirror::Object* root, const RootInfo& info) = 0;
153};
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800154
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700155class GcRootSource {
156 public:
157 GcRootSource()
158 : field_(nullptr), method_(nullptr) {
159 }
160 explicit GcRootSource(ArtField* field)
161 : field_(field), method_(nullptr) {
162 }
163 explicit GcRootSource(ArtMethod* method)
164 : field_(nullptr), method_(method) {
165 }
166 ArtField* GetArtField() const {
167 return field_;
168 }
169 ArtMethod* GetArtMethod() const {
170 return method_;
171 }
172 bool HasArtField() const {
173 return field_ != nullptr;
174 }
175 bool HasArtMethod() const {
176 return method_ != nullptr;
177 }
178
179 private:
180 ArtField* const field_;
181 ArtMethod* const method_;
182
183 DISALLOW_COPY_AND_ASSIGN(GcRootSource);
184};
185
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700186template<class MirrorType>
Hiroshi Yamauchi9e47bfa2015-02-23 11:14:40 -0800187class GcRoot {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700188 public:
189 template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700190 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700191 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700192
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700193 void VisitRoot(RootVisitor* visitor, const RootInfo& info) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700194 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700195 DCHECK(!IsNull());
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700196 mirror::CompressedReference<mirror::Object>* roots[1] = { &root_ };
197 visitor->VisitRoots(roots, 1u, info);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700198 DCHECK(!IsNull());
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700199 }
200
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700201 void VisitRootIfNonNull(RootVisitor* visitor, const RootInfo& info) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700202 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800203 if (!IsNull()) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700204 VisitRoot(visitor, info);
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800205 }
206 }
207
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700208 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700209 return &root_;
210 }
211
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700212 ALWAYS_INLINE bool IsNull() const {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700213 // It's safe to null-check it without a read barrier.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700214 return root_.IsNull();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700215 }
216
Mathieu Chartier65975772016-08-05 10:46:36 -0700217 ALWAYS_INLINE GcRoot() {}
Mathieu Chartier3398c782016-09-30 10:27:43 -0700218 explicit ALWAYS_INLINE GcRoot(MirrorType* ref)
219 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec73cb642017-02-22 10:11:30 -0800220 explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref)
Mathieu Chartier3398c782016-09-30 10:27:43 -0700221 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700222
223 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000224 // Root visitors take pointers to root_ and place them in CompressedReference** arrays. We use a
Mathieu Chartier9086b652015-04-14 09:35:18 -0700225 // CompressedReference<mirror::Object> here since it violates strict aliasing requirements to
226 // cast CompressedReference<MirrorType>* to CompressedReference<mirror::Object>*.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700227 mutable mirror::CompressedReference<mirror::Object> root_;
228
229 template <size_t kBufferSize> friend class BufferedRootVisitor;
230};
231
232// Simple data structure for buffered root visiting to avoid virtual dispatch overhead. Currently
233// only for CompressedReferences since these are more common than the Object** roots which are only
234// for thread local roots.
235template <size_t kBufferSize>
236class BufferedRootVisitor {
237 public:
238 BufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
239 : visitor_(visitor), root_info_(root_info), buffer_pos_(0) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700240 }
241
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700242 ~BufferedRootVisitor() {
243 Flush();
244 }
245
246 template <class MirrorType>
247 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700248 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700249 if (!root.IsNull()) {
250 VisitRoot(root);
251 }
252 }
253
254 template <class MirrorType>
255 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700256 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700257 if (!root->IsNull()) {
258 VisitRoot(root);
259 }
260 }
261
262 template <class MirrorType>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700263 void VisitRoot(GcRoot<MirrorType>& root) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700264 VisitRoot(root.AddressWithoutBarrier());
265 }
266
267 template <class MirrorType>
268 void VisitRoot(mirror::CompressedReference<MirrorType>* root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700269 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700270 if (UNLIKELY(buffer_pos_ >= kBufferSize)) {
271 Flush();
272 }
273 roots_[buffer_pos_++] = root;
274 }
275
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700276 void Flush() REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700277 visitor_->VisitRoots(roots_, buffer_pos_, root_info_);
278 buffer_pos_ = 0;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700279 }
280
281 private:
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700282 RootVisitor* const visitor_;
283 RootInfo root_info_;
284 mirror::CompressedReference<mirror::Object>* roots_[kBufferSize];
285 size_t buffer_pos_;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700286};
287
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -0800288class UnbufferedRootVisitor {
289 public:
290 UnbufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
291 : visitor_(visitor), root_info_(root_info) {}
292
293 template <class MirrorType>
294 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) const
295 REQUIRES_SHARED(Locks::mutator_lock_) {
296 if (!root.IsNull()) {
297 VisitRoot(root);
298 }
299 }
300
301 template <class MirrorType>
302 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) const
303 REQUIRES_SHARED(Locks::mutator_lock_) {
304 if (!root->IsNull()) {
305 VisitRoot(root);
306 }
307 }
308
309 template <class MirrorType>
310 void VisitRoot(GcRoot<MirrorType>& root) const REQUIRES_SHARED(Locks::mutator_lock_) {
311 VisitRoot(root.AddressWithoutBarrier());
312 }
313
314 template <class MirrorType>
315 void VisitRoot(mirror::CompressedReference<MirrorType>* root) const
316 REQUIRES_SHARED(Locks::mutator_lock_) {
317 visitor_->VisitRoots(&root, 1, root_info_);
318 }
319
320 private:
321 RootVisitor* const visitor_;
322 RootInfo root_info_;
323};
324
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700325} // namespace art
326
327#endif // ART_RUNTIME_GC_ROOT_H_