blob: 3aa9e5221df28789b264968506272856d03ffd9d [file] [log] [blame]
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_HANDLE_SCOPE_INL_H_
18#define ART_RUNTIME_HANDLE_SCOPE_INL_H_
19
Ian Rogers22d5e732014-07-15 22:23:51 -070020#include "handle_scope.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070021
Mathieu Chartiered150002015-08-28 11:16:54 -070022#include "base/mutex.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070023#include "handle.h"
Andreas Gampea1ffdba2019-01-04 16:08:51 -080024#include "handle_wrapper.h"
Vladimir Markoabedfca2019-05-23 14:07:47 +010025#include "mirror/object_reference-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070026#include "obj_ptr-inl.h"
Andreas Gampeb486a982017-06-01 13:45:54 -070027#include "thread-current-inl.h"
Andreas Gampe90b936d2017-01-31 08:58:55 -080028#include "verify_object.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070029
30namespace art {
31
32template<size_t kNumReferences>
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070033inline FixedSizeHandleScope<kNumReferences>::FixedSizeHandleScope(BaseHandleScope* link,
Vladimir Markoabedfca2019-05-23 14:07:47 +010034 ObjPtr<mirror::Object> fill_value)
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070035 : HandleScope(link, kNumReferences) {
Mathieu Chartiered150002015-08-28 11:16:54 -070036 if (kDebugLocking) {
37 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
38 }
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070039 static_assert(kNumReferences >= 1, "FixedSizeHandleScope must contain at least 1 reference");
40 DCHECK_EQ(&storage_[0], GetReferences()); // TODO: Figure out how to use a compile assert.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070041 for (size_t i = 0; i < kNumReferences; ++i) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -070042 SetReference(i, fill_value);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070043 }
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070044}
45
46template<size_t kNumReferences>
Vladimir Markoabedfca2019-05-23 14:07:47 +010047inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self,
48 ObjPtr<mirror::Object> fill_value)
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070049 : FixedSizeHandleScope<kNumReferences>(self->GetTopHandleScope(), fill_value),
50 self_(self) {
51 DCHECK_EQ(self, Thread::Current());
Vladimir Marko1d326f92021-06-01 09:26:55 +010052 if (kDebugLocking) {
53 Locks::mutator_lock_->AssertSharedHeld(self_);
54 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070055 self_->PushHandleScope(this);
56}
57
58template<size_t kNumReferences>
Mathieu Chartier421c5372014-05-14 14:11:40 -070059inline StackHandleScope<kNumReferences>::~StackHandleScope() {
Vladimir Marko1d326f92021-06-01 09:26:55 +010060 if (kDebugLocking) {
61 Locks::mutator_lock_->AssertSharedHeld(self_);
62 }
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070063 BaseHandleScope* top_handle_scope = self_->PopHandleScope();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070064 DCHECK_EQ(top_handle_scope, this);
65}
66
Mathieu Chartier3e0acf62015-01-08 09:41:25 -080067inline size_t HandleScope::SizeOf(uint32_t num_references) {
68 size_t header_size = sizeof(HandleScope);
69 size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
70 return header_size + data_size;
71}
72
Andreas Gampe542451c2016-07-26 09:02:02 -070073inline size_t HandleScope::SizeOf(PointerSize pointer_size, uint32_t num_references) {
Mathieu Chartier3e0acf62015-01-08 09:41:25 -080074 // Assume that the layout is packed.
Andreas Gampe542451c2016-07-26 09:02:02 -070075 size_t header_size = ReferencesOffset(pointer_size);
Mathieu Chartier3e0acf62015-01-08 09:41:25 -080076 size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
77 return header_size + data_size;
78}
79
Vladimir Markoabedfca2019-05-23 14:07:47 +010080inline ObjPtr<mirror::Object> HandleScope::GetReference(size_t i) const {
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070081 DCHECK_LT(i, NumberOfReferences());
Mathieu Chartiered150002015-08-28 11:16:54 -070082 if (kDebugLocking) {
83 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
84 }
Mathieu Chartier3e0acf62015-01-08 09:41:25 -080085 return GetReferences()[i].AsMirrorPtr();
86}
87
88inline Handle<mirror::Object> HandleScope::GetHandle(size_t i) {
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070089 DCHECK_LT(i, NumberOfReferences());
Mathieu Chartier3e0acf62015-01-08 09:41:25 -080090 return Handle<mirror::Object>(&GetReferences()[i]);
91}
92
93inline MutableHandle<mirror::Object> HandleScope::GetMutableHandle(size_t i) {
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070094 DCHECK_LT(i, NumberOfReferences());
Mathieu Chartier3e0acf62015-01-08 09:41:25 -080095 return MutableHandle<mirror::Object>(&GetReferences()[i]);
96}
97
Vladimir Markoabedfca2019-05-23 14:07:47 +010098inline void HandleScope::SetReference(size_t i, ObjPtr<mirror::Object> object) {
Mathieu Chartiered150002015-08-28 11:16:54 -070099 if (kDebugLocking) {
100 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
101 }
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700102 DCHECK_LT(i, NumberOfReferences());
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800103 GetReferences()[i].Assign(object);
104}
105
106inline bool HandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
107 // A HandleScope should always contain something. One created by the
108 // jni_compiler should have a jobject/jclass as a native method is
109 // passed in a this pointer or a class
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700110 DCHECK_GT(NumberOfReferences(), 0U);
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800111 return &GetReferences()[0] <= handle_scope_entry &&
112 handle_scope_entry <= &GetReferences()[number_of_references_ - 1];
113}
114
Andreas Gampea1ffdba2019-01-04 16:08:51 -0800115template <typename Visitor>
116inline void HandleScope::VisitRoots(Visitor& visitor) {
117 for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
118 // GetReference returns a pointer to the stack reference within the handle scope. If this
119 // needs to be updated, it will be done by the root visitor.
120 visitor.VisitRootIfNonNull(GetHandle(i).GetReference());
121 }
122}
123
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800124template<size_t kNumReferences> template<class T>
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700125inline MutableHandle<T> FixedSizeHandleScope<kNumReferences>::NewHandle(T* object) {
Vladimir Markoabedfca2019-05-23 14:07:47 +0100126 return NewHandle(ObjPtr<T>(object));
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800127}
128
Andreas Gampec73cb642017-02-22 10:11:30 -0800129template<size_t kNumReferences> template<class MirrorType>
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700130inline MutableHandle<MirrorType> FixedSizeHandleScope<kNumReferences>::NewHandle(
Andreas Gampec73cb642017-02-22 10:11:30 -0800131 ObjPtr<MirrorType> object) {
Vladimir Markoabedfca2019-05-23 14:07:47 +0100132 SetReference(pos_, object);
133 MutableHandle<MirrorType> h(GetHandle<MirrorType>(pos_));
134 ++pos_;
135 return h;
Mathieu Chartier0795f232016-09-27 18:43:30 -0700136}
137
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800138template<size_t kNumReferences> template<class T>
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700139inline HandleWrapper<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(T** object) {
Mathieu Chartier0795f232016-09-27 18:43:30 -0700140 return HandleWrapper<T>(object, NewHandle(*object));
141}
142
143template<size_t kNumReferences> template<class T>
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700144inline HandleWrapperObjPtr<T> FixedSizeHandleScope<kNumReferences>::NewHandleWrapper(
Mathieu Chartier0795f232016-09-27 18:43:30 -0700145 ObjPtr<T>* object) {
146 return HandleWrapperObjPtr<T>(object, NewHandle(*object));
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800147}
148
149template<size_t kNumReferences>
Vladimir Markoabedfca2019-05-23 14:07:47 +0100150inline void FixedSizeHandleScope<kNumReferences>::SetReference(size_t i,
151 ObjPtr<mirror::Object> object) {
Mathieu Chartiered150002015-08-28 11:16:54 -0700152 if (kDebugLocking) {
153 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
154 }
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800155 DCHECK_LT(i, kNumReferences);
156 VerifyObject(object);
157 GetReferences()[i].Assign(object);
158}
159
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700160// Number of references contained within this handle scope.
161inline uint32_t BaseHandleScope::NumberOfReferences() const {
162 return LIKELY(!IsVariableSized())
163 ? AsHandleScope()->NumberOfReferences()
164 : AsVariableSized()->NumberOfReferences();
165}
166
167inline bool BaseHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry) const {
168 return LIKELY(!IsVariableSized())
169 ? AsHandleScope()->Contains(handle_scope_entry)
170 : AsVariableSized()->Contains(handle_scope_entry);
171}
172
173template <typename Visitor>
174inline void BaseHandleScope::VisitRoots(Visitor& visitor) {
175 if (LIKELY(!IsVariableSized())) {
176 AsHandleScope()->VisitRoots(visitor);
177 } else {
178 AsVariableSized()->VisitRoots(visitor);
179 }
180}
181
182inline VariableSizedHandleScope* BaseHandleScope::AsVariableSized() {
183 DCHECK(IsVariableSized());
184 return down_cast<VariableSizedHandleScope*>(this);
185}
186
187inline HandleScope* BaseHandleScope::AsHandleScope() {
188 DCHECK(!IsVariableSized());
189 return down_cast<HandleScope*>(this);
190}
191
192inline const VariableSizedHandleScope* BaseHandleScope::AsVariableSized() const {
193 DCHECK(IsVariableSized());
194 return down_cast<const VariableSizedHandleScope*>(this);
195}
196
197inline const HandleScope* BaseHandleScope::AsHandleScope() const {
198 DCHECK(!IsVariableSized());
199 return down_cast<const HandleScope*>(this);
200}
201
202template<class T>
Vladimir Markoabedfca2019-05-23 14:07:47 +0100203inline MutableHandle<T> VariableSizedHandleScope::NewHandle(T* object) {
204 return NewHandle(ObjPtr<T>(object));
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700205}
206
Andreas Gampec73cb642017-02-22 10:11:30 -0800207template<class MirrorType>
208inline MutableHandle<MirrorType> VariableSizedHandleScope::NewHandle(ObjPtr<MirrorType> ptr) {
Vladimir Markoabedfca2019-05-23 14:07:47 +0100209 if (current_scope_->RemainingSlots() == 0) {
210 current_scope_ = new LocalScopeType(current_scope_);
211 }
212 return current_scope_->NewHandle(ptr);
Mathieu Chartier3398c782016-09-30 10:27:43 -0700213}
214
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700215inline VariableSizedHandleScope::VariableSizedHandleScope(Thread* const self)
216 : BaseHandleScope(self->GetTopHandleScope()),
Mathieu Chartierc5503272020-02-11 10:01:18 -0800217 self_(self),
218 current_scope_(&first_scope_),
219 first_scope_(/*link=*/ nullptr) {
Vladimir Marko1d326f92021-06-01 09:26:55 +0100220 DCHECK_EQ(self, Thread::Current());
221 if (kDebugLocking) {
222 Locks::mutator_lock_->AssertSharedHeld(self_);
223 }
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700224 self_->PushHandleScope(this);
225}
226
227inline VariableSizedHandleScope::~VariableSizedHandleScope() {
Vladimir Marko1d326f92021-06-01 09:26:55 +0100228 if (kDebugLocking) {
229 Locks::mutator_lock_->AssertSharedHeld(self_);
230 }
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700231 BaseHandleScope* top_handle_scope = self_->PopHandleScope();
232 DCHECK_EQ(top_handle_scope, this);
Mathieu Chartierc5503272020-02-11 10:01:18 -0800233 // Don't delete first_scope_ since it is not heap allocated.
234 while (current_scope_ != &first_scope_) {
235 LocalScopeType* next = down_cast<LocalScopeType*>(current_scope_->GetLink());
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700236 delete current_scope_;
237 current_scope_ = next;
238 }
239}
240
241inline uint32_t VariableSizedHandleScope::NumberOfReferences() const {
242 uint32_t sum = 0;
243 const LocalScopeType* cur = current_scope_;
244 while (cur != nullptr) {
245 sum += cur->NumberOfReferences();
246 cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
247 }
248 return sum;
249}
250
251inline bool VariableSizedHandleScope::Contains(StackReference<mirror::Object>* handle_scope_entry)
252 const {
253 const LocalScopeType* cur = current_scope_;
254 while (cur != nullptr) {
255 if (cur->Contains(handle_scope_entry)) {
256 return true;
257 }
258 cur = reinterpret_cast<const LocalScopeType*>(cur->GetLink());
259 }
260 return false;
261}
262
263template <typename Visitor>
264inline void VariableSizedHandleScope::VisitRoots(Visitor& visitor) {
265 LocalScopeType* cur = current_scope_;
266 while (cur != nullptr) {
267 cur->VisitRoots(visitor);
268 cur = reinterpret_cast<LocalScopeType*>(cur->GetLink());
269 }
270}
271
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700272} // namespace art
273
274#endif // ART_RUNTIME_HANDLE_SCOPE_INL_H_