blob: cb0be04d5c486313617e07278f02b4147902efc5 [file] [log] [blame]
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20#include "entrypoint_utils.h"
21
22#include "class_linker-inl.h"
23#include "common_throws.h"
24#include "dex_file.h"
25#include "indirect_reference_table.h"
26#include "invoke_type.h"
27#include "jni_internal.h"
28#include "mirror/art_method.h"
29#include "mirror/array.h"
30#include "mirror/class-inl.h"
31#include "mirror/object-inl.h"
32#include "mirror/throwable.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070033#include "handle_scope-inl.h"
34#include "thread.h"
35
36namespace art {
37
38// TODO: Fix no thread safety analysis when GCC can handle template specialization.
39template <const bool kAccessCheck>
Ian Rogerse5877a12014-07-16 12:06:35 -070040static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
41 mirror::ArtMethod* method,
42 Thread* self, bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070043 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
44 if (UNLIKELY(klass == NULL)) {
45 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
46 *slow_path = true;
47 if (klass == NULL) {
48 DCHECK(self->IsExceptionPending());
49 return nullptr; // Failure
50 }
51 }
52 if (kAccessCheck) {
53 if (UNLIKELY(!klass->IsInstantiable())) {
54 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
55 self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;",
56 PrettyDescriptor(klass).c_str());
57 *slow_path = true;
58 return nullptr; // Failure
59 }
60 mirror::Class* referrer = method->GetDeclaringClass();
61 if (UNLIKELY(!referrer->CanAccess(klass))) {
62 ThrowIllegalAccessErrorClass(referrer, klass);
63 *slow_path = true;
64 return nullptr; // Failure
65 }
66 }
67 if (UNLIKELY(!klass->IsInitialized())) {
68 StackHandleScope<1> hs(self);
69 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
70 // EnsureInitialized (the class initializer) might cause a GC.
71 // may cause us to suspend meaning that another thread may try to
72 // change the allocator while we are stuck in the entrypoints of
73 // an old allocator. Also, the class initialization may fail. To
74 // handle these cases we mark the slow path boolean as true so
75 // that the caller knows to check the allocator type to see if it
76 // has changed and to null-check the return value in case the
77 // initialization fails.
78 *slow_path = true;
79 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_klass, true, true)) {
80 DCHECK(self->IsExceptionPending());
81 return nullptr; // Failure
82 }
83 return h_klass.Get();
84 }
85 return klass;
86}
87
88// TODO: Fix no thread safety analysis when annotalysis is smarter.
Ian Rogerse5877a12014-07-16 12:06:35 -070089static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
90 Thread* self,
91 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070092 if (UNLIKELY(!klass->IsInitialized())) {
93 StackHandleScope<1> hs(self);
94 Handle<mirror::Class> h_class(hs.NewHandle(klass));
95 // EnsureInitialized (the class initializer) might cause a GC.
96 // may cause us to suspend meaning that another thread may try to
97 // change the allocator while we are stuck in the entrypoints of
98 // an old allocator. Also, the class initialization may fail. To
99 // handle these cases we mark the slow path boolean as true so
100 // that the caller knows to check the allocator type to see if it
101 // has changed and to null-check the return value in case the
102 // initialization fails.
103 *slow_path = true;
104 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
105 DCHECK(self->IsExceptionPending());
106 return nullptr; // Failure
107 }
108 return h_class.Get();
109 }
110 return klass;
111}
112
113// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
114// cannot be resolved, throw an error. If it can, use it to create an instance.
115// When verification/compiler hasn't been able to verify access, optionally perform an access
116// check.
117// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
118template <bool kAccessCheck, bool kInstrumented>
Ian Rogerse5877a12014-07-16 12:06:35 -0700119static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
120 mirror::ArtMethod* method,
121 Thread* self,
122 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700123 bool slow_path = false;
124 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
125 if (UNLIKELY(slow_path)) {
126 if (klass == nullptr) {
127 return nullptr;
128 }
129 return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
130 }
131 DCHECK(klass != nullptr);
132 return klass->Alloc<kInstrumented>(self, allocator_type);
133}
134
135// Given the context of a calling Method and a resolved class, create an instance.
136// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
137template <bool kInstrumented>
Ian Rogerse5877a12014-07-16 12:06:35 -0700138static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
139 mirror::ArtMethod* method,
140 Thread* self,
141 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700142 DCHECK(klass != nullptr);
143 bool slow_path = false;
144 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
145 if (UNLIKELY(slow_path)) {
146 if (klass == nullptr) {
147 return nullptr;
148 }
149 gc::Heap* heap = Runtime::Current()->GetHeap();
150 // Pass in false since the object can not be finalizable.
151 return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator());
152 }
153 // Pass in false since the object can not be finalizable.
154 return klass->Alloc<kInstrumented, false>(self, allocator_type);
155}
156
157// Given the context of a calling Method and an initialized class, create an instance.
158// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
159template <bool kInstrumented>
Ian Rogerse5877a12014-07-16 12:06:35 -0700160static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
161 mirror::ArtMethod* method,
162 Thread* self,
163 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700164 DCHECK(klass != nullptr);
165 // Pass in false since the object can not be finalizable.
166 return klass->Alloc<kInstrumented, false>(self, allocator_type);
167}
168
169
170// TODO: Fix no thread safety analysis when GCC can handle template specialization.
171template <bool kAccessCheck>
Ian Rogerse5877a12014-07-16 12:06:35 -0700172static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
173 mirror::ArtMethod* method,
174 int32_t component_count,
175 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700176 if (UNLIKELY(component_count < 0)) {
177 ThrowNegativeArraySizeException(component_count);
178 *slow_path = true;
179 return nullptr; // Failure
180 }
181 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
182 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
183 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
184 *slow_path = true;
185 if (klass == nullptr) { // Error
186 DCHECK(Thread::Current()->IsExceptionPending());
187 return nullptr; // Failure
188 }
189 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
190 }
191 if (kAccessCheck) {
192 mirror::Class* referrer = method->GetDeclaringClass();
193 if (UNLIKELY(!referrer->CanAccess(klass))) {
194 ThrowIllegalAccessErrorClass(referrer, klass);
195 *slow_path = true;
196 return nullptr; // Failure
197 }
198 }
199 return klass;
200}
201
202// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
203// it cannot be resolved, throw an error. If it can, use it to create an array.
204// When verification/compiler hasn't been able to verify access, optionally perform an access
205// check.
206// TODO: Fix no thread safety analysis when GCC can handle template specialization.
207template <bool kAccessCheck, bool kInstrumented>
Ian Rogerse5877a12014-07-16 12:06:35 -0700208static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
209 mirror::ArtMethod* method,
210 int32_t component_count,
211 Thread* self,
212 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700213 bool slow_path = false;
214 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, method, component_count,
215 &slow_path);
216 if (UNLIKELY(slow_path)) {
217 if (klass == nullptr) {
218 return nullptr;
219 }
220 gc::Heap* heap = Runtime::Current()->GetHeap();
221 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
222 klass->GetComponentSize(),
223 heap->GetCurrentAllocator());
224 }
225 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
226 klass->GetComponentSize(), allocator_type);
227}
228
229template <bool kAccessCheck, bool kInstrumented>
Ian Rogerse5877a12014-07-16 12:06:35 -0700230static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
231 mirror::ArtMethod* method,
232 int32_t component_count,
233 Thread* self,
234 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700235 DCHECK(klass != nullptr);
236 if (UNLIKELY(component_count < 0)) {
237 ThrowNegativeArraySizeException(component_count);
238 return nullptr; // Failure
239 }
240 if (kAccessCheck) {
241 mirror::Class* referrer = method->GetDeclaringClass();
242 if (UNLIKELY(!referrer->CanAccess(klass))) {
243 ThrowIllegalAccessErrorClass(referrer, klass);
244 return nullptr; // Failure
245 }
246 }
247 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
248 // suspension.
249 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
250 klass->GetComponentSize(), allocator_type);
251}
252
253template<FindFieldType type, bool access_check>
254static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer,
255 Thread* self, size_t expected_size) {
256 bool is_primitive;
257 bool is_set;
258 bool is_static;
259 switch (type) {
260 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
261 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
262 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
263 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
264 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
265 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
266 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
267 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
268 default: is_primitive = true; is_set = true; is_static = true; break;
269 }
270 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
271 mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
272 if (UNLIKELY(resolved_field == nullptr)) {
273 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
274 return nullptr; // Failure.
275 }
276 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
277 if (access_check) {
278 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
279 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
280 return nullptr;
281 }
282 mirror::Class* referring_class = referrer->GetDeclaringClass();
283 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
284 field_idx))) {
285 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
286 return nullptr; // Failure.
287 }
288 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
289 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
290 return nullptr; // Failure.
291 } else {
292 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
293 resolved_field->FieldSize() != expected_size)) {
294 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
295 DCHECK(throw_location.GetMethod() == referrer);
296 self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;",
297 "Attempted read of %zd-bit %s on field '%s'",
298 expected_size * (32 / sizeof(int32_t)),
299 is_primitive ? "primitive" : "non-primitive",
300 PrettyField(resolved_field, true).c_str());
301 return nullptr; // Failure.
302 }
303 }
304 }
305 if (!is_static) {
306 // instance fields must be being accessed on an initialized class
307 return resolved_field;
308 } else {
309 // If the class is initialized we're done.
310 if (LIKELY(fields_class->IsInitialized())) {
311 return resolved_field;
312 } else {
313 StackHandleScope<1> hs(self);
314 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
315 if (LIKELY(class_linker->EnsureInitialized(h_class, true, true))) {
316 // Otherwise let's ensure the class is initialized before resolving the field.
317 return resolved_field;
318 }
319 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
320 return nullptr; // Failure.
321 }
322 }
323}
324
325// Explicit template declarations of FindFieldFromCode for all field access types.
326#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
327template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
328mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
329 mirror::ArtMethod* referrer, \
330 Thread* self, size_t expected_size) \
331
332#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
333 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
334 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
335
336EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
337EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
338EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
339EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
340EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
341EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
342EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
343EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
344
345#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
346#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
347
348template<InvokeType type, bool access_check>
349static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx,
350 mirror::Object** this_object,
351 mirror::ArtMethod** referrer, Thread* self) {
352 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
353 mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer, type);
354 if (resolved_method == nullptr) {
355 StackHandleScope<1> hs(self);
356 mirror::Object* null_this = nullptr;
357 HandleWrapper<mirror::Object> h_this(
358 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
359 resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type);
360 }
361 if (UNLIKELY(resolved_method == nullptr)) {
362 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
363 return nullptr; // Failure.
364 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
365 // Maintain interpreter-like semantics where NullPointerException is thrown
366 // after potential NoSuchMethodError from class linker.
367 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
368 DCHECK_EQ(*referrer, throw_location.GetMethod());
369 ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type);
370 return nullptr; // Failure.
371 } else if (access_check) {
372 // Incompatible class change should have been handled in resolve method.
373 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
374 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
375 *referrer);
376 return nullptr; // Failure.
377 }
378 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
379 mirror::Class* referring_class = (*referrer)->GetDeclaringClass();
380 bool can_access_resolved_method =
381 referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method,
382 method_idx);
383 if (UNLIKELY(!can_access_resolved_method)) {
384 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
385 return nullptr; // Failure.
386 }
387 }
388 switch (type) {
389 case kStatic:
390 case kDirect:
391 return resolved_method;
392 case kVirtual: {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700393 mirror::Class* klass = (*this_object)->GetClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700394 uint16_t vtable_index = resolved_method->GetMethodIndex();
395 if (access_check &&
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700396 (!klass->HasVTable() ||
397 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700398 // Behavior to agree with that of the verifier.
399 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
400 resolved_method->GetName(), resolved_method->GetSignature());
401 return nullptr; // Failure.
402 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700403 DCHECK(klass->HasVTable()) << PrettyClass(klass);
404 return klass->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700405 }
406 case kSuper: {
407 mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass();
408 uint16_t vtable_index = resolved_method->GetMethodIndex();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700409 if (access_check) {
410 // Check existence of super class.
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700411 if (super_class == nullptr || !super_class->HasVTable() ||
412 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700413 // Behavior to agree with that of the verifier.
414 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
415 resolved_method->GetName(), resolved_method->GetSignature());
416 return nullptr; // Failure.
417 }
418 } else {
419 // Super class must exist.
420 DCHECK(super_class != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700421 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700422 DCHECK(super_class->HasVTable());
423 return super_class->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700424 }
425 case kInterface: {
426 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
427 mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index);
428 if (!imt_method->IsImtConflictMethod()) {
429 return imt_method;
430 } else {
431 mirror::ArtMethod* interface_method =
432 (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method);
433 if (UNLIKELY(interface_method == nullptr)) {
434 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
435 *this_object, *referrer);
436 return nullptr; // Failure.
437 }
438 return interface_method;
439 }
440 }
441 default:
442 LOG(FATAL) << "Unknown invoke type " << type;
443 return nullptr; // Failure.
444 }
445}
446
447// Explicit template declarations of FindMethodFromCode for all invoke types.
448#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
449 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
450 mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
451 mirror::Object** this_object, \
452 mirror::ArtMethod** referrer, \
453 Thread* self)
454#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
455 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
456 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
457
458EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
459EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
460EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
461EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
462EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
463
464#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
465#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
466
467// Fast path field resolution that can't initialize classes or throw exceptions.
468static inline mirror::ArtField* FindFieldFast(uint32_t field_idx,
469 mirror::ArtMethod* referrer,
Ian Rogerse5877a12014-07-16 12:06:35 -0700470 FindFieldType type, size_t expected_size) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700471 mirror::ArtField* resolved_field =
472 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx);
473 if (UNLIKELY(resolved_field == nullptr)) {
474 return nullptr;
475 }
476 // Check for incompatible class change.
477 bool is_primitive;
478 bool is_set;
479 bool is_static;
480 switch (type) {
481 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
482 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
483 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
484 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
485 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
486 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
487 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
488 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
489 default:
490 LOG(FATAL) << "UNREACHABLE"; // Assignment below to avoid GCC warnings.
491 is_primitive = true;
492 is_set = true;
493 is_static = true;
494 break;
495 }
496 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
497 // Incompatible class change.
498 return nullptr;
499 }
500 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
501 if (is_static) {
502 // Check class is initialized else fail so that we can contend to initialize the class with
503 // other threads that may be racing to do this.
504 if (UNLIKELY(!fields_class->IsInitialized())) {
505 return nullptr;
506 }
507 }
508 mirror::Class* referring_class = referrer->GetDeclaringClass();
509 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
510 !referring_class->CanAccessMember(fields_class,
511 resolved_field->GetAccessFlags()) ||
512 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
513 // Illegal access.
514 return nullptr;
515 }
516 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
517 resolved_field->FieldSize() != expected_size)) {
518 return nullptr;
519 }
520 return resolved_field;
521}
522
523// Fast path method resolution that can't throw exceptions.
524static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
525 mirror::Object* this_object,
526 mirror::ArtMethod* referrer,
Ian Rogerse5877a12014-07-16 12:06:35 -0700527 bool access_check, InvokeType type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700528 bool is_direct = type == kStatic || type == kDirect;
529 if (UNLIKELY(this_object == NULL && !is_direct)) {
530 return NULL;
531 }
532 mirror::ArtMethod* resolved_method =
533 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx);
534 if (UNLIKELY(resolved_method == NULL)) {
535 return NULL;
536 }
537 if (access_check) {
538 // Check for incompatible class change errors and access.
539 bool icce = resolved_method->CheckIncompatibleClassChange(type);
540 if (UNLIKELY(icce)) {
541 return NULL;
542 }
543 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
544 mirror::Class* referring_class = referrer->GetDeclaringClass();
545 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
546 !referring_class->CanAccessMember(methods_class,
547 resolved_method->GetAccessFlags()))) {
548 // Potential illegal access, may need to refine the method's class.
549 return NULL;
550 }
551 }
552 if (type == kInterface) { // Most common form of slow path dispatch.
553 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method);
554 } else if (is_direct) {
555 return resolved_method;
556 } else if (type == kSuper) {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700557 return referrer->GetDeclaringClass()->GetSuperClass()
558 ->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700559 } else {
560 DCHECK(type == kVirtual);
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700561 return this_object->GetClass()->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700562 }
563}
564
565static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
566 mirror::ArtMethod* referrer,
567 Thread* self, bool can_run_clinit,
Ian Rogerse5877a12014-07-16 12:06:35 -0700568 bool verify_access) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700569 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
570 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
571 if (UNLIKELY(klass == nullptr)) {
572 CHECK(self->IsExceptionPending());
573 return nullptr; // Failure - Indicate to caller to deliver exception
574 }
575 // Perform access check if necessary.
576 mirror::Class* referring_class = referrer->GetDeclaringClass();
577 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
578 ThrowIllegalAccessErrorClass(referring_class, klass);
579 return nullptr; // Failure - Indicate to caller to deliver exception
580 }
581 // If we're just implementing const-class, we shouldn't call <clinit>.
582 if (!can_run_clinit) {
583 return klass;
584 }
585 // If we are the <clinit> of this class, just return our storage.
586 //
587 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
588 // running.
589 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
590 return klass;
591 }
592 StackHandleScope<1> hs(self);
593 Handle<mirror::Class> h_class(hs.NewHandle(klass));
594 if (!class_linker->EnsureInitialized(h_class, true, true)) {
595 CHECK(self->IsExceptionPending());
596 return nullptr; // Failure - Indicate to caller to deliver exception
597 }
598 return h_class.Get();
599}
600
601static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer,
Ian Rogerse5877a12014-07-16 12:06:35 -0700602 uint32_t string_idx) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700603 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
604 return class_linker->ResolveString(string_idx, referrer);
605}
606
Ian Rogerse5877a12014-07-16 12:06:35 -0700607static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700608 // Save any pending exception over monitor exit call.
609 mirror::Throwable* saved_exception = NULL;
610 ThrowLocation saved_throw_location;
611 bool is_exception_reported = self->IsExceptionReportedToInstrumentation();
612 if (UNLIKELY(self->IsExceptionPending())) {
613 saved_exception = self->GetException(&saved_throw_location);
614 self->ClearException();
615 }
616 // Decode locked object and unlock, before popping local references.
617 self->DecodeJObject(locked)->MonitorExit(self);
618 if (UNLIKELY(self->IsExceptionPending())) {
619 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
620 << saved_exception->Dump()
621 << "\nEncountered second exception during implicit MonitorExit:\n"
622 << self->GetException(NULL)->Dump();
623 }
624 // Restore pending exception.
625 if (saved_exception != NULL) {
626 self->SetException(saved_throw_location, saved_exception);
627 self->SetExceptionReportedToInstrumentation(is_exception_reported);
628 }
629}
630
Ian Rogerse5877a12014-07-16 12:06:35 -0700631static inline void CheckSuspend(Thread* thread) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700632 for (;;) {
633 if (thread->ReadFlag(kCheckpointRequest)) {
634 thread->RunCheckpointFunction();
635 } else if (thread->ReadFlag(kSuspendRequest)) {
636 thread->FullSuspendCheck();
637 } else {
638 break;
639 }
640 }
641}
642
643template <typename INT_TYPE, typename FLOAT_TYPE>
644static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
645 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
646 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
647 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
648 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
649 if (LIKELY(f > kMinIntAsFloat)) {
650 if (LIKELY(f < kMaxIntAsFloat)) {
651 return static_cast<INT_TYPE>(f);
652 } else {
653 return kMaxInt;
654 }
655 } else {
656 return (f != f) ? 0 : kMinInt; // f != f implies NaN
657 }
658}
659
660} // namespace art
661
662#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_