blob: 05033fc058fed978c974ac151bda7e97aeac9710 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "entrypoints/entrypoint_utils.h"
Ian Rogers83883d72013-10-21 21:07:24 -070022#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070023#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070024#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070025#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070026#include "mirror/dex_cache-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/object-inl.h"
28#include "mirror/object_array-inl.h"
29#include "object_utils.h"
30#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070031#include "scoped_thread_state_change.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032
33namespace art {
34
35// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
36class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080037 // Number of bytes for each out register in the caller method's frame.
38 static constexpr size_t kBytesStackArgLocation = 4;
Ian Rogers848871b2013-08-05 10:56:33 -070039#if defined(__arm__)
40 // The callee save frame is pointed to by SP.
41 // | argN | |
42 // | ... | |
43 // | arg4 | |
44 // | arg3 spill | | Caller's frame
45 // | arg2 spill | |
46 // | arg1 spill | |
47 // | Method* | ---
48 // | LR |
49 // | ... | callee saves
50 // | R3 | arg3
51 // | R2 | arg2
52 // | R1 | arg1
Ian Rogers936b37f2014-02-14 00:52:24 -080053 // | R0 | padding
Ian Rogers848871b2013-08-05 10:56:33 -070054 // | Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080055 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
56 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
57 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -080058 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
59 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 8; // Offset of first GPR arg.
60 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 44; // Offset of return address.
61 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 48; // Frame size.
62 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000063 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080064 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000065#elif defined(__aarch64__)
66 // The callee save frame is pointed to by SP.
67 // | argN | |
68 // | ... | |
69 // | arg4 | |
70 // | arg3 spill | | Caller's frame
71 // | arg2 spill | |
72 // | arg1 spill | |
73 // | Method* | ---
74 // | LR |
75 // | X28 |
76 // | : |
77 // | X19 |
78 // | X7 |
79 // | : |
80 // | X1 |
81 // | D15 |
82 // | : |
83 // | D0 |
84 // | | padding
85 // | Method* | <- sp
86 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
87 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
88 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Stuart Monteithb95a5342014-03-12 13:32:32 +000089 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =16; // Offset of first FPR arg.
90 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 144; // Offset of first GPR arg.
91 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 296; // Offset of return address.
92 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 304; // Frame size.
93 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000094 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +000095 }
Ian Rogers848871b2013-08-05 10:56:33 -070096#elif defined(__mips__)
97 // The callee save frame is pointed to by SP.
98 // | argN | |
99 // | ... | |
100 // | arg4 | |
101 // | arg3 spill | | Caller's frame
102 // | arg2 spill | |
103 // | arg1 spill | |
104 // | Method* | ---
105 // | RA |
106 // | ... | callee saves
107 // | A3 | arg3
108 // | A2 | arg2
109 // | A1 | arg1
110 // | A0/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800111 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
112 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
113 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800114 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
115 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
116 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
117 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 64; // Frame size.
118 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000119 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800120 }
Ian Rogers848871b2013-08-05 10:56:33 -0700121#elif defined(__i386__)
122 // The callee save frame is pointed to by SP.
123 // | argN | |
124 // | ... | |
125 // | arg4 | |
126 // | arg3 spill | | Caller's frame
127 // | arg2 spill | |
128 // | arg1 spill | |
129 // | Method* | ---
130 // | Return |
131 // | EBP,ESI,EDI | callee saves
132 // | EBX | arg3
133 // | EDX | arg2
134 // | ECX | arg1
135 // | EAX/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800136 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
137 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
138 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800139 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
140 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
141 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28; // Offset of return address.
142 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 32; // Frame size.
143 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000144 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800145 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800146#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800147 // The callee save frame is pointed to by SP.
148 // | argN | |
149 // | ... | |
150 // | reg. arg spills | | Caller's frame
151 // | Method* | ---
152 // | Return |
153 // | R15 | callee save
154 // | R14 | callee save
155 // | R13 | callee save
156 // | R12 | callee save
157 // | R9 | arg5
158 // | R8 | arg4
159 // | RSI/R6 | arg1
160 // | RBP/R5 | callee save
161 // | RBX/R3 | callee save
162 // | RDX/R2 | arg2
163 // | RCX/R1 | arg3
164 // | XMM7 | float arg 8
165 // | XMM6 | float arg 7
166 // | XMM5 | float arg 6
167 // | XMM4 | float arg 5
168 // | XMM3 | float arg 4
169 // | XMM2 | float arg 3
170 // | XMM1 | float arg 2
171 // | XMM0 | float arg 1
172 // | Padding |
173 // | RDI/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800174 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700175#ifdef TARGET_REX_SUPPORT
176 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
177#else
178 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs if r8..r15 not enabled.
179#endif
180 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800181 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
182 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg.
183 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168; // Offset of return address.
184 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 176; // Frame size.
185 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
186 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000187 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
188 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
189 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
190 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
191 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800192 default:
193 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
194 return 0;
195 }
196 }
Ian Rogers848871b2013-08-05 10:56:33 -0700197#else
198#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700199#endif
200
Ian Rogers936b37f2014-02-14 00:52:24 -0800201 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700202 static mirror::ArtMethod* GetCallingMethod(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800203 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700204 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800205 byte* previous_sp = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Andreas Gampecf4035a2014-05-28 22:43:01 -0700206 return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr();
Ian Rogers848871b2013-08-05 10:56:33 -0700207 }
208
Ian Rogers936b37f2014-02-14 00:52:24 -0800209 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700210 static uintptr_t GetCallingPc(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800211 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700212 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800213 byte* lr = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700214 return *reinterpret_cast<uintptr_t*>(lr);
215 }
216
Andreas Gampecf4035a2014-05-28 22:43:01 -0700217 QuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
Ian Rogers848871b2013-08-05 10:56:33 -0700218 const char* shorty, uint32_t shorty_len)
Ian Rogers936b37f2014-02-14 00:52:24 -0800219 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
220 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
221 gpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
222 fpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
223 stack_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
224 + StackArgumentStartFromShorty(is_static, shorty, shorty_len)),
225 gpr_index_(0), fpr_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid),
226 is_split_long_or_double_(false) {
227 DCHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize,
Ian Rogers848871b2013-08-05 10:56:33 -0700228 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
229 }
230
231 virtual ~QuickArgumentVisitor() {}
232
233 virtual void Visit() = 0;
234
Ian Rogers936b37f2014-02-14 00:52:24 -0800235 Primitive::Type GetParamPrimitiveType() const {
236 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700237 }
238
239 byte* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800240 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800241 Primitive::Type type = GetParamPrimitiveType();
242 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800243 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000244 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800245 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700246 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800247 }
248 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800249 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800250 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
251 }
252 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700253 }
254
255 bool IsSplitLongOrDouble() const {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000256 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800257 return is_split_long_or_double_;
258 } else {
259 return false; // An optimization for when GPR and FPRs are 64bit.
260 }
Ian Rogers848871b2013-08-05 10:56:33 -0700261 }
262
Ian Rogers936b37f2014-02-14 00:52:24 -0800263 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700264 return GetParamPrimitiveType() == Primitive::kPrimNot;
265 }
266
Ian Rogers936b37f2014-02-14 00:52:24 -0800267 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700268 Primitive::Type type = GetParamPrimitiveType();
269 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
270 }
271
272 uint64_t ReadSplitLongParam() const {
273 DCHECK(IsSplitLongOrDouble());
274 uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
275 uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
276 return (low_half & 0xffffffffULL) | (high_half << 32);
277 }
278
279 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700280 // This implementation doesn't support reg-spill area for hard float
281 // ABI targets such as x86_64 and aarch64. So, for those targets whose
282 // 'kQuickSoftFloatAbi' is 'false':
283 // (a) 'stack_args_' should point to the first method's argument
284 // (b) whatever the argument type it is, the 'stack_index_' should
285 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800286 gpr_index_ = 0;
287 fpr_index_ = 0;
288 stack_index_ = 0;
289 if (!is_static_) { // Handle this.
290 cur_type_ = Primitive::kPrimNot;
291 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700292 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700293 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == 0) {
294 stack_index_++;
295 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800296 if (kNumQuickGprArgs > 0) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800297 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800298 }
Ian Rogers848871b2013-08-05 10:56:33 -0700299 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800300 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
301 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
302 switch (cur_type_) {
303 case Primitive::kPrimNot:
304 case Primitive::kPrimBoolean:
305 case Primitive::kPrimByte:
306 case Primitive::kPrimChar:
307 case Primitive::kPrimShort:
308 case Primitive::kPrimInt:
309 is_split_long_or_double_ = false;
310 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700311 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
312 stack_index_++;
313 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800314 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800315 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800316 }
317 break;
318 case Primitive::kPrimFloat:
319 is_split_long_or_double_ = false;
320 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800321 if (kQuickSoftFloatAbi) {
322 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800323 gpr_index_++;
324 } else {
325 stack_index_++;
326 }
327 } else {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800328 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800329 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800330 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700331 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800332 }
333 break;
334 case Primitive::kPrimDouble:
335 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800336 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000337 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800338 ((gpr_index_ + 1) == kNumQuickGprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800339 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700340 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800341 if (kBytesStackArgLocation == 4) {
342 stack_index_+= 2;
343 } else {
344 CHECK_EQ(kBytesStackArgLocation, 8U);
345 stack_index_++;
346 }
347 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700348 if (gpr_index_ < kNumQuickGprArgs) {
349 gpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000350 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700351 if (gpr_index_ < kNumQuickGprArgs) {
352 gpr_index_++;
353 } else if (kQuickSoftFloatAbi) {
354 stack_index_++;
355 }
356 }
357 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800358 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000359 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800360 ((fpr_index_ + 1) == kNumQuickFprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800361 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800362 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800363 fpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000364 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800365 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800366 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800367 }
368 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700369 }
370 if (kBytesStackArgLocation == 4) {
371 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800372 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700373 CHECK_EQ(kBytesStackArgLocation, 8U);
374 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800375 }
376 }
377 break;
378 default:
379 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
380 }
Ian Rogers848871b2013-08-05 10:56:33 -0700381 }
382 }
383
384 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800385 static size_t StackArgumentStartFromShorty(bool is_static, const char* shorty,
386 uint32_t shorty_len) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800387 if (kQuickSoftFloatAbi) {
388 CHECK_EQ(kNumQuickFprArgs, 0U);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000389 return (kNumQuickGprArgs * GetBytesPerGprSpillLocation(kRuntimeISA))
Andreas Gampecf4035a2014-05-28 22:43:01 -0700390 + sizeof(StackReference<mirror::ArtMethod>) /* StackReference<ArtMethod> */;
Ian Rogers936b37f2014-02-14 00:52:24 -0800391 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700392 // For now, there is no reg-spill area for the targets with
393 // hard float ABI. So, the offset pointing to the first method's
394 // parameter ('this' for non-static methods) should be returned.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700395 return sizeof(StackReference<mirror::ArtMethod>); // Skip StackReference<ArtMethod>.
Ian Rogers848871b2013-08-05 10:56:33 -0700396 }
Ian Rogers848871b2013-08-05 10:56:33 -0700397 }
398
399 const bool is_static_;
400 const char* const shorty_;
401 const uint32_t shorty_len_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800402 byte* const gpr_args_; // Address of GPR arguments in callee save frame.
403 byte* const fpr_args_; // Address of FPR arguments in callee save frame.
404 byte* const stack_args_; // Address of stack arguments in caller's frame.
405 uint32_t gpr_index_; // Index into spilled GPRs.
406 uint32_t fpr_index_; // Index into spilled FPRs.
407 uint32_t stack_index_; // Index into arguments on the stack.
408 // The current type of argument during VisitArguments.
409 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700410 // Does a 64bit parameter straddle the register and stack arguments?
411 bool is_split_long_or_double_;
412};
413
414// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800415class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700416 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700417 BuildQuickShadowFrameVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
418 const char* shorty, uint32_t shorty_len, ShadowFrame* sf,
419 size_t first_arg_reg) :
Ian Rogers848871b2013-08-05 10:56:33 -0700420 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
421
Ian Rogers9758f792014-03-13 09:02:55 -0700422 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700423
424 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800425 ShadowFrame* const sf_;
426 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700427
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700428 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700429};
430
Ian Rogers9758f792014-03-13 09:02:55 -0700431void BuildQuickShadowFrameVisitor::Visit() {
432 Primitive::Type type = GetParamPrimitiveType();
433 switch (type) {
434 case Primitive::kPrimLong: // Fall-through.
435 case Primitive::kPrimDouble:
436 if (IsSplitLongOrDouble()) {
437 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
438 } else {
439 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
440 }
441 ++cur_reg_;
442 break;
443 case Primitive::kPrimNot: {
444 StackReference<mirror::Object>* stack_ref =
445 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
446 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
447 }
448 break;
449 case Primitive::kPrimBoolean: // Fall-through.
450 case Primitive::kPrimByte: // Fall-through.
451 case Primitive::kPrimChar: // Fall-through.
452 case Primitive::kPrimShort: // Fall-through.
453 case Primitive::kPrimInt: // Fall-through.
454 case Primitive::kPrimFloat:
455 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
456 break;
457 case Primitive::kPrimVoid:
458 LOG(FATAL) << "UNREACHABLE";
459 break;
460 }
461 ++cur_reg_;
462}
463
Brian Carlstromea46f952013-07-30 01:26:50 -0700464extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700465 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700466 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
467 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
468 // frame.
469 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
470
471 if (method->IsAbstract()) {
472 ThrowAbstractMethodError(method);
473 return 0;
474 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800475 DCHECK(!method->IsNative()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700476 const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700477 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800478 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700479 uint16_t num_regs = code_item->registers_size_;
480 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
481 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL, // No last shadow coming from quick.
482 method, 0, memory));
483 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700484 uint32_t shorty_len = 0;
485 const char* shorty = method->GetShorty(&shorty_len);
486 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800487 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700488 shadow_frame_builder.VisitArguments();
489 // Push a transition back into managed code onto the linked list in thread.
490 ManagedStack fragment;
491 self->PushManagedStackFragment(&fragment);
492 self->PushShadowFrame(shadow_frame);
493 self->EndAssertNoThreadSuspension(old_cause);
494
495 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) {
496 // Ensure static method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700497 StackHandleScope<1> hs(self);
498 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
499 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800500 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700501 self->PopManagedStackFragment(fragment);
502 return 0;
503 }
504 }
505
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700506 StackHandleScope<1> hs(self);
507 MethodHelper mh(hs.NewHandle(method));
Ian Rogers848871b2013-08-05 10:56:33 -0700508 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800511 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700512 return result.GetJ();
513 }
514}
515
516// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
517// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800518class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700519 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700520 BuildQuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
521 const char* shorty, uint32_t shorty_len,
522 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Ian Rogers848871b2013-08-05 10:56:33 -0700523 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
524
Ian Rogers9758f792014-03-13 09:02:55 -0700525 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700526
Ian Rogers9758f792014-03-13 09:02:55 -0700527 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800528
Ian Rogers848871b2013-08-05 10:56:33 -0700529 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700530 ScopedObjectAccessUnchecked* const soa_;
531 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800532 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700533 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700534
Ian Rogers848871b2013-08-05 10:56:33 -0700535 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
536};
537
Ian Rogers9758f792014-03-13 09:02:55 -0700538void BuildQuickArgumentVisitor::Visit() {
539 jvalue val;
540 Primitive::Type type = GetParamPrimitiveType();
541 switch (type) {
542 case Primitive::kPrimNot: {
543 StackReference<mirror::Object>* stack_ref =
544 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
545 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
546 references_.push_back(std::make_pair(val.l, stack_ref));
547 break;
548 }
549 case Primitive::kPrimLong: // Fall-through.
550 case Primitive::kPrimDouble:
551 if (IsSplitLongOrDouble()) {
552 val.j = ReadSplitLongParam();
553 } else {
554 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
555 }
556 break;
557 case Primitive::kPrimBoolean: // Fall-through.
558 case Primitive::kPrimByte: // Fall-through.
559 case Primitive::kPrimChar: // Fall-through.
560 case Primitive::kPrimShort: // Fall-through.
561 case Primitive::kPrimInt: // Fall-through.
562 case Primitive::kPrimFloat:
563 val.i = *reinterpret_cast<jint*>(GetParamAddress());
564 break;
565 case Primitive::kPrimVoid:
566 LOG(FATAL) << "UNREACHABLE";
567 val.j = 0;
568 break;
569 }
570 args_->push_back(val);
571}
572
573void BuildQuickArgumentVisitor::FixupReferences() {
574 // Fixup any references which may have changed.
575 for (const auto& pair : references_) {
576 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700577 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700578 }
579}
580
Ian Rogers848871b2013-08-05 10:56:33 -0700581// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
582// which is responsible for recording callee save registers. We explicitly place into jobjects the
583// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
584// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700585extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700586 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700587 Thread* self, StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700588 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700589 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
590 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700591 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
592 const char* old_cause =
593 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
594 // Register the top of the managed stack, making stack crawlable.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700595 DCHECK_EQ(sp->AsMirrorPtr(), proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700596 self->SetTopOfStack(sp, 0);
597 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700598 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
599 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700600 self->VerifyStack();
601 // Start new JNI local reference state.
602 JNIEnvExt* env = self->GetJniEnv();
603 ScopedObjectAccessUnchecked soa(env);
604 ScopedJniEnvLocalRefState env_state(env);
605 // Create local ref. copies of proxy method and the receiver.
606 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
607
608 // Placing arguments into args vector and remove the receiver.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700609 mirror::ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy();
610 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
611 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700612 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700613 uint32_t shorty_len = 0;
614 const char* shorty = proxy_method->GetShorty(&shorty_len);
615 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700616
Ian Rogers848871b2013-08-05 10:56:33 -0700617 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700618 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700619 args.erase(args.begin());
620
621 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700622 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700623 DCHECK(interface_method != NULL) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700624 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
625 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
626
627 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
628 // that performs allocations.
629 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700630 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800631 // Restore references which might have moved.
632 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700633 return result.GetJ();
634}
635
636// Read object references held in arguments from quick frames and place in a JNI local references,
637// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800638class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700639 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700640 RememberForGcArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
641 const char* shorty, uint32_t shorty_len,
642 ScopedObjectAccessUnchecked* soa) :
Ian Rogers848871b2013-08-05 10:56:33 -0700643 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
644
Ian Rogers9758f792014-03-13 09:02:55 -0700645 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700646
Ian Rogers9758f792014-03-13 09:02:55 -0700647 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700648
649 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700650 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800651 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700652 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700653 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700654};
655
Ian Rogers9758f792014-03-13 09:02:55 -0700656void RememberForGcArgumentVisitor::Visit() {
657 if (IsParamAReference()) {
658 StackReference<mirror::Object>* stack_ref =
659 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
660 jobject reference =
661 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
662 references_.push_back(std::make_pair(reference, stack_ref));
663 }
664}
665
666void RememberForGcArgumentVisitor::FixupReferences() {
667 // Fixup any references which may have changed.
668 for (const auto& pair : references_) {
669 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700670 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700671 }
672}
673
674
Ian Rogers848871b2013-08-05 10:56:33 -0700675// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700676extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700677 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700678 Thread* self,
679 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700680 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800681 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -0700682 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800683 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700684 ScopedObjectAccessUnchecked soa(env);
685 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800686 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700687
688 // Compute details about the called method (avoid GCs)
689 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700690 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700691 InvokeType invoke_type;
692 const DexFile* dex_file;
693 uint32_t dex_method_idx;
694 if (called->IsRuntimeMethod()) {
695 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
696 const DexFile::CodeItem* code;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700697 dex_file = caller->GetDexFile();
698 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700699 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
700 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
701 Instruction::Code instr_code = instr->Opcode();
702 bool is_range;
703 switch (instr_code) {
704 case Instruction::INVOKE_DIRECT:
705 invoke_type = kDirect;
706 is_range = false;
707 break;
708 case Instruction::INVOKE_DIRECT_RANGE:
709 invoke_type = kDirect;
710 is_range = true;
711 break;
712 case Instruction::INVOKE_STATIC:
713 invoke_type = kStatic;
714 is_range = false;
715 break;
716 case Instruction::INVOKE_STATIC_RANGE:
717 invoke_type = kStatic;
718 is_range = true;
719 break;
720 case Instruction::INVOKE_SUPER:
721 invoke_type = kSuper;
722 is_range = false;
723 break;
724 case Instruction::INVOKE_SUPER_RANGE:
725 invoke_type = kSuper;
726 is_range = true;
727 break;
728 case Instruction::INVOKE_VIRTUAL:
729 invoke_type = kVirtual;
730 is_range = false;
731 break;
732 case Instruction::INVOKE_VIRTUAL_RANGE:
733 invoke_type = kVirtual;
734 is_range = true;
735 break;
736 case Instruction::INVOKE_INTERFACE:
737 invoke_type = kInterface;
738 is_range = false;
739 break;
740 case Instruction::INVOKE_INTERFACE_RANGE:
741 invoke_type = kInterface;
742 is_range = true;
743 break;
744 default:
745 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
746 // Avoid used uninitialized warnings.
747 invoke_type = kDirect;
748 is_range = false;
749 }
750 dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
751
752 } else {
753 invoke_type = kStatic;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700754 dex_file = called->GetDexFile();
Ian Rogers848871b2013-08-05 10:56:33 -0700755 dex_method_idx = called->GetDexMethodIndex();
756 }
757 uint32_t shorty_len;
758 const char* shorty =
759 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700760 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700761 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800762 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800763 bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700764 // Resolve method filling in dex cache.
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700765 if (UNLIKELY(called->IsRuntimeMethod())) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700766 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700767 mirror::Object* dummy = nullptr;
768 HandleWrapper<mirror::Object> h_receiver(
769 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
770 called = linker->ResolveMethod(self, dex_method_idx, &caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700771 }
772 const void* code = NULL;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800773 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700774 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800775 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
776 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800777 if (virtual_or_interface) {
778 // Refine called method based on receiver.
779 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700780
781 mirror::ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800782 if (invoke_type == kVirtual) {
783 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
784 } else {
785 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
786 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700787
788 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
789 << PrettyTypeOf(receiver) << " "
790 << invoke_type << " " << orig_called->GetVtableIndex();
791
Ian Rogers83883d72013-10-21 21:07:24 -0700792 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
793 // of the sharpened method.
794 if (called->GetDexCacheResolvedMethods() == caller->GetDexCacheResolvedMethods()) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100795 caller->GetDexCacheResolvedMethods()->Set<false>(called->GetDexMethodIndex(), called);
Ian Rogers83883d72013-10-21 21:07:24 -0700796 } else {
797 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000798 // the caller's dex file. Since we get here only if the original called was a runtime
799 // method, we've got the correct dex_file and a dex_method_idx from above.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700800 DCHECK_EQ(caller->GetDexFile(), dex_file);
801 StackHandleScope<1> hs(self);
802 MethodHelper mh(hs.NewHandle(called));
803 uint32_t method_index = mh.FindDexMethodIndexInOtherDexFile(*dex_file, dex_method_idx);
Ian Rogers83883d72013-10-21 21:07:24 -0700804 if (method_index != DexFile::kDexNoIndex) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100805 caller->GetDexCacheResolvedMethods()->Set<false>(method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700806 }
807 }
808 }
Ian Rogers848871b2013-08-05 10:56:33 -0700809 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700810 StackHandleScope<1> hs(soa.Self());
811 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers848871b2013-08-05 10:56:33 -0700812 linker->EnsureInitialized(called_class, true, true);
813 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800814 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700815 } else if (called_class->IsInitializing()) {
816 if (invoke_type == kStatic) {
817 // Class is still initializing, go to oat and grab code (trampoline must be left in place
818 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800819 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700820 } else {
821 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800822 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700823 }
824 } else {
825 DCHECK(called_class->IsErroneous());
826 }
827 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800828 CHECK_EQ(code == NULL, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700829 // Fixup any locally saved objects may have moved during a GC.
830 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700831 // Place called method in callee-save frame to be placed as first argument to quick method.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700832 sp->Assign(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700833 return code;
834}
835
Andreas Gampec147b002014-03-06 18:11:06 -0800836
837
838/*
839 * This class uses a couple of observations to unite the different calling conventions through
840 * a few constants.
841 *
842 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
843 * possible alignment.
844 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
845 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
846 * when we have to split things
847 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
848 * and we can use Int handling directly.
849 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
850 * necessary when widening. Also, widening of Ints will take place implicitly, and the
851 * extension should be compatible with Aarch64, which mandates copying the available bits
852 * into LSB and leaving the rest unspecified.
853 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
854 * the stack.
855 * 6) There is only little endian.
856 *
857 *
858 * Actual work is supposed to be done in a delegate of the template type. The interface is as
859 * follows:
860 *
861 * void PushGpr(uintptr_t): Add a value for the next GPR
862 *
863 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
864 * padding, that is, think the architecture is 32b and aligns 64b.
865 *
866 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
867 * split this if necessary. The current state will have aligned, if
868 * necessary.
869 *
870 * void PushStack(uintptr_t): Push a value to the stack.
871 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700872 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700873 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -0800874 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700875 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -0800876 *
877 */
878template <class T> class BuildGenericJniFrameStateMachine {
879 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800880#if defined(__arm__)
881 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -0800882 static constexpr bool kNativeSoftFloatAbi = true;
883 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800884 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
885
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800886 static constexpr size_t kRegistersNeededForLong = 2;
887 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800888 static constexpr bool kMultiRegistersAligned = true;
889 static constexpr bool kMultiRegistersWidened = false;
890 static constexpr bool kAlignLongOnStack = true;
891 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000892#elif defined(__aarch64__)
893 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
894 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
895 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
896
897 static constexpr size_t kRegistersNeededForLong = 1;
898 static constexpr size_t kRegistersNeededForDouble = 1;
899 static constexpr bool kMultiRegistersAligned = false;
900 static constexpr bool kMultiRegistersWidened = false;
901 static constexpr bool kAlignLongOnStack = false;
902 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800903#elif defined(__mips__)
904 // TODO: These are all dummy values!
905 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
906 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
907 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
908
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800909 static constexpr size_t kRegistersNeededForLong = 2;
910 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800911 static constexpr bool kMultiRegistersAligned = true;
912 static constexpr bool kMultiRegistersWidened = true;
913 static constexpr bool kAlignLongOnStack = false;
914 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800915#elif defined(__i386__)
916 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -0800917 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800918 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
919 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
920
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800921 static constexpr size_t kRegistersNeededForLong = 2;
922 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800923 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
924 static constexpr bool kMultiRegistersWidened = false;
925 static constexpr bool kAlignLongOnStack = false;
926 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800927#elif defined(__x86_64__)
928 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
929 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
930 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
931
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800932 static constexpr size_t kRegistersNeededForLong = 1;
933 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -0800934 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe7a0e5042014-03-07 13:03:19 -0800935 static constexpr bool kMultiRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -0800936 static constexpr bool kAlignLongOnStack = false;
937 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800938#else
939#error "Unsupported architecture"
940#endif
941
Andreas Gampec147b002014-03-06 18:11:06 -0800942 public:
943 explicit BuildGenericJniFrameStateMachine(T* delegate) : gpr_index_(kNumNativeGprArgs),
944 fpr_index_(kNumNativeFprArgs),
945 stack_entries_(0),
946 delegate_(delegate) {
947 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
948 // the next register is even; counting down is just to make the compiler happy...
949 CHECK_EQ(kNumNativeGprArgs % 2, 0U);
950 CHECK_EQ(kNumNativeFprArgs % 2, 0U);
951 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800952
Andreas Gampec147b002014-03-06 18:11:06 -0800953 virtual ~BuildGenericJniFrameStateMachine() {}
954
955 bool HavePointerGpr() {
956 return gpr_index_ > 0;
957 }
958
959 void AdvancePointer(void* val) {
960 if (HavePointerGpr()) {
961 gpr_index_--;
962 PushGpr(reinterpret_cast<uintptr_t>(val));
963 } else {
964 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
965 PushStack(reinterpret_cast<uintptr_t>(val));
966 gpr_index_ = 0;
967 }
968 }
969
970
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 bool HaveHandleScopeGpr() {
Andreas Gampec147b002014-03-06 18:11:06 -0800972 return gpr_index_ > 0;
973 }
974
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700975 void AdvanceHandleScope(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
976 uintptr_t handle = PushHandle(ptr);
977 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -0800978 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700979 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800980 } else {
981 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700982 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800983 gpr_index_ = 0;
984 }
985 }
986
987
988 bool HaveIntGpr() {
989 return gpr_index_ > 0;
990 }
991
992 void AdvanceInt(uint32_t val) {
993 if (HaveIntGpr()) {
994 gpr_index_--;
995 PushGpr(val);
996 } else {
997 stack_entries_++;
998 PushStack(val);
999 gpr_index_ = 0;
1000 }
1001 }
1002
1003
1004 bool HaveLongGpr() {
1005 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1006 }
1007
1008 bool LongGprNeedsPadding() {
1009 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1010 kAlignLongOnStack && // and when it needs alignment
1011 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1012 }
1013
1014 bool LongStackNeedsPadding() {
1015 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1016 kAlignLongOnStack && // and when it needs 8B alignment
1017 (stack_entries_ & 1) == 1; // counter is odd
1018 }
1019
1020 void AdvanceLong(uint64_t val) {
1021 if (HaveLongGpr()) {
1022 if (LongGprNeedsPadding()) {
1023 PushGpr(0);
1024 gpr_index_--;
1025 }
1026 if (kRegistersNeededForLong == 1) {
1027 PushGpr(static_cast<uintptr_t>(val));
1028 } else {
1029 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1030 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1031 }
1032 gpr_index_ -= kRegistersNeededForLong;
1033 } else {
1034 if (LongStackNeedsPadding()) {
1035 PushStack(0);
1036 stack_entries_++;
1037 }
1038 if (kRegistersNeededForLong == 1) {
1039 PushStack(static_cast<uintptr_t>(val));
1040 stack_entries_++;
1041 } else {
1042 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1043 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1044 stack_entries_ += 2;
1045 }
1046 gpr_index_ = 0;
1047 }
1048 }
1049
1050
1051 bool HaveFloatFpr() {
1052 return fpr_index_ > 0;
1053 }
1054
Andreas Gampec147b002014-03-06 18:11:06 -08001055 template <typename U, typename V> V convert(U in) {
1056 CHECK_LE(sizeof(U), sizeof(V));
1057 union { U u; V v; } tmp;
1058 tmp.u = in;
1059 return tmp.v;
1060 }
1061
1062 void AdvanceFloat(float val) {
1063 if (kNativeSoftFloatAbi) {
1064 AdvanceInt(convert<float, uint32_t>(val));
1065 } else {
1066 if (HaveFloatFpr()) {
1067 fpr_index_--;
1068 if (kRegistersNeededForDouble == 1) {
1069 if (kMultiRegistersWidened) {
1070 PushFpr8(convert<double, uint64_t>(val));
1071 } else {
1072 // No widening, just use the bits.
1073 PushFpr8(convert<float, uint64_t>(val));
1074 }
1075 } else {
1076 PushFpr4(val);
1077 }
1078 } else {
1079 stack_entries_++;
1080 if (kRegistersNeededForDouble == 1 && kMultiRegistersWidened) {
1081 // Need to widen before storing: Note the "double" in the template instantiation.
1082 PushStack(convert<double, uintptr_t>(val));
1083 } else {
1084 PushStack(convert<float, uintptr_t>(val));
1085 }
1086 fpr_index_ = 0;
1087 }
1088 }
1089 }
1090
1091
1092 bool HaveDoubleFpr() {
1093 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1094 }
1095
1096 bool DoubleFprNeedsPadding() {
1097 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1098 kAlignDoubleOnStack && // and when it needs alignment
1099 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1100 }
1101
1102 bool DoubleStackNeedsPadding() {
1103 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1104 kAlignDoubleOnStack && // and when it needs 8B alignment
1105 (stack_entries_ & 1) == 1; // counter is odd
1106 }
1107
1108 void AdvanceDouble(uint64_t val) {
1109 if (kNativeSoftFloatAbi) {
1110 AdvanceLong(val);
1111 } else {
1112 if (HaveDoubleFpr()) {
1113 if (DoubleFprNeedsPadding()) {
1114 PushFpr4(0);
1115 fpr_index_--;
1116 }
1117 PushFpr8(val);
1118 fpr_index_ -= kRegistersNeededForDouble;
1119 } else {
1120 if (DoubleStackNeedsPadding()) {
1121 PushStack(0);
1122 stack_entries_++;
1123 }
1124 if (kRegistersNeededForDouble == 1) {
1125 PushStack(static_cast<uintptr_t>(val));
1126 stack_entries_++;
1127 } else {
1128 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1129 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1130 stack_entries_ += 2;
1131 }
1132 fpr_index_ = 0;
1133 }
1134 }
1135 }
1136
1137 uint32_t getStackEntries() {
1138 return stack_entries_;
1139 }
1140
1141 uint32_t getNumberOfUsedGprs() {
1142 return kNumNativeGprArgs - gpr_index_;
1143 }
1144
1145 uint32_t getNumberOfUsedFprs() {
1146 return kNumNativeFprArgs - fpr_index_;
1147 }
1148
1149 private:
1150 void PushGpr(uintptr_t val) {
1151 delegate_->PushGpr(val);
1152 }
1153 void PushFpr4(float val) {
1154 delegate_->PushFpr4(val);
1155 }
1156 void PushFpr8(uint64_t val) {
1157 delegate_->PushFpr8(val);
1158 }
1159 void PushStack(uintptr_t val) {
1160 delegate_->PushStack(val);
1161 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1163 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001164 }
1165
1166 uint32_t gpr_index_; // Number of free GPRs
1167 uint32_t fpr_index_; // Number of free FPRs
1168 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1169 // extended
1170 T* delegate_; // What Push implementation gets called
1171};
1172
1173class ComputeGenericJniFrameSize FINAL {
1174 public:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001175 ComputeGenericJniFrameSize() : num_handle_scope_references_(0), num_stack_entries_(0) {}
Andreas Gampec147b002014-03-06 18:11:06 -08001176
Andreas Gampec147b002014-03-06 18:11:06 -08001177 uint32_t GetStackSize() {
1178 return num_stack_entries_ * sizeof(uintptr_t);
1179 }
1180
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001181 // WARNING: After this, *sp won't be pointing to the method anymore!
Andreas Gampecf4035a2014-05-28 22:43:01 -07001182 void ComputeLayout(StackReference<mirror::ArtMethod>** m, bool is_static, const char* shorty,
1183 uint32_t shorty_len, void* sp, HandleScope** table,
1184 uint32_t* handle_scope_entries, uintptr_t** start_stack, uintptr_t** start_gpr,
1185 uint32_t** start_fpr, void** code_return, size_t* overall_size)
Andreas Gampec147b002014-03-06 18:11:06 -08001186 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1187 ComputeAll(is_static, shorty, shorty_len);
1188
Andreas Gampecf4035a2014-05-28 22:43:01 -07001189 mirror::ArtMethod* method = (*m)->AsMirrorPtr();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001190
Andreas Gampec147b002014-03-06 18:11:06 -08001191 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
Andreas Gampec147b002014-03-06 18:11:06 -08001192
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001193 // First, fix up the layout of the callee-save frame.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001194 // We have to squeeze in the HandleScope, and relocate the method pointer.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001195
1196 // "Free" the slot for the method.
Andreas Gampecf4035a2014-05-28 22:43:01 -07001197 sp8 += kPointerSize; // In the callee-save frame we use a full pointer.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001198
Andreas Gampecf4035a2014-05-28 22:43:01 -07001199 // Under the callee saves put handle scope and new method stack reference.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001200 *handle_scope_entries = num_handle_scope_references_;
Andreas Gampecf4035a2014-05-28 22:43:01 -07001201
1202 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
1203 size_t scope_and_method = handle_scope_size + sizeof(StackReference<mirror::ArtMethod>);
1204
1205 sp8 -= scope_and_method;
1206 // Align by kStackAlignment
1207 uintptr_t sp_to_align = reinterpret_cast<uintptr_t>(sp8);
1208 sp_to_align = RoundDown(sp_to_align, kStackAlignment);
1209 sp8 = reinterpret_cast<uint8_t*>(sp_to_align);
1210
1211 uint8_t* sp8_table = sp8 + sizeof(StackReference<mirror::ArtMethod>);
1212 *table = reinterpret_cast<HandleScope*>(sp8_table);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001213 (*table)->SetNumberOfReferences(num_handle_scope_references_);
Andreas Gampec147b002014-03-06 18:11:06 -08001214
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001215 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001216 uint8_t* method_pointer = sp8;
Andreas Gampecf4035a2014-05-28 22:43:01 -07001217 StackReference<mirror::ArtMethod>* new_method_ref =
1218 reinterpret_cast<StackReference<mirror::ArtMethod>*>(method_pointer);
1219 new_method_ref->Assign(method);
1220 *m = new_method_ref;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001221
1222 // Reference cookie and padding
1223 sp8 -= 8;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001224 // Store HandleScope size
1225 *reinterpret_cast<uint32_t*>(sp8) = static_cast<uint32_t>(handle_scope_size & 0xFFFFFFFF);
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001226
1227 // Next comes the native call stack.
Andreas Gampec147b002014-03-06 18:11:06 -08001228 sp8 -= GetStackSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001229 // Now align the call stack below. This aligns by 16, as AArch64 seems to require.
Andreas Gampec147b002014-03-06 18:11:06 -08001230 uintptr_t mask = ~0x0F;
1231 sp8 = reinterpret_cast<uint8_t*>(reinterpret_cast<uintptr_t>(sp8) & mask);
1232 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
1233
1234 // put fprs and gprs below
1235 // Assumption is OK right now, as we have soft-float arm
1236 size_t fregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeFprArgs;
1237 sp8 -= fregs * sizeof(uintptr_t);
1238 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1239 size_t iregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeGprArgs;
1240 sp8 -= iregs * sizeof(uintptr_t);
1241 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1242
1243 // reserve space for the code pointer
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001244 sp8 -= kPointerSize;
Andreas Gampec147b002014-03-06 18:11:06 -08001245 *code_return = reinterpret_cast<void*>(sp8);
1246
1247 *overall_size = reinterpret_cast<uint8_t*>(sp) - sp8;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001248
1249 // The new SP is stored at the end of the alloca, so it can be immediately popped
1250 sp8 = reinterpret_cast<uint8_t*>(sp) - 5 * KB;
1251 *(reinterpret_cast<uint8_t**>(sp8)) = method_pointer;
Andreas Gampec147b002014-03-06 18:11:06 -08001252 }
1253
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001254 void ComputeHandleScopeOffset() { } // nothing to do, static right now
Andreas Gampec147b002014-03-06 18:11:06 -08001255
1256 void ComputeAll(bool is_static, const char* shorty, uint32_t shorty_len)
1257 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1258 BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize> sm(this);
1259
1260 // JNIEnv
1261 sm.AdvancePointer(nullptr);
1262
1263 // Class object or this as first argument
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001264 sm.AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001265
1266 for (uint32_t i = 1; i < shorty_len; ++i) {
1267 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1268 switch (cur_type_) {
1269 case Primitive::kPrimNot:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001270 sm.AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001271 break;
1272
1273 case Primitive::kPrimBoolean:
1274 case Primitive::kPrimByte:
1275 case Primitive::kPrimChar:
1276 case Primitive::kPrimShort:
1277 case Primitive::kPrimInt:
1278 sm.AdvanceInt(0);
1279 break;
1280 case Primitive::kPrimFloat:
1281 sm.AdvanceFloat(0);
1282 break;
1283 case Primitive::kPrimDouble:
1284 sm.AdvanceDouble(0);
1285 break;
1286 case Primitive::kPrimLong:
1287 sm.AdvanceLong(0);
1288 break;
1289 default:
1290 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
1291 }
1292 }
1293
1294 num_stack_entries_ = sm.getStackEntries();
1295 }
1296
1297 void PushGpr(uintptr_t /* val */) {
1298 // not optimizing registers, yet
1299 }
1300
1301 void PushFpr4(float /* val */) {
1302 // not optimizing registers, yet
1303 }
1304
1305 void PushFpr8(uint64_t /* val */) {
1306 // not optimizing registers, yet
1307 }
1308
1309 void PushStack(uintptr_t /* val */) {
1310 // counting is already done in the superclass
1311 }
1312
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001313 uintptr_t PushHandle(mirror::Object* /* ptr */) {
1314 num_handle_scope_references_++;
Andreas Gampec147b002014-03-06 18:11:06 -08001315 return reinterpret_cast<uintptr_t>(nullptr);
1316 }
1317
1318 private:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001319 uint32_t num_handle_scope_references_;
Andreas Gampec147b002014-03-06 18:11:06 -08001320 uint32_t num_stack_entries_;
1321};
1322
1323// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1324// of transitioning into native code.
1325class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001326 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -07001327 BuildGenericJniFrameVisitor(StackReference<mirror::ArtMethod>** sp, bool is_static,
1328 const char* shorty, uint32_t shorty_len, Thread* self) :
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001329 QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), sm_(this) {
Andreas Gampec147b002014-03-06 18:11:06 -08001330 ComputeGenericJniFrameSize fsc;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001331 fsc.ComputeLayout(sp, is_static, shorty, shorty_len, *sp, &handle_scope_, &handle_scope_expected_refs_,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001332 &cur_stack_arg_, &cur_gpr_reg_, &cur_fpr_reg_, &code_return_,
1333 &alloca_used_size_);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001334 handle_scope_number_of_references_ = 0;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001335 cur_hs_entry_ = GetFirstHandleScopeEntry();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001336
1337 // jni environment is always first argument
Andreas Gampec147b002014-03-06 18:11:06 -08001338 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001339
1340 if (is_static) {
Andreas Gampecf4035a2014-05-28 22:43:01 -07001341 sm_.AdvanceHandleScope((*sp)->AsMirrorPtr()->GetDeclaringClass());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001342 }
1343 }
1344
Ian Rogers9758f792014-03-13 09:02:55 -07001345 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001346
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001347 void FinalizeHandleScope(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001348
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001349 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
1350 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1351 return handle_scope_->GetHandle(0).GetReference();
1352 }
1353
1354 jobject GetFirstHandleScopeJObject()
1355 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001356 return handle_scope_->GetHandle(0).ToJObject();
Andreas Gampec147b002014-03-06 18:11:06 -08001357 }
1358
1359 void PushGpr(uintptr_t val) {
1360 *cur_gpr_reg_ = val;
1361 cur_gpr_reg_++;
1362 }
1363
1364 void PushFpr4(float val) {
1365 *cur_fpr_reg_ = val;
1366 cur_fpr_reg_++;
1367 }
1368
1369 void PushFpr8(uint64_t val) {
1370 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1371 *tmp = val;
1372 cur_fpr_reg_ += 2;
1373 }
1374
1375 void PushStack(uintptr_t val) {
1376 *cur_stack_arg_ = val;
1377 cur_stack_arg_++;
1378 }
1379
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001380 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001381 uintptr_t tmp;
1382 if (ref == nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001383 *cur_hs_entry_ = StackReference<mirror::Object>();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001384 tmp = reinterpret_cast<uintptr_t>(nullptr);
1385 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001386 *cur_hs_entry_ = StackReference<mirror::Object>::FromMirrorPtr(ref);
1387 tmp = reinterpret_cast<uintptr_t>(cur_hs_entry_);
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001388 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001389 cur_hs_entry_++;
1390 handle_scope_number_of_references_++;
Andreas Gampec147b002014-03-06 18:11:06 -08001391 return tmp;
1392 }
1393
1394 // Size of the part of the alloca that we actually need.
1395 size_t GetAllocaUsedSize() {
1396 return alloca_used_size_;
1397 }
1398
1399 void* GetCodeReturn() {
1400 return code_return_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001401 }
1402
1403 private:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001404 uint32_t handle_scope_number_of_references_;
1405 StackReference<mirror::Object>* cur_hs_entry_;
1406 HandleScope* handle_scope_;
1407 uint32_t handle_scope_expected_refs_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001408 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001409 uint32_t* cur_fpr_reg_;
1410 uintptr_t* cur_stack_arg_;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001411 // StackReference<mirror::Object>* top_of_handle_scope_;
Andreas Gampec147b002014-03-06 18:11:06 -08001412 void* code_return_;
1413 size_t alloca_used_size_;
1414
1415 BuildGenericJniFrameStateMachine<BuildGenericJniFrameVisitor> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001416
1417 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1418};
1419
Ian Rogers9758f792014-03-13 09:02:55 -07001420void BuildGenericJniFrameVisitor::Visit() {
1421 Primitive::Type type = GetParamPrimitiveType();
1422 switch (type) {
1423 case Primitive::kPrimLong: {
1424 jlong long_arg;
1425 if (IsSplitLongOrDouble()) {
1426 long_arg = ReadSplitLongParam();
1427 } else {
1428 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1429 }
1430 sm_.AdvanceLong(long_arg);
1431 break;
1432 }
1433 case Primitive::kPrimDouble: {
1434 uint64_t double_arg;
1435 if (IsSplitLongOrDouble()) {
1436 // Read into union so that we don't case to a double.
1437 double_arg = ReadSplitLongParam();
1438 } else {
1439 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1440 }
1441 sm_.AdvanceDouble(double_arg);
1442 break;
1443 }
1444 case Primitive::kPrimNot: {
1445 StackReference<mirror::Object>* stack_ref =
1446 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001447 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001448 break;
1449 }
1450 case Primitive::kPrimFloat:
1451 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1452 break;
1453 case Primitive::kPrimBoolean: // Fall-through.
1454 case Primitive::kPrimByte: // Fall-through.
1455 case Primitive::kPrimChar: // Fall-through.
1456 case Primitive::kPrimShort: // Fall-through.
1457 case Primitive::kPrimInt: // Fall-through.
1458 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1459 break;
1460 case Primitive::kPrimVoid:
1461 LOG(FATAL) << "UNREACHABLE";
1462 break;
1463 }
1464}
1465
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001466void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Ian Rogers9758f792014-03-13 09:02:55 -07001467 // Initialize padding entries.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001468 while (handle_scope_number_of_references_ < handle_scope_expected_refs_) {
1469 *cur_hs_entry_ = StackReference<mirror::Object>();
1470 cur_hs_entry_++;
1471 handle_scope_number_of_references_++;
Ian Rogers9758f792014-03-13 09:02:55 -07001472 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001473 handle_scope_->SetNumberOfReferences(handle_scope_expected_refs_);
1474 DCHECK_NE(handle_scope_expected_refs_, 0U);
1475 // Install HandleScope.
1476 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001477}
1478
Andreas Gampe90546832014-03-12 18:07:19 -07001479extern "C" void* artFindNativeMethod();
1480
Andreas Gampead615172014-04-04 16:20:13 -07001481uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1482 if (lock != nullptr) {
1483 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1484 } else {
1485 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1486 }
1487}
1488
1489void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1490 if (lock != nullptr) {
1491 JniMethodEndSynchronized(cookie, lock, self);
1492 } else {
1493 JniMethodEnd(cookie, self);
1494 }
1495}
1496
Andreas Gampec147b002014-03-06 18:11:06 -08001497/*
1498 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001499 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001500 * The final element on the stack is a pointer to the native code.
1501 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001502 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001503 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001504 *
Andreas Gampec147b002014-03-06 18:11:06 -08001505 * The return of this function denotes:
1506 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1507 * 2) An error, if the value is negative.
1508 */
Andreas Gampecf4035a2014-05-28 22:43:01 -07001509extern "C" ssize_t artQuickGenericJniTrampoline(Thread* self, StackReference<mirror::ArtMethod>* sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001510 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -07001511 mirror::ArtMethod* called = sp->AsMirrorPtr();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001512 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001513
1514 // run the visitor
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001515 uint32_t shorty_len = 0;
1516 const char* shorty = called->GetShorty(&shorty_len);
1517 BuildGenericJniFrameVisitor visitor(&sp, called->IsStatic(), shorty, shorty_len, self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001518 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001519 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001520
1521 // fix up managed-stack things in Thread
1522 self->SetTopOfStack(sp, 0);
1523
Ian Rogerse0dcd462014-03-08 15:21:04 -08001524 self->VerifyStack();
1525
Andreas Gampe90546832014-03-12 18:07:19 -07001526 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001527 uint32_t cookie;
1528 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001529 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001530 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001531 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001532 // A negative value denotes an error.
1533 return -1;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001534 }
1535 } else {
1536 cookie = JniMethodStart(self);
1537 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001538 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001539 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001540
Andreas Gampe90546832014-03-12 18:07:19 -07001541 // Retrieve the stored native code.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001542 const void* nativeCode = called->GetNativeMethod();
Andreas Gampe90546832014-03-12 18:07:19 -07001543
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001544 // There are two cases for the content of nativeCode:
1545 // 1) Pointer to the native function.
1546 // 2) Pointer to the trampoline for native code binding.
1547 // In the second case, we need to execute the binding and continue with the actual native function
1548 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001549 DCHECK(nativeCode != nullptr);
1550 if (nativeCode == GetJniDlsymLookupStub()) {
1551 nativeCode = artFindNativeMethod();
1552
1553 if (nativeCode == nullptr) {
1554 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001555
1556 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001557 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001558 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001559 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1560 } else {
1561 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1562 }
1563
Andreas Gampe90546832014-03-12 18:07:19 -07001564 return -1;
1565 }
1566 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001567 }
1568
Andreas Gampe90546832014-03-12 18:07:19 -07001569 // Store the native code pointer in the stack at the right location.
Andreas Gampec147b002014-03-06 18:11:06 -08001570 uintptr_t* code_pointer = reinterpret_cast<uintptr_t*>(visitor.GetCodeReturn());
Andreas Gampec147b002014-03-06 18:11:06 -08001571 *code_pointer = reinterpret_cast<uintptr_t>(nativeCode);
1572
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001573 // 5K reserved, window_size + frame pointer used.
Andreas Gampe90546832014-03-12 18:07:19 -07001574 size_t window_size = visitor.GetAllocaUsedSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001575 return (5 * KB) - window_size - kPointerSize;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001576}
1577
1578/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001579 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001580 * unlocking.
1581 */
Andreas Gampecf4035a2014-05-28 22:43:01 -07001582extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self,
1583 StackReference<mirror::ArtMethod>* sp,
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001584 jvalue result, uint64_t result_f)
1585 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1586 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Andreas Gampecf4035a2014-05-28 22:43:01 -07001587 mirror::ArtMethod* called = sp->AsMirrorPtr();
Ian Rogerse0dcd462014-03-08 15:21:04 -08001588 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001589
Andreas Gampead615172014-04-04 16:20:13 -07001590 jobject lock = nullptr;
1591 if (called->IsSynchronized()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001592 HandleScope* table = reinterpret_cast<HandleScope*>(
Andreas Gampecf4035a2014-05-28 22:43:01 -07001593 reinterpret_cast<uint8_t*>(sp) + sizeof(StackReference<mirror::ArtMethod>));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001594 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001595 }
1596
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001597 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001598
1599 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001600 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001601 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001602 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001603
1604 switch (return_shorty_char) {
1605 case 'F': // Fall-through.
1606 case 'D':
1607 return result_f;
1608 case 'Z':
1609 return result.z;
1610 case 'B':
1611 return result.b;
1612 case 'C':
1613 return result.c;
1614 case 'S':
1615 return result.s;
1616 case 'I':
1617 return result.i;
1618 case 'J':
1619 return result.j;
1620 case 'V':
1621 return 0;
1622 default:
1623 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1624 return 0;
1625 }
1626 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001627}
1628
Andreas Gampe51f76352014-05-21 08:28:48 -07001629// The following definitions create return types for two word-sized entities that will be passed
1630// in registers so that memory operations for the interface trampolines can be avoided. The entities
1631// are the resolved method and the pointer to the code to be invoked.
1632//
1633// On x86, ARM32 and MIPS, this is given for a *scalar* 64bit value. The definition thus *must* be
1634// uint64_t or long long int. We use the upper 32b for code, and the lower 32b for the method.
1635//
1636// On x86_64 and ARM64, structs are decomposed for allocation, so we can create a structs of two
1637// size_t-sized values.
1638//
1639// We need two operations:
1640//
1641// 1) A flag value that signals failure. The assembly stubs expect the method part to be "0".
1642// GetFailureValue() will return a value that has method == 0.
1643//
1644// 2) A value that combines a code pointer and a method pointer.
1645// GetSuccessValue() constructs this.
1646
1647#if defined(__i386__) || defined(__arm__) || defined(__mips__)
1648typedef uint64_t MethodAndCode;
1649
1650// Encodes method_ptr==nullptr and code_ptr==nullptr
1651static constexpr MethodAndCode GetFailureValue() {
1652 return 0;
1653}
1654
1655// Use the lower 32b for the method pointer and the upper 32b for the code pointer.
1656static MethodAndCode GetSuccessValue(const void* code, mirror::ArtMethod* method) {
1657 uint32_t method_uint = reinterpret_cast<uint32_t>(method);
1658 uint64_t code_uint = reinterpret_cast<uint32_t>(code);
1659 return ((code_uint << 32) | method_uint);
1660}
1661
1662#elif defined(__x86_64__) || defined(__aarch64__)
1663struct MethodAndCode {
1664 uintptr_t method;
1665 uintptr_t code;
1666};
1667
1668// Encodes method_ptr==nullptr. Leaves random value in code pointer.
1669static MethodAndCode GetFailureValue() {
1670 MethodAndCode ret;
1671 ret.method = 0;
1672 return ret;
1673}
1674
1675// Write values into their respective members.
1676static MethodAndCode GetSuccessValue(const void* code, mirror::ArtMethod* method) {
1677 MethodAndCode ret;
1678 ret.method = reinterpret_cast<uintptr_t>(method);
1679 ret.code = reinterpret_cast<uintptr_t>(code);
1680 return ret;
1681}
1682#else
1683#error "Unsupported architecture"
1684#endif
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001685
1686template<InvokeType type, bool access_check>
Andreas Gampe51f76352014-05-21 08:28:48 -07001687static MethodAndCode artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
1688 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001689 Thread* self, StackReference<mirror::ArtMethod>* sp);
Andreas Gampe51f76352014-05-21 08:28:48 -07001690
1691template<InvokeType type, bool access_check>
1692static MethodAndCode artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
1693 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001694 Thread* self, StackReference<mirror::ArtMethod>* sp) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001695 mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check,
1696 type);
1697 if (UNLIKELY(method == nullptr)) {
1698 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1699 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1700 uint32_t shorty_len;
1701 const char* shorty =
1702 dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
1703 {
1704 // Remember the args in case a GC happens in FindMethodFromCode.
1705 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1706 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1707 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001708 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, &caller_method,
1709 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001710 visitor.FixupReferences();
1711 }
1712
1713 if (UNLIKELY(method == NULL)) {
1714 CHECK(self->IsExceptionPending());
Andreas Gampe51f76352014-05-21 08:28:48 -07001715 return GetFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001716 }
1717 }
1718 DCHECK(!self->IsExceptionPending());
1719 const void* code = method->GetEntryPointFromQuickCompiledCode();
1720
1721 // When we return, the caller will branch to this address, so it had better not be 0!
1722 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001723 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001724
1725 return GetSuccessValue(code, method);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001726}
1727
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001728// Explicit artInvokeCommon template function declarations to please analysis tool.
1729#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
1730 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
Andreas Gampe51f76352014-05-21 08:28:48 -07001731 MethodAndCode artInvokeCommon<type, access_check>(uint32_t method_idx, \
1732 mirror::Object* this_object, \
1733 mirror::ArtMethod* caller_method, \
Andreas Gampecf4035a2014-05-28 22:43:01 -07001734 Thread* self, \
1735 StackReference<mirror::ArtMethod>* sp) \
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001736
1737EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
1738EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
1739EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
1740EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
1741EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
1742EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
1743EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
1744EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
1745EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
1746EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
1747#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
1748
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001749
1750// See comments in runtime_support_asm.S
Andreas Gampe51f76352014-05-21 08:28:48 -07001751extern "C" MethodAndCode artInvokeInterfaceTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001752 mirror::Object* this_object,
1753 mirror::ArtMethod* caller_method,
1754 Thread* self,
1755 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001756 return artInvokeCommon<kInterface, true>(method_idx, this_object, caller_method, self, sp);
1757}
1758
1759
Andreas Gampe51f76352014-05-21 08:28:48 -07001760extern "C" MethodAndCode artInvokeDirectTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001761 mirror::Object* this_object,
1762 mirror::ArtMethod* caller_method,
1763 Thread* self,
1764 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001765 return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method, self, sp);
1766}
1767
Andreas Gampe51f76352014-05-21 08:28:48 -07001768extern "C" MethodAndCode artInvokeStaticTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001769 mirror::Object* this_object,
1770 mirror::ArtMethod* caller_method,
1771 Thread* self,
1772 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001773 return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method, self, sp);
1774}
1775
Andreas Gampe51f76352014-05-21 08:28:48 -07001776extern "C" MethodAndCode artInvokeSuperTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001777 mirror::Object* this_object,
1778 mirror::ArtMethod* caller_method,
1779 Thread* self,
1780 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001781 return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method, self, sp);
1782}
1783
Andreas Gampe51f76352014-05-21 08:28:48 -07001784extern "C" MethodAndCode artInvokeVirtualTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001785 mirror::Object* this_object,
1786 mirror::ArtMethod* caller_method,
1787 Thread* self,
1788 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001789 return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method, self, sp);
1790}
1791
1792// Determine target of interface dispatch. This object is known non-null.
Andreas Gampe51f76352014-05-21 08:28:48 -07001793extern "C" MethodAndCode artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method,
1794 mirror::Object* this_object,
1795 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001796 Thread* self,
1797 StackReference<mirror::ArtMethod>* sp)
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001798 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1799 mirror::ArtMethod* method;
1800 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
1801 method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
1802 if (UNLIKELY(method == NULL)) {
1803 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1804 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
1805 caller_method);
Andreas Gampe51f76352014-05-21 08:28:48 -07001806 return GetFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001807 }
1808 } else {
1809 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1810 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
1811 // Determine method index from calling dex instruction.
1812#if defined(__arm__)
1813 // On entry the stack pointed by sp is:
1814 // | argN | |
1815 // | ... | |
1816 // | arg4 | |
1817 // | arg3 spill | | Caller's frame
1818 // | arg2 spill | |
1819 // | arg1 spill | |
1820 // | Method* | ---
1821 // | LR |
1822 // | ... | callee saves
1823 // | R3 | arg3
1824 // | R2 | arg2
1825 // | R1 | arg1
1826 // | R0 |
1827 // | Method* | <- sp
1828 DCHECK_EQ(48U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1829 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp) + kPointerSize);
1830 uintptr_t caller_pc = regs[10];
1831#elif defined(__i386__)
1832 // On entry the stack pointed by sp is:
1833 // | argN | |
1834 // | ... | |
1835 // | arg4 | |
1836 // | arg3 spill | | Caller's frame
1837 // | arg2 spill | |
1838 // | arg1 spill | |
1839 // | Method* | ---
1840 // | Return |
1841 // | EBP,ESI,EDI | callee saves
1842 // | EBX | arg3
1843 // | EDX | arg2
1844 // | ECX | arg1
1845 // | EAX/Method* | <- sp
1846 DCHECK_EQ(32U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1847 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
1848 uintptr_t caller_pc = regs[7];
1849#elif defined(__mips__)
1850 // On entry the stack pointed by sp is:
1851 // | argN | |
1852 // | ... | |
1853 // | arg4 | |
1854 // | arg3 spill | | Caller's frame
1855 // | arg2 spill | |
1856 // | arg1 spill | |
1857 // | Method* | ---
1858 // | RA |
1859 // | ... | callee saves
1860 // | A3 | arg3
1861 // | A2 | arg2
1862 // | A1 | arg1
1863 // | A0/Method* | <- sp
1864 DCHECK_EQ(64U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1865 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
1866 uintptr_t caller_pc = regs[15];
1867#else
1868 UNIMPLEMENTED(FATAL);
1869 uintptr_t caller_pc = 0;
1870#endif
1871 uint32_t dex_pc = caller_method->ToDexPc(caller_pc);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001872 const DexFile::CodeItem* code = caller_method->GetCodeItem();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001873 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
1874 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
1875 Instruction::Code instr_code = instr->Opcode();
1876 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
1877 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
1878 << "Unexpected call into interface trampoline: " << instr->DumpString(NULL);
1879 uint32_t dex_method_idx;
1880 if (instr_code == Instruction::INVOKE_INTERFACE) {
1881 dex_method_idx = instr->VRegB_35c();
1882 } else {
1883 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
1884 dex_method_idx = instr->VRegB_3rc();
1885 }
1886
1887 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1888 uint32_t shorty_len;
1889 const char* shorty =
1890 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
1891 {
1892 // Remember the args in case a GC happens in FindMethodFromCode.
1893 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1894 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
1895 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001896 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, &caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001897 self);
1898 visitor.FixupReferences();
1899 }
1900
1901 if (UNLIKELY(method == nullptr)) {
1902 CHECK(self->IsExceptionPending());
Andreas Gampe51f76352014-05-21 08:28:48 -07001903 return GetFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001904 }
1905 }
1906 const void* code = method->GetEntryPointFromQuickCompiledCode();
1907
1908 // When we return, the caller will branch to this address, so it had better not be 0!
1909 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001910 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001911
1912 return GetSuccessValue(code, method);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001913}
1914
Ian Rogers848871b2013-08-05 10:56:33 -07001915} // namespace art