blob: 9e75cbabd74345caeeae5e0538fd0f758eec83d6 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070025#include "imt_conflict_table.h"
Andreas Gampe13b27842016-11-07 16:48:23 -080026#include "jni_internal.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000027#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070028#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070029#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070030#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070031
32namespace art {
33
34
35class StubTest : public CommonRuntimeTest {
36 protected:
37 // We need callee-save methods set up in the Runtime for exceptions.
38 void SetUp() OVERRIDE {
39 // Do the normal setup.
40 CommonRuntimeTest::SetUp();
41
42 {
43 // Create callee-save methods
44 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010045 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
47 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
48 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070049 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070050 }
51 }
52 }
53 }
54
Ian Rogerse63db272014-07-15 15:36:11 -070055 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 // Use a smaller heap
57 for (std::pair<std::string, const void*>& pair : *options) {
58 if (pair.first.find("-Xmx") == 0) {
59 pair.first = "-Xmx4M"; // Smallest we can go.
60 }
61 }
Andreas Gampe51f76352014-05-21 08:28:48 -070062 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070063 }
Andreas Gampe525cde22014-04-22 15:44:50 -070064
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070065 // Helper function needed since TEST_F makes a new class.
66 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
67 return &self->tlsPtr_;
68 }
69
Andreas Gampe4fc046e2014-05-06 16:56:39 -070070 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070071 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070072 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070073 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 // TODO: Set up a frame according to referrer's specs.
76 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070078 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079 }
80
Andreas Gampe51f76352014-05-21 08:28:48 -070081 // TODO: Set up a frame according to referrer's specs.
82 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070083 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070084 // Push a transition back into managed code onto the linked list in thread.
85 ManagedStack fragment;
86 self->PushManagedStackFragment(&fragment);
87
88 size_t result;
89 size_t fpr_result = 0;
90#if defined(__i386__)
91 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070092#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
93#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070094 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070095 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
96 // esp, then we won't be able to access it after spilling.
97
98 // Spill 6 registers.
99 PUSH(%%ebx)
100 PUSH(%%ecx)
101 PUSH(%%edx)
102 PUSH(%%esi)
103 PUSH(%%edi)
104 PUSH(%%ebp)
105
106 // Store the inputs to the stack, but keep the referrer up top, less work.
107 PUSH(%[referrer]) // Align stack.
108 PUSH(%[referrer]) // Store referrer
109
110 PUSH(%[arg0])
111 PUSH(%[arg1])
112 PUSH(%[arg2])
113 PUSH(%[code])
114 // Now read them back into the required registers.
115 POP(%%edi)
116 POP(%%edx)
117 POP(%%ecx)
118 POP(%%eax)
119 // Call is prepared now.
120
Andreas Gampe51f76352014-05-21 08:28:48 -0700121 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700122 "addl $8, %%esp\n\t" // Pop referrer and padding.
123 ".cfi_adjust_cfa_offset -8\n\t"
124
125 // Restore 6 registers.
126 POP(%%ebp)
127 POP(%%edi)
128 POP(%%esi)
129 POP(%%edx)
130 POP(%%ecx)
131 POP(%%ebx)
132
Andreas Gampe51f76352014-05-21 08:28:48 -0700133 : "=a" (result)
134 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700135 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
136 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700137 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700138 : "memory", "xmm7"); // clobber.
139#undef PUSH
140#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700141#elif defined(__arm__)
142 __asm__ __volatile__(
143 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
144 ".cfi_adjust_cfa_offset 52\n\t"
145 "push {r9}\n\t"
146 ".cfi_adjust_cfa_offset 4\n\t"
147 "mov r9, %[referrer]\n\n"
148 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
149 ".cfi_adjust_cfa_offset 8\n\t"
150 "ldr r9, [sp, #8]\n\t"
151
152 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
153 "sub sp, sp, #24\n\t"
154 "str %[arg0], [sp]\n\t"
155 "str %[arg1], [sp, #4]\n\t"
156 "str %[arg2], [sp, #8]\n\t"
157 "str %[code], [sp, #12]\n\t"
158 "str %[self], [sp, #16]\n\t"
159 "str %[hidden], [sp, #20]\n\t"
160 "ldr r0, [sp]\n\t"
161 "ldr r1, [sp, #4]\n\t"
162 "ldr r2, [sp, #8]\n\t"
163 "ldr r3, [sp, #12]\n\t"
164 "ldr r9, [sp, #16]\n\t"
165 "ldr r12, [sp, #20]\n\t"
166 "add sp, sp, #24\n\t"
167
168 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700169 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700170 ".cfi_adjust_cfa_offset -12\n\t"
171 "pop {r1-r12, lr}\n\t" // Restore state
172 ".cfi_adjust_cfa_offset -52\n\t"
173 "mov %[result], r0\n\t" // Save the result
174 : [result] "=r" (result)
175 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700176 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
177 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700178 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700179#elif defined(__aarch64__)
180 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000182 "sub sp, sp, #80\n\t"
183 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700184 "stp x0, x1, [sp]\n\t"
185 "stp x2, x3, [sp, #16]\n\t"
186 "stp x4, x5, [sp, #32]\n\t"
187 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000188 // To be extra defensive, store x20. We do this because some of the stubs might make a
189 // transition into the runtime via the blr instruction below and *not* save x20.
190 "str x20, [sp, #64]\n\t"
191 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700192
Andreas Gampef39b3782014-06-03 14:38:30 -0700193 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
194 ".cfi_adjust_cfa_offset 16\n\t"
195 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700196
197 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
198 "sub sp, sp, #48\n\t"
199 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700200 // All things are "r" constraints, so direct str/stp should work.
201 "stp %[arg0], %[arg1], [sp]\n\t"
202 "stp %[arg2], %[code], [sp, #16]\n\t"
203 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700204
205 // Now we definitely have x0-x3 free, use it to garble d8 - d15
206 "movk x0, #0xfad0\n\t"
207 "movk x0, #0xebad, lsl #16\n\t"
208 "movk x0, #0xfad0, lsl #32\n\t"
209 "movk x0, #0xebad, lsl #48\n\t"
210 "fmov d8, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d9, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d10, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d11, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d12, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d13, x0\n\t"
221 "add x0, x0, 1\n\t"
222 "fmov d14, x0\n\t"
223 "add x0, x0, 1\n\t"
224 "fmov d15, x0\n\t"
225
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 // Load call params into the right registers.
227 "ldp x0, x1, [sp]\n\t"
228 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100229 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700230 "add sp, sp, #48\n\t"
231 ".cfi_adjust_cfa_offset -48\n\t"
232
Andreas Gampe51f76352014-05-21 08:28:48 -0700233 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "mov x8, x0\n\t" // Store result
235 "add sp, sp, #16\n\t" // Drop the quick "frame"
236 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700237
238 // Test d8 - d15. We can use x1 and x2.
239 "movk x1, #0xfad0\n\t"
240 "movk x1, #0xebad, lsl #16\n\t"
241 "movk x1, #0xfad0, lsl #32\n\t"
242 "movk x1, #0xebad, lsl #48\n\t"
243 "fmov x2, d8\n\t"
244 "cmp x1, x2\n\t"
245 "b.ne 1f\n\t"
246 "add x1, x1, 1\n\t"
247
248 "fmov x2, d9\n\t"
249 "cmp x1, x2\n\t"
250 "b.ne 1f\n\t"
251 "add x1, x1, 1\n\t"
252
253 "fmov x2, d10\n\t"
254 "cmp x1, x2\n\t"
255 "b.ne 1f\n\t"
256 "add x1, x1, 1\n\t"
257
258 "fmov x2, d11\n\t"
259 "cmp x1, x2\n\t"
260 "b.ne 1f\n\t"
261 "add x1, x1, 1\n\t"
262
263 "fmov x2, d12\n\t"
264 "cmp x1, x2\n\t"
265 "b.ne 1f\n\t"
266 "add x1, x1, 1\n\t"
267
268 "fmov x2, d13\n\t"
269 "cmp x1, x2\n\t"
270 "b.ne 1f\n\t"
271 "add x1, x1, 1\n\t"
272
273 "fmov x2, d14\n\t"
274 "cmp x1, x2\n\t"
275 "b.ne 1f\n\t"
276 "add x1, x1, 1\n\t"
277
278 "fmov x2, d15\n\t"
279 "cmp x1, x2\n\t"
280 "b.ne 1f\n\t"
281
Andreas Gampef39b3782014-06-03 14:38:30 -0700282 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700283
284 // Finish up.
285 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700286 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
287 "ldp x2, x3, [sp, #16]\n\t"
288 "ldp x4, x5, [sp, #32]\n\t"
289 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000290 "ldr x20, [sp, #64]\n\t"
291 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
292 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700293
Andreas Gampef39b3782014-06-03 14:38:30 -0700294 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
295 "mov %[result], x8\n\t" // Store the call result
296
Andreas Gampe51f76352014-05-21 08:28:48 -0700297 "b 3f\n\t" // Goto end
298
299 // Failed fpr verification.
300 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700301 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700302 "b 2b\n\t" // Goto finish-up
303
304 // End
305 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700306 : [result] "=r" (result)
307 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700309 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000310 // Leave one register unclobbered, which is needed for compiling with
311 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
312 // which means we should unclobber one of the callee-saved registers that are unused.
313 // Here we use x20.
314 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700315 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
316 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
317 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
318 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700319 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000320 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200321#elif defined(__mips__) && !defined(__LP64__)
322 __asm__ __volatile__ (
323 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
324 "addiu $sp, $sp, -64\n\t"
325 "sw $a0, 0($sp)\n\t"
326 "sw $a1, 4($sp)\n\t"
327 "sw $a2, 8($sp)\n\t"
328 "sw $a3, 12($sp)\n\t"
329 "sw $t0, 16($sp)\n\t"
330 "sw $t1, 20($sp)\n\t"
331 "sw $t2, 24($sp)\n\t"
332 "sw $t3, 28($sp)\n\t"
333 "sw $t4, 32($sp)\n\t"
334 "sw $t5, 36($sp)\n\t"
335 "sw $t6, 40($sp)\n\t"
336 "sw $t7, 44($sp)\n\t"
337 // Spill gp register since it is caller save.
338 "sw $gp, 52($sp)\n\t"
339
340 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
341 "sw %[referrer], 0($sp)\n\t"
342
343 // Push everything on the stack, so we don't rely on the order.
344 "addiu $sp, $sp, -24\n\t"
345 "sw %[arg0], 0($sp)\n\t"
346 "sw %[arg1], 4($sp)\n\t"
347 "sw %[arg2], 8($sp)\n\t"
348 "sw %[code], 12($sp)\n\t"
349 "sw %[self], 16($sp)\n\t"
350 "sw %[hidden], 20($sp)\n\t"
351
352 // Load call params into the right registers.
353 "lw $a0, 0($sp)\n\t"
354 "lw $a1, 4($sp)\n\t"
355 "lw $a2, 8($sp)\n\t"
356 "lw $t9, 12($sp)\n\t"
357 "lw $s1, 16($sp)\n\t"
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800358 "lw $t7, 20($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200359 "addiu $sp, $sp, 24\n\t"
360
361 "jalr $t9\n\t" // Call the stub.
362 "nop\n\t"
363 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
364
365 // Restore stuff not named clobbered.
366 "lw $a0, 0($sp)\n\t"
367 "lw $a1, 4($sp)\n\t"
368 "lw $a2, 8($sp)\n\t"
369 "lw $a3, 12($sp)\n\t"
370 "lw $t0, 16($sp)\n\t"
371 "lw $t1, 20($sp)\n\t"
372 "lw $t2, 24($sp)\n\t"
373 "lw $t3, 28($sp)\n\t"
374 "lw $t4, 32($sp)\n\t"
375 "lw $t5, 36($sp)\n\t"
376 "lw $t6, 40($sp)\n\t"
377 "lw $t7, 44($sp)\n\t"
378 // Restore gp.
379 "lw $gp, 52($sp)\n\t"
380 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
381
382 "move %[result], $v0\n\t" // Store the call result.
383 : [result] "=r" (result)
384 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
385 [referrer] "r"(referrer), [hidden] "r"(hidden)
386 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
387 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100388 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
389 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
390 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200391 "memory"); // clobber.
392#elif defined(__mips__) && defined(__LP64__)
393 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100394 // Spill a0-a7 which we say we don't clobber. May contain args.
395 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200396 "sd $a0, 0($sp)\n\t"
397 "sd $a1, 8($sp)\n\t"
398 "sd $a2, 16($sp)\n\t"
399 "sd $a3, 24($sp)\n\t"
400 "sd $a4, 32($sp)\n\t"
401 "sd $a5, 40($sp)\n\t"
402 "sd $a6, 48($sp)\n\t"
403 "sd $a7, 56($sp)\n\t"
404
405 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
406 "sd %[referrer], 0($sp)\n\t"
407
408 // Push everything on the stack, so we don't rely on the order.
409 "daddiu $sp, $sp, -48\n\t"
410 "sd %[arg0], 0($sp)\n\t"
411 "sd %[arg1], 8($sp)\n\t"
412 "sd %[arg2], 16($sp)\n\t"
413 "sd %[code], 24($sp)\n\t"
414 "sd %[self], 32($sp)\n\t"
415 "sd %[hidden], 40($sp)\n\t"
416
417 // Load call params into the right registers.
418 "ld $a0, 0($sp)\n\t"
419 "ld $a1, 8($sp)\n\t"
420 "ld $a2, 16($sp)\n\t"
421 "ld $t9, 24($sp)\n\t"
422 "ld $s1, 32($sp)\n\t"
423 "ld $t0, 40($sp)\n\t"
424 "daddiu $sp, $sp, 48\n\t"
425
426 "jalr $t9\n\t" // Call the stub.
427 "nop\n\t"
428 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
429
430 // Restore stuff not named clobbered.
431 "ld $a0, 0($sp)\n\t"
432 "ld $a1, 8($sp)\n\t"
433 "ld $a2, 16($sp)\n\t"
434 "ld $a3, 24($sp)\n\t"
435 "ld $a4, 32($sp)\n\t"
436 "ld $a5, 40($sp)\n\t"
437 "ld $a6, 48($sp)\n\t"
438 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100439 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200440
441 "move %[result], $v0\n\t" // Store the call result.
442 : [result] "=r" (result)
443 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
444 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100445 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
446 // t0-t3 are ambiguous.
447 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
448 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100449 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
450 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
451 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200452 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700453#elif defined(__x86_64__) && !defined(__APPLE__)
454#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
455#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
456 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
457 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700458 // TODO: Set the thread?
459 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700460 // Spill almost everything (except rax, rsp). 14 registers.
461 PUSH(%%rbx)
462 PUSH(%%rcx)
463 PUSH(%%rdx)
464 PUSH(%%rsi)
465 PUSH(%%rdi)
466 PUSH(%%rbp)
467 PUSH(%%r8)
468 PUSH(%%r9)
469 PUSH(%%r10)
470 PUSH(%%r11)
471 PUSH(%%r12)
472 PUSH(%%r13)
473 PUSH(%%r14)
474 PUSH(%%r15)
475
476 PUSH(%[referrer]) // Push referrer & 16B alignment padding
477 PUSH(%[referrer])
478
479 // Now juggle the input registers.
480 PUSH(%[arg0])
481 PUSH(%[arg1])
482 PUSH(%[arg2])
483 PUSH(%[hidden])
484 PUSH(%[code])
485 POP(%%r8)
486 POP(%%rax)
487 POP(%%rdx)
488 POP(%%rsi)
489 POP(%%rdi)
490
491 "call *%%r8\n\t" // Call the stub
492 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700493 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700494
495 POP(%%r15)
496 POP(%%r14)
497 POP(%%r13)
498 POP(%%r12)
499 POP(%%r11)
500 POP(%%r10)
501 POP(%%r9)
502 POP(%%r8)
503 POP(%%rbp)
504 POP(%%rdi)
505 POP(%%rsi)
506 POP(%%rdx)
507 POP(%%rcx)
508 POP(%%rbx)
509
Andreas Gampe51f76352014-05-21 08:28:48 -0700510 : "=a" (result)
511 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700512 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
513 [referrer] "r"(referrer), [hidden] "r"(hidden)
514 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
515 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
516 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
517#undef PUSH
518#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700519#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800520 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700521 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
522 result = 0;
523#endif
524 // Pop transition.
525 self->PopManagedStackFragment(fragment);
526
527 fp_result = fpr_result;
528 EXPECT_EQ(0U, fp_result);
529
530 return result;
531 }
532
Andreas Gampe29b38412014-08-13 00:15:43 -0700533 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
534 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700535 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700536 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
537 }
538
Andreas Gampe6cf80102014-05-19 11:32:41 -0700539 protected:
540 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700541};
542
543
Andreas Gampe525cde22014-04-22 15:44:50 -0700544TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200545#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700546 Thread* self = Thread::Current();
547
548 uint32_t orig[20];
549 uint32_t trg[20];
550 for (size_t i = 0; i < 20; ++i) {
551 orig[i] = i;
552 trg[i] = 0;
553 }
554
555 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700556 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700557
558 EXPECT_EQ(orig[0], trg[0]);
559
560 for (size_t i = 1; i < 4; ++i) {
561 EXPECT_NE(orig[i], trg[i]);
562 }
563
564 for (size_t i = 4; i < 14; ++i) {
565 EXPECT_EQ(orig[i], trg[i]);
566 }
567
568 for (size_t i = 14; i < 20; ++i) {
569 EXPECT_NE(orig[i], trg[i]);
570 }
571
572 // TODO: Test overlapping?
573
574#else
575 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
576 // Force-print to std::cout so it's also outside the logcat.
577 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
578#endif
579}
580
Andreas Gampe525cde22014-04-22 15:44:50 -0700581TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200582#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
583 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700584 static constexpr size_t kThinLockLoops = 100;
585
Andreas Gampe525cde22014-04-22 15:44:50 -0700586 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700587
588 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
589
Andreas Gampe525cde22014-04-22 15:44:50 -0700590 // Create an object
591 ScopedObjectAccess soa(self);
592 // garbage is created during ClassLinker::Init
593
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700594 StackHandleScope<2> hs(soa.Self());
595 Handle<mirror::String> obj(
596 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700597 LockWord lock = obj->GetLockWord(false);
598 LockWord::LockState old_state = lock.GetState();
599 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
600
Andreas Gampe29b38412014-08-13 00:15:43 -0700601 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700602
603 LockWord lock_after = obj->GetLockWord(false);
604 LockWord::LockState new_state = lock_after.GetState();
605 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700606 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
607
608 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700609 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700610
611 // Check we're at lock count i
612
613 LockWord l_inc = obj->GetLockWord(false);
614 LockWord::LockState l_inc_state = l_inc.GetState();
615 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
616 EXPECT_EQ(l_inc.ThinLockCount(), i);
617 }
618
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700619 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700620 Handle<mirror::String> obj2(hs.NewHandle(
621 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700623 obj2->IdentityHashCode();
624
Andreas Gampe29b38412014-08-13 00:15:43 -0700625 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626
627 LockWord lock_after2 = obj2->GetLockWord(false);
628 LockWord::LockState new_state2 = lock_after2.GetState();
629 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
630 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
631
632 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700633#else
634 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
635 // Force-print to std::cout so it's also outside the logcat.
636 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
637#endif
638}
639
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700640
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700641class RandGen {
642 public:
643 explicit RandGen(uint32_t seed) : val_(seed) {}
644
645 uint32_t next() {
646 val_ = val_ * 48271 % 2147483647 + 13;
647 return val_;
648 }
649
650 uint32_t val_;
651};
652
653
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700654// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
655static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
657 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700658 static constexpr size_t kThinLockLoops = 100;
659
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700660 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700661
662 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
663 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700664 // Create an object
665 ScopedObjectAccess soa(self);
666 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700667 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
668 StackHandleScope<kNumberOfLocks + 1> hs(self);
669 Handle<mirror::String> obj(
670 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700671 LockWord lock = obj->GetLockWord(false);
672 LockWord::LockState old_state = lock.GetState();
673 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
674
Andreas Gampe29b38412014-08-13 00:15:43 -0700675 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 // This should be an illegal monitor state.
677 EXPECT_TRUE(self->IsExceptionPending());
678 self->ClearException();
679
680 LockWord lock_after = obj->GetLockWord(false);
681 LockWord::LockState new_state = lock_after.GetState();
682 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
Andreas Gampe29b38412014-08-13 00:15:43 -0700684 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
686 LockWord lock_after2 = obj->GetLockWord(false);
687 LockWord::LockState new_state2 = lock_after2.GetState();
688 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
689
Andreas Gampe29b38412014-08-13 00:15:43 -0700690 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691
692 LockWord lock_after3 = obj->GetLockWord(false);
693 LockWord::LockState new_state3 = lock_after3.GetState();
694 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
695
696 // Stress test:
697 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
698 // each step.
699
700 RandGen r(0x1234);
701
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700703 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704
705 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700706 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700707 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 // Initialize = allocate.
710 for (size_t i = 0; i < kNumberOfLocks; ++i) {
711 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700712 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700713 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700714 }
715
716 for (size_t i = 0; i < kIterations; ++i) {
717 // Select which lock to update.
718 size_t index = r.next() % kNumberOfLocks;
719
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 // Make lock fat?
721 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
722 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700726 LockWord::LockState iter_state = lock_iter.GetState();
727 if (counts[index] == 0) {
728 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
729 } else {
730 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
731 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700732 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800733 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else {
739 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700742
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800743 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700744 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
745 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 counts[index]++;
747 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700748 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700749 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 counts[index]--;
751 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700752
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700753 EXPECT_FALSE(self->IsExceptionPending());
754
755 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700756 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 LockWord::LockState iter_state = lock_iter.GetState();
758 if (fat[index]) {
759 // Abuse MonitorInfo.
760 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 EXPECT_EQ(counts[index], info.entry_count_) << index;
763 } else {
764 if (counts[index] > 0) {
765 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
766 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
767 } else {
768 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
769 }
770 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700771 }
772 }
773
774 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700776 for (size_t i = 0; i < kNumberOfLocks; ++i) {
777 size_t index = kNumberOfLocks - 1 - i;
778 size_t count = counts[index];
779 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700780 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
781 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700782 count--;
783 }
784
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700785 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700787 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
788 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700789 }
790
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700792#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800793 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700795 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700797#endif
798}
799
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700800TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800801 // This will lead to monitor error messages in the log.
802 ScopedLogSeverity sls(LogSeverity::FATAL);
803
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804 TestUnlockObject(this);
805}
Andreas Gampe525cde22014-04-22 15:44:50 -0700806
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200807#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
808 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800809extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#endif
811
812TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200813#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
814 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700815 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700816
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800817 const uintptr_t art_quick_check_instance_of =
818 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700819
Andreas Gampe525cde22014-04-22 15:44:50 -0700820 // Find some classes.
821 ScopedObjectAccess soa(self);
822 // garbage is created during ClassLinker::Init
823
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800824 VariableSizedHandleScope hs(soa.Self());
825 Handle<mirror::Class> klass_obj(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
827 Handle<mirror::Class> klass_str(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
829 Handle<mirror::Class> klass_list(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
831 Handle<mirror::Class> klass_cloneable(
832 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
833 Handle<mirror::Class> klass_array_list(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
835 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
836 Handle<mirror::String> string(hs.NewHandle(
837 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
838 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_FALSE(self->IsExceptionPending());
841
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800842 Invoke3(reinterpret_cast<size_t>(obj.Get()),
843 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700844 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800845 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700846 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800849 // Expected true: Test string instance of java.lang.String.
850 Invoke3(reinterpret_cast<size_t>(string.Get()),
851 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700852 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800853 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700854 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700855 EXPECT_FALSE(self->IsExceptionPending());
856
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800857 // Expected true: Test string instance of java.lang.Object.
858 Invoke3(reinterpret_cast<size_t>(string.Get()),
859 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700860 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800861 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700862 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700863 EXPECT_FALSE(self->IsExceptionPending());
864
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800865 // Expected false: Test object instance of java.lang.String.
866 Invoke3(reinterpret_cast<size_t>(obj.Get()),
867 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700868 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800869 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700870 self);
871 EXPECT_TRUE(self->IsExceptionPending());
872 self->ClearException();
873
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800874 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
875 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700876 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800877 art_quick_check_instance_of,
878 self);
879 EXPECT_FALSE(self->IsExceptionPending());
880
881 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
882 reinterpret_cast<size_t>(klass_cloneable.Get()),
883 0U,
884 art_quick_check_instance_of,
885 self);
886 EXPECT_FALSE(self->IsExceptionPending());
887
888 Invoke3(reinterpret_cast<size_t>(string.Get()),
889 reinterpret_cast<size_t>(klass_array_list.Get()),
890 0U,
891 art_quick_check_instance_of,
892 self);
893 EXPECT_TRUE(self->IsExceptionPending());
894 self->ClearException();
895
896 Invoke3(reinterpret_cast<size_t>(string.Get()),
897 reinterpret_cast<size_t>(klass_cloneable.Get()),
898 0U,
899 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700900 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700901 EXPECT_TRUE(self->IsExceptionPending());
902 self->ClearException();
903
904#else
905 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
906 // Force-print to std::cout so it's also outside the logcat.
907 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
908#endif
909}
910
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700911TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200912#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
913 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800914 // This will lead to OOM error messages in the log.
915 ScopedLogSeverity sls(LogSeverity::FATAL);
916
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700917 // TODO: Check the "Unresolved" allocation stubs
918
919 Thread* self = Thread::Current();
920 // Create an object
921 ScopedObjectAccess soa(self);
922 // garbage is created during ClassLinker::Init
923
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 StackHandleScope<2> hs(soa.Self());
925 Handle<mirror::Class> c(
926 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700927
928 // Play with it...
929
930 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700931 {
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000932 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
933 StubTest::GetEntrypoint(self, kQuickAllocObjectWithChecks),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700934 self);
935
936 EXPECT_FALSE(self->IsExceptionPending());
937 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
938 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700939 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700940 VerifyObject(obj);
941 }
942
943 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700944 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700945 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700946 self);
947
948 EXPECT_FALSE(self->IsExceptionPending());
949 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
950 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700951 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700952 VerifyObject(obj);
953 }
954
955 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700956 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700957 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700958 self);
959
960 EXPECT_FALSE(self->IsExceptionPending());
961 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
962 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700964 VerifyObject(obj);
965 }
966
967 // Failure tests.
968
969 // Out-of-memory.
970 {
971 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
972
973 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700974 Handle<mirror::Class> ca(
975 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
976
977 // Use arbitrary large amount for now.
978 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -0700979 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980
981 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700982 // Start allocating with 128K
983 size_t length = 128 * KB / 4;
984 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700985 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
986 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
987 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700988 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700989
990 // Try a smaller length
991 length = length / 8;
992 // Use at most half the reported free space.
993 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
994 if (length * 8 > mem) {
995 length = mem / 8;
996 }
997 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700998 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700999 }
1000 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001001 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001002
1003 // Allocate simple objects till it fails.
1004 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1006 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1007 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001008 }
1009 }
1010 self->ClearException();
1011
Mathieu Chartiere401d142015-04-22 13:56:20 -07001012 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001013 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001014 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 EXPECT_TRUE(self->IsExceptionPending());
1016 self->ClearException();
1017 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018 }
1019
1020 // Tests done.
1021#else
1022 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1023 // Force-print to std::cout so it's also outside the logcat.
1024 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1025#endif
1026}
1027
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001028TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001029#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1030 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001031 // TODO: Check the "Unresolved" allocation stubs
1032
Andreas Gampe369810a2015-01-14 19:53:31 -08001033 // This will lead to OOM error messages in the log.
1034 ScopedLogSeverity sls(LogSeverity::FATAL);
1035
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001036 Thread* self = Thread::Current();
1037 // Create an object
1038 ScopedObjectAccess soa(self);
1039 // garbage is created during ClassLinker::Init
1040
Nicolas Geoffray8d91ac32017-01-18 18:07:15 +00001041 StackHandleScope<1> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001042 Handle<mirror::Class> c(
1043 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001045 // Play with it...
1046
1047 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001048
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001050 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001051 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001052 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1053 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001054 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 self);
David Sehr709b0702016-10-13 09:12:37 -07001056 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001057 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1058 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1059 EXPECT_TRUE(obj->IsArrayInstance());
1060 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001061 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001062 VerifyObject(obj);
1063 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1064 EXPECT_EQ(array->GetLength(), 10);
1065 }
1066
1067 // Failure tests.
1068
1069 // Out-of-memory.
1070 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001071 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001072 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001073 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001074 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001075 self);
1076
1077 EXPECT_TRUE(self->IsExceptionPending());
1078 self->ClearException();
1079 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1080 }
1081
1082 // Tests done.
1083#else
1084 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1085 // Force-print to std::cout so it's also outside the logcat.
1086 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1087#endif
1088}
1089
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001090
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001091TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001092 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001093 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1094#if defined(__i386__) || defined(__mips__) || \
1095 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001096 // TODO: Check the "Unresolved" allocation stubs
1097
1098 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001099
1100 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1101
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001102 ScopedObjectAccess soa(self);
1103 // garbage is created during ClassLinker::Init
1104
1105 // Create some strings
1106 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001107 // Setup: The first half is standard. The second half uses a non-zero offset.
1108 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001109 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001110 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1111 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1112 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1113 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001114 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001115
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001116 StackHandleScope<kStringCount> hs(self);
1117 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001118
Jeff Hao848f70a2014-01-15 13:49:50 -08001119 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001120 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001121 }
1122
1123 // TODO: wide characters
1124
1125 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001126 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1127 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001128 int32_t expected[kStringCount][kStringCount];
1129 for (size_t x = 0; x < kStringCount; ++x) {
1130 for (size_t y = 0; y < kStringCount; ++y) {
1131 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001132 }
1133 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001134
1135 // Play with it...
1136
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001137 for (size_t x = 0; x < kStringCount; ++x) {
1138 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001139 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001140 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1141 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001142 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001143
1144 EXPECT_FALSE(self->IsExceptionPending());
1145
1146 // The result is a 32b signed integer
1147 union {
1148 size_t r;
1149 int32_t i;
1150 } conv;
1151 conv.r = result;
1152 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001153 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1154 conv.r;
1155 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1156 conv.r;
1157 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1158 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001159 }
1160 }
1161
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001162 // TODO: Deallocate things.
1163
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001164 // Tests done.
1165#else
1166 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1167 // Force-print to std::cout so it's also outside the logcat.
1168 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1169 std::endl;
1170#endif
1171}
1172
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001173
Mathieu Chartierc7853442015-03-27 14:35:38 -07001174static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001175 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001176 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001177#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1178 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001179 constexpr size_t num_values = 5;
1180 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1181
1182 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001183 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001184 static_cast<size_t>(values[i]),
1185 0U,
1186 StubTest::GetEntrypoint(self, kQuickSet8Static),
1187 self,
1188 referrer);
1189
Mathieu Chartierc7853442015-03-27 14:35:38 -07001190 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001191 0U, 0U,
1192 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1193 self,
1194 referrer);
1195 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1196 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1197 }
1198#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001199 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001200 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1201 // Force-print to std::cout so it's also outside the logcat.
1202 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1203#endif
1204}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001205static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001206 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001207 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001208#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1209 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001210 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001211
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001212 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001213 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001214 static_cast<size_t>(values[i]),
1215 0U,
1216 StubTest::GetEntrypoint(self, kQuickSet8Static),
1217 self,
1218 referrer);
1219
Mathieu Chartierc7853442015-03-27 14:35:38 -07001220 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001221 0U, 0U,
1222 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1223 self,
1224 referrer);
1225 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1226 }
1227#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001228 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001229 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1230 // Force-print to std::cout so it's also outside the logcat.
1231 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1232#endif
1233}
1234
1235
Mathieu Chartierc7853442015-03-27 14:35:38 -07001236static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001237 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001238 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001239#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1240 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001241 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001242
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001243 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001244 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001245 reinterpret_cast<size_t>(obj->Get()),
1246 static_cast<size_t>(values[i]),
1247 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1248 self,
1249 referrer);
1250
Mathieu Chartierc7853442015-03-27 14:35:38 -07001251 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001252 EXPECT_EQ(values[i], res) << "Iteration " << i;
1253
Mathieu Chartierc7853442015-03-27 14:35:38 -07001254 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001255
Mathieu Chartierc7853442015-03-27 14:35:38 -07001256 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001257 reinterpret_cast<size_t>(obj->Get()),
1258 0U,
1259 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1260 self,
1261 referrer);
1262 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1263 }
1264#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001265 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001266 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1267 // Force-print to std::cout so it's also outside the logcat.
1268 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1269#endif
1270}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001271static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001272 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001273 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001274#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1275 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001276 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001277
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001278 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001279 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001280 reinterpret_cast<size_t>(obj->Get()),
1281 static_cast<size_t>(values[i]),
1282 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1283 self,
1284 referrer);
1285
Mathieu Chartierc7853442015-03-27 14:35:38 -07001286 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001287 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001288 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001289
Mathieu Chartierc7853442015-03-27 14:35:38 -07001290 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001291 reinterpret_cast<size_t>(obj->Get()),
1292 0U,
1293 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1294 self,
1295 referrer);
1296 EXPECT_EQ(res, static_cast<int8_t>(res2));
1297 }
1298#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001299 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001300 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1301 // Force-print to std::cout so it's also outside the logcat.
1302 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1303#endif
1304}
1305
Mathieu Chartiere401d142015-04-22 13:56:20 -07001306static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001307 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001308 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001309#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1310 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001311 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001312
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001313 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001314 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001315 static_cast<size_t>(values[i]),
1316 0U,
1317 StubTest::GetEntrypoint(self, kQuickSet16Static),
1318 self,
1319 referrer);
1320
Mathieu Chartierc7853442015-03-27 14:35:38 -07001321 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001322 0U, 0U,
1323 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1324 self,
1325 referrer);
1326
1327 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1328 }
1329#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001330 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001331 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1332 // Force-print to std::cout so it's also outside the logcat.
1333 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1334#endif
1335}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001336static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001337 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001338 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001339#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1340 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001341 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001342
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001343 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001344 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001345 static_cast<size_t>(values[i]),
1346 0U,
1347 StubTest::GetEntrypoint(self, kQuickSet16Static),
1348 self,
1349 referrer);
1350
Mathieu Chartierc7853442015-03-27 14:35:38 -07001351 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001352 0U, 0U,
1353 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1354 self,
1355 referrer);
1356
1357 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1358 }
1359#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001360 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001361 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1362 // Force-print to std::cout so it's also outside the logcat.
1363 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1364#endif
1365}
1366
Mathieu Chartierc7853442015-03-27 14:35:38 -07001367static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001368 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001369 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001370#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1371 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001372 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001373
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001374 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001375 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001376 reinterpret_cast<size_t>(obj->Get()),
1377 static_cast<size_t>(values[i]),
1378 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1379 self,
1380 referrer);
1381
Mathieu Chartierc7853442015-03-27 14:35:38 -07001382 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001383 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001384 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001385
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001387 reinterpret_cast<size_t>(obj->Get()),
1388 0U,
1389 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1390 self,
1391 referrer);
1392 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1393 }
1394#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001395 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001396 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1397 // Force-print to std::cout so it's also outside the logcat.
1398 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1399#endif
1400}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001401static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001402 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001403 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001404#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1405 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001406 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001407
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001408 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001409 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001410 reinterpret_cast<size_t>(obj->Get()),
1411 static_cast<size_t>(values[i]),
1412 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1413 self,
1414 referrer);
1415
Mathieu Chartierc7853442015-03-27 14:35:38 -07001416 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001417 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001418 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001419
Mathieu Chartierc7853442015-03-27 14:35:38 -07001420 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001421 reinterpret_cast<size_t>(obj->Get()),
1422 0U,
1423 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1424 self,
1425 referrer);
1426 EXPECT_EQ(res, static_cast<int16_t>(res2));
1427 }
1428#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001429 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001430 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1431 // Force-print to std::cout so it's also outside the logcat.
1432 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1433#endif
1434}
1435
Mathieu Chartiere401d142015-04-22 13:56:20 -07001436static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001437 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001438 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001439#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1440 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001441 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001442
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001443 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001444 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001445 static_cast<size_t>(values[i]),
1446 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001447 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001448 self,
1449 referrer);
1450
Mathieu Chartierc7853442015-03-27 14:35:38 -07001451 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001452 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001453 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001454 self,
1455 referrer);
1456
Goran Jakovljevic04568812015-04-23 15:27:23 +02001457#if defined(__mips__) && defined(__LP64__)
1458 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1459#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001460 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001461#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001462 }
1463#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001464 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001465 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1466 // Force-print to std::cout so it's also outside the logcat.
1467 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1468#endif
1469}
1470
1471
Mathieu Chartierc7853442015-03-27 14:35:38 -07001472static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001473 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001474 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001475#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1476 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001477 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001478
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001479 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001480 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001481 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001482 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001483 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001484 self,
1485 referrer);
1486
Mathieu Chartierc7853442015-03-27 14:35:38 -07001487 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001488 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1489
1490 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001491 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001492
Mathieu Chartierc7853442015-03-27 14:35:38 -07001493 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001494 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001495 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001496 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001497 self,
1498 referrer);
1499 EXPECT_EQ(res, static_cast<int32_t>(res2));
1500 }
1501#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001502 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001503 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1504 // Force-print to std::cout so it's also outside the logcat.
1505 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1506#endif
1507}
1508
1509
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001510#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1511 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001512
1513static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001514 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001515 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001516 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1517 reinterpret_cast<size_t>(val),
1518 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001519 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001520 self,
1521 referrer);
1522
1523 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1524 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001525 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001526 self,
1527 referrer);
1528
1529 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1530}
1531#endif
1532
Mathieu Chartiere401d142015-04-22 13:56:20 -07001533static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001534 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001535 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001536#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1537 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001538 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001539
1540 // Allocate a string object for simplicity.
1541 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001542 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001543
Mathieu Chartierc7853442015-03-27 14:35:38 -07001544 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001545#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001546 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001547 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1548 // Force-print to std::cout so it's also outside the logcat.
1549 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1550#endif
1551}
1552
1553
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001554#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1555 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001556static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001557 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001558 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001559 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001560 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 reinterpret_cast<size_t>(trg),
1562 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001563 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564 self,
1565 referrer);
1566
Mathieu Chartierc7853442015-03-27 14:35:38 -07001567 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001568 reinterpret_cast<size_t>(trg),
1569 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001570 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001571 self,
1572 referrer);
1573
1574 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1575
Mathieu Chartier3398c782016-09-30 10:27:43 -07001576 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001577}
1578#endif
1579
Mathieu Chartierc7853442015-03-27 14:35:38 -07001580static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001581 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001582 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001583#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1584 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001585 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586
1587 // Allocate a string object for simplicity.
1588 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001589 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001590
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001593 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1595 // Force-print to std::cout so it's also outside the logcat.
1596 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1597#endif
1598}
1599
1600
Calin Juravle872ab3f2015-10-02 07:27:51 +01001601// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001602
Mathieu Chartiere401d142015-04-22 13:56:20 -07001603static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001604 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001605 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001606#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1607 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001608 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001609
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001610 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001611 // 64 bit FieldSet stores the set value in the second register.
1612 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001613 values[i],
Nicolas Geoffray5b3c6c02017-01-19 14:22:26 +00001614 0U,
Calin Juravle24cc1b32015-10-06 11:46:58 +01001615 StubTest::GetEntrypoint(self, kQuickSet64Static),
1616 self,
1617 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618
Mathieu Chartierc7853442015-03-27 14:35:38 -07001619 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001620 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001621 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001622 self,
1623 referrer);
1624
1625 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1626 }
1627#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001628 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1630 // Force-print to std::cout so it's also outside the logcat.
1631 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1632#endif
1633}
1634
1635
Mathieu Chartierc7853442015-03-27 14:35:38 -07001636static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001637 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001638 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001639#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1640 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001641 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001642
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001643 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001644 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001645 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001646 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001647 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001648 self,
1649 referrer);
1650
Mathieu Chartierc7853442015-03-27 14:35:38 -07001651 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001652 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1653
1654 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001655 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001656
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001658 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001659 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001660 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001661 self,
1662 referrer);
1663 EXPECT_EQ(res, static_cast<int64_t>(res2));
1664 }
1665#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001666 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001667 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1668 // Force-print to std::cout so it's also outside the logcat.
1669 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1670#endif
1671}
1672
1673static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1674 // garbage is created during ClassLinker::Init
1675
1676 JNIEnv* env = Thread::Current()->GetJniEnv();
1677 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001678 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001679 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001680 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681
1682 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001683 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001684 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001685 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001687 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001688
1689 // Play with it...
1690
1691 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001692 for (ArtField& f : c->GetSFields()) {
1693 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001694 if (test_type != type) {
1695 continue;
1696 }
1697 switch (type) {
1698 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001699 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001700 break;
1701 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001702 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001703 break;
1704 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001705 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001706 break;
1707 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001708 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001709 break;
1710 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001711 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001712 break;
1713 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001714 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001715 break;
1716 case Primitive::Type::kPrimNot:
1717 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001718 if (f.GetTypeDescriptor()[0] != '[') {
1719 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001720 }
1721 break;
1722 default:
1723 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001724 }
1725 }
1726
1727 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001728 for (ArtField& f : c->GetIFields()) {
1729 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001730 if (test_type != type) {
1731 continue;
1732 }
1733 switch (type) {
1734 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001735 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001736 break;
1737 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001738 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001739 break;
1740 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001741 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001742 break;
1743 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001744 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001745 break;
1746 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001747 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001748 break;
1749 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001750 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001751 break;
1752 case Primitive::Type::kPrimNot:
1753 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001754 if (f.GetTypeDescriptor()[0] != '[') {
1755 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001756 }
1757 break;
1758 default:
1759 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001760 }
1761 }
1762
1763 // TODO: Deallocate things.
1764}
1765
Fred Shih37f05ef2014-07-16 18:38:08 -07001766TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001767 Thread* self = Thread::Current();
1768
1769 self->TransitionFromSuspendedToRunnable();
1770 LoadDex("AllFields");
1771 bool started = runtime_->Start();
1772 CHECK(started);
1773
1774 TestFields(self, this, Primitive::Type::kPrimBoolean);
1775 TestFields(self, this, Primitive::Type::kPrimByte);
1776}
1777
1778TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001779 Thread* self = Thread::Current();
1780
1781 self->TransitionFromSuspendedToRunnable();
1782 LoadDex("AllFields");
1783 bool started = runtime_->Start();
1784 CHECK(started);
1785
1786 TestFields(self, this, Primitive::Type::kPrimChar);
1787 TestFields(self, this, Primitive::Type::kPrimShort);
1788}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001789
1790TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001791 Thread* self = Thread::Current();
1792
1793 self->TransitionFromSuspendedToRunnable();
1794 LoadDex("AllFields");
1795 bool started = runtime_->Start();
1796 CHECK(started);
1797
1798 TestFields(self, this, Primitive::Type::kPrimInt);
1799}
1800
1801TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001802 Thread* self = Thread::Current();
1803
1804 self->TransitionFromSuspendedToRunnable();
1805 LoadDex("AllFields");
1806 bool started = runtime_->Start();
1807 CHECK(started);
1808
1809 TestFields(self, this, Primitive::Type::kPrimNot);
1810}
1811
1812TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001813 Thread* self = Thread::Current();
1814
1815 self->TransitionFromSuspendedToRunnable();
1816 LoadDex("AllFields");
1817 bool started = runtime_->Start();
1818 CHECK(started);
1819
1820 TestFields(self, this, Primitive::Type::kPrimLong);
1821}
1822
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001823// Disabled, b/27991555 .
1824// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1825// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1826// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1827// the bridge and uses that to check for inlined frames, crashing in the process.
1828TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001829#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1830 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001831 Thread* self = Thread::Current();
1832
1833 ScopedObjectAccess soa(self);
1834 StackHandleScope<7> hs(self);
1835
1836 JNIEnv* env = Thread::Current()->GetJniEnv();
1837
1838 // ArrayList
1839
1840 // Load ArrayList and used methods (JNI).
1841 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1842 ASSERT_NE(nullptr, arraylist_jclass);
1843 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1844 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001845 jmethodID contains_jmethod = env->GetMethodID(
1846 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001847 ASSERT_NE(nullptr, contains_jmethod);
1848 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1849 ASSERT_NE(nullptr, add_jmethod);
1850
Mathieu Chartiere401d142015-04-22 13:56:20 -07001851 // Get representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001852 ArtMethod* contains_amethod = jni::DecodeArtMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001853
1854 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001855 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1856 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001857 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001858 }
1859
1860 // List
1861
1862 // Load List and used methods (JNI).
1863 jclass list_jclass = env->FindClass("java/util/List");
1864 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001865 jmethodID inf_contains_jmethod = env->GetMethodID(
1866 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001867 ASSERT_NE(nullptr, inf_contains_jmethod);
1868
1869 // Get mirror representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001870 ArtMethod* inf_contains = jni::DecodeArtMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001871
1872 // Object
1873
1874 jclass obj_jclass = env->FindClass("java/lang/Object");
1875 ASSERT_NE(nullptr, obj_jclass);
1876 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1877 ASSERT_NE(nullptr, obj_constructor);
1878
Andreas Gampe51f76352014-05-21 08:28:48 -07001879 // Create instances.
1880
1881 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1882 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001883 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001884
1885 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1886 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001887 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001888
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001889 // Invocation tests.
1890
1891 // 1. imt_conflict
1892
1893 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001894
1895 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
1896 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001897 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
1898 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001899 ImtConflictTable* empty_conflict_table =
1900 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001901 void* data = linear_alloc->Alloc(
1902 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07001903 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001904 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07001905 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
1906 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001907
Andreas Gampe51f76352014-05-21 08:28:48 -07001908 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001909 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1910 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07001911 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001912 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001913 self,
1914 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001915 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001916
1917 ASSERT_FALSE(self->IsExceptionPending());
1918 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1919
1920 // Add object.
1921
1922 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1923
David Sehr709b0702016-10-13 09:12:37 -07001924 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07001925
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001926 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001927
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001928 result =
1929 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1930 reinterpret_cast<size_t>(array_list.Get()),
1931 reinterpret_cast<size_t>(obj.Get()),
1932 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
1933 self,
1934 contains_amethod,
1935 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001936
1937 ASSERT_FALSE(self->IsExceptionPending());
1938 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001939
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001940 // 2. regular interface trampoline
1941
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001942 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1943 reinterpret_cast<size_t>(array_list.Get()),
1944 reinterpret_cast<size_t>(obj.Get()),
1945 StubTest::GetEntrypoint(self,
1946 kQuickInvokeInterfaceTrampolineWithAccessCheck),
1947 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001948
1949 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001950 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001951
Mathieu Chartiere401d142015-04-22 13:56:20 -07001952 result = Invoke3WithReferrer(
1953 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1954 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
1955 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
1956 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001957
1958 ASSERT_FALSE(self->IsExceptionPending());
1959 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07001960#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001961 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001962 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001963 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1964#endif
1965}
1966
Andreas Gampe6aac3552014-06-09 14:55:53 -07001967TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08001968#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07001969 Thread* self = Thread::Current();
1970 ScopedObjectAccess soa(self);
1971 // garbage is created during ClassLinker::Init
1972
1973 // Create some strings
1974 // Use array so we can index into it and use a matrix for expected results
1975 // Setup: The first half is standard. The second half uses a non-zero offset.
1976 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001977 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1978 static constexpr size_t kStringCount = arraysize(c_str);
1979 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
1980 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07001981
1982 StackHandleScope<kStringCount> hs(self);
1983 Handle<mirror::String> s[kStringCount];
1984
1985 for (size_t i = 0; i < kStringCount; ++i) {
1986 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1987 }
1988
1989 // Matrix of expectations. First component is first parameter. Note we only check against the
1990 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1991 // rely on String::CompareTo being correct.
1992 static constexpr size_t kMaxLen = 9;
1993 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1994
1995 // Last dimension: start, offset by 1.
1996 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
1997 for (size_t x = 0; x < kStringCount; ++x) {
1998 for (size_t y = 0; y < kCharCount; ++y) {
1999 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2000 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2001 }
2002 }
2003 }
2004
2005 // Play with it...
2006
2007 for (size_t x = 0; x < kStringCount; ++x) {
2008 for (size_t y = 0; y < kCharCount; ++y) {
2009 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2010 int32_t start = static_cast<int32_t>(z) - 1;
2011
2012 // Test string_compareto x y
2013 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002014 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002015
2016 EXPECT_FALSE(self->IsExceptionPending());
2017
2018 // The result is a 32b signed integer
2019 union {
2020 size_t r;
2021 int32_t i;
2022 } conv;
2023 conv.r = result;
2024
2025 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2026 c_char[y] << " @ " << start;
2027 }
2028 }
2029 }
2030
2031 // TODO: Deallocate things.
2032
2033 // Tests done.
2034#else
2035 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2036 // Force-print to std::cout so it's also outside the logcat.
2037 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002038#endif
2039}
2040
Roland Levillain02b75802016-07-13 11:54:35 +01002041// TODO: Exercise the ReadBarrierMarkRegX entry points.
2042
Man Cao1aee9002015-07-14 22:31:42 -07002043TEST_F(StubTest, ReadBarrier) {
2044#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2045 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2046 Thread* self = Thread::Current();
2047
2048 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2049
2050 // Create an object
2051 ScopedObjectAccess soa(self);
2052 // garbage is created during ClassLinker::Init
2053
2054 StackHandleScope<2> hs(soa.Self());
2055 Handle<mirror::Class> c(
2056 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2057
2058 // Build an object instance
2059 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2060
2061 EXPECT_FALSE(self->IsExceptionPending());
2062
2063 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2064 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2065
2066 EXPECT_FALSE(self->IsExceptionPending());
2067 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2068 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2069 EXPECT_EQ(klass, obj->GetClass());
2070
2071 // Tests done.
2072#else
2073 LOG(INFO) << "Skipping read_barrier_slow";
2074 // Force-print to std::cout so it's also outside the logcat.
2075 std::cout << "Skipping read_barrier_slow" << std::endl;
2076#endif
2077}
2078
Roland Levillain0d5a2812015-11-13 10:07:31 +00002079TEST_F(StubTest, ReadBarrierForRoot) {
2080#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2081 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2082 Thread* self = Thread::Current();
2083
2084 const uintptr_t readBarrierForRootSlow =
2085 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2086
2087 // Create an object
2088 ScopedObjectAccess soa(self);
2089 // garbage is created during ClassLinker::Init
2090
2091 StackHandleScope<1> hs(soa.Self());
2092
2093 Handle<mirror::String> obj(
2094 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2095
2096 EXPECT_FALSE(self->IsExceptionPending());
2097
2098 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2099 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2100
2101 EXPECT_FALSE(self->IsExceptionPending());
2102 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2103 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2104 EXPECT_EQ(klass, obj->GetClass());
2105
2106 // Tests done.
2107#else
2108 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2109 // Force-print to std::cout so it's also outside the logcat.
2110 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2111#endif
2112}
2113
Andreas Gampe525cde22014-04-22 15:44:50 -07002114} // namespace art