blob: bd51809c22f79d09f6c7513b971d035107db2a3b [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070021#include "base/callee_save_type.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070022#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070024#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070025#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070026#include "imt_conflict_table.h"
Andreas Gampe13b27842016-11-07 16:48:23 -080027#include "jni_internal.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000028#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070029#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070030#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070031#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070032
33namespace art {
34
35
36class StubTest : public CommonRuntimeTest {
37 protected:
38 // We need callee-save methods set up in the Runtime for exceptions.
39 void SetUp() OVERRIDE {
40 // Do the normal setup.
41 CommonRuntimeTest::SetUp();
42
43 {
44 // Create callee-save methods
45 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010046 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe8228cdf2017-05-30 15:03:54 -070047 for (uint32_t i = 0; i < static_cast<uint32_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
48 CalleeSaveType type = CalleeSaveType(i);
Andreas Gampe525cde22014-04-22 15:44:50 -070049 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070050 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070051 }
52 }
53 }
54 }
55
Ian Rogerse63db272014-07-15 15:36:11 -070056 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070057 // Use a smaller heap
58 for (std::pair<std::string, const void*>& pair : *options) {
59 if (pair.first.find("-Xmx") == 0) {
60 pair.first = "-Xmx4M"; // Smallest we can go.
61 }
62 }
Andreas Gampe51f76352014-05-21 08:28:48 -070063 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070064 }
Andreas Gampe525cde22014-04-22 15:44:50 -070065
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070066 // Helper function needed since TEST_F makes a new class.
67 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
68 return &self->tlsPtr_;
69 }
70
Andreas Gampe4fc046e2014-05-06 16:56:39 -070071 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070072 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070073 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070074 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070076 // TODO: Set up a frame according to referrer's specs.
77 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070078 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070079 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070080 }
81
Andreas Gampe51f76352014-05-21 08:28:48 -070082 // TODO: Set up a frame according to referrer's specs.
83 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070084 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070085 // Push a transition back into managed code onto the linked list in thread.
86 ManagedStack fragment;
87 self->PushManagedStackFragment(&fragment);
88
89 size_t result;
90 size_t fpr_result = 0;
91#if defined(__i386__)
92 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070093#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
94#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070095 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070096 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
97 // esp, then we won't be able to access it after spilling.
98
99 // Spill 6 registers.
100 PUSH(%%ebx)
101 PUSH(%%ecx)
102 PUSH(%%edx)
103 PUSH(%%esi)
104 PUSH(%%edi)
105 PUSH(%%ebp)
106
107 // Store the inputs to the stack, but keep the referrer up top, less work.
108 PUSH(%[referrer]) // Align stack.
109 PUSH(%[referrer]) // Store referrer
110
111 PUSH(%[arg0])
112 PUSH(%[arg1])
113 PUSH(%[arg2])
114 PUSH(%[code])
115 // Now read them back into the required registers.
116 POP(%%edi)
117 POP(%%edx)
118 POP(%%ecx)
119 POP(%%eax)
120 // Call is prepared now.
121
Andreas Gampe51f76352014-05-21 08:28:48 -0700122 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700123 "addl $8, %%esp\n\t" // Pop referrer and padding.
124 ".cfi_adjust_cfa_offset -8\n\t"
125
126 // Restore 6 registers.
127 POP(%%ebp)
128 POP(%%edi)
129 POP(%%esi)
130 POP(%%edx)
131 POP(%%ecx)
132 POP(%%ebx)
133
Andreas Gampe51f76352014-05-21 08:28:48 -0700134 : "=a" (result)
135 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700136 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
137 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700138 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700139 : "memory", "xmm7"); // clobber.
140#undef PUSH
141#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700142#elif defined(__arm__)
143 __asm__ __volatile__(
144 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
145 ".cfi_adjust_cfa_offset 52\n\t"
146 "push {r9}\n\t"
147 ".cfi_adjust_cfa_offset 4\n\t"
148 "mov r9, %[referrer]\n\n"
149 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
150 ".cfi_adjust_cfa_offset 8\n\t"
151 "ldr r9, [sp, #8]\n\t"
152
153 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
154 "sub sp, sp, #24\n\t"
155 "str %[arg0], [sp]\n\t"
156 "str %[arg1], [sp, #4]\n\t"
157 "str %[arg2], [sp, #8]\n\t"
158 "str %[code], [sp, #12]\n\t"
159 "str %[self], [sp, #16]\n\t"
160 "str %[hidden], [sp, #20]\n\t"
161 "ldr r0, [sp]\n\t"
162 "ldr r1, [sp, #4]\n\t"
163 "ldr r2, [sp, #8]\n\t"
164 "ldr r3, [sp, #12]\n\t"
165 "ldr r9, [sp, #16]\n\t"
166 "ldr r12, [sp, #20]\n\t"
167 "add sp, sp, #24\n\t"
168
169 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700170 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700171 ".cfi_adjust_cfa_offset -12\n\t"
172 "pop {r1-r12, lr}\n\t" // Restore state
173 ".cfi_adjust_cfa_offset -52\n\t"
174 "mov %[result], r0\n\t" // Save the result
175 : [result] "=r" (result)
176 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700177 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
178 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700179 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700180#elif defined(__aarch64__)
181 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000183 "sub sp, sp, #80\n\t"
184 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700185 "stp x0, x1, [sp]\n\t"
186 "stp x2, x3, [sp, #16]\n\t"
187 "stp x4, x5, [sp, #32]\n\t"
188 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000189 // To be extra defensive, store x20. We do this because some of the stubs might make a
190 // transition into the runtime via the blr instruction below and *not* save x20.
191 "str x20, [sp, #64]\n\t"
192 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700193
Andreas Gampef39b3782014-06-03 14:38:30 -0700194 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
195 ".cfi_adjust_cfa_offset 16\n\t"
196 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700197
198 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
199 "sub sp, sp, #48\n\t"
200 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700201 // All things are "r" constraints, so direct str/stp should work.
202 "stp %[arg0], %[arg1], [sp]\n\t"
203 "stp %[arg2], %[code], [sp, #16]\n\t"
204 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700205
206 // Now we definitely have x0-x3 free, use it to garble d8 - d15
207 "movk x0, #0xfad0\n\t"
208 "movk x0, #0xebad, lsl #16\n\t"
209 "movk x0, #0xfad0, lsl #32\n\t"
210 "movk x0, #0xebad, lsl #48\n\t"
211 "fmov d8, x0\n\t"
212 "add x0, x0, 1\n\t"
213 "fmov d9, x0\n\t"
214 "add x0, x0, 1\n\t"
215 "fmov d10, x0\n\t"
216 "add x0, x0, 1\n\t"
217 "fmov d11, x0\n\t"
218 "add x0, x0, 1\n\t"
219 "fmov d12, x0\n\t"
220 "add x0, x0, 1\n\t"
221 "fmov d13, x0\n\t"
222 "add x0, x0, 1\n\t"
223 "fmov d14, x0\n\t"
224 "add x0, x0, 1\n\t"
225 "fmov d15, x0\n\t"
226
Andreas Gampef39b3782014-06-03 14:38:30 -0700227 // Load call params into the right registers.
228 "ldp x0, x1, [sp]\n\t"
229 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100230 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700231 "add sp, sp, #48\n\t"
232 ".cfi_adjust_cfa_offset -48\n\t"
233
Andreas Gampe51f76352014-05-21 08:28:48 -0700234 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700235 "mov x8, x0\n\t" // Store result
236 "add sp, sp, #16\n\t" // Drop the quick "frame"
237 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700238
239 // Test d8 - d15. We can use x1 and x2.
240 "movk x1, #0xfad0\n\t"
241 "movk x1, #0xebad, lsl #16\n\t"
242 "movk x1, #0xfad0, lsl #32\n\t"
243 "movk x1, #0xebad, lsl #48\n\t"
244 "fmov x2, d8\n\t"
245 "cmp x1, x2\n\t"
246 "b.ne 1f\n\t"
247 "add x1, x1, 1\n\t"
248
249 "fmov x2, d9\n\t"
250 "cmp x1, x2\n\t"
251 "b.ne 1f\n\t"
252 "add x1, x1, 1\n\t"
253
254 "fmov x2, d10\n\t"
255 "cmp x1, x2\n\t"
256 "b.ne 1f\n\t"
257 "add x1, x1, 1\n\t"
258
259 "fmov x2, d11\n\t"
260 "cmp x1, x2\n\t"
261 "b.ne 1f\n\t"
262 "add x1, x1, 1\n\t"
263
264 "fmov x2, d12\n\t"
265 "cmp x1, x2\n\t"
266 "b.ne 1f\n\t"
267 "add x1, x1, 1\n\t"
268
269 "fmov x2, d13\n\t"
270 "cmp x1, x2\n\t"
271 "b.ne 1f\n\t"
272 "add x1, x1, 1\n\t"
273
274 "fmov x2, d14\n\t"
275 "cmp x1, x2\n\t"
276 "b.ne 1f\n\t"
277 "add x1, x1, 1\n\t"
278
279 "fmov x2, d15\n\t"
280 "cmp x1, x2\n\t"
281 "b.ne 1f\n\t"
282
Andreas Gampef39b3782014-06-03 14:38:30 -0700283 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700284
285 // Finish up.
286 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700287 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
288 "ldp x2, x3, [sp, #16]\n\t"
289 "ldp x4, x5, [sp, #32]\n\t"
290 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000291 "ldr x20, [sp, #64]\n\t"
292 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
293 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700294
Andreas Gampef39b3782014-06-03 14:38:30 -0700295 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
296 "mov %[result], x8\n\t" // Store the call result
297
Andreas Gampe51f76352014-05-21 08:28:48 -0700298 "b 3f\n\t" // Goto end
299
300 // Failed fpr verification.
301 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700302 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700303 "b 2b\n\t" // Goto finish-up
304
305 // End
306 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700307 : [result] "=r" (result)
308 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700309 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700310 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000311 // Leave one register unclobbered, which is needed for compiling with
312 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
313 // which means we should unclobber one of the callee-saved registers that are unused.
314 // Here we use x20.
315 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700316 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
317 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
318 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
319 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700320 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000321 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200322#elif defined(__mips__) && !defined(__LP64__)
323 __asm__ __volatile__ (
324 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
325 "addiu $sp, $sp, -64\n\t"
326 "sw $a0, 0($sp)\n\t"
327 "sw $a1, 4($sp)\n\t"
328 "sw $a2, 8($sp)\n\t"
329 "sw $a3, 12($sp)\n\t"
330 "sw $t0, 16($sp)\n\t"
331 "sw $t1, 20($sp)\n\t"
332 "sw $t2, 24($sp)\n\t"
333 "sw $t3, 28($sp)\n\t"
334 "sw $t4, 32($sp)\n\t"
335 "sw $t5, 36($sp)\n\t"
336 "sw $t6, 40($sp)\n\t"
337 "sw $t7, 44($sp)\n\t"
338 // Spill gp register since it is caller save.
339 "sw $gp, 52($sp)\n\t"
340
341 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
342 "sw %[referrer], 0($sp)\n\t"
343
344 // Push everything on the stack, so we don't rely on the order.
345 "addiu $sp, $sp, -24\n\t"
346 "sw %[arg0], 0($sp)\n\t"
347 "sw %[arg1], 4($sp)\n\t"
348 "sw %[arg2], 8($sp)\n\t"
349 "sw %[code], 12($sp)\n\t"
350 "sw %[self], 16($sp)\n\t"
351 "sw %[hidden], 20($sp)\n\t"
352
353 // Load call params into the right registers.
354 "lw $a0, 0($sp)\n\t"
355 "lw $a1, 4($sp)\n\t"
356 "lw $a2, 8($sp)\n\t"
357 "lw $t9, 12($sp)\n\t"
358 "lw $s1, 16($sp)\n\t"
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800359 "lw $t7, 20($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200360 "addiu $sp, $sp, 24\n\t"
361
362 "jalr $t9\n\t" // Call the stub.
363 "nop\n\t"
364 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
365
366 // Restore stuff not named clobbered.
367 "lw $a0, 0($sp)\n\t"
368 "lw $a1, 4($sp)\n\t"
369 "lw $a2, 8($sp)\n\t"
370 "lw $a3, 12($sp)\n\t"
371 "lw $t0, 16($sp)\n\t"
372 "lw $t1, 20($sp)\n\t"
373 "lw $t2, 24($sp)\n\t"
374 "lw $t3, 28($sp)\n\t"
375 "lw $t4, 32($sp)\n\t"
376 "lw $t5, 36($sp)\n\t"
377 "lw $t6, 40($sp)\n\t"
378 "lw $t7, 44($sp)\n\t"
379 // Restore gp.
380 "lw $gp, 52($sp)\n\t"
381 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
382
383 "move %[result], $v0\n\t" // Store the call result.
384 : [result] "=r" (result)
385 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
386 [referrer] "r"(referrer), [hidden] "r"(hidden)
387 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
388 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100389 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
390 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
391 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200392 "memory"); // clobber.
393#elif defined(__mips__) && defined(__LP64__)
394 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100395 // Spill a0-a7 which we say we don't clobber. May contain args.
396 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200397 "sd $a0, 0($sp)\n\t"
398 "sd $a1, 8($sp)\n\t"
399 "sd $a2, 16($sp)\n\t"
400 "sd $a3, 24($sp)\n\t"
401 "sd $a4, 32($sp)\n\t"
402 "sd $a5, 40($sp)\n\t"
403 "sd $a6, 48($sp)\n\t"
404 "sd $a7, 56($sp)\n\t"
405
406 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
407 "sd %[referrer], 0($sp)\n\t"
408
409 // Push everything on the stack, so we don't rely on the order.
410 "daddiu $sp, $sp, -48\n\t"
411 "sd %[arg0], 0($sp)\n\t"
412 "sd %[arg1], 8($sp)\n\t"
413 "sd %[arg2], 16($sp)\n\t"
414 "sd %[code], 24($sp)\n\t"
415 "sd %[self], 32($sp)\n\t"
416 "sd %[hidden], 40($sp)\n\t"
417
418 // Load call params into the right registers.
419 "ld $a0, 0($sp)\n\t"
420 "ld $a1, 8($sp)\n\t"
421 "ld $a2, 16($sp)\n\t"
422 "ld $t9, 24($sp)\n\t"
423 "ld $s1, 32($sp)\n\t"
424 "ld $t0, 40($sp)\n\t"
425 "daddiu $sp, $sp, 48\n\t"
426
427 "jalr $t9\n\t" // Call the stub.
428 "nop\n\t"
429 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
430
431 // Restore stuff not named clobbered.
432 "ld $a0, 0($sp)\n\t"
433 "ld $a1, 8($sp)\n\t"
434 "ld $a2, 16($sp)\n\t"
435 "ld $a3, 24($sp)\n\t"
436 "ld $a4, 32($sp)\n\t"
437 "ld $a5, 40($sp)\n\t"
438 "ld $a6, 48($sp)\n\t"
439 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100440 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200441
442 "move %[result], $v0\n\t" // Store the call result.
443 : [result] "=r" (result)
444 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
445 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100446 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
447 // t0-t3 are ambiguous.
448 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
449 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100450 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
451 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
452 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200453 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700454#elif defined(__x86_64__) && !defined(__APPLE__)
455#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
456#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
457 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
458 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700459 // TODO: Set the thread?
460 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700461 // Spill almost everything (except rax, rsp). 14 registers.
462 PUSH(%%rbx)
463 PUSH(%%rcx)
464 PUSH(%%rdx)
465 PUSH(%%rsi)
466 PUSH(%%rdi)
467 PUSH(%%rbp)
468 PUSH(%%r8)
469 PUSH(%%r9)
470 PUSH(%%r10)
471 PUSH(%%r11)
472 PUSH(%%r12)
473 PUSH(%%r13)
474 PUSH(%%r14)
475 PUSH(%%r15)
476
477 PUSH(%[referrer]) // Push referrer & 16B alignment padding
478 PUSH(%[referrer])
479
480 // Now juggle the input registers.
481 PUSH(%[arg0])
482 PUSH(%[arg1])
483 PUSH(%[arg2])
484 PUSH(%[hidden])
485 PUSH(%[code])
486 POP(%%r8)
487 POP(%%rax)
488 POP(%%rdx)
489 POP(%%rsi)
490 POP(%%rdi)
491
492 "call *%%r8\n\t" // Call the stub
493 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700494 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700495
496 POP(%%r15)
497 POP(%%r14)
498 POP(%%r13)
499 POP(%%r12)
500 POP(%%r11)
501 POP(%%r10)
502 POP(%%r9)
503 POP(%%r8)
504 POP(%%rbp)
505 POP(%%rdi)
506 POP(%%rsi)
507 POP(%%rdx)
508 POP(%%rcx)
509 POP(%%rbx)
510
Andreas Gampe51f76352014-05-21 08:28:48 -0700511 : "=a" (result)
512 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700513 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
514 [referrer] "r"(referrer), [hidden] "r"(hidden)
515 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
516 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
517 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
518#undef PUSH
519#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700520#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800521 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700522 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
523 result = 0;
524#endif
525 // Pop transition.
526 self->PopManagedStackFragment(fragment);
527
528 fp_result = fpr_result;
529 EXPECT_EQ(0U, fp_result);
530
531 return result;
532 }
533
Andreas Gampe29b38412014-08-13 00:15:43 -0700534 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
535 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700536 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700537 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
538 }
539
Andreas Gampe6cf80102014-05-19 11:32:41 -0700540 protected:
541 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700542};
543
544
Andreas Gampe525cde22014-04-22 15:44:50 -0700545TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200546#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700547 Thread* self = Thread::Current();
548
549 uint32_t orig[20];
550 uint32_t trg[20];
551 for (size_t i = 0; i < 20; ++i) {
552 orig[i] = i;
553 trg[i] = 0;
554 }
555
556 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700557 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700558
559 EXPECT_EQ(orig[0], trg[0]);
560
561 for (size_t i = 1; i < 4; ++i) {
562 EXPECT_NE(orig[i], trg[i]);
563 }
564
565 for (size_t i = 4; i < 14; ++i) {
566 EXPECT_EQ(orig[i], trg[i]);
567 }
568
569 for (size_t i = 14; i < 20; ++i) {
570 EXPECT_NE(orig[i], trg[i]);
571 }
572
573 // TODO: Test overlapping?
574
575#else
576 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
577 // Force-print to std::cout so it's also outside the logcat.
578 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
579#endif
580}
581
Andreas Gampe525cde22014-04-22 15:44:50 -0700582TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200583#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
584 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700585 static constexpr size_t kThinLockLoops = 100;
586
Andreas Gampe525cde22014-04-22 15:44:50 -0700587 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700588
589 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
590
Andreas Gampe525cde22014-04-22 15:44:50 -0700591 // Create an object
592 ScopedObjectAccess soa(self);
593 // garbage is created during ClassLinker::Init
594
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700595 StackHandleScope<2> hs(soa.Self());
596 Handle<mirror::String> obj(
597 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700598 LockWord lock = obj->GetLockWord(false);
599 LockWord::LockState old_state = lock.GetState();
600 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
601
Andreas Gampe29b38412014-08-13 00:15:43 -0700602 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700603
604 LockWord lock_after = obj->GetLockWord(false);
605 LockWord::LockState new_state = lock_after.GetState();
606 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700607 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
608
609 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700610 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700611
612 // Check we're at lock count i
613
614 LockWord l_inc = obj->GetLockWord(false);
615 LockWord::LockState l_inc_state = l_inc.GetState();
616 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
617 EXPECT_EQ(l_inc.ThinLockCount(), i);
618 }
619
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700620 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700621 Handle<mirror::String> obj2(hs.NewHandle(
622 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700623
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700624 obj2->IdentityHashCode();
625
Andreas Gampe29b38412014-08-13 00:15:43 -0700626 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700627
628 LockWord lock_after2 = obj2->GetLockWord(false);
629 LockWord::LockState new_state2 = lock_after2.GetState();
630 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
631 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
632
633 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700634#else
635 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
636 // Force-print to std::cout so it's also outside the logcat.
637 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
638#endif
639}
640
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700641
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700642class RandGen {
643 public:
644 explicit RandGen(uint32_t seed) : val_(seed) {}
645
646 uint32_t next() {
647 val_ = val_ * 48271 % 2147483647 + 13;
648 return val_;
649 }
650
651 uint32_t val_;
652};
653
654
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700655// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
656static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200657#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
658 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700659 static constexpr size_t kThinLockLoops = 100;
660
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700661 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700662
663 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
664 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700665 // Create an object
666 ScopedObjectAccess soa(self);
667 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700668 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
669 StackHandleScope<kNumberOfLocks + 1> hs(self);
670 Handle<mirror::String> obj(
671 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700672 LockWord lock = obj->GetLockWord(false);
673 LockWord::LockState old_state = lock.GetState();
674 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
675
Andreas Gampe29b38412014-08-13 00:15:43 -0700676 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700677 // This should be an illegal monitor state.
678 EXPECT_TRUE(self->IsExceptionPending());
679 self->ClearException();
680
681 LockWord lock_after = obj->GetLockWord(false);
682 LockWord::LockState new_state = lock_after.GetState();
683 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700684
Andreas Gampe29b38412014-08-13 00:15:43 -0700685 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700686
687 LockWord lock_after2 = obj->GetLockWord(false);
688 LockWord::LockState new_state2 = lock_after2.GetState();
689 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
690
Andreas Gampe29b38412014-08-13 00:15:43 -0700691 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700692
693 LockWord lock_after3 = obj->GetLockWord(false);
694 LockWord::LockState new_state3 = lock_after3.GetState();
695 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
696
697 // Stress test:
698 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
699 // each step.
700
701 RandGen r(0x1234);
702
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700703 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700704 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700705
706 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700707 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700708 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700709
710 // Initialize = allocate.
711 for (size_t i = 0; i < kNumberOfLocks; ++i) {
712 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700713 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700714 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700715 }
716
717 for (size_t i = 0; i < kIterations; ++i) {
718 // Select which lock to update.
719 size_t index = r.next() % kNumberOfLocks;
720
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700721 // Make lock fat?
722 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
723 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700725
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700726 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700727 LockWord::LockState iter_state = lock_iter.GetState();
728 if (counts[index] == 0) {
729 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
730 } else {
731 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
732 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700733 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800734 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800736 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800738 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700739 } else {
740 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800741 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700743
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800744 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700745 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
746 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700747 counts[index]++;
748 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700749 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700750 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 counts[index]--;
752 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700753
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700754 EXPECT_FALSE(self->IsExceptionPending());
755
756 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700757 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700758 LockWord::LockState iter_state = lock_iter.GetState();
759 if (fat[index]) {
760 // Abuse MonitorInfo.
761 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700762 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700763 EXPECT_EQ(counts[index], info.entry_count_) << index;
764 } else {
765 if (counts[index] > 0) {
766 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
767 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
768 } else {
769 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
770 }
771 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700772 }
773 }
774
775 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700776 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700777 for (size_t i = 0; i < kNumberOfLocks; ++i) {
778 size_t index = kNumberOfLocks - 1 - i;
779 size_t count = counts[index];
780 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700781 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
782 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700783 count--;
784 }
785
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700786 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700788 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
789 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700790 }
791
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700792 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700793#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800794 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700797 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700798#endif
799}
800
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800802 // This will lead to monitor error messages in the log.
803 ScopedLogSeverity sls(LogSeverity::FATAL);
804
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805 TestUnlockObject(this);
806}
Andreas Gampe525cde22014-04-22 15:44:50 -0700807
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200808#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
809 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800810extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700811#endif
812
813TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200814#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
815 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700816 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700817
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800818 const uintptr_t art_quick_check_instance_of =
819 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700820
Andreas Gampe525cde22014-04-22 15:44:50 -0700821 // Find some classes.
822 ScopedObjectAccess soa(self);
823 // garbage is created during ClassLinker::Init
824
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800825 VariableSizedHandleScope hs(soa.Self());
826 Handle<mirror::Class> klass_obj(
827 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
828 Handle<mirror::Class> klass_str(
829 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
830 Handle<mirror::Class> klass_list(
831 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
832 Handle<mirror::Class> klass_cloneable(
833 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
834 Handle<mirror::Class> klass_array_list(
835 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
836 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
837 Handle<mirror::String> string(hs.NewHandle(
838 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
839 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700840
841 EXPECT_FALSE(self->IsExceptionPending());
842
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800843 Invoke3(reinterpret_cast<size_t>(obj.Get()),
844 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700845 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800846 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700847 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700848 EXPECT_FALSE(self->IsExceptionPending());
849
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800850 // Expected true: Test string instance of java.lang.String.
851 Invoke3(reinterpret_cast<size_t>(string.Get()),
852 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700853 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800854 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700855 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700856 EXPECT_FALSE(self->IsExceptionPending());
857
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800858 // Expected true: Test string instance of java.lang.Object.
859 Invoke3(reinterpret_cast<size_t>(string.Get()),
860 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700861 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800862 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700863 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700864 EXPECT_FALSE(self->IsExceptionPending());
865
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800866 // Expected false: Test object instance of java.lang.String.
867 Invoke3(reinterpret_cast<size_t>(obj.Get()),
868 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700869 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800870 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700871 self);
872 EXPECT_TRUE(self->IsExceptionPending());
873 self->ClearException();
874
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800875 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
876 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700877 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800878 art_quick_check_instance_of,
879 self);
880 EXPECT_FALSE(self->IsExceptionPending());
881
882 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
883 reinterpret_cast<size_t>(klass_cloneable.Get()),
884 0U,
885 art_quick_check_instance_of,
886 self);
887 EXPECT_FALSE(self->IsExceptionPending());
888
889 Invoke3(reinterpret_cast<size_t>(string.Get()),
890 reinterpret_cast<size_t>(klass_array_list.Get()),
891 0U,
892 art_quick_check_instance_of,
893 self);
894 EXPECT_TRUE(self->IsExceptionPending());
895 self->ClearException();
896
897 Invoke3(reinterpret_cast<size_t>(string.Get()),
898 reinterpret_cast<size_t>(klass_cloneable.Get()),
899 0U,
900 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700901 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700902 EXPECT_TRUE(self->IsExceptionPending());
903 self->ClearException();
904
905#else
906 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
907 // Force-print to std::cout so it's also outside the logcat.
908 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
909#endif
910}
911
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700912TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200913#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
914 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800915 // This will lead to OOM error messages in the log.
916 ScopedLogSeverity sls(LogSeverity::FATAL);
917
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700918 // TODO: Check the "Unresolved" allocation stubs
919
920 Thread* self = Thread::Current();
921 // Create an object
922 ScopedObjectAccess soa(self);
923 // garbage is created during ClassLinker::Init
924
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 StackHandleScope<2> hs(soa.Self());
926 Handle<mirror::Class> c(
927 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700928
929 // Play with it...
930
931 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700932 {
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000933 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
934 StubTest::GetEntrypoint(self, kQuickAllocObjectWithChecks),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700935 self);
936
937 EXPECT_FALSE(self->IsExceptionPending());
938 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
939 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700941 VerifyObject(obj);
942 }
943
944 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700945 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700946 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700947 self);
948
949 EXPECT_FALSE(self->IsExceptionPending());
950 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
951 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700953 VerifyObject(obj);
954 }
955
956 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700957 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700958 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700959 self);
960
961 EXPECT_FALSE(self->IsExceptionPending());
962 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
963 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700964 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700965 VerifyObject(obj);
966 }
967
968 // Failure tests.
969
970 // Out-of-memory.
971 {
972 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
973
974 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700975 Handle<mirror::Class> ca(
976 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
977
978 // Use arbitrary large amount for now.
979 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -0700980 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981
982 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700983 // Start allocating with 128K
984 size_t length = 128 * KB / 4;
985 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700986 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
987 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800988 if (self->IsExceptionPending() || h == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700989 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700990
991 // Try a smaller length
992 length = length / 8;
993 // Use at most half the reported free space.
994 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
995 if (length * 8 > mem) {
996 length = mem / 8;
997 }
998 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700999 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001000 }
1001 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001002 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001003
1004 // Allocate simple objects till it fails.
1005 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001006 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08001007 if (!self->IsExceptionPending() && h != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001008 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001009 }
1010 }
1011 self->ClearException();
1012
Mathieu Chartiere401d142015-04-22 13:56:20 -07001013 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001014 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001016 EXPECT_TRUE(self->IsExceptionPending());
1017 self->ClearException();
1018 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 }
1020
1021 // Tests done.
1022#else
1023 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1024 // Force-print to std::cout so it's also outside the logcat.
1025 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1026#endif
1027}
1028
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001029TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001030#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1031 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001032 // TODO: Check the "Unresolved" allocation stubs
1033
Andreas Gampe369810a2015-01-14 19:53:31 -08001034 // This will lead to OOM error messages in the log.
1035 ScopedLogSeverity sls(LogSeverity::FATAL);
1036
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001037 Thread* self = Thread::Current();
1038 // Create an object
1039 ScopedObjectAccess soa(self);
1040 // garbage is created during ClassLinker::Init
1041
Nicolas Geoffray8d91ac32017-01-18 18:07:15 +00001042 StackHandleScope<1> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001043 Handle<mirror::Class> c(
1044 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001045
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001046 // Play with it...
1047
1048 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001049
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001050 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001051 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001052 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001053 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1054 reinterpret_cast<size_t>(nullptr),
Nicolas Geoffray26aee502017-02-03 13:27:33 +00001055 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved32),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001056 self);
David Sehr709b0702016-10-13 09:12:37 -07001057 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001058 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1059 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1060 EXPECT_TRUE(obj->IsArrayInstance());
1061 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001062 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001063 VerifyObject(obj);
1064 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1065 EXPECT_EQ(array->GetLength(), 10);
1066 }
1067
1068 // Failure tests.
1069
1070 // Out-of-memory.
1071 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001072 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001073 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001074 reinterpret_cast<size_t>(nullptr),
Nicolas Geoffray26aee502017-02-03 13:27:33 +00001075 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved32),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001076 self);
1077
1078 EXPECT_TRUE(self->IsExceptionPending());
1079 self->ClearException();
1080 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1081 }
1082
1083 // Tests done.
1084#else
1085 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1086 // Force-print to std::cout so it's also outside the logcat.
1087 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1088#endif
1089}
1090
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001091
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001092TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001093 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001094 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1095#if defined(__i386__) || defined(__mips__) || \
1096 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001097 // TODO: Check the "Unresolved" allocation stubs
1098
1099 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001100
1101 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1102
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001103 ScopedObjectAccess soa(self);
1104 // garbage is created during ClassLinker::Init
1105
1106 // Create some strings
1107 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001108 // Setup: The first half is standard. The second half uses a non-zero offset.
1109 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001110 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001111 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1112 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1113 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1114 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001115 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001116
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001117 StackHandleScope<kStringCount> hs(self);
1118 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001119
Jeff Hao848f70a2014-01-15 13:49:50 -08001120 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001121 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001122 }
1123
1124 // TODO: wide characters
1125
1126 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001127 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1128 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001129 int32_t expected[kStringCount][kStringCount];
1130 for (size_t x = 0; x < kStringCount; ++x) {
1131 for (size_t y = 0; y < kStringCount; ++y) {
1132 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001133 }
1134 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001135
1136 // Play with it...
1137
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 for (size_t x = 0; x < kStringCount; ++x) {
1139 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001140 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1142 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001143 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001144
1145 EXPECT_FALSE(self->IsExceptionPending());
1146
1147 // The result is a 32b signed integer
1148 union {
1149 size_t r;
1150 int32_t i;
1151 } conv;
1152 conv.r = result;
1153 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001154 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1155 conv.r;
1156 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1157 conv.r;
1158 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1159 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001160 }
1161 }
1162
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001163 // TODO: Deallocate things.
1164
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001165 // Tests done.
1166#else
1167 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1168 // Force-print to std::cout so it's also outside the logcat.
1169 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1170 std::endl;
1171#endif
1172}
1173
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001174
Mathieu Chartierc7853442015-03-27 14:35:38 -07001175static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001176 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001177 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001178#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1179 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001180 constexpr size_t num_values = 5;
1181 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1182
1183 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001184 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001185 static_cast<size_t>(values[i]),
1186 0U,
1187 StubTest::GetEntrypoint(self, kQuickSet8Static),
1188 self,
1189 referrer);
1190
Mathieu Chartierc7853442015-03-27 14:35:38 -07001191 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001192 0U, 0U,
1193 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1194 self,
1195 referrer);
1196 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1197 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1198 }
1199#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001200 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001201 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1202 // Force-print to std::cout so it's also outside the logcat.
1203 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1204#endif
1205}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001206static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001207 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001208 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001209#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1210 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001211 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001212
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001213 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001214 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001215 static_cast<size_t>(values[i]),
1216 0U,
1217 StubTest::GetEntrypoint(self, kQuickSet8Static),
1218 self,
1219 referrer);
1220
Mathieu Chartierc7853442015-03-27 14:35:38 -07001221 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001222 0U, 0U,
1223 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1224 self,
1225 referrer);
1226 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1227 }
1228#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001229 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001230 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1231 // Force-print to std::cout so it's also outside the logcat.
1232 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1233#endif
1234}
1235
1236
Mathieu Chartierc7853442015-03-27 14:35:38 -07001237static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001238 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001239 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001240#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1241 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001242 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001243
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001244 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001245 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001246 reinterpret_cast<size_t>(obj->Get()),
1247 static_cast<size_t>(values[i]),
1248 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1249 self,
1250 referrer);
1251
Mathieu Chartierc7853442015-03-27 14:35:38 -07001252 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001253 EXPECT_EQ(values[i], res) << "Iteration " << i;
1254
Mathieu Chartierc7853442015-03-27 14:35:38 -07001255 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001256
Mathieu Chartierc7853442015-03-27 14:35:38 -07001257 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001258 reinterpret_cast<size_t>(obj->Get()),
1259 0U,
1260 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1261 self,
1262 referrer);
1263 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1264 }
1265#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001266 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001267 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1268 // Force-print to std::cout so it's also outside the logcat.
1269 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1270#endif
1271}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001272static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001273 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001274 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001275#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1276 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001277 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001278
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001279 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001280 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001281 reinterpret_cast<size_t>(obj->Get()),
1282 static_cast<size_t>(values[i]),
1283 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1284 self,
1285 referrer);
1286
Mathieu Chartierc7853442015-03-27 14:35:38 -07001287 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001288 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001289 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001290
Mathieu Chartierc7853442015-03-27 14:35:38 -07001291 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001292 reinterpret_cast<size_t>(obj->Get()),
1293 0U,
1294 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1295 self,
1296 referrer);
1297 EXPECT_EQ(res, static_cast<int8_t>(res2));
1298 }
1299#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001300 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001301 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1302 // Force-print to std::cout so it's also outside the logcat.
1303 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1304#endif
1305}
1306
Mathieu Chartiere401d142015-04-22 13:56:20 -07001307static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001308 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001309 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001310#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1311 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001312 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001313
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001314 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001315 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001316 static_cast<size_t>(values[i]),
1317 0U,
1318 StubTest::GetEntrypoint(self, kQuickSet16Static),
1319 self,
1320 referrer);
1321
Mathieu Chartierc7853442015-03-27 14:35:38 -07001322 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001323 0U, 0U,
1324 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1325 self,
1326 referrer);
1327
1328 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1329 }
1330#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001331 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001332 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1333 // Force-print to std::cout so it's also outside the logcat.
1334 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1335#endif
1336}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001337static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001338 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001339 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001340#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1341 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001342 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001343
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001344 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001345 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001346 static_cast<size_t>(values[i]),
1347 0U,
1348 StubTest::GetEntrypoint(self, kQuickSet16Static),
1349 self,
1350 referrer);
1351
Mathieu Chartierc7853442015-03-27 14:35:38 -07001352 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001353 0U, 0U,
1354 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1355 self,
1356 referrer);
1357
1358 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1359 }
1360#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001361 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001362 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1363 // Force-print to std::cout so it's also outside the logcat.
1364 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1365#endif
1366}
1367
Mathieu Chartierc7853442015-03-27 14:35:38 -07001368static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001369 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001370 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001371#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1372 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001373 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001374
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001375 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001376 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001377 reinterpret_cast<size_t>(obj->Get()),
1378 static_cast<size_t>(values[i]),
1379 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1380 self,
1381 referrer);
1382
Mathieu Chartierc7853442015-03-27 14:35:38 -07001383 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001384 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001385 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001386
Mathieu Chartierc7853442015-03-27 14:35:38 -07001387 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001388 reinterpret_cast<size_t>(obj->Get()),
1389 0U,
1390 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1391 self,
1392 referrer);
1393 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1394 }
1395#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001396 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001397 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1398 // Force-print to std::cout so it's also outside the logcat.
1399 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1400#endif
1401}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001402static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001403 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001404 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001405#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1406 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001407 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001408
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001409 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001410 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001411 reinterpret_cast<size_t>(obj->Get()),
1412 static_cast<size_t>(values[i]),
1413 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1414 self,
1415 referrer);
1416
Mathieu Chartierc7853442015-03-27 14:35:38 -07001417 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001418 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001419 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001420
Mathieu Chartierc7853442015-03-27 14:35:38 -07001421 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001422 reinterpret_cast<size_t>(obj->Get()),
1423 0U,
1424 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1425 self,
1426 referrer);
1427 EXPECT_EQ(res, static_cast<int16_t>(res2));
1428 }
1429#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001430 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001431 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1432 // Force-print to std::cout so it's also outside the logcat.
1433 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1434#endif
1435}
1436
Mathieu Chartiere401d142015-04-22 13:56:20 -07001437static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001438 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001439 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001440#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1441 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001442 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001443
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001444 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001445 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001446 static_cast<size_t>(values[i]),
1447 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001448 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001449 self,
1450 referrer);
1451
Mathieu Chartierc7853442015-03-27 14:35:38 -07001452 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001453 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001454 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001455 self,
1456 referrer);
1457
Goran Jakovljevic04568812015-04-23 15:27:23 +02001458#if defined(__mips__) && defined(__LP64__)
1459 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1460#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001461 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001462#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001463 }
1464#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001465 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001466 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1467 // Force-print to std::cout so it's also outside the logcat.
1468 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1469#endif
1470}
1471
1472
Mathieu Chartierc7853442015-03-27 14:35:38 -07001473static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001474 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001475 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001476#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1477 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001478 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001479
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001480 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001481 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001482 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001483 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001484 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001485 self,
1486 referrer);
1487
Mathieu Chartierc7853442015-03-27 14:35:38 -07001488 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001489 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1490
1491 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001492 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001493
Mathieu Chartierc7853442015-03-27 14:35:38 -07001494 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001495 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001496 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001497 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001498 self,
1499 referrer);
1500 EXPECT_EQ(res, static_cast<int32_t>(res2));
1501 }
1502#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001503 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001504 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1505 // Force-print to std::cout so it's also outside the logcat.
1506 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1507#endif
1508}
1509
1510
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001511#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1512 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001513
1514static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001515 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001516 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001517 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1518 reinterpret_cast<size_t>(val),
1519 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001520 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001521 self,
1522 referrer);
1523
1524 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1525 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001526 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001527 self,
1528 referrer);
1529
1530 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1531}
1532#endif
1533
Mathieu Chartiere401d142015-04-22 13:56:20 -07001534static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001535 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001536 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001537#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1538 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001539 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001540
1541 // Allocate a string object for simplicity.
1542 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001543 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001544
Mathieu Chartierc7853442015-03-27 14:35:38 -07001545 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001546#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001547 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001548 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1549 // Force-print to std::cout so it's also outside the logcat.
1550 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1551#endif
1552}
1553
1554
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001555#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1556 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001557static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001558 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001559 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001560 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001561 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001562 reinterpret_cast<size_t>(trg),
1563 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001564 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565 self,
1566 referrer);
1567
Mathieu Chartierc7853442015-03-27 14:35:38 -07001568 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569 reinterpret_cast<size_t>(trg),
1570 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001571 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001572 self,
1573 referrer);
1574
1575 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1576
Mathieu Chartier3398c782016-09-30 10:27:43 -07001577 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001578}
1579#endif
1580
Mathieu Chartierc7853442015-03-27 14:35:38 -07001581static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001582 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001583 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001584#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1585 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001586 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001587
1588 // Allocate a string object for simplicity.
1589 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001590 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001591
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001592 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001593#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001594 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001595 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1596 // Force-print to std::cout so it's also outside the logcat.
1597 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1598#endif
1599}
1600
1601
Calin Juravle872ab3f2015-10-02 07:27:51 +01001602// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603
Mathieu Chartiere401d142015-04-22 13:56:20 -07001604static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001605 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001606 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001607#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1608 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001609 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001611 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001612 // 64 bit FieldSet stores the set value in the second register.
1613 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001614 values[i],
Nicolas Geoffray5b3c6c02017-01-19 14:22:26 +00001615 0U,
Calin Juravle24cc1b32015-10-06 11:46:58 +01001616 StubTest::GetEntrypoint(self, kQuickSet64Static),
1617 self,
1618 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001619
Mathieu Chartierc7853442015-03-27 14:35:38 -07001620 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001621 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001622 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 self,
1624 referrer);
1625
1626 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1627 }
1628#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001629 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001630 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1631 // Force-print to std::cout so it's also outside the logcat.
1632 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1633#endif
1634}
1635
1636
Mathieu Chartierc7853442015-03-27 14:35:38 -07001637static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001638 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001639 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001640#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1641 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001642 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001643
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001644 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001645 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001646 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001647 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001648 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001649 self,
1650 referrer);
1651
Mathieu Chartierc7853442015-03-27 14:35:38 -07001652 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001653 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1654
1655 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001656 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001657
Mathieu Chartierc7853442015-03-27 14:35:38 -07001658 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001659 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001661 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662 self,
1663 referrer);
1664 EXPECT_EQ(res, static_cast<int64_t>(res2));
1665 }
1666#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001667 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001668 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1669 // Force-print to std::cout so it's also outside the logcat.
1670 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1671#endif
1672}
1673
1674static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1675 // garbage is created during ClassLinker::Init
1676
1677 JNIEnv* env = Thread::Current()->GetJniEnv();
1678 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001679 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001680 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001681 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001682
1683 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001684 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001685 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001686 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001687 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001688 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001689
1690 // Play with it...
1691
1692 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001693 for (ArtField& f : c->GetSFields()) {
1694 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001695 if (test_type != type) {
1696 continue;
1697 }
1698 switch (type) {
1699 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001700 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001701 break;
1702 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001703 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001704 break;
1705 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001706 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001707 break;
1708 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001709 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001710 break;
1711 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001712 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001713 break;
1714 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001715 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001716 break;
1717 case Primitive::Type::kPrimNot:
1718 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001719 if (f.GetTypeDescriptor()[0] != '[') {
1720 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001721 }
1722 break;
1723 default:
1724 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001725 }
1726 }
1727
1728 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001729 for (ArtField& f : c->GetIFields()) {
1730 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001731 if (test_type != type) {
1732 continue;
1733 }
1734 switch (type) {
1735 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001736 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001737 break;
1738 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001739 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001740 break;
1741 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001742 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001743 break;
1744 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001745 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001746 break;
1747 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001748 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001749 break;
1750 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001751 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001752 break;
1753 case Primitive::Type::kPrimNot:
1754 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001755 if (f.GetTypeDescriptor()[0] != '[') {
1756 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001757 }
1758 break;
1759 default:
1760 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 }
1762 }
1763
1764 // TODO: Deallocate things.
1765}
1766
Fred Shih37f05ef2014-07-16 18:38:08 -07001767TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001768 Thread* self = Thread::Current();
1769
1770 self->TransitionFromSuspendedToRunnable();
1771 LoadDex("AllFields");
1772 bool started = runtime_->Start();
1773 CHECK(started);
1774
1775 TestFields(self, this, Primitive::Type::kPrimBoolean);
1776 TestFields(self, this, Primitive::Type::kPrimByte);
1777}
1778
1779TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001780 Thread* self = Thread::Current();
1781
1782 self->TransitionFromSuspendedToRunnable();
1783 LoadDex("AllFields");
1784 bool started = runtime_->Start();
1785 CHECK(started);
1786
1787 TestFields(self, this, Primitive::Type::kPrimChar);
1788 TestFields(self, this, Primitive::Type::kPrimShort);
1789}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001790
1791TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001792 Thread* self = Thread::Current();
1793
1794 self->TransitionFromSuspendedToRunnable();
1795 LoadDex("AllFields");
1796 bool started = runtime_->Start();
1797 CHECK(started);
1798
1799 TestFields(self, this, Primitive::Type::kPrimInt);
1800}
1801
1802TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001803 Thread* self = Thread::Current();
1804
1805 self->TransitionFromSuspendedToRunnable();
1806 LoadDex("AllFields");
1807 bool started = runtime_->Start();
1808 CHECK(started);
1809
1810 TestFields(self, this, Primitive::Type::kPrimNot);
1811}
1812
1813TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001814 Thread* self = Thread::Current();
1815
1816 self->TransitionFromSuspendedToRunnable();
1817 LoadDex("AllFields");
1818 bool started = runtime_->Start();
1819 CHECK(started);
1820
1821 TestFields(self, this, Primitive::Type::kPrimLong);
1822}
1823
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001824// Disabled, b/27991555 .
1825// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1826// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1827// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1828// the bridge and uses that to check for inlined frames, crashing in the process.
1829TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001830#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1831 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001832 Thread* self = Thread::Current();
1833
1834 ScopedObjectAccess soa(self);
1835 StackHandleScope<7> hs(self);
1836
1837 JNIEnv* env = Thread::Current()->GetJniEnv();
1838
1839 // ArrayList
1840
1841 // Load ArrayList and used methods (JNI).
1842 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1843 ASSERT_NE(nullptr, arraylist_jclass);
1844 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1845 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001846 jmethodID contains_jmethod = env->GetMethodID(
1847 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001848 ASSERT_NE(nullptr, contains_jmethod);
1849 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1850 ASSERT_NE(nullptr, add_jmethod);
1851
Mathieu Chartiere401d142015-04-22 13:56:20 -07001852 // Get representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001853 ArtMethod* contains_amethod = jni::DecodeArtMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001854
1855 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001856 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1857 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001858 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001859 }
1860
1861 // List
1862
1863 // Load List and used methods (JNI).
1864 jclass list_jclass = env->FindClass("java/util/List");
1865 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001866 jmethodID inf_contains_jmethod = env->GetMethodID(
1867 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001868 ASSERT_NE(nullptr, inf_contains_jmethod);
1869
1870 // Get mirror representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001871 ArtMethod* inf_contains = jni::DecodeArtMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001872
1873 // Object
1874
1875 jclass obj_jclass = env->FindClass("java/lang/Object");
1876 ASSERT_NE(nullptr, obj_jclass);
1877 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1878 ASSERT_NE(nullptr, obj_constructor);
1879
Andreas Gampe51f76352014-05-21 08:28:48 -07001880 // Create instances.
1881
1882 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1883 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001884 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001885
1886 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1887 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001888 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001889
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001890 // Invocation tests.
1891
1892 // 1. imt_conflict
1893
1894 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001895
1896 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
1897 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001898 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
1899 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001900 ImtConflictTable* empty_conflict_table =
1901 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001902 void* data = linear_alloc->Alloc(
1903 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07001904 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001905 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07001906 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
1907 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001908
Andreas Gampe51f76352014-05-21 08:28:48 -07001909 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001910 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1911 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07001912 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001913 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001914 self,
1915 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001916 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001917
1918 ASSERT_FALSE(self->IsExceptionPending());
1919 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1920
1921 // Add object.
1922
1923 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1924
David Sehr709b0702016-10-13 09:12:37 -07001925 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07001926
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001927 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001928
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001929 result =
1930 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1931 reinterpret_cast<size_t>(array_list.Get()),
1932 reinterpret_cast<size_t>(obj.Get()),
1933 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
1934 self,
1935 contains_amethod,
1936 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001937
1938 ASSERT_FALSE(self->IsExceptionPending());
1939 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001940
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001941 // 2. regular interface trampoline
1942
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001943 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1944 reinterpret_cast<size_t>(array_list.Get()),
1945 reinterpret_cast<size_t>(obj.Get()),
1946 StubTest::GetEntrypoint(self,
1947 kQuickInvokeInterfaceTrampolineWithAccessCheck),
1948 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001949
1950 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001951 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001952
Mathieu Chartiere401d142015-04-22 13:56:20 -07001953 result = Invoke3WithReferrer(
1954 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1955 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
1956 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
1957 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001958
1959 ASSERT_FALSE(self->IsExceptionPending());
1960 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07001961#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001962 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001963 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001964 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1965#endif
1966}
1967
Andreas Gampe6aac3552014-06-09 14:55:53 -07001968TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08001969#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07001970 Thread* self = Thread::Current();
1971 ScopedObjectAccess soa(self);
1972 // garbage is created during ClassLinker::Init
1973
1974 // Create some strings
1975 // Use array so we can index into it and use a matrix for expected results
1976 // Setup: The first half is standard. The second half uses a non-zero offset.
1977 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001978 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1979 static constexpr size_t kStringCount = arraysize(c_str);
1980 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
1981 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07001982
1983 StackHandleScope<kStringCount> hs(self);
1984 Handle<mirror::String> s[kStringCount];
1985
1986 for (size_t i = 0; i < kStringCount; ++i) {
1987 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1988 }
1989
1990 // Matrix of expectations. First component is first parameter. Note we only check against the
1991 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1992 // rely on String::CompareTo being correct.
1993 static constexpr size_t kMaxLen = 9;
1994 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1995
1996 // Last dimension: start, offset by 1.
1997 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
1998 for (size_t x = 0; x < kStringCount; ++x) {
1999 for (size_t y = 0; y < kCharCount; ++y) {
2000 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2001 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2002 }
2003 }
2004 }
2005
2006 // Play with it...
2007
2008 for (size_t x = 0; x < kStringCount; ++x) {
2009 for (size_t y = 0; y < kCharCount; ++y) {
2010 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2011 int32_t start = static_cast<int32_t>(z) - 1;
2012
2013 // Test string_compareto x y
2014 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002015 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002016
2017 EXPECT_FALSE(self->IsExceptionPending());
2018
2019 // The result is a 32b signed integer
2020 union {
2021 size_t r;
2022 int32_t i;
2023 } conv;
2024 conv.r = result;
2025
2026 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2027 c_char[y] << " @ " << start;
2028 }
2029 }
2030 }
2031
2032 // TODO: Deallocate things.
2033
2034 // Tests done.
2035#else
2036 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2037 // Force-print to std::cout so it's also outside the logcat.
2038 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002039#endif
2040}
2041
Roland Levillain02b75802016-07-13 11:54:35 +01002042// TODO: Exercise the ReadBarrierMarkRegX entry points.
2043
Man Cao1aee9002015-07-14 22:31:42 -07002044TEST_F(StubTest, ReadBarrier) {
2045#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2046 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2047 Thread* self = Thread::Current();
2048
2049 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2050
2051 // Create an object
2052 ScopedObjectAccess soa(self);
2053 // garbage is created during ClassLinker::Init
2054
2055 StackHandleScope<2> hs(soa.Self());
2056 Handle<mirror::Class> c(
2057 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2058
2059 // Build an object instance
2060 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2061
2062 EXPECT_FALSE(self->IsExceptionPending());
2063
2064 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2065 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2066
2067 EXPECT_FALSE(self->IsExceptionPending());
2068 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2069 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2070 EXPECT_EQ(klass, obj->GetClass());
2071
2072 // Tests done.
2073#else
2074 LOG(INFO) << "Skipping read_barrier_slow";
2075 // Force-print to std::cout so it's also outside the logcat.
2076 std::cout << "Skipping read_barrier_slow" << std::endl;
2077#endif
2078}
2079
Roland Levillain0d5a2812015-11-13 10:07:31 +00002080TEST_F(StubTest, ReadBarrierForRoot) {
2081#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2082 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2083 Thread* self = Thread::Current();
2084
2085 const uintptr_t readBarrierForRootSlow =
2086 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2087
2088 // Create an object
2089 ScopedObjectAccess soa(self);
2090 // garbage is created during ClassLinker::Init
2091
2092 StackHandleScope<1> hs(soa.Self());
2093
2094 Handle<mirror::String> obj(
2095 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2096
2097 EXPECT_FALSE(self->IsExceptionPending());
2098
2099 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2100 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2101
2102 EXPECT_FALSE(self->IsExceptionPending());
2103 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2104 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2105 EXPECT_EQ(klass, obj->GetClass());
2106
2107 // Tests done.
2108#else
2109 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2110 // Force-print to std::cout so it's also outside the logcat.
2111 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2112#endif
2113}
2114
Andreas Gampe525cde22014-04-22 15:44:50 -07002115} // namespace art