blob: 4236c287de3fe2c96e7841406a8b5a35a70bd299 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070024#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070073 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070074 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 }
76
Andreas Gampe51f76352014-05-21 08:28:48 -070077 // TODO: Set up a frame according to referrer's specs.
78 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070079 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070080 // Push a transition back into managed code onto the linked list in thread.
81 ManagedStack fragment;
82 self->PushManagedStackFragment(&fragment);
83
84 size_t result;
85 size_t fpr_result = 0;
86#if defined(__i386__)
87 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070088#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
89#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070090 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070091 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
92 // esp, then we won't be able to access it after spilling.
93
94 // Spill 6 registers.
95 PUSH(%%ebx)
96 PUSH(%%ecx)
97 PUSH(%%edx)
98 PUSH(%%esi)
99 PUSH(%%edi)
100 PUSH(%%ebp)
101
102 // Store the inputs to the stack, but keep the referrer up top, less work.
103 PUSH(%[referrer]) // Align stack.
104 PUSH(%[referrer]) // Store referrer
105
106 PUSH(%[arg0])
107 PUSH(%[arg1])
108 PUSH(%[arg2])
109 PUSH(%[code])
110 // Now read them back into the required registers.
111 POP(%%edi)
112 POP(%%edx)
113 POP(%%ecx)
114 POP(%%eax)
115 // Call is prepared now.
116
Andreas Gampe51f76352014-05-21 08:28:48 -0700117 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700118 "addl $8, %%esp\n\t" // Pop referrer and padding.
119 ".cfi_adjust_cfa_offset -8\n\t"
120
121 // Restore 6 registers.
122 POP(%%ebp)
123 POP(%%edi)
124 POP(%%esi)
125 POP(%%edx)
126 POP(%%ecx)
127 POP(%%ebx)
128
Andreas Gampe51f76352014-05-21 08:28:48 -0700129 : "=a" (result)
130 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700131 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
132 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700133 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700134 : "memory", "xmm7"); // clobber.
135#undef PUSH
136#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700137#elif defined(__arm__)
138 __asm__ __volatile__(
139 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
140 ".cfi_adjust_cfa_offset 52\n\t"
141 "push {r9}\n\t"
142 ".cfi_adjust_cfa_offset 4\n\t"
143 "mov r9, %[referrer]\n\n"
144 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
145 ".cfi_adjust_cfa_offset 8\n\t"
146 "ldr r9, [sp, #8]\n\t"
147
148 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
149 "sub sp, sp, #24\n\t"
150 "str %[arg0], [sp]\n\t"
151 "str %[arg1], [sp, #4]\n\t"
152 "str %[arg2], [sp, #8]\n\t"
153 "str %[code], [sp, #12]\n\t"
154 "str %[self], [sp, #16]\n\t"
155 "str %[hidden], [sp, #20]\n\t"
156 "ldr r0, [sp]\n\t"
157 "ldr r1, [sp, #4]\n\t"
158 "ldr r2, [sp, #8]\n\t"
159 "ldr r3, [sp, #12]\n\t"
160 "ldr r9, [sp, #16]\n\t"
161 "ldr r12, [sp, #20]\n\t"
162 "add sp, sp, #24\n\t"
163
164 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700165 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700166 ".cfi_adjust_cfa_offset -12\n\t"
167 "pop {r1-r12, lr}\n\t" // Restore state
168 ".cfi_adjust_cfa_offset -52\n\t"
169 "mov %[result], r0\n\t" // Save the result
170 : [result] "=r" (result)
171 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700172 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
173 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700174 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700175#elif defined(__aarch64__)
176 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700177 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000178 "sub sp, sp, #80\n\t"
179 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 "stp x0, x1, [sp]\n\t"
181 "stp x2, x3, [sp, #16]\n\t"
182 "stp x4, x5, [sp, #32]\n\t"
183 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000184 // To be extra defensive, store x20. We do this because some of the stubs might make a
185 // transition into the runtime via the blr instruction below and *not* save x20.
186 "str x20, [sp, #64]\n\t"
187 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700188
Andreas Gampef39b3782014-06-03 14:38:30 -0700189 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
190 ".cfi_adjust_cfa_offset 16\n\t"
191 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700192
193 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
194 "sub sp, sp, #48\n\t"
195 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700196 // All things are "r" constraints, so direct str/stp should work.
197 "stp %[arg0], %[arg1], [sp]\n\t"
198 "stp %[arg2], %[code], [sp, #16]\n\t"
199 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700200
201 // Now we definitely have x0-x3 free, use it to garble d8 - d15
202 "movk x0, #0xfad0\n\t"
203 "movk x0, #0xebad, lsl #16\n\t"
204 "movk x0, #0xfad0, lsl #32\n\t"
205 "movk x0, #0xebad, lsl #48\n\t"
206 "fmov d8, x0\n\t"
207 "add x0, x0, 1\n\t"
208 "fmov d9, x0\n\t"
209 "add x0, x0, 1\n\t"
210 "fmov d10, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d11, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d12, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d13, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d14, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d15, x0\n\t"
221
Andreas Gampef39b3782014-06-03 14:38:30 -0700222 // Load call params into the right registers.
223 "ldp x0, x1, [sp]\n\t"
224 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100225 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700226 "add sp, sp, #48\n\t"
227 ".cfi_adjust_cfa_offset -48\n\t"
228
Andreas Gampe51f76352014-05-21 08:28:48 -0700229 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "mov x8, x0\n\t" // Store result
231 "add sp, sp, #16\n\t" // Drop the quick "frame"
232 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700233
234 // Test d8 - d15. We can use x1 and x2.
235 "movk x1, #0xfad0\n\t"
236 "movk x1, #0xebad, lsl #16\n\t"
237 "movk x1, #0xfad0, lsl #32\n\t"
238 "movk x1, #0xebad, lsl #48\n\t"
239 "fmov x2, d8\n\t"
240 "cmp x1, x2\n\t"
241 "b.ne 1f\n\t"
242 "add x1, x1, 1\n\t"
243
244 "fmov x2, d9\n\t"
245 "cmp x1, x2\n\t"
246 "b.ne 1f\n\t"
247 "add x1, x1, 1\n\t"
248
249 "fmov x2, d10\n\t"
250 "cmp x1, x2\n\t"
251 "b.ne 1f\n\t"
252 "add x1, x1, 1\n\t"
253
254 "fmov x2, d11\n\t"
255 "cmp x1, x2\n\t"
256 "b.ne 1f\n\t"
257 "add x1, x1, 1\n\t"
258
259 "fmov x2, d12\n\t"
260 "cmp x1, x2\n\t"
261 "b.ne 1f\n\t"
262 "add x1, x1, 1\n\t"
263
264 "fmov x2, d13\n\t"
265 "cmp x1, x2\n\t"
266 "b.ne 1f\n\t"
267 "add x1, x1, 1\n\t"
268
269 "fmov x2, d14\n\t"
270 "cmp x1, x2\n\t"
271 "b.ne 1f\n\t"
272 "add x1, x1, 1\n\t"
273
274 "fmov x2, d15\n\t"
275 "cmp x1, x2\n\t"
276 "b.ne 1f\n\t"
277
Andreas Gampef39b3782014-06-03 14:38:30 -0700278 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700279
280 // Finish up.
281 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700282 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
283 "ldp x2, x3, [sp, #16]\n\t"
284 "ldp x4, x5, [sp, #32]\n\t"
285 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000286 "ldr x20, [sp, #64]\n\t"
287 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
288 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700289
Andreas Gampef39b3782014-06-03 14:38:30 -0700290 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
291 "mov %[result], x8\n\t" // Store the call result
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 "b 3f\n\t" // Goto end
294
295 // Failed fpr verification.
296 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700297 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700298 "b 2b\n\t" // Goto finish-up
299
300 // End
301 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700302 : [result] "=r" (result)
303 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700304 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700305 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000306 // Leave one register unclobbered, which is needed for compiling with
307 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
308 // which means we should unclobber one of the callee-saved registers that are unused.
309 // Here we use x20.
310 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700311 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
312 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
313 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
314 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700315 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000316 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200317#elif defined(__mips__) && !defined(__LP64__)
318 __asm__ __volatile__ (
319 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
320 "addiu $sp, $sp, -64\n\t"
321 "sw $a0, 0($sp)\n\t"
322 "sw $a1, 4($sp)\n\t"
323 "sw $a2, 8($sp)\n\t"
324 "sw $a3, 12($sp)\n\t"
325 "sw $t0, 16($sp)\n\t"
326 "sw $t1, 20($sp)\n\t"
327 "sw $t2, 24($sp)\n\t"
328 "sw $t3, 28($sp)\n\t"
329 "sw $t4, 32($sp)\n\t"
330 "sw $t5, 36($sp)\n\t"
331 "sw $t6, 40($sp)\n\t"
332 "sw $t7, 44($sp)\n\t"
333 // Spill gp register since it is caller save.
334 "sw $gp, 52($sp)\n\t"
335
336 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
337 "sw %[referrer], 0($sp)\n\t"
338
339 // Push everything on the stack, so we don't rely on the order.
340 "addiu $sp, $sp, -24\n\t"
341 "sw %[arg0], 0($sp)\n\t"
342 "sw %[arg1], 4($sp)\n\t"
343 "sw %[arg2], 8($sp)\n\t"
344 "sw %[code], 12($sp)\n\t"
345 "sw %[self], 16($sp)\n\t"
346 "sw %[hidden], 20($sp)\n\t"
347
348 // Load call params into the right registers.
349 "lw $a0, 0($sp)\n\t"
350 "lw $a1, 4($sp)\n\t"
351 "lw $a2, 8($sp)\n\t"
352 "lw $t9, 12($sp)\n\t"
353 "lw $s1, 16($sp)\n\t"
354 "lw $t0, 20($sp)\n\t"
355 "addiu $sp, $sp, 24\n\t"
356
357 "jalr $t9\n\t" // Call the stub.
358 "nop\n\t"
359 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
360
361 // Restore stuff not named clobbered.
362 "lw $a0, 0($sp)\n\t"
363 "lw $a1, 4($sp)\n\t"
364 "lw $a2, 8($sp)\n\t"
365 "lw $a3, 12($sp)\n\t"
366 "lw $t0, 16($sp)\n\t"
367 "lw $t1, 20($sp)\n\t"
368 "lw $t2, 24($sp)\n\t"
369 "lw $t3, 28($sp)\n\t"
370 "lw $t4, 32($sp)\n\t"
371 "lw $t5, 36($sp)\n\t"
372 "lw $t6, 40($sp)\n\t"
373 "lw $t7, 44($sp)\n\t"
374 // Restore gp.
375 "lw $gp, 52($sp)\n\t"
376 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
377
378 "move %[result], $v0\n\t" // Store the call result.
379 : [result] "=r" (result)
380 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
381 [referrer] "r"(referrer), [hidden] "r"(hidden)
382 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
383 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100384 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
385 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
386 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200387 "memory"); // clobber.
388#elif defined(__mips__) && defined(__LP64__)
389 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100390 // Spill a0-a7 which we say we don't clobber. May contain args.
391 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200392 "sd $a0, 0($sp)\n\t"
393 "sd $a1, 8($sp)\n\t"
394 "sd $a2, 16($sp)\n\t"
395 "sd $a3, 24($sp)\n\t"
396 "sd $a4, 32($sp)\n\t"
397 "sd $a5, 40($sp)\n\t"
398 "sd $a6, 48($sp)\n\t"
399 "sd $a7, 56($sp)\n\t"
400
401 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
402 "sd %[referrer], 0($sp)\n\t"
403
404 // Push everything on the stack, so we don't rely on the order.
405 "daddiu $sp, $sp, -48\n\t"
406 "sd %[arg0], 0($sp)\n\t"
407 "sd %[arg1], 8($sp)\n\t"
408 "sd %[arg2], 16($sp)\n\t"
409 "sd %[code], 24($sp)\n\t"
410 "sd %[self], 32($sp)\n\t"
411 "sd %[hidden], 40($sp)\n\t"
412
413 // Load call params into the right registers.
414 "ld $a0, 0($sp)\n\t"
415 "ld $a1, 8($sp)\n\t"
416 "ld $a2, 16($sp)\n\t"
417 "ld $t9, 24($sp)\n\t"
418 "ld $s1, 32($sp)\n\t"
419 "ld $t0, 40($sp)\n\t"
420 "daddiu $sp, $sp, 48\n\t"
421
422 "jalr $t9\n\t" // Call the stub.
423 "nop\n\t"
424 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
425
426 // Restore stuff not named clobbered.
427 "ld $a0, 0($sp)\n\t"
428 "ld $a1, 8($sp)\n\t"
429 "ld $a2, 16($sp)\n\t"
430 "ld $a3, 24($sp)\n\t"
431 "ld $a4, 32($sp)\n\t"
432 "ld $a5, 40($sp)\n\t"
433 "ld $a6, 48($sp)\n\t"
434 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100435 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200436
437 "move %[result], $v0\n\t" // Store the call result.
438 : [result] "=r" (result)
439 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
440 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100441 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
442 // t0-t3 are ambiguous.
443 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
444 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100445 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
446 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
447 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200448 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700449#elif defined(__x86_64__) && !defined(__APPLE__)
450#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
451#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
452 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
453 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700454 // TODO: Set the thread?
455 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700456 // Spill almost everything (except rax, rsp). 14 registers.
457 PUSH(%%rbx)
458 PUSH(%%rcx)
459 PUSH(%%rdx)
460 PUSH(%%rsi)
461 PUSH(%%rdi)
462 PUSH(%%rbp)
463 PUSH(%%r8)
464 PUSH(%%r9)
465 PUSH(%%r10)
466 PUSH(%%r11)
467 PUSH(%%r12)
468 PUSH(%%r13)
469 PUSH(%%r14)
470 PUSH(%%r15)
471
472 PUSH(%[referrer]) // Push referrer & 16B alignment padding
473 PUSH(%[referrer])
474
475 // Now juggle the input registers.
476 PUSH(%[arg0])
477 PUSH(%[arg1])
478 PUSH(%[arg2])
479 PUSH(%[hidden])
480 PUSH(%[code])
481 POP(%%r8)
482 POP(%%rax)
483 POP(%%rdx)
484 POP(%%rsi)
485 POP(%%rdi)
486
487 "call *%%r8\n\t" // Call the stub
488 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700490
491 POP(%%r15)
492 POP(%%r14)
493 POP(%%r13)
494 POP(%%r12)
495 POP(%%r11)
496 POP(%%r10)
497 POP(%%r9)
498 POP(%%r8)
499 POP(%%rbp)
500 POP(%%rdi)
501 POP(%%rsi)
502 POP(%%rdx)
503 POP(%%rcx)
504 POP(%%rbx)
505
Andreas Gampe51f76352014-05-21 08:28:48 -0700506 : "=a" (result)
507 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700508 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
509 [referrer] "r"(referrer), [hidden] "r"(hidden)
510 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
511 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
512 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
513#undef PUSH
514#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700515#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800516 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700517 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
518 result = 0;
519#endif
520 // Pop transition.
521 self->PopManagedStackFragment(fragment);
522
523 fp_result = fpr_result;
524 EXPECT_EQ(0U, fp_result);
525
526 return result;
527 }
528
Andreas Gampe29b38412014-08-13 00:15:43 -0700529 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
530 int32_t offset;
531#ifdef __LP64__
532 offset = GetThreadOffset<8>(entrypoint).Int32Value();
533#else
534 offset = GetThreadOffset<4>(entrypoint).Int32Value();
535#endif
536 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
537 }
538
Andreas Gampe6cf80102014-05-19 11:32:41 -0700539 protected:
540 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700541};
542
543
Andreas Gampe525cde22014-04-22 15:44:50 -0700544TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200545#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700546 Thread* self = Thread::Current();
547
548 uint32_t orig[20];
549 uint32_t trg[20];
550 for (size_t i = 0; i < 20; ++i) {
551 orig[i] = i;
552 trg[i] = 0;
553 }
554
555 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700556 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700557
558 EXPECT_EQ(orig[0], trg[0]);
559
560 for (size_t i = 1; i < 4; ++i) {
561 EXPECT_NE(orig[i], trg[i]);
562 }
563
564 for (size_t i = 4; i < 14; ++i) {
565 EXPECT_EQ(orig[i], trg[i]);
566 }
567
568 for (size_t i = 14; i < 20; ++i) {
569 EXPECT_NE(orig[i], trg[i]);
570 }
571
572 // TODO: Test overlapping?
573
574#else
575 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
576 // Force-print to std::cout so it's also outside the logcat.
577 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
578#endif
579}
580
Andreas Gampe525cde22014-04-22 15:44:50 -0700581TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200582#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
583 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700584 static constexpr size_t kThinLockLoops = 100;
585
Andreas Gampe525cde22014-04-22 15:44:50 -0700586 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700587
588 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
589
Andreas Gampe525cde22014-04-22 15:44:50 -0700590 // Create an object
591 ScopedObjectAccess soa(self);
592 // garbage is created during ClassLinker::Init
593
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700594 StackHandleScope<2> hs(soa.Self());
595 Handle<mirror::String> obj(
596 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700597 LockWord lock = obj->GetLockWord(false);
598 LockWord::LockState old_state = lock.GetState();
599 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
600
Andreas Gampe29b38412014-08-13 00:15:43 -0700601 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700602
603 LockWord lock_after = obj->GetLockWord(false);
604 LockWord::LockState new_state = lock_after.GetState();
605 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700606 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
607
608 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700609 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700610
611 // Check we're at lock count i
612
613 LockWord l_inc = obj->GetLockWord(false);
614 LockWord::LockState l_inc_state = l_inc.GetState();
615 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
616 EXPECT_EQ(l_inc.ThinLockCount(), i);
617 }
618
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700619 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700620 Handle<mirror::String> obj2(hs.NewHandle(
621 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700623 obj2->IdentityHashCode();
624
Andreas Gampe29b38412014-08-13 00:15:43 -0700625 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626
627 LockWord lock_after2 = obj2->GetLockWord(false);
628 LockWord::LockState new_state2 = lock_after2.GetState();
629 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
630 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
631
632 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700633#else
634 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
635 // Force-print to std::cout so it's also outside the logcat.
636 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
637#endif
638}
639
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700640
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700641class RandGen {
642 public:
643 explicit RandGen(uint32_t seed) : val_(seed) {}
644
645 uint32_t next() {
646 val_ = val_ * 48271 % 2147483647 + 13;
647 return val_;
648 }
649
650 uint32_t val_;
651};
652
653
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700654// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
655static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
657 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700658 static constexpr size_t kThinLockLoops = 100;
659
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700660 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700661
662 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
663 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700664 // Create an object
665 ScopedObjectAccess soa(self);
666 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700667 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
668 StackHandleScope<kNumberOfLocks + 1> hs(self);
669 Handle<mirror::String> obj(
670 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700671 LockWord lock = obj->GetLockWord(false);
672 LockWord::LockState old_state = lock.GetState();
673 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
674
Andreas Gampe29b38412014-08-13 00:15:43 -0700675 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 // This should be an illegal monitor state.
677 EXPECT_TRUE(self->IsExceptionPending());
678 self->ClearException();
679
680 LockWord lock_after = obj->GetLockWord(false);
681 LockWord::LockState new_state = lock_after.GetState();
682 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
Andreas Gampe29b38412014-08-13 00:15:43 -0700684 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
686 LockWord lock_after2 = obj->GetLockWord(false);
687 LockWord::LockState new_state2 = lock_after2.GetState();
688 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
689
Andreas Gampe29b38412014-08-13 00:15:43 -0700690 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691
692 LockWord lock_after3 = obj->GetLockWord(false);
693 LockWord::LockState new_state3 = lock_after3.GetState();
694 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
695
696 // Stress test:
697 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
698 // each step.
699
700 RandGen r(0x1234);
701
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700703 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704
705 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700706 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700707 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 // Initialize = allocate.
710 for (size_t i = 0; i < kNumberOfLocks; ++i) {
711 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700712 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700713 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700714 }
715
716 for (size_t i = 0; i < kIterations; ++i) {
717 // Select which lock to update.
718 size_t index = r.next() % kNumberOfLocks;
719
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 // Make lock fat?
721 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
722 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700726 LockWord::LockState iter_state = lock_iter.GetState();
727 if (counts[index] == 0) {
728 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
729 } else {
730 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
731 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700732 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800733 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else {
739 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700742
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800743 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700744 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
745 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 counts[index]++;
747 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700748 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700749 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 counts[index]--;
751 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700752
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700753 EXPECT_FALSE(self->IsExceptionPending());
754
755 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700756 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 LockWord::LockState iter_state = lock_iter.GetState();
758 if (fat[index]) {
759 // Abuse MonitorInfo.
760 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 EXPECT_EQ(counts[index], info.entry_count_) << index;
763 } else {
764 if (counts[index] > 0) {
765 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
766 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
767 } else {
768 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
769 }
770 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700771 }
772 }
773
774 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700776 for (size_t i = 0; i < kNumberOfLocks; ++i) {
777 size_t index = kNumberOfLocks - 1 - i;
778 size_t count = counts[index];
779 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700780 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
781 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700782 count--;
783 }
784
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700785 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700787 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
788 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700789 }
790
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700792#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800793 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700795 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700797#endif
798}
799
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700800TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800801 // This will lead to monitor error messages in the log.
802 ScopedLogSeverity sls(LogSeverity::FATAL);
803
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804 TestUnlockObject(this);
805}
Andreas Gampe525cde22014-04-22 15:44:50 -0700806
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200807#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
808 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700809extern "C" void art_quick_check_cast(void);
810#endif
811
812TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200813#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
814 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700815 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700816
817 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
818
Andreas Gampe525cde22014-04-22 15:44:50 -0700819 // Find some classes.
820 ScopedObjectAccess soa(self);
821 // garbage is created during ClassLinker::Init
822
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700823 StackHandleScope<2> hs(soa.Self());
824 Handle<mirror::Class> c(
825 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
826 Handle<mirror::Class> c2(
827 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700828
829 EXPECT_FALSE(self->IsExceptionPending());
830
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700831 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700832 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700833
834 EXPECT_FALSE(self->IsExceptionPending());
835
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700836 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700837 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700838
839 EXPECT_FALSE(self->IsExceptionPending());
840
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700841 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700842 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700843
844 EXPECT_FALSE(self->IsExceptionPending());
845
846 // TODO: Make the following work. But that would require correct managed frames.
847
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700848 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700849 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700850
851 EXPECT_TRUE(self->IsExceptionPending());
852 self->ClearException();
853
854#else
855 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
856 // Force-print to std::cout so it's also outside the logcat.
857 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
858#endif
859}
860
861
Andreas Gampe525cde22014-04-22 15:44:50 -0700862TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200863#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
864 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700865 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700866
867 // Do not check non-checked ones, we'd need handlers and stuff...
868 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
869 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
870
Andreas Gampe525cde22014-04-22 15:44:50 -0700871 // Create an object
872 ScopedObjectAccess soa(self);
873 // garbage is created during ClassLinker::Init
874
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700875 StackHandleScope<5> hs(soa.Self());
876 Handle<mirror::Class> c(
877 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
878 Handle<mirror::Class> ca(
879 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700880
881 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700882 Handle<mirror::ObjectArray<mirror::Object>> array(
883 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700884
885 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700886 Handle<mirror::String> str_obj(
887 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700890 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700891
892 // Play with it...
893
894 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700895 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700896
897 EXPECT_FALSE(self->IsExceptionPending());
898
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700899 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700900 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700901
902 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700903 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700904
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700905 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700906 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700907
908 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700910
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700911 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700912 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
914 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700917 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700918 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
920 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700921 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922
923 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700924
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700926 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700927
928 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700929 EXPECT_EQ(nullptr, array->Get(0));
930
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700931 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700932 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700933
934 EXPECT_FALSE(self->IsExceptionPending());
935 EXPECT_EQ(nullptr, array->Get(1));
936
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700937 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700938 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700939
940 EXPECT_FALSE(self->IsExceptionPending());
941 EXPECT_EQ(nullptr, array->Get(2));
942
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700943 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700944 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700945
946 EXPECT_FALSE(self->IsExceptionPending());
947 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700948
949 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
950
951 // 2) Failure cases (str into str[])
952 // 2.1) Array = null
953 // TODO: Throwing NPE needs actual DEX code
954
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700955// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700956// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
957//
958// EXPECT_TRUE(self->IsExceptionPending());
959// self->ClearException();
960
961 // 2.2) Index < 0
962
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
964 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700965 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700966
967 EXPECT_TRUE(self->IsExceptionPending());
968 self->ClearException();
969
970 // 2.3) Index > 0
971
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700972 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700973 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700974
975 EXPECT_TRUE(self->IsExceptionPending());
976 self->ClearException();
977
978 // 3) Failure cases (obj into str[])
979
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700981 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700982
983 EXPECT_TRUE(self->IsExceptionPending());
984 self->ClearException();
985
986 // Tests done.
987#else
988 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
989 // Force-print to std::cout so it's also outside the logcat.
990 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
991#endif
992}
993
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700994TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200995#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
996 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800997 // This will lead to OOM error messages in the log.
998 ScopedLogSeverity sls(LogSeverity::FATAL);
999
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001000 // TODO: Check the "Unresolved" allocation stubs
1001
1002 Thread* self = Thread::Current();
1003 // Create an object
1004 ScopedObjectAccess soa(self);
1005 // garbage is created during ClassLinker::Init
1006
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001007 StackHandleScope<2> hs(soa.Self());
1008 Handle<mirror::Class> c(
1009 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001010
1011 // Play with it...
1012
1013 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001014 {
1015 // Use an arbitrary method from c to use as referrer
1016 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001017 // arbitrary
1018 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001020 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 self);
1022
1023 EXPECT_FALSE(self->IsExceptionPending());
1024 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1025 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001026 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001027 VerifyObject(obj);
1028 }
1029
1030 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001031 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001032 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001033 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001034 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 self);
1036
1037 EXPECT_FALSE(self->IsExceptionPending());
1038 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1039 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001040 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001041 VerifyObject(obj);
1042 }
1043
1044 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001045 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001046 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001047 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001048 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 self);
1050
1051 EXPECT_FALSE(self->IsExceptionPending());
1052 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1053 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001054 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 VerifyObject(obj);
1056 }
1057
1058 // Failure tests.
1059
1060 // Out-of-memory.
1061 {
1062 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1063
1064 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065 Handle<mirror::Class> ca(
1066 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1067
1068 // Use arbitrary large amount for now.
1069 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001070 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071
1072 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001073 // Start allocating with 128K
1074 size_t length = 128 * KB / 4;
1075 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001076 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1077 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1078 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080
1081 // Try a smaller length
1082 length = length / 8;
1083 // Use at most half the reported free space.
1084 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1085 if (length * 8 > mem) {
1086 length = mem / 8;
1087 }
1088 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001089 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001090 }
1091 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001092 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001093
1094 // Allocate simple objects till it fails.
1095 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1097 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1098 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099 }
1100 }
1101 self->ClearException();
1102
Mathieu Chartiere401d142015-04-22 13:56:20 -07001103 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001104 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001106 EXPECT_TRUE(self->IsExceptionPending());
1107 self->ClearException();
1108 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001109 }
1110
1111 // Tests done.
1112#else
1113 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1114 // Force-print to std::cout so it's also outside the logcat.
1115 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1116#endif
1117}
1118
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001119TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001120#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1121 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001122 // TODO: Check the "Unresolved" allocation stubs
1123
Andreas Gampe369810a2015-01-14 19:53:31 -08001124 // This will lead to OOM error messages in the log.
1125 ScopedLogSeverity sls(LogSeverity::FATAL);
1126
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001127 Thread* self = Thread::Current();
1128 // Create an object
1129 ScopedObjectAccess soa(self);
1130 // garbage is created during ClassLinker::Init
1131
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001132 StackHandleScope<2> hs(self);
1133 Handle<mirror::Class> c(
1134 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001135
1136 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001137 Handle<mirror::Class> c_obj(
1138 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139
1140 // Play with it...
1141
1142 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001143
1144 // For some reason this does not work, as the type_idx is artificial and outside what the
1145 // resolved types of c_obj allow...
1146
Ian Rogerscf7f1912014-10-22 22:06:39 -07001147 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001148 // Use an arbitrary method from c to use as referrer
1149 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001150 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001151 // arbitrary
1152 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001153 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154 self);
1155
1156 EXPECT_FALSE(self->IsExceptionPending());
1157 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1158 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001159 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001160 VerifyObject(obj);
1161 EXPECT_EQ(obj->GetLength(), 10);
1162 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001163
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001165 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001166 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001167 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1168 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001169 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001170 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001171 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1173 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1174 EXPECT_TRUE(obj->IsArrayInstance());
1175 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001176 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001177 VerifyObject(obj);
1178 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1179 EXPECT_EQ(array->GetLength(), 10);
1180 }
1181
1182 // Failure tests.
1183
1184 // Out-of-memory.
1185 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001186 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001187 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001188 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001189 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 self);
1191
1192 EXPECT_TRUE(self->IsExceptionPending());
1193 self->ClearException();
1194 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1195 }
1196
1197 // Tests done.
1198#else
1199 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1200 // Force-print to std::cout so it's also outside the logcat.
1201 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1202#endif
1203}
1204
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206TEST_F(StubTest, StringCompareTo) {
Goran Jakovljevic801fcc42015-12-03 11:44:26 +01001207#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
Chris Larsencf283da2016-01-19 16:45:35 -08001208 defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209 // TODO: Check the "Unresolved" allocation stubs
1210
1211 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001212
1213 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1214
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001215 ScopedObjectAccess soa(self);
1216 // garbage is created during ClassLinker::Init
1217
1218 // Create some strings
1219 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001220 // Setup: The first half is standard. The second half uses a non-zero offset.
1221 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001222 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001223 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1224 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1225 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1226 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001227 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001228
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 StackHandleScope<kStringCount> hs(self);
1230 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001231
Jeff Hao848f70a2014-01-15 13:49:50 -08001232 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234 }
1235
1236 // TODO: wide characters
1237
1238 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001239 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1240 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001241 int32_t expected[kStringCount][kStringCount];
1242 for (size_t x = 0; x < kStringCount; ++x) {
1243 for (size_t y = 0; y < kStringCount; ++y) {
1244 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001245 }
1246 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001247
1248 // Play with it...
1249
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001250 for (size_t x = 0; x < kStringCount; ++x) {
1251 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001252 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1254 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001255 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001256
1257 EXPECT_FALSE(self->IsExceptionPending());
1258
1259 // The result is a 32b signed integer
1260 union {
1261 size_t r;
1262 int32_t i;
1263 } conv;
1264 conv.r = result;
1265 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001266 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1267 conv.r;
1268 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1269 conv.r;
1270 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1271 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001272 }
1273 }
1274
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001275 // TODO: Deallocate things.
1276
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001277 // Tests done.
1278#else
1279 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1280 // Force-print to std::cout so it's also outside the logcat.
1281 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1282 std::endl;
1283#endif
1284}
1285
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001286
Mathieu Chartierc7853442015-03-27 14:35:38 -07001287static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001288 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001289 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001290#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1291 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001292 constexpr size_t num_values = 5;
1293 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1294
1295 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001296 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001297 static_cast<size_t>(values[i]),
1298 0U,
1299 StubTest::GetEntrypoint(self, kQuickSet8Static),
1300 self,
1301 referrer);
1302
Mathieu Chartierc7853442015-03-27 14:35:38 -07001303 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001304 0U, 0U,
1305 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1306 self,
1307 referrer);
1308 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1309 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1310 }
1311#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001312 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001313 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1314 // Force-print to std::cout so it's also outside the logcat.
1315 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1316#endif
1317}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001318static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001319 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001320 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001321#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1322 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001323 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001324
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001325 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001326 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001327 static_cast<size_t>(values[i]),
1328 0U,
1329 StubTest::GetEntrypoint(self, kQuickSet8Static),
1330 self,
1331 referrer);
1332
Mathieu Chartierc7853442015-03-27 14:35:38 -07001333 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001334 0U, 0U,
1335 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1336 self,
1337 referrer);
1338 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1339 }
1340#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001341 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001342 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1343 // Force-print to std::cout so it's also outside the logcat.
1344 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1345#endif
1346}
1347
1348
Mathieu Chartierc7853442015-03-27 14:35:38 -07001349static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001350 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001351 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001352#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1353 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001354 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001355
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001356 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001357 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001358 reinterpret_cast<size_t>(obj->Get()),
1359 static_cast<size_t>(values[i]),
1360 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1361 self,
1362 referrer);
1363
Mathieu Chartierc7853442015-03-27 14:35:38 -07001364 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001365 EXPECT_EQ(values[i], res) << "Iteration " << i;
1366
Mathieu Chartierc7853442015-03-27 14:35:38 -07001367 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001368
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001370 reinterpret_cast<size_t>(obj->Get()),
1371 0U,
1372 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1373 self,
1374 referrer);
1375 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1376 }
1377#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001378 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001379 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1380 // Force-print to std::cout so it's also outside the logcat.
1381 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1382#endif
1383}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001384static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001385 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001386 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001387#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1388 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001389 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001390
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001391 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001392 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001393 reinterpret_cast<size_t>(obj->Get()),
1394 static_cast<size_t>(values[i]),
1395 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1396 self,
1397 referrer);
1398
Mathieu Chartierc7853442015-03-27 14:35:38 -07001399 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001400 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001401 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001402
Mathieu Chartierc7853442015-03-27 14:35:38 -07001403 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001404 reinterpret_cast<size_t>(obj->Get()),
1405 0U,
1406 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1407 self,
1408 referrer);
1409 EXPECT_EQ(res, static_cast<int8_t>(res2));
1410 }
1411#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001412 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001413 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1414 // Force-print to std::cout so it's also outside the logcat.
1415 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1416#endif
1417}
1418
Mathieu Chartiere401d142015-04-22 13:56:20 -07001419static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001420 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001421 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001422#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1423 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001424 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001425
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001426 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001427 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001428 static_cast<size_t>(values[i]),
1429 0U,
1430 StubTest::GetEntrypoint(self, kQuickSet16Static),
1431 self,
1432 referrer);
1433
Mathieu Chartierc7853442015-03-27 14:35:38 -07001434 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001435 0U, 0U,
1436 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1437 self,
1438 referrer);
1439
1440 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1441 }
1442#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001443 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001444 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1445 // Force-print to std::cout so it's also outside the logcat.
1446 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1447#endif
1448}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001449static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001450 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001451 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001452#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1453 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001454 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001455
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001456 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001457 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001458 static_cast<size_t>(values[i]),
1459 0U,
1460 StubTest::GetEntrypoint(self, kQuickSet16Static),
1461 self,
1462 referrer);
1463
Mathieu Chartierc7853442015-03-27 14:35:38 -07001464 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001465 0U, 0U,
1466 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1467 self,
1468 referrer);
1469
1470 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1471 }
1472#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001473 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001474 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1475 // Force-print to std::cout so it's also outside the logcat.
1476 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1477#endif
1478}
1479
Mathieu Chartierc7853442015-03-27 14:35:38 -07001480static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001481 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001482 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001483#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1484 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001485 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001486
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001487 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001488 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001489 reinterpret_cast<size_t>(obj->Get()),
1490 static_cast<size_t>(values[i]),
1491 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1492 self,
1493 referrer);
1494
Mathieu Chartierc7853442015-03-27 14:35:38 -07001495 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001496 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001497 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001498
Mathieu Chartierc7853442015-03-27 14:35:38 -07001499 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001500 reinterpret_cast<size_t>(obj->Get()),
1501 0U,
1502 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1503 self,
1504 referrer);
1505 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1506 }
1507#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001508 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001509 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1510 // Force-print to std::cout so it's also outside the logcat.
1511 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1512#endif
1513}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001514static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001515 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001516 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001517#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1518 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001519 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001520
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001521 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001522 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001523 reinterpret_cast<size_t>(obj->Get()),
1524 static_cast<size_t>(values[i]),
1525 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1526 self,
1527 referrer);
1528
Mathieu Chartierc7853442015-03-27 14:35:38 -07001529 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001530 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001531 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001532
Mathieu Chartierc7853442015-03-27 14:35:38 -07001533 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001534 reinterpret_cast<size_t>(obj->Get()),
1535 0U,
1536 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1537 self,
1538 referrer);
1539 EXPECT_EQ(res, static_cast<int16_t>(res2));
1540 }
1541#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001542 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001543 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1544 // Force-print to std::cout so it's also outside the logcat.
1545 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1546#endif
1547}
1548
Mathieu Chartiere401d142015-04-22 13:56:20 -07001549static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001550 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001551 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001552#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1553 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001554 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001556 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001557 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001558 static_cast<size_t>(values[i]),
1559 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001560 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 self,
1562 referrer);
1563
Mathieu Chartierc7853442015-03-27 14:35:38 -07001564 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001566 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567 self,
1568 referrer);
1569
Goran Jakovljevic04568812015-04-23 15:27:23 +02001570#if defined(__mips__) && defined(__LP64__)
1571 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1572#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001573 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001574#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 }
1576#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001577 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001578 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1579 // Force-print to std::cout so it's also outside the logcat.
1580 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1581#endif
1582}
1583
1584
Mathieu Chartierc7853442015-03-27 14:35:38 -07001585static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001586 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001587 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001588#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1589 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001590 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001591
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001592 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001593 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001594 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001595 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001596 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001597 self,
1598 referrer);
1599
Mathieu Chartierc7853442015-03-27 14:35:38 -07001600 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1602
1603 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001604 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001605
Mathieu Chartierc7853442015-03-27 14:35:38 -07001606 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001607 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001608 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001609 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 self,
1611 referrer);
1612 EXPECT_EQ(res, static_cast<int32_t>(res2));
1613 }
1614#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001615 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001616 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1617 // Force-print to std::cout so it's also outside the logcat.
1618 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1619#endif
1620}
1621
1622
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001623#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1624 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001625
1626static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001627 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001628 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1630 reinterpret_cast<size_t>(val),
1631 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001632 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001633 self,
1634 referrer);
1635
1636 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1637 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001638 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001639 self,
1640 referrer);
1641
1642 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1643}
1644#endif
1645
Mathieu Chartiere401d142015-04-22 13:56:20 -07001646static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001647 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001648 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001649#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1650 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001651 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001652
1653 // Allocate a string object for simplicity.
1654 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001655 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001656
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001659 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1661 // Force-print to std::cout so it's also outside the logcat.
1662 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1663#endif
1664}
1665
1666
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001667#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1668 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001669static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001670 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001671 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001672 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001673 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001674 reinterpret_cast<size_t>(trg),
1675 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001676 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001677 self,
1678 referrer);
1679
Mathieu Chartierc7853442015-03-27 14:35:38 -07001680 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681 reinterpret_cast<size_t>(trg),
1682 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001683 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001684 self,
1685 referrer);
1686
1687 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1688
Mathieu Chartierc7853442015-03-27 14:35:38 -07001689 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001690}
1691#endif
1692
Mathieu Chartierc7853442015-03-27 14:35:38 -07001693static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001694 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001695 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001696#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1697 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001698 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001699
1700 // Allocate a string object for simplicity.
1701 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001702 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001703
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001704 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001706 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001707 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1708 // Force-print to std::cout so it's also outside the logcat.
1709 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1710#endif
1711}
1712
1713
Calin Juravle872ab3f2015-10-02 07:27:51 +01001714// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001715
Mathieu Chartiere401d142015-04-22 13:56:20 -07001716static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001717 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001718 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001719#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1720 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001721 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001722
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001723 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001724 // 64 bit FieldSet stores the set value in the second register.
1725 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001726 0U,
1727 values[i],
1728 StubTest::GetEntrypoint(self, kQuickSet64Static),
1729 self,
1730 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001731
Mathieu Chartierc7853442015-03-27 14:35:38 -07001732 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001733 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001734 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 self,
1736 referrer);
1737
1738 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1739 }
1740#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001741 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001742 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1743 // Force-print to std::cout so it's also outside the logcat.
1744 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1745#endif
1746}
1747
1748
Mathieu Chartierc7853442015-03-27 14:35:38 -07001749static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001750 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001751 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001752#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1753 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001754 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001755
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001756 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001757 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001758 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001759 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001760 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 self,
1762 referrer);
1763
Mathieu Chartierc7853442015-03-27 14:35:38 -07001764 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001765 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1766
1767 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001768 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001769
Mathieu Chartierc7853442015-03-27 14:35:38 -07001770 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001771 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001772 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001773 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774 self,
1775 referrer);
1776 EXPECT_EQ(res, static_cast<int64_t>(res2));
1777 }
1778#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001779 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001780 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1781 // Force-print to std::cout so it's also outside the logcat.
1782 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1783#endif
1784}
1785
1786static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1787 // garbage is created during ClassLinker::Init
1788
1789 JNIEnv* env = Thread::Current()->GetJniEnv();
1790 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001791 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001792 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001793 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001794
1795 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001796 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001797 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1798 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001799 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07001800 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001801
1802 // Play with it...
1803
1804 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001805 for (ArtField& f : c->GetSFields()) {
1806 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001807 if (test_type != type) {
1808 continue;
1809 }
1810 switch (type) {
1811 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001812 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001813 break;
1814 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001815 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001816 break;
1817 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001818 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001819 break;
1820 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001821 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001822 break;
1823 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001824 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001825 break;
1826 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001827 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001828 break;
1829 case Primitive::Type::kPrimNot:
1830 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001831 if (f.GetTypeDescriptor()[0] != '[') {
1832 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001833 }
1834 break;
1835 default:
1836 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001837 }
1838 }
1839
1840 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001841 for (ArtField& f : c->GetIFields()) {
1842 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001843 if (test_type != type) {
1844 continue;
1845 }
1846 switch (type) {
1847 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001848 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001849 break;
1850 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001851 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001852 break;
1853 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001854 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001855 break;
1856 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001857 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001858 break;
1859 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001860 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001861 break;
1862 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001863 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001864 break;
1865 case Primitive::Type::kPrimNot:
1866 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001867 if (f.GetTypeDescriptor()[0] != '[') {
1868 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001869 }
1870 break;
1871 default:
1872 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001873 }
1874 }
1875
1876 // TODO: Deallocate things.
1877}
1878
Fred Shih37f05ef2014-07-16 18:38:08 -07001879TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001880 Thread* self = Thread::Current();
1881
1882 self->TransitionFromSuspendedToRunnable();
1883 LoadDex("AllFields");
1884 bool started = runtime_->Start();
1885 CHECK(started);
1886
1887 TestFields(self, this, Primitive::Type::kPrimBoolean);
1888 TestFields(self, this, Primitive::Type::kPrimByte);
1889}
1890
1891TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001892 Thread* self = Thread::Current();
1893
1894 self->TransitionFromSuspendedToRunnable();
1895 LoadDex("AllFields");
1896 bool started = runtime_->Start();
1897 CHECK(started);
1898
1899 TestFields(self, this, Primitive::Type::kPrimChar);
1900 TestFields(self, this, Primitive::Type::kPrimShort);
1901}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001902
1903TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001904 Thread* self = Thread::Current();
1905
1906 self->TransitionFromSuspendedToRunnable();
1907 LoadDex("AllFields");
1908 bool started = runtime_->Start();
1909 CHECK(started);
1910
1911 TestFields(self, this, Primitive::Type::kPrimInt);
1912}
1913
1914TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001915 Thread* self = Thread::Current();
1916
1917 self->TransitionFromSuspendedToRunnable();
1918 LoadDex("AllFields");
1919 bool started = runtime_->Start();
1920 CHECK(started);
1921
1922 TestFields(self, this, Primitive::Type::kPrimNot);
1923}
1924
1925TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001926 Thread* self = Thread::Current();
1927
1928 self->TransitionFromSuspendedToRunnable();
1929 LoadDex("AllFields");
1930 bool started = runtime_->Start();
1931 CHECK(started);
1932
1933 TestFields(self, this, Primitive::Type::kPrimLong);
1934}
1935
Andreas Gampe51f76352014-05-21 08:28:48 -07001936TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001937#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1938 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001939 Thread* self = Thread::Current();
1940
1941 ScopedObjectAccess soa(self);
1942 StackHandleScope<7> hs(self);
1943
1944 JNIEnv* env = Thread::Current()->GetJniEnv();
1945
1946 // ArrayList
1947
1948 // Load ArrayList and used methods (JNI).
1949 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1950 ASSERT_NE(nullptr, arraylist_jclass);
1951 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1952 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001953 jmethodID contains_jmethod = env->GetMethodID(
1954 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001955 ASSERT_NE(nullptr, contains_jmethod);
1956 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1957 ASSERT_NE(nullptr, add_jmethod);
1958
Mathieu Chartiere401d142015-04-22 13:56:20 -07001959 // Get representation.
1960 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001961
1962 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001963 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1964 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001965 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001966 }
1967
1968 // List
1969
1970 // Load List and used methods (JNI).
1971 jclass list_jclass = env->FindClass("java/util/List");
1972 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001973 jmethodID inf_contains_jmethod = env->GetMethodID(
1974 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001975 ASSERT_NE(nullptr, inf_contains_jmethod);
1976
1977 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001978 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001979
1980 // Object
1981
1982 jclass obj_jclass = env->FindClass("java/lang/Object");
1983 ASSERT_NE(nullptr, obj_jclass);
1984 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1985 ASSERT_NE(nullptr, obj_constructor);
1986
Andreas Gampe51f76352014-05-21 08:28:48 -07001987 // Create instances.
1988
1989 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1990 ASSERT_NE(nullptr, jarray_list);
1991 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1992
1993 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1994 ASSERT_NE(nullptr, jobj);
1995 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1996
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001997 // Invocation tests.
1998
1999 // 1. imt_conflict
2000
2001 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002002
2003 size_t result =
2004 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2005 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002006 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002007 self, contains_amethod,
2008 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002009
2010 ASSERT_FALSE(self->IsExceptionPending());
2011 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2012
2013 // Add object.
2014
2015 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2016
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002017 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002018
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002019 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002020
Mathieu Chartiere401d142015-04-22 13:56:20 -07002021 result = Invoke3WithReferrerAndHidden(
2022 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
2023 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
2024 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002025
2026 ASSERT_FALSE(self->IsExceptionPending());
2027 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002028
2029 // 2. regular interface trampoline
2030
Mathieu Chartiere401d142015-04-22 13:56:20 -07002031 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002032 reinterpret_cast<size_t>(array_list.Get()),
2033 reinterpret_cast<size_t>(obj.Get()),
2034 StubTest::GetEntrypoint(self,
2035 kQuickInvokeInterfaceTrampolineWithAccessCheck),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002036 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002037
2038 ASSERT_FALSE(self->IsExceptionPending());
2039 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2040
Mathieu Chartiere401d142015-04-22 13:56:20 -07002041 result = Invoke3WithReferrer(
2042 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2043 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2044 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2045 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002046
2047 ASSERT_FALSE(self->IsExceptionPending());
2048 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002049#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002050 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002051 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002052 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2053#endif
2054}
2055
Andreas Gampe6aac3552014-06-09 14:55:53 -07002056TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002057#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002058 Thread* self = Thread::Current();
2059 ScopedObjectAccess soa(self);
2060 // garbage is created during ClassLinker::Init
2061
2062 // Create some strings
2063 // Use array so we can index into it and use a matrix for expected results
2064 // Setup: The first half is standard. The second half uses a non-zero offset.
2065 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002066 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2067 static constexpr size_t kStringCount = arraysize(c_str);
2068 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2069 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002070
2071 StackHandleScope<kStringCount> hs(self);
2072 Handle<mirror::String> s[kStringCount];
2073
2074 for (size_t i = 0; i < kStringCount; ++i) {
2075 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2076 }
2077
2078 // Matrix of expectations. First component is first parameter. Note we only check against the
2079 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2080 // rely on String::CompareTo being correct.
2081 static constexpr size_t kMaxLen = 9;
2082 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2083
2084 // Last dimension: start, offset by 1.
2085 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2086 for (size_t x = 0; x < kStringCount; ++x) {
2087 for (size_t y = 0; y < kCharCount; ++y) {
2088 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2089 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2090 }
2091 }
2092 }
2093
2094 // Play with it...
2095
2096 for (size_t x = 0; x < kStringCount; ++x) {
2097 for (size_t y = 0; y < kCharCount; ++y) {
2098 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2099 int32_t start = static_cast<int32_t>(z) - 1;
2100
2101 // Test string_compareto x y
2102 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002103 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002104
2105 EXPECT_FALSE(self->IsExceptionPending());
2106
2107 // The result is a 32b signed integer
2108 union {
2109 size_t r;
2110 int32_t i;
2111 } conv;
2112 conv.r = result;
2113
2114 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2115 c_char[y] << " @ " << start;
2116 }
2117 }
2118 }
2119
2120 // TODO: Deallocate things.
2121
2122 // Tests done.
2123#else
2124 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2125 // Force-print to std::cout so it's also outside the logcat.
2126 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002127#endif
2128}
2129
Man Cao1aee9002015-07-14 22:31:42 -07002130TEST_F(StubTest, ReadBarrier) {
2131#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2132 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2133 Thread* self = Thread::Current();
2134
2135 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2136
2137 // Create an object
2138 ScopedObjectAccess soa(self);
2139 // garbage is created during ClassLinker::Init
2140
2141 StackHandleScope<2> hs(soa.Self());
2142 Handle<mirror::Class> c(
2143 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2144
2145 // Build an object instance
2146 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2147
2148 EXPECT_FALSE(self->IsExceptionPending());
2149
2150 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2151 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2152
2153 EXPECT_FALSE(self->IsExceptionPending());
2154 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2155 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2156 EXPECT_EQ(klass, obj->GetClass());
2157
2158 // Tests done.
2159#else
2160 LOG(INFO) << "Skipping read_barrier_slow";
2161 // Force-print to std::cout so it's also outside the logcat.
2162 std::cout << "Skipping read_barrier_slow" << std::endl;
2163#endif
2164}
2165
Roland Levillain0d5a2812015-11-13 10:07:31 +00002166TEST_F(StubTest, ReadBarrierForRoot) {
2167#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2168 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2169 Thread* self = Thread::Current();
2170
2171 const uintptr_t readBarrierForRootSlow =
2172 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2173
2174 // Create an object
2175 ScopedObjectAccess soa(self);
2176 // garbage is created during ClassLinker::Init
2177
2178 StackHandleScope<1> hs(soa.Self());
2179
2180 Handle<mirror::String> obj(
2181 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2182
2183 EXPECT_FALSE(self->IsExceptionPending());
2184
2185 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2186 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2187
2188 EXPECT_FALSE(self->IsExceptionPending());
2189 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2190 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2191 EXPECT_EQ(klass, obj->GetClass());
2192
2193 // Tests done.
2194#else
2195 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2196 // Force-print to std::cout so it's also outside the logcat.
2197 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2198#endif
2199}
2200
Andreas Gampe525cde22014-04-22 15:44:50 -07002201} // namespace art