blob: 507dbf013083b1652c168f4275c4dc33ccd5c9d9 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000025#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070026#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070027#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070028#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070029
30namespace art {
31
32
33class StubTest : public CommonRuntimeTest {
34 protected:
35 // We need callee-save methods set up in the Runtime for exceptions.
36 void SetUp() OVERRIDE {
37 // Do the normal setup.
38 CommonRuntimeTest::SetUp();
39
40 {
41 // Create callee-save methods
42 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010043 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070044 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
45 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
46 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070047 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070048 }
49 }
50 }
51 }
52
Ian Rogerse63db272014-07-15 15:36:11 -070053 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070054 // Use a smaller heap
55 for (std::pair<std::string, const void*>& pair : *options) {
56 if (pair.first.find("-Xmx") == 0) {
57 pair.first = "-Xmx4M"; // Smallest we can go.
58 }
59 }
Andreas Gampe51f76352014-05-21 08:28:48 -070060 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070061 }
Andreas Gampe525cde22014-04-22 15:44:50 -070062
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070063 // Helper function needed since TEST_F makes a new class.
64 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
65 return &self->tlsPtr_;
66 }
67
Andreas Gampe4fc046e2014-05-06 16:56:39 -070068 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070069 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070070 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070071 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070072
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070073 // TODO: Set up a frame according to referrer's specs.
74 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070075 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070076 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077 }
78
Andreas Gampe51f76352014-05-21 08:28:48 -070079 // TODO: Set up a frame according to referrer's specs.
80 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070081 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070082 // Push a transition back into managed code onto the linked list in thread.
83 ManagedStack fragment;
84 self->PushManagedStackFragment(&fragment);
85
86 size_t result;
87 size_t fpr_result = 0;
88#if defined(__i386__)
89 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070090#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
91#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070092 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070093 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
94 // esp, then we won't be able to access it after spilling.
95
96 // Spill 6 registers.
97 PUSH(%%ebx)
98 PUSH(%%ecx)
99 PUSH(%%edx)
100 PUSH(%%esi)
101 PUSH(%%edi)
102 PUSH(%%ebp)
103
104 // Store the inputs to the stack, but keep the referrer up top, less work.
105 PUSH(%[referrer]) // Align stack.
106 PUSH(%[referrer]) // Store referrer
107
108 PUSH(%[arg0])
109 PUSH(%[arg1])
110 PUSH(%[arg2])
111 PUSH(%[code])
112 // Now read them back into the required registers.
113 POP(%%edi)
114 POP(%%edx)
115 POP(%%ecx)
116 POP(%%eax)
117 // Call is prepared now.
118
Andreas Gampe51f76352014-05-21 08:28:48 -0700119 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700120 "addl $8, %%esp\n\t" // Pop referrer and padding.
121 ".cfi_adjust_cfa_offset -8\n\t"
122
123 // Restore 6 registers.
124 POP(%%ebp)
125 POP(%%edi)
126 POP(%%esi)
127 POP(%%edx)
128 POP(%%ecx)
129 POP(%%ebx)
130
Andreas Gampe51f76352014-05-21 08:28:48 -0700131 : "=a" (result)
132 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700133 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
134 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700135 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700136 : "memory", "xmm7"); // clobber.
137#undef PUSH
138#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700139#elif defined(__arm__)
140 __asm__ __volatile__(
141 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
142 ".cfi_adjust_cfa_offset 52\n\t"
143 "push {r9}\n\t"
144 ".cfi_adjust_cfa_offset 4\n\t"
145 "mov r9, %[referrer]\n\n"
146 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
147 ".cfi_adjust_cfa_offset 8\n\t"
148 "ldr r9, [sp, #8]\n\t"
149
150 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
151 "sub sp, sp, #24\n\t"
152 "str %[arg0], [sp]\n\t"
153 "str %[arg1], [sp, #4]\n\t"
154 "str %[arg2], [sp, #8]\n\t"
155 "str %[code], [sp, #12]\n\t"
156 "str %[self], [sp, #16]\n\t"
157 "str %[hidden], [sp, #20]\n\t"
158 "ldr r0, [sp]\n\t"
159 "ldr r1, [sp, #4]\n\t"
160 "ldr r2, [sp, #8]\n\t"
161 "ldr r3, [sp, #12]\n\t"
162 "ldr r9, [sp, #16]\n\t"
163 "ldr r12, [sp, #20]\n\t"
164 "add sp, sp, #24\n\t"
165
166 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700167 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700168 ".cfi_adjust_cfa_offset -12\n\t"
169 "pop {r1-r12, lr}\n\t" // Restore state
170 ".cfi_adjust_cfa_offset -52\n\t"
171 "mov %[result], r0\n\t" // Save the result
172 : [result] "=r" (result)
173 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700174 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
175 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700176 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700177#elif defined(__aarch64__)
178 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700179 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000180 "sub sp, sp, #80\n\t"
181 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "stp x0, x1, [sp]\n\t"
183 "stp x2, x3, [sp, #16]\n\t"
184 "stp x4, x5, [sp, #32]\n\t"
185 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000186 // To be extra defensive, store x20. We do this because some of the stubs might make a
187 // transition into the runtime via the blr instruction below and *not* save x20.
188 "str x20, [sp, #64]\n\t"
189 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700190
Andreas Gampef39b3782014-06-03 14:38:30 -0700191 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
192 ".cfi_adjust_cfa_offset 16\n\t"
193 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700194
195 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
196 "sub sp, sp, #48\n\t"
197 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700198 // All things are "r" constraints, so direct str/stp should work.
199 "stp %[arg0], %[arg1], [sp]\n\t"
200 "stp %[arg2], %[code], [sp, #16]\n\t"
201 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700202
203 // Now we definitely have x0-x3 free, use it to garble d8 - d15
204 "movk x0, #0xfad0\n\t"
205 "movk x0, #0xebad, lsl #16\n\t"
206 "movk x0, #0xfad0, lsl #32\n\t"
207 "movk x0, #0xebad, lsl #48\n\t"
208 "fmov d8, x0\n\t"
209 "add x0, x0, 1\n\t"
210 "fmov d9, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d10, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d11, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d12, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d13, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d14, x0\n\t"
221 "add x0, x0, 1\n\t"
222 "fmov d15, x0\n\t"
223
Andreas Gampef39b3782014-06-03 14:38:30 -0700224 // Load call params into the right registers.
225 "ldp x0, x1, [sp]\n\t"
226 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100227 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700228 "add sp, sp, #48\n\t"
229 ".cfi_adjust_cfa_offset -48\n\t"
230
Andreas Gampe51f76352014-05-21 08:28:48 -0700231 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700232 "mov x8, x0\n\t" // Store result
233 "add sp, sp, #16\n\t" // Drop the quick "frame"
234 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700235
236 // Test d8 - d15. We can use x1 and x2.
237 "movk x1, #0xfad0\n\t"
238 "movk x1, #0xebad, lsl #16\n\t"
239 "movk x1, #0xfad0, lsl #32\n\t"
240 "movk x1, #0xebad, lsl #48\n\t"
241 "fmov x2, d8\n\t"
242 "cmp x1, x2\n\t"
243 "b.ne 1f\n\t"
244 "add x1, x1, 1\n\t"
245
246 "fmov x2, d9\n\t"
247 "cmp x1, x2\n\t"
248 "b.ne 1f\n\t"
249 "add x1, x1, 1\n\t"
250
251 "fmov x2, d10\n\t"
252 "cmp x1, x2\n\t"
253 "b.ne 1f\n\t"
254 "add x1, x1, 1\n\t"
255
256 "fmov x2, d11\n\t"
257 "cmp x1, x2\n\t"
258 "b.ne 1f\n\t"
259 "add x1, x1, 1\n\t"
260
261 "fmov x2, d12\n\t"
262 "cmp x1, x2\n\t"
263 "b.ne 1f\n\t"
264 "add x1, x1, 1\n\t"
265
266 "fmov x2, d13\n\t"
267 "cmp x1, x2\n\t"
268 "b.ne 1f\n\t"
269 "add x1, x1, 1\n\t"
270
271 "fmov x2, d14\n\t"
272 "cmp x1, x2\n\t"
273 "b.ne 1f\n\t"
274 "add x1, x1, 1\n\t"
275
276 "fmov x2, d15\n\t"
277 "cmp x1, x2\n\t"
278 "b.ne 1f\n\t"
279
Andreas Gampef39b3782014-06-03 14:38:30 -0700280 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700281
282 // Finish up.
283 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700284 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
285 "ldp x2, x3, [sp, #16]\n\t"
286 "ldp x4, x5, [sp, #32]\n\t"
287 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000288 "ldr x20, [sp, #64]\n\t"
289 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
290 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700291
Andreas Gampef39b3782014-06-03 14:38:30 -0700292 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
293 "mov %[result], x8\n\t" // Store the call result
294
Andreas Gampe51f76352014-05-21 08:28:48 -0700295 "b 3f\n\t" // Goto end
296
297 // Failed fpr verification.
298 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700299 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700300 "b 2b\n\t" // Goto finish-up
301
302 // End
303 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700304 : [result] "=r" (result)
305 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700306 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700307 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000308 // Leave one register unclobbered, which is needed for compiling with
309 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
310 // which means we should unclobber one of the callee-saved registers that are unused.
311 // Here we use x20.
312 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700313 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
314 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
315 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
316 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700317 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000318 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200319#elif defined(__mips__) && !defined(__LP64__)
320 __asm__ __volatile__ (
321 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
322 "addiu $sp, $sp, -64\n\t"
323 "sw $a0, 0($sp)\n\t"
324 "sw $a1, 4($sp)\n\t"
325 "sw $a2, 8($sp)\n\t"
326 "sw $a3, 12($sp)\n\t"
327 "sw $t0, 16($sp)\n\t"
328 "sw $t1, 20($sp)\n\t"
329 "sw $t2, 24($sp)\n\t"
330 "sw $t3, 28($sp)\n\t"
331 "sw $t4, 32($sp)\n\t"
332 "sw $t5, 36($sp)\n\t"
333 "sw $t6, 40($sp)\n\t"
334 "sw $t7, 44($sp)\n\t"
335 // Spill gp register since it is caller save.
336 "sw $gp, 52($sp)\n\t"
337
338 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
339 "sw %[referrer], 0($sp)\n\t"
340
341 // Push everything on the stack, so we don't rely on the order.
342 "addiu $sp, $sp, -24\n\t"
343 "sw %[arg0], 0($sp)\n\t"
344 "sw %[arg1], 4($sp)\n\t"
345 "sw %[arg2], 8($sp)\n\t"
346 "sw %[code], 12($sp)\n\t"
347 "sw %[self], 16($sp)\n\t"
348 "sw %[hidden], 20($sp)\n\t"
349
350 // Load call params into the right registers.
351 "lw $a0, 0($sp)\n\t"
352 "lw $a1, 4($sp)\n\t"
353 "lw $a2, 8($sp)\n\t"
354 "lw $t9, 12($sp)\n\t"
355 "lw $s1, 16($sp)\n\t"
356 "lw $t0, 20($sp)\n\t"
357 "addiu $sp, $sp, 24\n\t"
358
359 "jalr $t9\n\t" // Call the stub.
360 "nop\n\t"
361 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
362
363 // Restore stuff not named clobbered.
364 "lw $a0, 0($sp)\n\t"
365 "lw $a1, 4($sp)\n\t"
366 "lw $a2, 8($sp)\n\t"
367 "lw $a3, 12($sp)\n\t"
368 "lw $t0, 16($sp)\n\t"
369 "lw $t1, 20($sp)\n\t"
370 "lw $t2, 24($sp)\n\t"
371 "lw $t3, 28($sp)\n\t"
372 "lw $t4, 32($sp)\n\t"
373 "lw $t5, 36($sp)\n\t"
374 "lw $t6, 40($sp)\n\t"
375 "lw $t7, 44($sp)\n\t"
376 // Restore gp.
377 "lw $gp, 52($sp)\n\t"
378 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
379
380 "move %[result], $v0\n\t" // Store the call result.
381 : [result] "=r" (result)
382 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
383 [referrer] "r"(referrer), [hidden] "r"(hidden)
384 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
385 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100386 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
387 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
388 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200389 "memory"); // clobber.
390#elif defined(__mips__) && defined(__LP64__)
391 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100392 // Spill a0-a7 which we say we don't clobber. May contain args.
393 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200394 "sd $a0, 0($sp)\n\t"
395 "sd $a1, 8($sp)\n\t"
396 "sd $a2, 16($sp)\n\t"
397 "sd $a3, 24($sp)\n\t"
398 "sd $a4, 32($sp)\n\t"
399 "sd $a5, 40($sp)\n\t"
400 "sd $a6, 48($sp)\n\t"
401 "sd $a7, 56($sp)\n\t"
402
403 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
404 "sd %[referrer], 0($sp)\n\t"
405
406 // Push everything on the stack, so we don't rely on the order.
407 "daddiu $sp, $sp, -48\n\t"
408 "sd %[arg0], 0($sp)\n\t"
409 "sd %[arg1], 8($sp)\n\t"
410 "sd %[arg2], 16($sp)\n\t"
411 "sd %[code], 24($sp)\n\t"
412 "sd %[self], 32($sp)\n\t"
413 "sd %[hidden], 40($sp)\n\t"
414
415 // Load call params into the right registers.
416 "ld $a0, 0($sp)\n\t"
417 "ld $a1, 8($sp)\n\t"
418 "ld $a2, 16($sp)\n\t"
419 "ld $t9, 24($sp)\n\t"
420 "ld $s1, 32($sp)\n\t"
421 "ld $t0, 40($sp)\n\t"
422 "daddiu $sp, $sp, 48\n\t"
423
424 "jalr $t9\n\t" // Call the stub.
425 "nop\n\t"
426 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
427
428 // Restore stuff not named clobbered.
429 "ld $a0, 0($sp)\n\t"
430 "ld $a1, 8($sp)\n\t"
431 "ld $a2, 16($sp)\n\t"
432 "ld $a3, 24($sp)\n\t"
433 "ld $a4, 32($sp)\n\t"
434 "ld $a5, 40($sp)\n\t"
435 "ld $a6, 48($sp)\n\t"
436 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100437 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200438
439 "move %[result], $v0\n\t" // Store the call result.
440 : [result] "=r" (result)
441 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
442 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100443 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
444 // t0-t3 are ambiguous.
445 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
446 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100447 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
448 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
449 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200450 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700451#elif defined(__x86_64__) && !defined(__APPLE__)
452#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
453#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
454 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
455 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700456 // TODO: Set the thread?
457 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700458 // Spill almost everything (except rax, rsp). 14 registers.
459 PUSH(%%rbx)
460 PUSH(%%rcx)
461 PUSH(%%rdx)
462 PUSH(%%rsi)
463 PUSH(%%rdi)
464 PUSH(%%rbp)
465 PUSH(%%r8)
466 PUSH(%%r9)
467 PUSH(%%r10)
468 PUSH(%%r11)
469 PUSH(%%r12)
470 PUSH(%%r13)
471 PUSH(%%r14)
472 PUSH(%%r15)
473
474 PUSH(%[referrer]) // Push referrer & 16B alignment padding
475 PUSH(%[referrer])
476
477 // Now juggle the input registers.
478 PUSH(%[arg0])
479 PUSH(%[arg1])
480 PUSH(%[arg2])
481 PUSH(%[hidden])
482 PUSH(%[code])
483 POP(%%r8)
484 POP(%%rax)
485 POP(%%rdx)
486 POP(%%rsi)
487 POP(%%rdi)
488
489 "call *%%r8\n\t" // Call the stub
490 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700491 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700492
493 POP(%%r15)
494 POP(%%r14)
495 POP(%%r13)
496 POP(%%r12)
497 POP(%%r11)
498 POP(%%r10)
499 POP(%%r9)
500 POP(%%r8)
501 POP(%%rbp)
502 POP(%%rdi)
503 POP(%%rsi)
504 POP(%%rdx)
505 POP(%%rcx)
506 POP(%%rbx)
507
Andreas Gampe51f76352014-05-21 08:28:48 -0700508 : "=a" (result)
509 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700510 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
511 [referrer] "r"(referrer), [hidden] "r"(hidden)
512 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
513 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
514 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
515#undef PUSH
516#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700517#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800518 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700519 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
520 result = 0;
521#endif
522 // Pop transition.
523 self->PopManagedStackFragment(fragment);
524
525 fp_result = fpr_result;
526 EXPECT_EQ(0U, fp_result);
527
528 return result;
529 }
530
Andreas Gampe29b38412014-08-13 00:15:43 -0700531 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
532 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700533 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700534 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
535 }
536
Andreas Gampe6cf80102014-05-19 11:32:41 -0700537 protected:
538 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700539};
540
541
Andreas Gampe525cde22014-04-22 15:44:50 -0700542TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200543#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700544 Thread* self = Thread::Current();
545
546 uint32_t orig[20];
547 uint32_t trg[20];
548 for (size_t i = 0; i < 20; ++i) {
549 orig[i] = i;
550 trg[i] = 0;
551 }
552
553 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700554 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700555
556 EXPECT_EQ(orig[0], trg[0]);
557
558 for (size_t i = 1; i < 4; ++i) {
559 EXPECT_NE(orig[i], trg[i]);
560 }
561
562 for (size_t i = 4; i < 14; ++i) {
563 EXPECT_EQ(orig[i], trg[i]);
564 }
565
566 for (size_t i = 14; i < 20; ++i) {
567 EXPECT_NE(orig[i], trg[i]);
568 }
569
570 // TODO: Test overlapping?
571
572#else
573 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
574 // Force-print to std::cout so it's also outside the logcat.
575 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
576#endif
577}
578
Andreas Gampe525cde22014-04-22 15:44:50 -0700579TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200580#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
581 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700582 static constexpr size_t kThinLockLoops = 100;
583
Andreas Gampe525cde22014-04-22 15:44:50 -0700584 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700585
586 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
587
Andreas Gampe525cde22014-04-22 15:44:50 -0700588 // Create an object
589 ScopedObjectAccess soa(self);
590 // garbage is created during ClassLinker::Init
591
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700592 StackHandleScope<2> hs(soa.Self());
593 Handle<mirror::String> obj(
594 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700595 LockWord lock = obj->GetLockWord(false);
596 LockWord::LockState old_state = lock.GetState();
597 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
598
Andreas Gampe29b38412014-08-13 00:15:43 -0700599 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700600
601 LockWord lock_after = obj->GetLockWord(false);
602 LockWord::LockState new_state = lock_after.GetState();
603 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700604 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
605
606 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700607 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700608
609 // Check we're at lock count i
610
611 LockWord l_inc = obj->GetLockWord(false);
612 LockWord::LockState l_inc_state = l_inc.GetState();
613 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
614 EXPECT_EQ(l_inc.ThinLockCount(), i);
615 }
616
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700617 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700618 Handle<mirror::String> obj2(hs.NewHandle(
619 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700620
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700621 obj2->IdentityHashCode();
622
Andreas Gampe29b38412014-08-13 00:15:43 -0700623 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700624
625 LockWord lock_after2 = obj2->GetLockWord(false);
626 LockWord::LockState new_state2 = lock_after2.GetState();
627 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
628 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
629
630 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700631#else
632 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
633 // Force-print to std::cout so it's also outside the logcat.
634 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
635#endif
636}
637
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700638
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700639class RandGen {
640 public:
641 explicit RandGen(uint32_t seed) : val_(seed) {}
642
643 uint32_t next() {
644 val_ = val_ * 48271 % 2147483647 + 13;
645 return val_;
646 }
647
648 uint32_t val_;
649};
650
651
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700652// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
653static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200654#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
655 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700656 static constexpr size_t kThinLockLoops = 100;
657
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700658 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700659
660 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
661 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662 // Create an object
663 ScopedObjectAccess soa(self);
664 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700665 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
666 StackHandleScope<kNumberOfLocks + 1> hs(self);
667 Handle<mirror::String> obj(
668 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700669 LockWord lock = obj->GetLockWord(false);
670 LockWord::LockState old_state = lock.GetState();
671 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
672
Andreas Gampe29b38412014-08-13 00:15:43 -0700673 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700674 // This should be an illegal monitor state.
675 EXPECT_TRUE(self->IsExceptionPending());
676 self->ClearException();
677
678 LockWord lock_after = obj->GetLockWord(false);
679 LockWord::LockState new_state = lock_after.GetState();
680 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700681
Andreas Gampe29b38412014-08-13 00:15:43 -0700682 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
684 LockWord lock_after2 = obj->GetLockWord(false);
685 LockWord::LockState new_state2 = lock_after2.GetState();
686 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
687
Andreas Gampe29b38412014-08-13 00:15:43 -0700688 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700689
690 LockWord lock_after3 = obj->GetLockWord(false);
691 LockWord::LockState new_state3 = lock_after3.GetState();
692 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
693
694 // Stress test:
695 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
696 // each step.
697
698 RandGen r(0x1234);
699
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700700 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700701 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702
703 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700704 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700705 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706
707 // Initialize = allocate.
708 for (size_t i = 0; i < kNumberOfLocks; ++i) {
709 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700710 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700711 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700712 }
713
714 for (size_t i = 0; i < kIterations; ++i) {
715 // Select which lock to update.
716 size_t index = r.next() % kNumberOfLocks;
717
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700718 // Make lock fat?
719 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
720 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700721 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700722
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700724 LockWord::LockState iter_state = lock_iter.GetState();
725 if (counts[index] == 0) {
726 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
727 } else {
728 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
729 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700730 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800731 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700732 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800733 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 } else {
737 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800738 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700739 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700740
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800741 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700742 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
743 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700744 counts[index]++;
745 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700746 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700747 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 counts[index]--;
749 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700750
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 EXPECT_FALSE(self->IsExceptionPending());
752
753 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700754 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 LockWord::LockState iter_state = lock_iter.GetState();
756 if (fat[index]) {
757 // Abuse MonitorInfo.
758 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700759 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 EXPECT_EQ(counts[index], info.entry_count_) << index;
761 } else {
762 if (counts[index] > 0) {
763 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
764 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
765 } else {
766 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
767 }
768 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700769 }
770 }
771
772 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700773 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700774 for (size_t i = 0; i < kNumberOfLocks; ++i) {
775 size_t index = kNumberOfLocks - 1 - i;
776 size_t count = counts[index];
777 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700778 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
779 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700780 count--;
781 }
782
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700783 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700784 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700785 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
786 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 }
788
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700789 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700790#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800791 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700792 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700793 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700795#endif
796}
797
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700798TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800799 // This will lead to monitor error messages in the log.
800 ScopedLogSeverity sls(LogSeverity::FATAL);
801
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700802 TestUnlockObject(this);
803}
Andreas Gampe525cde22014-04-22 15:44:50 -0700804
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200805#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
806 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700807extern "C" void art_quick_check_cast(void);
808#endif
809
810TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200811#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
812 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700813 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700814
815 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
816
Andreas Gampe525cde22014-04-22 15:44:50 -0700817 // Find some classes.
818 ScopedObjectAccess soa(self);
819 // garbage is created during ClassLinker::Init
820
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700821 StackHandleScope<2> hs(soa.Self());
822 Handle<mirror::Class> c(
823 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
824 Handle<mirror::Class> c2(
825 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700826
827 EXPECT_FALSE(self->IsExceptionPending());
828
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700829 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700830 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700831
832 EXPECT_FALSE(self->IsExceptionPending());
833
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700834 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700835 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700840 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_FALSE(self->IsExceptionPending());
843
844 // TODO: Make the following work. But that would require correct managed frames.
845
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700846 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700847 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700848
849 EXPECT_TRUE(self->IsExceptionPending());
850 self->ClearException();
851
852#else
853 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
854 // Force-print to std::cout so it's also outside the logcat.
855 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
856#endif
857}
858
859
Andreas Gampe525cde22014-04-22 15:44:50 -0700860TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200861#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
862 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700863 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700864
865 // Do not check non-checked ones, we'd need handlers and stuff...
866 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
867 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
868
Andreas Gampe525cde22014-04-22 15:44:50 -0700869 // Create an object
870 ScopedObjectAccess soa(self);
871 // garbage is created during ClassLinker::Init
872
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700873 StackHandleScope<5> hs(soa.Self());
874 Handle<mirror::Class> c(
875 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
876 Handle<mirror::Class> ca(
877 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700878
879 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700880 Handle<mirror::ObjectArray<mirror::Object>> array(
881 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700882
883 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 Handle<mirror::String> str_obj(
885 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700886
887 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700888 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700889
890 // Play with it...
891
892 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700893 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700894
895 EXPECT_FALSE(self->IsExceptionPending());
896
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700897 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700898 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700899
900 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700901 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700902
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700903 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700904 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700907 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700908
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700910 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700911
912 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700914
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700916 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700917
918 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700919 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920
921 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700922
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700923 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700924 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700925
926 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700927 EXPECT_EQ(nullptr, array->Get(0));
928
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700929 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700930 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700931
932 EXPECT_FALSE(self->IsExceptionPending());
933 EXPECT_EQ(nullptr, array->Get(1));
934
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700935 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700936 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700937
938 EXPECT_FALSE(self->IsExceptionPending());
939 EXPECT_EQ(nullptr, array->Get(2));
940
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700941 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700942 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700943
944 EXPECT_FALSE(self->IsExceptionPending());
945 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700946
947 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
948
949 // 2) Failure cases (str into str[])
950 // 2.1) Array = null
951 // TODO: Throwing NPE needs actual DEX code
952
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700953// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700954// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
955//
956// EXPECT_TRUE(self->IsExceptionPending());
957// self->ClearException();
958
959 // 2.2) Index < 0
960
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700961 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
962 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700963 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700964
965 EXPECT_TRUE(self->IsExceptionPending());
966 self->ClearException();
967
968 // 2.3) Index > 0
969
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700970 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700971 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700972
973 EXPECT_TRUE(self->IsExceptionPending());
974 self->ClearException();
975
976 // 3) Failure cases (obj into str[])
977
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700978 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700979 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700980
981 EXPECT_TRUE(self->IsExceptionPending());
982 self->ClearException();
983
984 // Tests done.
985#else
986 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
987 // Force-print to std::cout so it's also outside the logcat.
988 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
989#endif
990}
991
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700992TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200993#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
994 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800995 // This will lead to OOM error messages in the log.
996 ScopedLogSeverity sls(LogSeverity::FATAL);
997
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700998 // TODO: Check the "Unresolved" allocation stubs
999
1000 Thread* self = Thread::Current();
1001 // Create an object
1002 ScopedObjectAccess soa(self);
1003 // garbage is created during ClassLinker::Init
1004
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005 StackHandleScope<2> hs(soa.Self());
1006 Handle<mirror::Class> c(
1007 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001008
1009 // Play with it...
1010
1011 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001012 {
1013 // Use an arbitrary method from c to use as referrer
1014 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001015 // arbitrary
Andreas Gampe542451c2016-07-26 09:02:02 -07001016 reinterpret_cast<size_t>(c->GetVirtualMethod(0, kRuntimePointerSize)),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001017 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001018 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 self);
1020
1021 EXPECT_FALSE(self->IsExceptionPending());
1022 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1023 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001024 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001025 VerifyObject(obj);
1026 }
1027
1028 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001029 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001030 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001031 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001032 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 self);
1034
1035 EXPECT_FALSE(self->IsExceptionPending());
1036 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1037 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001038 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001039 VerifyObject(obj);
1040 }
1041
1042 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001043 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001045 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001046 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 self);
1048
1049 EXPECT_FALSE(self->IsExceptionPending());
1050 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1051 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001052 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001053 VerifyObject(obj);
1054 }
1055
1056 // Failure tests.
1057
1058 // Out-of-memory.
1059 {
1060 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1061
1062 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001063 Handle<mirror::Class> ca(
1064 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1065
1066 // Use arbitrary large amount for now.
1067 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001068 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001069
1070 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001071 // Start allocating with 128K
1072 size_t length = 128 * KB / 4;
1073 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001074 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1075 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1076 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001078
1079 // Try a smaller length
1080 length = length / 8;
1081 // Use at most half the reported free space.
1082 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1083 if (length * 8 > mem) {
1084 length = mem / 8;
1085 }
1086 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001087 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001088 }
1089 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001090 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001091
1092 // Allocate simple objects till it fails.
1093 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001094 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1095 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1096 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097 }
1098 }
1099 self->ClearException();
1100
Mathieu Chartiere401d142015-04-22 13:56:20 -07001101 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001102 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001104 EXPECT_TRUE(self->IsExceptionPending());
1105 self->ClearException();
1106 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001107 }
1108
1109 // Tests done.
1110#else
1111 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1112 // Force-print to std::cout so it's also outside the logcat.
1113 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1114#endif
1115}
1116
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001117TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001118#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1119 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001120 // TODO: Check the "Unresolved" allocation stubs
1121
Andreas Gampe369810a2015-01-14 19:53:31 -08001122 // This will lead to OOM error messages in the log.
1123 ScopedLogSeverity sls(LogSeverity::FATAL);
1124
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001125 Thread* self = Thread::Current();
1126 // Create an object
1127 ScopedObjectAccess soa(self);
1128 // garbage is created during ClassLinker::Init
1129
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001130 StackHandleScope<2> hs(self);
1131 Handle<mirror::Class> c(
1132 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001133
1134 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001135 Handle<mirror::Class> c_obj(
1136 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001137
1138 // Play with it...
1139
1140 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001141
1142 // For some reason this does not work, as the type_idx is artificial and outside what the
1143 // resolved types of c_obj allow...
1144
Ian Rogerscf7f1912014-10-22 22:06:39 -07001145 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001146 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001147 size_t result = Invoke3(
1148 static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1149 10U,
1150 // arbitrary
1151 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1152 StubTest::GetEntrypoint(self, kQuickAllocArray),
1153 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154
1155 EXPECT_FALSE(self->IsExceptionPending());
1156 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1157 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001158 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001159 VerifyObject(obj);
1160 EXPECT_EQ(obj->GetLength(), 10);
1161 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001162
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001164 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001165 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001166 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1167 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001168 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001169 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001170 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001171 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1172 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1173 EXPECT_TRUE(obj->IsArrayInstance());
1174 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001175 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 VerifyObject(obj);
1177 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1178 EXPECT_EQ(array->GetLength(), 10);
1179 }
1180
1181 // Failure tests.
1182
1183 // Out-of-memory.
1184 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001185 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001186 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001187 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001188 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189 self);
1190
1191 EXPECT_TRUE(self->IsExceptionPending());
1192 self->ClearException();
1193 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1194 }
1195
1196 // Tests done.
1197#else
1198 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1199 // Force-print to std::cout so it's also outside the logcat.
1200 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1201#endif
1202}
1203
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001204
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001205TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001206 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001207 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1208#if defined(__i386__) || defined(__mips__) || \
1209 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001210 // TODO: Check the "Unresolved" allocation stubs
1211
1212 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001213
1214 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1215
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001216 ScopedObjectAccess soa(self);
1217 // garbage is created during ClassLinker::Init
1218
1219 // Create some strings
1220 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001221 // Setup: The first half is standard. The second half uses a non-zero offset.
1222 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001223 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001224 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1225 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1226 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1227 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001228 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001229
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001230 StackHandleScope<kStringCount> hs(self);
1231 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001232
Jeff Hao848f70a2014-01-15 13:49:50 -08001233 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001234 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001235 }
1236
1237 // TODO: wide characters
1238
1239 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001240 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1241 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001242 int32_t expected[kStringCount][kStringCount];
1243 for (size_t x = 0; x < kStringCount; ++x) {
1244 for (size_t y = 0; y < kStringCount; ++y) {
1245 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001246 }
1247 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001248
1249 // Play with it...
1250
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001251 for (size_t x = 0; x < kStringCount; ++x) {
1252 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001253 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001254 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1255 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001256 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001257
1258 EXPECT_FALSE(self->IsExceptionPending());
1259
1260 // The result is a 32b signed integer
1261 union {
1262 size_t r;
1263 int32_t i;
1264 } conv;
1265 conv.r = result;
1266 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001267 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1268 conv.r;
1269 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1270 conv.r;
1271 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1272 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001273 }
1274 }
1275
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001276 // TODO: Deallocate things.
1277
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001278 // Tests done.
1279#else
1280 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1281 // Force-print to std::cout so it's also outside the logcat.
1282 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1283 std::endl;
1284#endif
1285}
1286
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001287
Mathieu Chartierc7853442015-03-27 14:35:38 -07001288static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001289 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001290 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001291#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1292 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001293 constexpr size_t num_values = 5;
1294 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1295
1296 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001297 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001298 static_cast<size_t>(values[i]),
1299 0U,
1300 StubTest::GetEntrypoint(self, kQuickSet8Static),
1301 self,
1302 referrer);
1303
Mathieu Chartierc7853442015-03-27 14:35:38 -07001304 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001305 0U, 0U,
1306 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1307 self,
1308 referrer);
1309 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1310 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1311 }
1312#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001313 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001314 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1315 // Force-print to std::cout so it's also outside the logcat.
1316 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1317#endif
1318}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001319static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001320 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001321 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001322#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1323 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001324 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001325
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001326 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001327 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001328 static_cast<size_t>(values[i]),
1329 0U,
1330 StubTest::GetEntrypoint(self, kQuickSet8Static),
1331 self,
1332 referrer);
1333
Mathieu Chartierc7853442015-03-27 14:35:38 -07001334 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001335 0U, 0U,
1336 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1337 self,
1338 referrer);
1339 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1340 }
1341#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001342 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001343 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1344 // Force-print to std::cout so it's also outside the logcat.
1345 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1346#endif
1347}
1348
1349
Mathieu Chartierc7853442015-03-27 14:35:38 -07001350static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001351 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001352 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001353#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1354 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001355 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001356
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001357 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001358 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001359 reinterpret_cast<size_t>(obj->Get()),
1360 static_cast<size_t>(values[i]),
1361 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1362 self,
1363 referrer);
1364
Mathieu Chartierc7853442015-03-27 14:35:38 -07001365 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001366 EXPECT_EQ(values[i], res) << "Iteration " << i;
1367
Mathieu Chartierc7853442015-03-27 14:35:38 -07001368 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001369
Mathieu Chartierc7853442015-03-27 14:35:38 -07001370 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001371 reinterpret_cast<size_t>(obj->Get()),
1372 0U,
1373 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1374 self,
1375 referrer);
1376 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1377 }
1378#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001379 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001380 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1381 // Force-print to std::cout so it's also outside the logcat.
1382 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1383#endif
1384}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001385static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001386 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001387 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001388#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1389 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001390 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001391
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001392 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001393 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001394 reinterpret_cast<size_t>(obj->Get()),
1395 static_cast<size_t>(values[i]),
1396 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1397 self,
1398 referrer);
1399
Mathieu Chartierc7853442015-03-27 14:35:38 -07001400 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001401 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001402 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001403
Mathieu Chartierc7853442015-03-27 14:35:38 -07001404 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001405 reinterpret_cast<size_t>(obj->Get()),
1406 0U,
1407 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1408 self,
1409 referrer);
1410 EXPECT_EQ(res, static_cast<int8_t>(res2));
1411 }
1412#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001413 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001414 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1415 // Force-print to std::cout so it's also outside the logcat.
1416 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1417#endif
1418}
1419
Mathieu Chartiere401d142015-04-22 13:56:20 -07001420static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001421 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001422 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001423#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1424 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001425 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001426
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001427 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001428 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001429 static_cast<size_t>(values[i]),
1430 0U,
1431 StubTest::GetEntrypoint(self, kQuickSet16Static),
1432 self,
1433 referrer);
1434
Mathieu Chartierc7853442015-03-27 14:35:38 -07001435 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001436 0U, 0U,
1437 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1438 self,
1439 referrer);
1440
1441 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1442 }
1443#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001444 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001445 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1446 // Force-print to std::cout so it's also outside the logcat.
1447 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1448#endif
1449}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001450static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001451 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001452 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001453#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1454 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001455 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001456
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001457 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001458 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001459 static_cast<size_t>(values[i]),
1460 0U,
1461 StubTest::GetEntrypoint(self, kQuickSet16Static),
1462 self,
1463 referrer);
1464
Mathieu Chartierc7853442015-03-27 14:35:38 -07001465 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001466 0U, 0U,
1467 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1468 self,
1469 referrer);
1470
1471 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1472 }
1473#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001474 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001475 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1476 // Force-print to std::cout so it's also outside the logcat.
1477 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1478#endif
1479}
1480
Mathieu Chartierc7853442015-03-27 14:35:38 -07001481static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001482 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001483 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001484#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1485 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001486 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001487
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001488 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001489 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001490 reinterpret_cast<size_t>(obj->Get()),
1491 static_cast<size_t>(values[i]),
1492 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1493 self,
1494 referrer);
1495
Mathieu Chartierc7853442015-03-27 14:35:38 -07001496 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001497 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001498 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001499
Mathieu Chartierc7853442015-03-27 14:35:38 -07001500 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001501 reinterpret_cast<size_t>(obj->Get()),
1502 0U,
1503 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1504 self,
1505 referrer);
1506 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1507 }
1508#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001509 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001510 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1511 // Force-print to std::cout so it's also outside the logcat.
1512 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1513#endif
1514}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001515static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001516 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001517 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001518#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1519 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001520 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001521
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001522 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001523 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001524 reinterpret_cast<size_t>(obj->Get()),
1525 static_cast<size_t>(values[i]),
1526 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1527 self,
1528 referrer);
1529
Mathieu Chartierc7853442015-03-27 14:35:38 -07001530 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001531 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001532 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001533
Mathieu Chartierc7853442015-03-27 14:35:38 -07001534 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001535 reinterpret_cast<size_t>(obj->Get()),
1536 0U,
1537 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1538 self,
1539 referrer);
1540 EXPECT_EQ(res, static_cast<int16_t>(res2));
1541 }
1542#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001543 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001544 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1545 // Force-print to std::cout so it's also outside the logcat.
1546 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1547#endif
1548}
1549
Mathieu Chartiere401d142015-04-22 13:56:20 -07001550static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001551 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001552 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001553#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1554 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001555 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001556
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001557 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001558 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001559 static_cast<size_t>(values[i]),
1560 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001561 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001562 self,
1563 referrer);
1564
Mathieu Chartierc7853442015-03-27 14:35:38 -07001565 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001566 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001567 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001568 self,
1569 referrer);
1570
Goran Jakovljevic04568812015-04-23 15:27:23 +02001571#if defined(__mips__) && defined(__LP64__)
1572 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1573#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001574 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001575#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001576 }
1577#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001578 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1580 // Force-print to std::cout so it's also outside the logcat.
1581 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1582#endif
1583}
1584
1585
Mathieu Chartierc7853442015-03-27 14:35:38 -07001586static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001587 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001588 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001589#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1590 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001591 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001593 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001594 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001595 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001597 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001598 self,
1599 referrer);
1600
Mathieu Chartierc7853442015-03-27 14:35:38 -07001601 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001602 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1603
1604 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001605 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001606
Mathieu Chartierc7853442015-03-27 14:35:38 -07001607 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001608 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001609 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001610 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001611 self,
1612 referrer);
1613 EXPECT_EQ(res, static_cast<int32_t>(res2));
1614 }
1615#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001616 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1618 // Force-print to std::cout so it's also outside the logcat.
1619 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1620#endif
1621}
1622
1623
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001624#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1625 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001626
1627static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001628 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001629 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001630 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1631 reinterpret_cast<size_t>(val),
1632 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001633 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001634 self,
1635 referrer);
1636
1637 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1638 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001639 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001640 self,
1641 referrer);
1642
1643 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1644}
1645#endif
1646
Mathieu Chartiere401d142015-04-22 13:56:20 -07001647static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001648 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001649 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001650#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1651 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001652 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001653
1654 // Allocate a string object for simplicity.
1655 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001656 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001657
Mathieu Chartierc7853442015-03-27 14:35:38 -07001658 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001659#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001660 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001661 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1662 // Force-print to std::cout so it's also outside the logcat.
1663 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1664#endif
1665}
1666
1667
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001668#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1669 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001670static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001671 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001672 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001673 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001674 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001675 reinterpret_cast<size_t>(trg),
1676 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001677 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001678 self,
1679 referrer);
1680
Mathieu Chartierc7853442015-03-27 14:35:38 -07001681 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001682 reinterpret_cast<size_t>(trg),
1683 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001684 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001685 self,
1686 referrer);
1687
1688 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1689
Mathieu Chartierc7853442015-03-27 14:35:38 -07001690 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001691}
1692#endif
1693
Mathieu Chartierc7853442015-03-27 14:35:38 -07001694static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001695 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001696 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001697#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1698 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001699 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001700
1701 // Allocate a string object for simplicity.
1702 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001703 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001704
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001705 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001706#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001707 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001708 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1709 // Force-print to std::cout so it's also outside the logcat.
1710 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1711#endif
1712}
1713
1714
Calin Juravle872ab3f2015-10-02 07:27:51 +01001715// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001716
Mathieu Chartiere401d142015-04-22 13:56:20 -07001717static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001718 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001719 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001720#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1721 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001722 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001723
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001724 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001725 // 64 bit FieldSet stores the set value in the second register.
1726 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001727 0U,
1728 values[i],
1729 StubTest::GetEntrypoint(self, kQuickSet64Static),
1730 self,
1731 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001732
Mathieu Chartierc7853442015-03-27 14:35:38 -07001733 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001734 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001735 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001736 self,
1737 referrer);
1738
1739 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1740 }
1741#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001742 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001743 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1744 // Force-print to std::cout so it's also outside the logcat.
1745 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1746#endif
1747}
1748
1749
Mathieu Chartierc7853442015-03-27 14:35:38 -07001750static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001751 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001752 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001753#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1754 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001755 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001756
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001757 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001758 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001759 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001760 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001761 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001762 self,
1763 referrer);
1764
Mathieu Chartierc7853442015-03-27 14:35:38 -07001765 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001766 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1767
1768 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001769 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001770
Mathieu Chartierc7853442015-03-27 14:35:38 -07001771 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001772 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001773 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001774 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001775 self,
1776 referrer);
1777 EXPECT_EQ(res, static_cast<int64_t>(res2));
1778 }
1779#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001780 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001781 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1782 // Force-print to std::cout so it's also outside the logcat.
1783 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1784#endif
1785}
1786
1787static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1788 // garbage is created during ClassLinker::Init
1789
1790 JNIEnv* env = Thread::Current()->GetJniEnv();
1791 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001792 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001793 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001794 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001795
1796 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001797 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001798 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1799 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001800 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001801 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001802
1803 // Play with it...
1804
1805 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001806 for (ArtField& f : c->GetSFields()) {
1807 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001808 if (test_type != type) {
1809 continue;
1810 }
1811 switch (type) {
1812 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001813 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001814 break;
1815 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001816 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001817 break;
1818 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001819 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001820 break;
1821 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001822 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001823 break;
1824 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001825 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001826 break;
1827 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001828 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001829 break;
1830 case Primitive::Type::kPrimNot:
1831 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001832 if (f.GetTypeDescriptor()[0] != '[') {
1833 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001834 }
1835 break;
1836 default:
1837 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001838 }
1839 }
1840
1841 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001842 for (ArtField& f : c->GetIFields()) {
1843 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001844 if (test_type != type) {
1845 continue;
1846 }
1847 switch (type) {
1848 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001849 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001850 break;
1851 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001852 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001853 break;
1854 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001855 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001856 break;
1857 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001858 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001859 break;
1860 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001861 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001862 break;
1863 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001864 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001865 break;
1866 case Primitive::Type::kPrimNot:
1867 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001868 if (f.GetTypeDescriptor()[0] != '[') {
1869 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001870 }
1871 break;
1872 default:
1873 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001874 }
1875 }
1876
1877 // TODO: Deallocate things.
1878}
1879
Fred Shih37f05ef2014-07-16 18:38:08 -07001880TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001881 Thread* self = Thread::Current();
1882
1883 self->TransitionFromSuspendedToRunnable();
1884 LoadDex("AllFields");
1885 bool started = runtime_->Start();
1886 CHECK(started);
1887
1888 TestFields(self, this, Primitive::Type::kPrimBoolean);
1889 TestFields(self, this, Primitive::Type::kPrimByte);
1890}
1891
1892TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001893 Thread* self = Thread::Current();
1894
1895 self->TransitionFromSuspendedToRunnable();
1896 LoadDex("AllFields");
1897 bool started = runtime_->Start();
1898 CHECK(started);
1899
1900 TestFields(self, this, Primitive::Type::kPrimChar);
1901 TestFields(self, this, Primitive::Type::kPrimShort);
1902}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001903
1904TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001905 Thread* self = Thread::Current();
1906
1907 self->TransitionFromSuspendedToRunnable();
1908 LoadDex("AllFields");
1909 bool started = runtime_->Start();
1910 CHECK(started);
1911
1912 TestFields(self, this, Primitive::Type::kPrimInt);
1913}
1914
1915TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001916 Thread* self = Thread::Current();
1917
1918 self->TransitionFromSuspendedToRunnable();
1919 LoadDex("AllFields");
1920 bool started = runtime_->Start();
1921 CHECK(started);
1922
1923 TestFields(self, this, Primitive::Type::kPrimNot);
1924}
1925
1926TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001927 Thread* self = Thread::Current();
1928
1929 self->TransitionFromSuspendedToRunnable();
1930 LoadDex("AllFields");
1931 bool started = runtime_->Start();
1932 CHECK(started);
1933
1934 TestFields(self, this, Primitive::Type::kPrimLong);
1935}
1936
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001937// Disabled, b/27991555 .
1938// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1939// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1940// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1941// the bridge and uses that to check for inlined frames, crashing in the process.
1942TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001943#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1944 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001945 Thread* self = Thread::Current();
1946
1947 ScopedObjectAccess soa(self);
1948 StackHandleScope<7> hs(self);
1949
1950 JNIEnv* env = Thread::Current()->GetJniEnv();
1951
1952 // ArrayList
1953
1954 // Load ArrayList and used methods (JNI).
1955 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1956 ASSERT_NE(nullptr, arraylist_jclass);
1957 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1958 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001959 jmethodID contains_jmethod = env->GetMethodID(
1960 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001961 ASSERT_NE(nullptr, contains_jmethod);
1962 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1963 ASSERT_NE(nullptr, add_jmethod);
1964
Mathieu Chartiere401d142015-04-22 13:56:20 -07001965 // Get representation.
1966 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001967
1968 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001969 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1970 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001971 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001972 }
1973
1974 // List
1975
1976 // Load List and used methods (JNI).
1977 jclass list_jclass = env->FindClass("java/util/List");
1978 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001979 jmethodID inf_contains_jmethod = env->GetMethodID(
1980 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001981 ASSERT_NE(nullptr, inf_contains_jmethod);
1982
1983 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001984 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001985
1986 // Object
1987
1988 jclass obj_jclass = env->FindClass("java/lang/Object");
1989 ASSERT_NE(nullptr, obj_jclass);
1990 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1991 ASSERT_NE(nullptr, obj_constructor);
1992
Andreas Gampe51f76352014-05-21 08:28:48 -07001993 // Create instances.
1994
1995 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1996 ASSERT_NE(nullptr, jarray_list);
1997 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1998
1999 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2000 ASSERT_NE(nullptr, jobj);
2001 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2002
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002003 // Invocation tests.
2004
2005 // 1. imt_conflict
2006
2007 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002008
2009 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2010 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002011 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2012 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002013 ImtConflictTable* empty_conflict_table =
2014 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002015 void* data = linear_alloc->Alloc(
2016 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07002017 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002018 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07002019 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
2020 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002021
Andreas Gampe51f76352014-05-21 08:28:48 -07002022 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002023 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2024 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002025 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002026 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002027 self,
2028 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002029 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002030
2031 ASSERT_FALSE(self->IsExceptionPending());
2032 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2033
2034 // Add object.
2035
2036 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2037
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002038 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002039
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002040 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002041
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002042 result =
2043 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2044 reinterpret_cast<size_t>(array_list.Get()),
2045 reinterpret_cast<size_t>(obj.Get()),
2046 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2047 self,
2048 contains_amethod,
2049 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002050
2051 ASSERT_FALSE(self->IsExceptionPending());
2052 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002053
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002054 // 2. regular interface trampoline
2055
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002056 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2057 reinterpret_cast<size_t>(array_list.Get()),
2058 reinterpret_cast<size_t>(obj.Get()),
2059 StubTest::GetEntrypoint(self,
2060 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2061 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002062
2063 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002064 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002065
Mathieu Chartiere401d142015-04-22 13:56:20 -07002066 result = Invoke3WithReferrer(
2067 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2068 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2069 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2070 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002071
2072 ASSERT_FALSE(self->IsExceptionPending());
2073 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002074#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002075 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002076 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002077 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2078#endif
2079}
2080
Andreas Gampe6aac3552014-06-09 14:55:53 -07002081TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002082#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002083 Thread* self = Thread::Current();
2084 ScopedObjectAccess soa(self);
2085 // garbage is created during ClassLinker::Init
2086
2087 // Create some strings
2088 // Use array so we can index into it and use a matrix for expected results
2089 // Setup: The first half is standard. The second half uses a non-zero offset.
2090 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002091 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2092 static constexpr size_t kStringCount = arraysize(c_str);
2093 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2094 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002095
2096 StackHandleScope<kStringCount> hs(self);
2097 Handle<mirror::String> s[kStringCount];
2098
2099 for (size_t i = 0; i < kStringCount; ++i) {
2100 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2101 }
2102
2103 // Matrix of expectations. First component is first parameter. Note we only check against the
2104 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2105 // rely on String::CompareTo being correct.
2106 static constexpr size_t kMaxLen = 9;
2107 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2108
2109 // Last dimension: start, offset by 1.
2110 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2111 for (size_t x = 0; x < kStringCount; ++x) {
2112 for (size_t y = 0; y < kCharCount; ++y) {
2113 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2114 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2115 }
2116 }
2117 }
2118
2119 // Play with it...
2120
2121 for (size_t x = 0; x < kStringCount; ++x) {
2122 for (size_t y = 0; y < kCharCount; ++y) {
2123 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2124 int32_t start = static_cast<int32_t>(z) - 1;
2125
2126 // Test string_compareto x y
2127 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002128 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002129
2130 EXPECT_FALSE(self->IsExceptionPending());
2131
2132 // The result is a 32b signed integer
2133 union {
2134 size_t r;
2135 int32_t i;
2136 } conv;
2137 conv.r = result;
2138
2139 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2140 c_char[y] << " @ " << start;
2141 }
2142 }
2143 }
2144
2145 // TODO: Deallocate things.
2146
2147 // Tests done.
2148#else
2149 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2150 // Force-print to std::cout so it's also outside the logcat.
2151 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002152#endif
2153}
2154
Roland Levillain02b75802016-07-13 11:54:35 +01002155// TODO: Exercise the ReadBarrierMarkRegX entry points.
2156
Man Cao1aee9002015-07-14 22:31:42 -07002157TEST_F(StubTest, ReadBarrier) {
2158#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2159 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2160 Thread* self = Thread::Current();
2161
2162 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2163
2164 // Create an object
2165 ScopedObjectAccess soa(self);
2166 // garbage is created during ClassLinker::Init
2167
2168 StackHandleScope<2> hs(soa.Self());
2169 Handle<mirror::Class> c(
2170 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2171
2172 // Build an object instance
2173 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2174
2175 EXPECT_FALSE(self->IsExceptionPending());
2176
2177 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2178 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2179
2180 EXPECT_FALSE(self->IsExceptionPending());
2181 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2182 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2183 EXPECT_EQ(klass, obj->GetClass());
2184
2185 // Tests done.
2186#else
2187 LOG(INFO) << "Skipping read_barrier_slow";
2188 // Force-print to std::cout so it's also outside the logcat.
2189 std::cout << "Skipping read_barrier_slow" << std::endl;
2190#endif
2191}
2192
Roland Levillain0d5a2812015-11-13 10:07:31 +00002193TEST_F(StubTest, ReadBarrierForRoot) {
2194#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2195 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2196 Thread* self = Thread::Current();
2197
2198 const uintptr_t readBarrierForRootSlow =
2199 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2200
2201 // Create an object
2202 ScopedObjectAccess soa(self);
2203 // garbage is created during ClassLinker::Init
2204
2205 StackHandleScope<1> hs(soa.Self());
2206
2207 Handle<mirror::String> obj(
2208 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2209
2210 EXPECT_FALSE(self->IsExceptionPending());
2211
2212 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2213 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2214
2215 EXPECT_FALSE(self->IsExceptionPending());
2216 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2217 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2218 EXPECT_EQ(klass, obj->GetClass());
2219
2220 // Tests done.
2221#else
2222 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2223 // Force-print to std::cout so it's also outside the logcat.
2224 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2225#endif
2226}
2227
Andreas Gampe525cde22014-04-22 15:44:50 -07002228} // namespace art