blob: 9e385f839f0adb5547dc109ad233df5b82b2d48e [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070025#include "imt_conflict_table.h"
Andreas Gampe13b27842016-11-07 16:48:23 -080026#include "jni_internal.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000027#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070028#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070029#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070030#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070031
32namespace art {
33
34
35class StubTest : public CommonRuntimeTest {
36 protected:
37 // We need callee-save methods set up in the Runtime for exceptions.
38 void SetUp() OVERRIDE {
39 // Do the normal setup.
40 CommonRuntimeTest::SetUp();
41
42 {
43 // Create callee-save methods
44 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010045 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
47 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
48 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070049 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070050 }
51 }
52 }
53 }
54
Ian Rogerse63db272014-07-15 15:36:11 -070055 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 // Use a smaller heap
57 for (std::pair<std::string, const void*>& pair : *options) {
58 if (pair.first.find("-Xmx") == 0) {
59 pair.first = "-Xmx4M"; // Smallest we can go.
60 }
61 }
Andreas Gampe51f76352014-05-21 08:28:48 -070062 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070063 }
Andreas Gampe525cde22014-04-22 15:44:50 -070064
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070065 // Helper function needed since TEST_F makes a new class.
66 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
67 return &self->tlsPtr_;
68 }
69
Andreas Gampe4fc046e2014-05-06 16:56:39 -070070 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070071 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070072 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070073 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 // TODO: Set up a frame according to referrer's specs.
76 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070078 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079 }
80
Andreas Gampe51f76352014-05-21 08:28:48 -070081 // TODO: Set up a frame according to referrer's specs.
82 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070083 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070084 // Push a transition back into managed code onto the linked list in thread.
85 ManagedStack fragment;
86 self->PushManagedStackFragment(&fragment);
87
88 size_t result;
89 size_t fpr_result = 0;
90#if defined(__i386__)
91 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070092#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
93#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070094 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070095 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
96 // esp, then we won't be able to access it after spilling.
97
98 // Spill 6 registers.
99 PUSH(%%ebx)
100 PUSH(%%ecx)
101 PUSH(%%edx)
102 PUSH(%%esi)
103 PUSH(%%edi)
104 PUSH(%%ebp)
105
106 // Store the inputs to the stack, but keep the referrer up top, less work.
107 PUSH(%[referrer]) // Align stack.
108 PUSH(%[referrer]) // Store referrer
109
110 PUSH(%[arg0])
111 PUSH(%[arg1])
112 PUSH(%[arg2])
113 PUSH(%[code])
114 // Now read them back into the required registers.
115 POP(%%edi)
116 POP(%%edx)
117 POP(%%ecx)
118 POP(%%eax)
119 // Call is prepared now.
120
Andreas Gampe51f76352014-05-21 08:28:48 -0700121 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700122 "addl $8, %%esp\n\t" // Pop referrer and padding.
123 ".cfi_adjust_cfa_offset -8\n\t"
124
125 // Restore 6 registers.
126 POP(%%ebp)
127 POP(%%edi)
128 POP(%%esi)
129 POP(%%edx)
130 POP(%%ecx)
131 POP(%%ebx)
132
Andreas Gampe51f76352014-05-21 08:28:48 -0700133 : "=a" (result)
134 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700135 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
136 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700137 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700138 : "memory", "xmm7"); // clobber.
139#undef PUSH
140#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700141#elif defined(__arm__)
142 __asm__ __volatile__(
143 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
144 ".cfi_adjust_cfa_offset 52\n\t"
145 "push {r9}\n\t"
146 ".cfi_adjust_cfa_offset 4\n\t"
147 "mov r9, %[referrer]\n\n"
148 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
149 ".cfi_adjust_cfa_offset 8\n\t"
150 "ldr r9, [sp, #8]\n\t"
151
152 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
153 "sub sp, sp, #24\n\t"
154 "str %[arg0], [sp]\n\t"
155 "str %[arg1], [sp, #4]\n\t"
156 "str %[arg2], [sp, #8]\n\t"
157 "str %[code], [sp, #12]\n\t"
158 "str %[self], [sp, #16]\n\t"
159 "str %[hidden], [sp, #20]\n\t"
160 "ldr r0, [sp]\n\t"
161 "ldr r1, [sp, #4]\n\t"
162 "ldr r2, [sp, #8]\n\t"
163 "ldr r3, [sp, #12]\n\t"
164 "ldr r9, [sp, #16]\n\t"
165 "ldr r12, [sp, #20]\n\t"
166 "add sp, sp, #24\n\t"
167
168 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700169 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700170 ".cfi_adjust_cfa_offset -12\n\t"
171 "pop {r1-r12, lr}\n\t" // Restore state
172 ".cfi_adjust_cfa_offset -52\n\t"
173 "mov %[result], r0\n\t" // Save the result
174 : [result] "=r" (result)
175 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700176 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
177 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700178 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700179#elif defined(__aarch64__)
180 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000182 "sub sp, sp, #80\n\t"
183 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700184 "stp x0, x1, [sp]\n\t"
185 "stp x2, x3, [sp, #16]\n\t"
186 "stp x4, x5, [sp, #32]\n\t"
187 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000188 // To be extra defensive, store x20. We do this because some of the stubs might make a
189 // transition into the runtime via the blr instruction below and *not* save x20.
190 "str x20, [sp, #64]\n\t"
191 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700192
Andreas Gampef39b3782014-06-03 14:38:30 -0700193 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
194 ".cfi_adjust_cfa_offset 16\n\t"
195 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700196
197 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
198 "sub sp, sp, #48\n\t"
199 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700200 // All things are "r" constraints, so direct str/stp should work.
201 "stp %[arg0], %[arg1], [sp]\n\t"
202 "stp %[arg2], %[code], [sp, #16]\n\t"
203 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700204
205 // Now we definitely have x0-x3 free, use it to garble d8 - d15
206 "movk x0, #0xfad0\n\t"
207 "movk x0, #0xebad, lsl #16\n\t"
208 "movk x0, #0xfad0, lsl #32\n\t"
209 "movk x0, #0xebad, lsl #48\n\t"
210 "fmov d8, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d9, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d10, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d11, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d12, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d13, x0\n\t"
221 "add x0, x0, 1\n\t"
222 "fmov d14, x0\n\t"
223 "add x0, x0, 1\n\t"
224 "fmov d15, x0\n\t"
225
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 // Load call params into the right registers.
227 "ldp x0, x1, [sp]\n\t"
228 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100229 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700230 "add sp, sp, #48\n\t"
231 ".cfi_adjust_cfa_offset -48\n\t"
232
Andreas Gampe51f76352014-05-21 08:28:48 -0700233 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "mov x8, x0\n\t" // Store result
235 "add sp, sp, #16\n\t" // Drop the quick "frame"
236 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700237
238 // Test d8 - d15. We can use x1 and x2.
239 "movk x1, #0xfad0\n\t"
240 "movk x1, #0xebad, lsl #16\n\t"
241 "movk x1, #0xfad0, lsl #32\n\t"
242 "movk x1, #0xebad, lsl #48\n\t"
243 "fmov x2, d8\n\t"
244 "cmp x1, x2\n\t"
245 "b.ne 1f\n\t"
246 "add x1, x1, 1\n\t"
247
248 "fmov x2, d9\n\t"
249 "cmp x1, x2\n\t"
250 "b.ne 1f\n\t"
251 "add x1, x1, 1\n\t"
252
253 "fmov x2, d10\n\t"
254 "cmp x1, x2\n\t"
255 "b.ne 1f\n\t"
256 "add x1, x1, 1\n\t"
257
258 "fmov x2, d11\n\t"
259 "cmp x1, x2\n\t"
260 "b.ne 1f\n\t"
261 "add x1, x1, 1\n\t"
262
263 "fmov x2, d12\n\t"
264 "cmp x1, x2\n\t"
265 "b.ne 1f\n\t"
266 "add x1, x1, 1\n\t"
267
268 "fmov x2, d13\n\t"
269 "cmp x1, x2\n\t"
270 "b.ne 1f\n\t"
271 "add x1, x1, 1\n\t"
272
273 "fmov x2, d14\n\t"
274 "cmp x1, x2\n\t"
275 "b.ne 1f\n\t"
276 "add x1, x1, 1\n\t"
277
278 "fmov x2, d15\n\t"
279 "cmp x1, x2\n\t"
280 "b.ne 1f\n\t"
281
Andreas Gampef39b3782014-06-03 14:38:30 -0700282 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700283
284 // Finish up.
285 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700286 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
287 "ldp x2, x3, [sp, #16]\n\t"
288 "ldp x4, x5, [sp, #32]\n\t"
289 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000290 "ldr x20, [sp, #64]\n\t"
291 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
292 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700293
Andreas Gampef39b3782014-06-03 14:38:30 -0700294 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
295 "mov %[result], x8\n\t" // Store the call result
296
Andreas Gampe51f76352014-05-21 08:28:48 -0700297 "b 3f\n\t" // Goto end
298
299 // Failed fpr verification.
300 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700301 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700302 "b 2b\n\t" // Goto finish-up
303
304 // End
305 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700306 : [result] "=r" (result)
307 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700309 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000310 // Leave one register unclobbered, which is needed for compiling with
311 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
312 // which means we should unclobber one of the callee-saved registers that are unused.
313 // Here we use x20.
314 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700315 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
316 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
317 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
318 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700319 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000320 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200321#elif defined(__mips__) && !defined(__LP64__)
322 __asm__ __volatile__ (
323 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
324 "addiu $sp, $sp, -64\n\t"
325 "sw $a0, 0($sp)\n\t"
326 "sw $a1, 4($sp)\n\t"
327 "sw $a2, 8($sp)\n\t"
328 "sw $a3, 12($sp)\n\t"
329 "sw $t0, 16($sp)\n\t"
330 "sw $t1, 20($sp)\n\t"
331 "sw $t2, 24($sp)\n\t"
332 "sw $t3, 28($sp)\n\t"
333 "sw $t4, 32($sp)\n\t"
334 "sw $t5, 36($sp)\n\t"
335 "sw $t6, 40($sp)\n\t"
336 "sw $t7, 44($sp)\n\t"
337 // Spill gp register since it is caller save.
338 "sw $gp, 52($sp)\n\t"
339
340 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
341 "sw %[referrer], 0($sp)\n\t"
342
343 // Push everything on the stack, so we don't rely on the order.
344 "addiu $sp, $sp, -24\n\t"
345 "sw %[arg0], 0($sp)\n\t"
346 "sw %[arg1], 4($sp)\n\t"
347 "sw %[arg2], 8($sp)\n\t"
348 "sw %[code], 12($sp)\n\t"
349 "sw %[self], 16($sp)\n\t"
350 "sw %[hidden], 20($sp)\n\t"
351
352 // Load call params into the right registers.
353 "lw $a0, 0($sp)\n\t"
354 "lw $a1, 4($sp)\n\t"
355 "lw $a2, 8($sp)\n\t"
356 "lw $t9, 12($sp)\n\t"
357 "lw $s1, 16($sp)\n\t"
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800358 "lw $t7, 20($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200359 "addiu $sp, $sp, 24\n\t"
360
361 "jalr $t9\n\t" // Call the stub.
362 "nop\n\t"
363 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
364
365 // Restore stuff not named clobbered.
366 "lw $a0, 0($sp)\n\t"
367 "lw $a1, 4($sp)\n\t"
368 "lw $a2, 8($sp)\n\t"
369 "lw $a3, 12($sp)\n\t"
370 "lw $t0, 16($sp)\n\t"
371 "lw $t1, 20($sp)\n\t"
372 "lw $t2, 24($sp)\n\t"
373 "lw $t3, 28($sp)\n\t"
374 "lw $t4, 32($sp)\n\t"
375 "lw $t5, 36($sp)\n\t"
376 "lw $t6, 40($sp)\n\t"
377 "lw $t7, 44($sp)\n\t"
378 // Restore gp.
379 "lw $gp, 52($sp)\n\t"
380 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
381
382 "move %[result], $v0\n\t" // Store the call result.
383 : [result] "=r" (result)
384 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
385 [referrer] "r"(referrer), [hidden] "r"(hidden)
386 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
387 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100388 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
389 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
390 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200391 "memory"); // clobber.
392#elif defined(__mips__) && defined(__LP64__)
393 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100394 // Spill a0-a7 which we say we don't clobber. May contain args.
395 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200396 "sd $a0, 0($sp)\n\t"
397 "sd $a1, 8($sp)\n\t"
398 "sd $a2, 16($sp)\n\t"
399 "sd $a3, 24($sp)\n\t"
400 "sd $a4, 32($sp)\n\t"
401 "sd $a5, 40($sp)\n\t"
402 "sd $a6, 48($sp)\n\t"
403 "sd $a7, 56($sp)\n\t"
404
405 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
406 "sd %[referrer], 0($sp)\n\t"
407
408 // Push everything on the stack, so we don't rely on the order.
409 "daddiu $sp, $sp, -48\n\t"
410 "sd %[arg0], 0($sp)\n\t"
411 "sd %[arg1], 8($sp)\n\t"
412 "sd %[arg2], 16($sp)\n\t"
413 "sd %[code], 24($sp)\n\t"
414 "sd %[self], 32($sp)\n\t"
415 "sd %[hidden], 40($sp)\n\t"
416
417 // Load call params into the right registers.
418 "ld $a0, 0($sp)\n\t"
419 "ld $a1, 8($sp)\n\t"
420 "ld $a2, 16($sp)\n\t"
421 "ld $t9, 24($sp)\n\t"
422 "ld $s1, 32($sp)\n\t"
423 "ld $t0, 40($sp)\n\t"
424 "daddiu $sp, $sp, 48\n\t"
425
426 "jalr $t9\n\t" // Call the stub.
427 "nop\n\t"
428 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
429
430 // Restore stuff not named clobbered.
431 "ld $a0, 0($sp)\n\t"
432 "ld $a1, 8($sp)\n\t"
433 "ld $a2, 16($sp)\n\t"
434 "ld $a3, 24($sp)\n\t"
435 "ld $a4, 32($sp)\n\t"
436 "ld $a5, 40($sp)\n\t"
437 "ld $a6, 48($sp)\n\t"
438 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100439 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200440
441 "move %[result], $v0\n\t" // Store the call result.
442 : [result] "=r" (result)
443 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
444 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100445 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
446 // t0-t3 are ambiguous.
447 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
448 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100449 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
450 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
451 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200452 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700453#elif defined(__x86_64__) && !defined(__APPLE__)
454#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
455#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
456 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
457 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700458 // TODO: Set the thread?
459 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700460 // Spill almost everything (except rax, rsp). 14 registers.
461 PUSH(%%rbx)
462 PUSH(%%rcx)
463 PUSH(%%rdx)
464 PUSH(%%rsi)
465 PUSH(%%rdi)
466 PUSH(%%rbp)
467 PUSH(%%r8)
468 PUSH(%%r9)
469 PUSH(%%r10)
470 PUSH(%%r11)
471 PUSH(%%r12)
472 PUSH(%%r13)
473 PUSH(%%r14)
474 PUSH(%%r15)
475
476 PUSH(%[referrer]) // Push referrer & 16B alignment padding
477 PUSH(%[referrer])
478
479 // Now juggle the input registers.
480 PUSH(%[arg0])
481 PUSH(%[arg1])
482 PUSH(%[arg2])
483 PUSH(%[hidden])
484 PUSH(%[code])
485 POP(%%r8)
486 POP(%%rax)
487 POP(%%rdx)
488 POP(%%rsi)
489 POP(%%rdi)
490
491 "call *%%r8\n\t" // Call the stub
492 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700493 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700494
495 POP(%%r15)
496 POP(%%r14)
497 POP(%%r13)
498 POP(%%r12)
499 POP(%%r11)
500 POP(%%r10)
501 POP(%%r9)
502 POP(%%r8)
503 POP(%%rbp)
504 POP(%%rdi)
505 POP(%%rsi)
506 POP(%%rdx)
507 POP(%%rcx)
508 POP(%%rbx)
509
Andreas Gampe51f76352014-05-21 08:28:48 -0700510 : "=a" (result)
511 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700512 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
513 [referrer] "r"(referrer), [hidden] "r"(hidden)
514 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
515 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
516 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
517#undef PUSH
518#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700519#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800520 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700521 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
522 result = 0;
523#endif
524 // Pop transition.
525 self->PopManagedStackFragment(fragment);
526
527 fp_result = fpr_result;
528 EXPECT_EQ(0U, fp_result);
529
530 return result;
531 }
532
Andreas Gampe29b38412014-08-13 00:15:43 -0700533 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
534 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700535 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700536 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
537 }
538
Andreas Gampe6cf80102014-05-19 11:32:41 -0700539 protected:
540 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700541};
542
543
Andreas Gampe525cde22014-04-22 15:44:50 -0700544TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200545#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700546 Thread* self = Thread::Current();
547
548 uint32_t orig[20];
549 uint32_t trg[20];
550 for (size_t i = 0; i < 20; ++i) {
551 orig[i] = i;
552 trg[i] = 0;
553 }
554
555 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700556 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700557
558 EXPECT_EQ(orig[0], trg[0]);
559
560 for (size_t i = 1; i < 4; ++i) {
561 EXPECT_NE(orig[i], trg[i]);
562 }
563
564 for (size_t i = 4; i < 14; ++i) {
565 EXPECT_EQ(orig[i], trg[i]);
566 }
567
568 for (size_t i = 14; i < 20; ++i) {
569 EXPECT_NE(orig[i], trg[i]);
570 }
571
572 // TODO: Test overlapping?
573
574#else
575 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
576 // Force-print to std::cout so it's also outside the logcat.
577 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
578#endif
579}
580
Andreas Gampe525cde22014-04-22 15:44:50 -0700581TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200582#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
583 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700584 static constexpr size_t kThinLockLoops = 100;
585
Andreas Gampe525cde22014-04-22 15:44:50 -0700586 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700587
588 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
589
Andreas Gampe525cde22014-04-22 15:44:50 -0700590 // Create an object
591 ScopedObjectAccess soa(self);
592 // garbage is created during ClassLinker::Init
593
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700594 StackHandleScope<2> hs(soa.Self());
595 Handle<mirror::String> obj(
596 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700597 LockWord lock = obj->GetLockWord(false);
598 LockWord::LockState old_state = lock.GetState();
599 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
600
Andreas Gampe29b38412014-08-13 00:15:43 -0700601 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700602
603 LockWord lock_after = obj->GetLockWord(false);
604 LockWord::LockState new_state = lock_after.GetState();
605 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700606 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
607
608 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700609 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700610
611 // Check we're at lock count i
612
613 LockWord l_inc = obj->GetLockWord(false);
614 LockWord::LockState l_inc_state = l_inc.GetState();
615 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
616 EXPECT_EQ(l_inc.ThinLockCount(), i);
617 }
618
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700619 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700620 Handle<mirror::String> obj2(hs.NewHandle(
621 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700623 obj2->IdentityHashCode();
624
Andreas Gampe29b38412014-08-13 00:15:43 -0700625 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626
627 LockWord lock_after2 = obj2->GetLockWord(false);
628 LockWord::LockState new_state2 = lock_after2.GetState();
629 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
630 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
631
632 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700633#else
634 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
635 // Force-print to std::cout so it's also outside the logcat.
636 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
637#endif
638}
639
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700640
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700641class RandGen {
642 public:
643 explicit RandGen(uint32_t seed) : val_(seed) {}
644
645 uint32_t next() {
646 val_ = val_ * 48271 % 2147483647 + 13;
647 return val_;
648 }
649
650 uint32_t val_;
651};
652
653
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700654// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
655static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
657 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700658 static constexpr size_t kThinLockLoops = 100;
659
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700660 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700661
662 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
663 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700664 // Create an object
665 ScopedObjectAccess soa(self);
666 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700667 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
668 StackHandleScope<kNumberOfLocks + 1> hs(self);
669 Handle<mirror::String> obj(
670 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700671 LockWord lock = obj->GetLockWord(false);
672 LockWord::LockState old_state = lock.GetState();
673 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
674
Andreas Gampe29b38412014-08-13 00:15:43 -0700675 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 // This should be an illegal monitor state.
677 EXPECT_TRUE(self->IsExceptionPending());
678 self->ClearException();
679
680 LockWord lock_after = obj->GetLockWord(false);
681 LockWord::LockState new_state = lock_after.GetState();
682 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
Andreas Gampe29b38412014-08-13 00:15:43 -0700684 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
686 LockWord lock_after2 = obj->GetLockWord(false);
687 LockWord::LockState new_state2 = lock_after2.GetState();
688 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
689
Andreas Gampe29b38412014-08-13 00:15:43 -0700690 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691
692 LockWord lock_after3 = obj->GetLockWord(false);
693 LockWord::LockState new_state3 = lock_after3.GetState();
694 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
695
696 // Stress test:
697 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
698 // each step.
699
700 RandGen r(0x1234);
701
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700703 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704
705 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700706 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700707 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 // Initialize = allocate.
710 for (size_t i = 0; i < kNumberOfLocks; ++i) {
711 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700712 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700713 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700714 }
715
716 for (size_t i = 0; i < kIterations; ++i) {
717 // Select which lock to update.
718 size_t index = r.next() % kNumberOfLocks;
719
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 // Make lock fat?
721 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
722 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700726 LockWord::LockState iter_state = lock_iter.GetState();
727 if (counts[index] == 0) {
728 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
729 } else {
730 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
731 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700732 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800733 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else {
739 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700742
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800743 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700744 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
745 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 counts[index]++;
747 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700748 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700749 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 counts[index]--;
751 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700752
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700753 EXPECT_FALSE(self->IsExceptionPending());
754
755 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700756 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 LockWord::LockState iter_state = lock_iter.GetState();
758 if (fat[index]) {
759 // Abuse MonitorInfo.
760 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 EXPECT_EQ(counts[index], info.entry_count_) << index;
763 } else {
764 if (counts[index] > 0) {
765 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
766 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
767 } else {
768 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
769 }
770 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700771 }
772 }
773
774 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700776 for (size_t i = 0; i < kNumberOfLocks; ++i) {
777 size_t index = kNumberOfLocks - 1 - i;
778 size_t count = counts[index];
779 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700780 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
781 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700782 count--;
783 }
784
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700785 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700787 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
788 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700789 }
790
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700792#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800793 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700795 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700797#endif
798}
799
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700800TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800801 // This will lead to monitor error messages in the log.
802 ScopedLogSeverity sls(LogSeverity::FATAL);
803
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804 TestUnlockObject(this);
805}
Andreas Gampe525cde22014-04-22 15:44:50 -0700806
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200807#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
808 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800809extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#endif
811
812TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200813#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
814 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700815 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700816
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800817 const uintptr_t art_quick_check_instance_of =
818 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700819
Andreas Gampe525cde22014-04-22 15:44:50 -0700820 // Find some classes.
821 ScopedObjectAccess soa(self);
822 // garbage is created during ClassLinker::Init
823
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800824 VariableSizedHandleScope hs(soa.Self());
825 Handle<mirror::Class> klass_obj(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
827 Handle<mirror::Class> klass_str(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
829 Handle<mirror::Class> klass_list(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
831 Handle<mirror::Class> klass_cloneable(
832 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
833 Handle<mirror::Class> klass_array_list(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
835 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
836 Handle<mirror::String> string(hs.NewHandle(
837 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
838 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_FALSE(self->IsExceptionPending());
841
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800842 Invoke3(reinterpret_cast<size_t>(obj.Get()),
843 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700844 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800845 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700846 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800849 // Expected true: Test string instance of java.lang.String.
850 Invoke3(reinterpret_cast<size_t>(string.Get()),
851 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700852 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800853 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700854 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700855 EXPECT_FALSE(self->IsExceptionPending());
856
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800857 // Expected true: Test string instance of java.lang.Object.
858 Invoke3(reinterpret_cast<size_t>(string.Get()),
859 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700860 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800861 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700862 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700863 EXPECT_FALSE(self->IsExceptionPending());
864
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800865 // Expected false: Test object instance of java.lang.String.
866 Invoke3(reinterpret_cast<size_t>(obj.Get()),
867 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700868 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800869 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700870 self);
871 EXPECT_TRUE(self->IsExceptionPending());
872 self->ClearException();
873
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800874 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
875 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700876 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800877 art_quick_check_instance_of,
878 self);
879 EXPECT_FALSE(self->IsExceptionPending());
880
881 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
882 reinterpret_cast<size_t>(klass_cloneable.Get()),
883 0U,
884 art_quick_check_instance_of,
885 self);
886 EXPECT_FALSE(self->IsExceptionPending());
887
888 Invoke3(reinterpret_cast<size_t>(string.Get()),
889 reinterpret_cast<size_t>(klass_array_list.Get()),
890 0U,
891 art_quick_check_instance_of,
892 self);
893 EXPECT_TRUE(self->IsExceptionPending());
894 self->ClearException();
895
896 Invoke3(reinterpret_cast<size_t>(string.Get()),
897 reinterpret_cast<size_t>(klass_cloneable.Get()),
898 0U,
899 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700900 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700901 EXPECT_TRUE(self->IsExceptionPending());
902 self->ClearException();
903
904#else
905 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
906 // Force-print to std::cout so it's also outside the logcat.
907 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
908#endif
909}
910
911
Andreas Gampe525cde22014-04-22 15:44:50 -0700912TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200913#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
914 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700915 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700916
917 // Do not check non-checked ones, we'd need handlers and stuff...
918 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
919 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
920
Andreas Gampe525cde22014-04-22 15:44:50 -0700921 // Create an object
922 ScopedObjectAccess soa(self);
923 // garbage is created during ClassLinker::Init
924
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 StackHandleScope<5> hs(soa.Self());
926 Handle<mirror::Class> c(
927 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
928 Handle<mirror::Class> ca(
929 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700930
931 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 Handle<mirror::ObjectArray<mirror::Object>> array(
933 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700934
935 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700936 Handle<mirror::String> str_obj(
937 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700938
939 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700941
942 // Play with it...
943
944 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700945 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700946
947 EXPECT_FALSE(self->IsExceptionPending());
948
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700949 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700950 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700951
952 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700953 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700954
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700955 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700956 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700957
958 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700959 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700960
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700961 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700962 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700963
964 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700965 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700966
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700967 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700968 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700969
970 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700972
973 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700974
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700975 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700976 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700977
978 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700979 EXPECT_EQ(nullptr, array->Get(0));
980
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700982 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700983
984 EXPECT_FALSE(self->IsExceptionPending());
985 EXPECT_EQ(nullptr, array->Get(1));
986
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700987 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700988 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700989
990 EXPECT_FALSE(self->IsExceptionPending());
991 EXPECT_EQ(nullptr, array->Get(2));
992
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700993 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700994 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700995
996 EXPECT_FALSE(self->IsExceptionPending());
997 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700998
999 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
1000
1001 // 2) Failure cases (str into str[])
1002 // 2.1) Array = null
1003 // TODO: Throwing NPE needs actual DEX code
1004
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -07001006// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
1007//
1008// EXPECT_TRUE(self->IsExceptionPending());
1009// self->ClearException();
1010
1011 // 2.2) Index < 0
1012
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001013 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
1014 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001015 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001016
1017 EXPECT_TRUE(self->IsExceptionPending());
1018 self->ClearException();
1019
1020 // 2.3) Index > 0
1021
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001022 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001023 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001024
1025 EXPECT_TRUE(self->IsExceptionPending());
1026 self->ClearException();
1027
1028 // 3) Failure cases (obj into str[])
1029
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001030 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001031 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001032
1033 EXPECT_TRUE(self->IsExceptionPending());
1034 self->ClearException();
1035
1036 // Tests done.
1037#else
1038 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1039 // Force-print to std::cout so it's also outside the logcat.
1040 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1041#endif
1042}
1043
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001045#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1046 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001047 // This will lead to OOM error messages in the log.
1048 ScopedLogSeverity sls(LogSeverity::FATAL);
1049
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001050 // TODO: Check the "Unresolved" allocation stubs
1051
1052 Thread* self = Thread::Current();
1053 // Create an object
1054 ScopedObjectAccess soa(self);
1055 // garbage is created during ClassLinker::Init
1056
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001057 StackHandleScope<2> hs(soa.Self());
1058 Handle<mirror::Class> c(
1059 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001060
1061 // Play with it...
1062
1063 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001064 {
1065 // Use an arbitrary method from c to use as referrer
Andreas Gampea5b09a62016-11-17 15:21:22 -08001066 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex().index_), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001067 // arbitrary
Andreas Gampe542451c2016-07-26 09:02:02 -07001068 reinterpret_cast<size_t>(c->GetVirtualMethod(0, kRuntimePointerSize)),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001069 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001070 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001071 self);
1072
1073 EXPECT_FALSE(self->IsExceptionPending());
1074 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1075 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001076 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077 VerifyObject(obj);
1078 }
1079
1080 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001081 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001082 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001083 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001084 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001085 self);
1086
1087 EXPECT_FALSE(self->IsExceptionPending());
1088 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1089 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001090 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001091 VerifyObject(obj);
1092 }
1093
1094 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001095 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001097 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001098 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099 self);
1100
1101 EXPECT_FALSE(self->IsExceptionPending());
1102 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1103 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001104 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 VerifyObject(obj);
1106 }
1107
1108 // Failure tests.
1109
1110 // Out-of-memory.
1111 {
1112 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1113
1114 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001115 Handle<mirror::Class> ca(
1116 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1117
1118 // Use arbitrary large amount for now.
1119 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001120 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001121
1122 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001123 // Start allocating with 128K
1124 size_t length = 128 * KB / 4;
1125 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001126 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1127 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1128 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001129 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001130
1131 // Try a smaller length
1132 length = length / 8;
1133 // Use at most half the reported free space.
1134 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1135 if (length * 8 > mem) {
1136 length = mem / 8;
1137 }
1138 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001139 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140 }
1141 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001142 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001143
1144 // Allocate simple objects till it fails.
1145 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001146 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1147 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1148 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001149 }
1150 }
1151 self->ClearException();
1152
Mathieu Chartiere401d142015-04-22 13:56:20 -07001153 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001154 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001156 EXPECT_TRUE(self->IsExceptionPending());
1157 self->ClearException();
1158 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001159 }
1160
1161 // Tests done.
1162#else
1163 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1164 // Force-print to std::cout so it's also outside the logcat.
1165 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1166#endif
1167}
1168
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001169TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001170#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1171 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 // TODO: Check the "Unresolved" allocation stubs
1173
Andreas Gampe369810a2015-01-14 19:53:31 -08001174 // This will lead to OOM error messages in the log.
1175 ScopedLogSeverity sls(LogSeverity::FATAL);
1176
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001177 Thread* self = Thread::Current();
1178 // Create an object
1179 ScopedObjectAccess soa(self);
1180 // garbage is created during ClassLinker::Init
1181
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001182 StackHandleScope<2> hs(self);
1183 Handle<mirror::Class> c(
1184 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001185
1186 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001187 Handle<mirror::Class> c_obj(
1188 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189
1190 // Play with it...
1191
1192 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001193
1194 // For some reason this does not work, as the type_idx is artificial and outside what the
1195 // resolved types of c_obj allow...
1196
Ian Rogerscf7f1912014-10-22 22:06:39 -07001197 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001198 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001199 size_t result = Invoke3(
Andreas Gampea5b09a62016-11-17 15:21:22 -08001200 static_cast<size_t>(c->GetDexTypeIndex().index_), // type_idx
Andreas Gampe542451c2016-07-26 09:02:02 -07001201 10U,
1202 // arbitrary
1203 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1204 StubTest::GetEntrypoint(self, kQuickAllocArray),
1205 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001206
1207 EXPECT_FALSE(self->IsExceptionPending());
1208 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1209 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001210 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001211 VerifyObject(obj);
1212 EXPECT_EQ(obj->GetLength(), 10);
1213 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001214
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001215 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001216 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001217 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001218 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1219 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001220 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001221 self);
David Sehr709b0702016-10-13 09:12:37 -07001222 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001223 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1224 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1225 EXPECT_TRUE(obj->IsArrayInstance());
1226 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001227 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001228 VerifyObject(obj);
1229 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1230 EXPECT_EQ(array->GetLength(), 10);
1231 }
1232
1233 // Failure tests.
1234
1235 // Out-of-memory.
1236 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001237 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001238 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001239 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001240 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001241 self);
1242
1243 EXPECT_TRUE(self->IsExceptionPending());
1244 self->ClearException();
1245 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1246 }
1247
1248 // Tests done.
1249#else
1250 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1251 // Force-print to std::cout so it's also outside the logcat.
1252 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1253#endif
1254}
1255
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001256
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001257TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001258 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001259 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1260#if defined(__i386__) || defined(__mips__) || \
1261 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001262 // TODO: Check the "Unresolved" allocation stubs
1263
1264 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001265
1266 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1267
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001268 ScopedObjectAccess soa(self);
1269 // garbage is created during ClassLinker::Init
1270
1271 // Create some strings
1272 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001273 // Setup: The first half is standard. The second half uses a non-zero offset.
1274 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001275 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001276 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1277 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1278 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1279 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001280 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001281
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001282 StackHandleScope<kStringCount> hs(self);
1283 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001284
Jeff Hao848f70a2014-01-15 13:49:50 -08001285 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001286 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001287 }
1288
1289 // TODO: wide characters
1290
1291 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001292 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1293 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001294 int32_t expected[kStringCount][kStringCount];
1295 for (size_t x = 0; x < kStringCount; ++x) {
1296 for (size_t y = 0; y < kStringCount; ++y) {
1297 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001298 }
1299 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001300
1301 // Play with it...
1302
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001303 for (size_t x = 0; x < kStringCount; ++x) {
1304 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001305 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001306 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1307 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001308 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001309
1310 EXPECT_FALSE(self->IsExceptionPending());
1311
1312 // The result is a 32b signed integer
1313 union {
1314 size_t r;
1315 int32_t i;
1316 } conv;
1317 conv.r = result;
1318 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001319 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1320 conv.r;
1321 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1322 conv.r;
1323 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1324 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001325 }
1326 }
1327
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001328 // TODO: Deallocate things.
1329
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001330 // Tests done.
1331#else
1332 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1333 // Force-print to std::cout so it's also outside the logcat.
1334 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1335 std::endl;
1336#endif
1337}
1338
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001339
Mathieu Chartierc7853442015-03-27 14:35:38 -07001340static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001341 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001342 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001343#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1344 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001345 constexpr size_t num_values = 5;
1346 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1347
1348 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001349 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001350 static_cast<size_t>(values[i]),
1351 0U,
1352 StubTest::GetEntrypoint(self, kQuickSet8Static),
1353 self,
1354 referrer);
1355
Mathieu Chartierc7853442015-03-27 14:35:38 -07001356 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001357 0U, 0U,
1358 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1359 self,
1360 referrer);
1361 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1362 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1363 }
1364#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001365 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001366 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1367 // Force-print to std::cout so it's also outside the logcat.
1368 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1369#endif
1370}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001371static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001372 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001373 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001374#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1375 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001376 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001377
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001378 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001379 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001380 static_cast<size_t>(values[i]),
1381 0U,
1382 StubTest::GetEntrypoint(self, kQuickSet8Static),
1383 self,
1384 referrer);
1385
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001387 0U, 0U,
1388 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1389 self,
1390 referrer);
1391 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1392 }
1393#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001394 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001395 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1396 // Force-print to std::cout so it's also outside the logcat.
1397 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1398#endif
1399}
1400
1401
Mathieu Chartierc7853442015-03-27 14:35:38 -07001402static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001403 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001404 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001405#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1406 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001407 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001408
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001409 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001410 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001411 reinterpret_cast<size_t>(obj->Get()),
1412 static_cast<size_t>(values[i]),
1413 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1414 self,
1415 referrer);
1416
Mathieu Chartierc7853442015-03-27 14:35:38 -07001417 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001418 EXPECT_EQ(values[i], res) << "Iteration " << i;
1419
Mathieu Chartierc7853442015-03-27 14:35:38 -07001420 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001421
Mathieu Chartierc7853442015-03-27 14:35:38 -07001422 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001423 reinterpret_cast<size_t>(obj->Get()),
1424 0U,
1425 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1426 self,
1427 referrer);
1428 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1429 }
1430#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001431 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001432 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1433 // Force-print to std::cout so it's also outside the logcat.
1434 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1435#endif
1436}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001437static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001438 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001439 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001440#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1441 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001442 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001443
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001444 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001445 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001446 reinterpret_cast<size_t>(obj->Get()),
1447 static_cast<size_t>(values[i]),
1448 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1449 self,
1450 referrer);
1451
Mathieu Chartierc7853442015-03-27 14:35:38 -07001452 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001453 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001454 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001455
Mathieu Chartierc7853442015-03-27 14:35:38 -07001456 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001457 reinterpret_cast<size_t>(obj->Get()),
1458 0U,
1459 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1460 self,
1461 referrer);
1462 EXPECT_EQ(res, static_cast<int8_t>(res2));
1463 }
1464#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001465 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001466 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1467 // Force-print to std::cout so it's also outside the logcat.
1468 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1469#endif
1470}
1471
Mathieu Chartiere401d142015-04-22 13:56:20 -07001472static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001473 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001474 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001475#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1476 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001477 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001478
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001479 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001480 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001481 static_cast<size_t>(values[i]),
1482 0U,
1483 StubTest::GetEntrypoint(self, kQuickSet16Static),
1484 self,
1485 referrer);
1486
Mathieu Chartierc7853442015-03-27 14:35:38 -07001487 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001488 0U, 0U,
1489 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1490 self,
1491 referrer);
1492
1493 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1494 }
1495#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001496 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001497 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1498 // Force-print to std::cout so it's also outside the logcat.
1499 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1500#endif
1501}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001502static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001503 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001504 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001505#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1506 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001507 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001508
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001509 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001510 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001511 static_cast<size_t>(values[i]),
1512 0U,
1513 StubTest::GetEntrypoint(self, kQuickSet16Static),
1514 self,
1515 referrer);
1516
Mathieu Chartierc7853442015-03-27 14:35:38 -07001517 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001518 0U, 0U,
1519 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1520 self,
1521 referrer);
1522
1523 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1524 }
1525#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001526 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001527 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1528 // Force-print to std::cout so it's also outside the logcat.
1529 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1530#endif
1531}
1532
Mathieu Chartierc7853442015-03-27 14:35:38 -07001533static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001534 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001535 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001536#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1537 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001538 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001539
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001540 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001541 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001542 reinterpret_cast<size_t>(obj->Get()),
1543 static_cast<size_t>(values[i]),
1544 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1545 self,
1546 referrer);
1547
Mathieu Chartierc7853442015-03-27 14:35:38 -07001548 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001549 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001550 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001551
Mathieu Chartierc7853442015-03-27 14:35:38 -07001552 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001553 reinterpret_cast<size_t>(obj->Get()),
1554 0U,
1555 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1556 self,
1557 referrer);
1558 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1559 }
1560#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001561 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001562 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1563 // Force-print to std::cout so it's also outside the logcat.
1564 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1565#endif
1566}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001567static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001568 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001569 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001570#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1571 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001572 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001573
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001574 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001575 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001576 reinterpret_cast<size_t>(obj->Get()),
1577 static_cast<size_t>(values[i]),
1578 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1579 self,
1580 referrer);
1581
Mathieu Chartierc7853442015-03-27 14:35:38 -07001582 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001583 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001584 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001585
Mathieu Chartierc7853442015-03-27 14:35:38 -07001586 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001587 reinterpret_cast<size_t>(obj->Get()),
1588 0U,
1589 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1590 self,
1591 referrer);
1592 EXPECT_EQ(res, static_cast<int16_t>(res2));
1593 }
1594#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001595 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001596 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1597 // Force-print to std::cout so it's also outside the logcat.
1598 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1599#endif
1600}
1601
Mathieu Chartiere401d142015-04-22 13:56:20 -07001602static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001603 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001604 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001605#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1606 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001607 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001608
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001609 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001610 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001611 static_cast<size_t>(values[i]),
1612 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001613 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001614 self,
1615 referrer);
1616
Mathieu Chartierc7853442015-03-27 14:35:38 -07001617 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001619 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001620 self,
1621 referrer);
1622
Goran Jakovljevic04568812015-04-23 15:27:23 +02001623#if defined(__mips__) && defined(__LP64__)
1624 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1625#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001626 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001627#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001628 }
1629#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001630 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001631 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1632 // Force-print to std::cout so it's also outside the logcat.
1633 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1634#endif
1635}
1636
1637
Mathieu Chartierc7853442015-03-27 14:35:38 -07001638static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001639 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001640 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001641#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1642 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001643 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001644
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001645 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001646 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001647 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001648 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001649 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001650 self,
1651 referrer);
1652
Mathieu Chartierc7853442015-03-27 14:35:38 -07001653 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001654 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1655
1656 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658
Mathieu Chartierc7853442015-03-27 14:35:38 -07001659 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001660 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001661 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001662 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001663 self,
1664 referrer);
1665 EXPECT_EQ(res, static_cast<int32_t>(res2));
1666 }
1667#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001668 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001669 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1670 // Force-print to std::cout so it's also outside the logcat.
1671 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1672#endif
1673}
1674
1675
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001676#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1677 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001678
1679static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001680 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001681 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001682 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1683 reinterpret_cast<size_t>(val),
1684 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001685 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686 self,
1687 referrer);
1688
1689 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1690 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001691 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001692 self,
1693 referrer);
1694
1695 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1696}
1697#endif
1698
Mathieu Chartiere401d142015-04-22 13:56:20 -07001699static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001700 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001701 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001702#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1703 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001704 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705
1706 // Allocate a string object for simplicity.
1707 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001708 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709
Mathieu Chartierc7853442015-03-27 14:35:38 -07001710 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001711#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001712 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001713 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1714 // Force-print to std::cout so it's also outside the logcat.
1715 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1716#endif
1717}
1718
1719
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001720#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1721 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001722static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001723 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001724 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001725 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001726 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001727 reinterpret_cast<size_t>(trg),
1728 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001729 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001730 self,
1731 referrer);
1732
Mathieu Chartierc7853442015-03-27 14:35:38 -07001733 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001734 reinterpret_cast<size_t>(trg),
1735 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001736 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001737 self,
1738 referrer);
1739
1740 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1741
Mathieu Chartier3398c782016-09-30 10:27:43 -07001742 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001743}
1744#endif
1745
Mathieu Chartierc7853442015-03-27 14:35:38 -07001746static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001747 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001748 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001749#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1750 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001751 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001752
1753 // Allocate a string object for simplicity.
1754 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001755 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001756
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001757 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001758#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001759 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001760 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1761 // Force-print to std::cout so it's also outside the logcat.
1762 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1763#endif
1764}
1765
1766
Calin Juravle872ab3f2015-10-02 07:27:51 +01001767// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001768
Mathieu Chartiere401d142015-04-22 13:56:20 -07001769static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001770 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001771 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001772#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1773 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001774 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001775
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001776 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001777 // 64 bit FieldSet stores the set value in the second register.
1778 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001779 0U,
1780 values[i],
1781 StubTest::GetEntrypoint(self, kQuickSet64Static),
1782 self,
1783 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001784
Mathieu Chartierc7853442015-03-27 14:35:38 -07001785 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001786 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001787 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001788 self,
1789 referrer);
1790
1791 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1792 }
1793#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001794 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001795 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1796 // Force-print to std::cout so it's also outside the logcat.
1797 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1798#endif
1799}
1800
1801
Mathieu Chartierc7853442015-03-27 14:35:38 -07001802static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001803 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001804 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001805#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1806 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001807 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001808
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001809 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001810 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001811 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001812 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001813 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001814 self,
1815 referrer);
1816
Mathieu Chartierc7853442015-03-27 14:35:38 -07001817 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001818 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1819
1820 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001821 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001822
Mathieu Chartierc7853442015-03-27 14:35:38 -07001823 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001824 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001825 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001826 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001827 self,
1828 referrer);
1829 EXPECT_EQ(res, static_cast<int64_t>(res2));
1830 }
1831#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001832 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001833 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1834 // Force-print to std::cout so it's also outside the logcat.
1835 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1836#endif
1837}
1838
1839static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1840 // garbage is created during ClassLinker::Init
1841
1842 JNIEnv* env = Thread::Current()->GetJniEnv();
1843 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001844 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001845 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001846 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001847
1848 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001849 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001850 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001851 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001852 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001853 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001854
1855 // Play with it...
1856
1857 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001858 for (ArtField& f : c->GetSFields()) {
1859 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001860 if (test_type != type) {
1861 continue;
1862 }
1863 switch (type) {
1864 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001865 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001866 break;
1867 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001868 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001869 break;
1870 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001871 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001872 break;
1873 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001874 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001875 break;
1876 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001877 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001878 break;
1879 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001880 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001881 break;
1882 case Primitive::Type::kPrimNot:
1883 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001884 if (f.GetTypeDescriptor()[0] != '[') {
1885 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001886 }
1887 break;
1888 default:
1889 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001890 }
1891 }
1892
1893 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001894 for (ArtField& f : c->GetIFields()) {
1895 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001896 if (test_type != type) {
1897 continue;
1898 }
1899 switch (type) {
1900 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001901 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001902 break;
1903 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001904 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001905 break;
1906 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001907 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001908 break;
1909 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001910 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001911 break;
1912 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001913 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001914 break;
1915 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001916 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001917 break;
1918 case Primitive::Type::kPrimNot:
1919 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001920 if (f.GetTypeDescriptor()[0] != '[') {
1921 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001922 }
1923 break;
1924 default:
1925 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001926 }
1927 }
1928
1929 // TODO: Deallocate things.
1930}
1931
Fred Shih37f05ef2014-07-16 18:38:08 -07001932TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001933 Thread* self = Thread::Current();
1934
1935 self->TransitionFromSuspendedToRunnable();
1936 LoadDex("AllFields");
1937 bool started = runtime_->Start();
1938 CHECK(started);
1939
1940 TestFields(self, this, Primitive::Type::kPrimBoolean);
1941 TestFields(self, this, Primitive::Type::kPrimByte);
1942}
1943
1944TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001945 Thread* self = Thread::Current();
1946
1947 self->TransitionFromSuspendedToRunnable();
1948 LoadDex("AllFields");
1949 bool started = runtime_->Start();
1950 CHECK(started);
1951
1952 TestFields(self, this, Primitive::Type::kPrimChar);
1953 TestFields(self, this, Primitive::Type::kPrimShort);
1954}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001955
1956TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001957 Thread* self = Thread::Current();
1958
1959 self->TransitionFromSuspendedToRunnable();
1960 LoadDex("AllFields");
1961 bool started = runtime_->Start();
1962 CHECK(started);
1963
1964 TestFields(self, this, Primitive::Type::kPrimInt);
1965}
1966
1967TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001968 Thread* self = Thread::Current();
1969
1970 self->TransitionFromSuspendedToRunnable();
1971 LoadDex("AllFields");
1972 bool started = runtime_->Start();
1973 CHECK(started);
1974
1975 TestFields(self, this, Primitive::Type::kPrimNot);
1976}
1977
1978TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001979 Thread* self = Thread::Current();
1980
1981 self->TransitionFromSuspendedToRunnable();
1982 LoadDex("AllFields");
1983 bool started = runtime_->Start();
1984 CHECK(started);
1985
1986 TestFields(self, this, Primitive::Type::kPrimLong);
1987}
1988
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001989// Disabled, b/27991555 .
1990// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1991// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1992// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1993// the bridge and uses that to check for inlined frames, crashing in the process.
1994TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001995#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1996 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001997 Thread* self = Thread::Current();
1998
1999 ScopedObjectAccess soa(self);
2000 StackHandleScope<7> hs(self);
2001
2002 JNIEnv* env = Thread::Current()->GetJniEnv();
2003
2004 // ArrayList
2005
2006 // Load ArrayList and used methods (JNI).
2007 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2008 ASSERT_NE(nullptr, arraylist_jclass);
2009 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2010 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002011 jmethodID contains_jmethod = env->GetMethodID(
2012 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002013 ASSERT_NE(nullptr, contains_jmethod);
2014 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2015 ASSERT_NE(nullptr, add_jmethod);
2016
Mathieu Chartiere401d142015-04-22 13:56:20 -07002017 // Get representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08002018 ArtMethod* contains_amethod = jni::DecodeArtMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002019
2020 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002021 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
2022 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002023 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002024 }
2025
2026 // List
2027
2028 // Load List and used methods (JNI).
2029 jclass list_jclass = env->FindClass("java/util/List");
2030 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002031 jmethodID inf_contains_jmethod = env->GetMethodID(
2032 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002033 ASSERT_NE(nullptr, inf_contains_jmethod);
2034
2035 // Get mirror representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08002036 ArtMethod* inf_contains = jni::DecodeArtMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002037
2038 // Object
2039
2040 jclass obj_jclass = env->FindClass("java/lang/Object");
2041 ASSERT_NE(nullptr, obj_jclass);
2042 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2043 ASSERT_NE(nullptr, obj_constructor);
2044
Andreas Gampe51f76352014-05-21 08:28:48 -07002045 // Create instances.
2046
2047 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2048 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002049 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002050
2051 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2052 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002053 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002054
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002055 // Invocation tests.
2056
2057 // 1. imt_conflict
2058
2059 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002060
2061 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2062 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002063 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2064 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002065 ImtConflictTable* empty_conflict_table =
2066 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002067 void* data = linear_alloc->Alloc(
2068 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07002069 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002070 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07002071 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
2072 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002073
Andreas Gampe51f76352014-05-21 08:28:48 -07002074 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002075 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2076 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002077 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002078 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002079 self,
2080 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002081 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002082
2083 ASSERT_FALSE(self->IsExceptionPending());
2084 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2085
2086 // Add object.
2087
2088 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2089
David Sehr709b0702016-10-13 09:12:37 -07002090 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002091
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002092 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002093
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002094 result =
2095 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2096 reinterpret_cast<size_t>(array_list.Get()),
2097 reinterpret_cast<size_t>(obj.Get()),
2098 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2099 self,
2100 contains_amethod,
2101 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002102
2103 ASSERT_FALSE(self->IsExceptionPending());
2104 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002105
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002106 // 2. regular interface trampoline
2107
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002108 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2109 reinterpret_cast<size_t>(array_list.Get()),
2110 reinterpret_cast<size_t>(obj.Get()),
2111 StubTest::GetEntrypoint(self,
2112 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2113 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002114
2115 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002116 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002117
Mathieu Chartiere401d142015-04-22 13:56:20 -07002118 result = Invoke3WithReferrer(
2119 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2120 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2121 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2122 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002123
2124 ASSERT_FALSE(self->IsExceptionPending());
2125 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002126#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002127 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002128 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002129 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2130#endif
2131}
2132
Andreas Gampe6aac3552014-06-09 14:55:53 -07002133TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002134#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002135 Thread* self = Thread::Current();
2136 ScopedObjectAccess soa(self);
2137 // garbage is created during ClassLinker::Init
2138
2139 // Create some strings
2140 // Use array so we can index into it and use a matrix for expected results
2141 // Setup: The first half is standard. The second half uses a non-zero offset.
2142 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002143 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2144 static constexpr size_t kStringCount = arraysize(c_str);
2145 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2146 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002147
2148 StackHandleScope<kStringCount> hs(self);
2149 Handle<mirror::String> s[kStringCount];
2150
2151 for (size_t i = 0; i < kStringCount; ++i) {
2152 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2153 }
2154
2155 // Matrix of expectations. First component is first parameter. Note we only check against the
2156 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2157 // rely on String::CompareTo being correct.
2158 static constexpr size_t kMaxLen = 9;
2159 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2160
2161 // Last dimension: start, offset by 1.
2162 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2163 for (size_t x = 0; x < kStringCount; ++x) {
2164 for (size_t y = 0; y < kCharCount; ++y) {
2165 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2166 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2167 }
2168 }
2169 }
2170
2171 // Play with it...
2172
2173 for (size_t x = 0; x < kStringCount; ++x) {
2174 for (size_t y = 0; y < kCharCount; ++y) {
2175 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2176 int32_t start = static_cast<int32_t>(z) - 1;
2177
2178 // Test string_compareto x y
2179 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002180 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002181
2182 EXPECT_FALSE(self->IsExceptionPending());
2183
2184 // The result is a 32b signed integer
2185 union {
2186 size_t r;
2187 int32_t i;
2188 } conv;
2189 conv.r = result;
2190
2191 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2192 c_char[y] << " @ " << start;
2193 }
2194 }
2195 }
2196
2197 // TODO: Deallocate things.
2198
2199 // Tests done.
2200#else
2201 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2202 // Force-print to std::cout so it's also outside the logcat.
2203 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002204#endif
2205}
2206
Roland Levillain02b75802016-07-13 11:54:35 +01002207// TODO: Exercise the ReadBarrierMarkRegX entry points.
2208
Man Cao1aee9002015-07-14 22:31:42 -07002209TEST_F(StubTest, ReadBarrier) {
2210#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2211 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2212 Thread* self = Thread::Current();
2213
2214 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2215
2216 // Create an object
2217 ScopedObjectAccess soa(self);
2218 // garbage is created during ClassLinker::Init
2219
2220 StackHandleScope<2> hs(soa.Self());
2221 Handle<mirror::Class> c(
2222 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2223
2224 // Build an object instance
2225 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2226
2227 EXPECT_FALSE(self->IsExceptionPending());
2228
2229 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2230 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2231
2232 EXPECT_FALSE(self->IsExceptionPending());
2233 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2234 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2235 EXPECT_EQ(klass, obj->GetClass());
2236
2237 // Tests done.
2238#else
2239 LOG(INFO) << "Skipping read_barrier_slow";
2240 // Force-print to std::cout so it's also outside the logcat.
2241 std::cout << "Skipping read_barrier_slow" << std::endl;
2242#endif
2243}
2244
Roland Levillain0d5a2812015-11-13 10:07:31 +00002245TEST_F(StubTest, ReadBarrierForRoot) {
2246#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2247 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2248 Thread* self = Thread::Current();
2249
2250 const uintptr_t readBarrierForRootSlow =
2251 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2252
2253 // Create an object
2254 ScopedObjectAccess soa(self);
2255 // garbage is created during ClassLinker::Init
2256
2257 StackHandleScope<1> hs(soa.Self());
2258
2259 Handle<mirror::String> obj(
2260 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2261
2262 EXPECT_FALSE(self->IsExceptionPending());
2263
2264 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2265 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2266
2267 EXPECT_FALSE(self->IsExceptionPending());
2268 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2269 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2270 EXPECT_EQ(klass, obj->GetClass());
2271
2272 // Tests done.
2273#else
2274 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2275 // Force-print to std::cout so it's also outside the logcat.
2276 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2277#endif
2278}
2279
Andreas Gampe525cde22014-04-22 15:44:50 -07002280} // namespace art