blob: 5e39f4281495df0f9be28b149ef2ad24481f226c [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070025#include "imt_conflict_table.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000026#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070027#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070028#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070029#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070030
31namespace art {
32
33
34class StubTest : public CommonRuntimeTest {
35 protected:
36 // We need callee-save methods set up in the Runtime for exceptions.
37 void SetUp() OVERRIDE {
38 // Do the normal setup.
39 CommonRuntimeTest::SetUp();
40
41 {
42 // Create callee-save methods
43 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010044 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
46 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
47 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070048 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070049 }
50 }
51 }
52 }
53
Ian Rogerse63db272014-07-15 15:36:11 -070054 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070055 // Use a smaller heap
56 for (std::pair<std::string, const void*>& pair : *options) {
57 if (pair.first.find("-Xmx") == 0) {
58 pair.first = "-Xmx4M"; // Smallest we can go.
59 }
60 }
Andreas Gampe51f76352014-05-21 08:28:48 -070061 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070062 }
Andreas Gampe525cde22014-04-22 15:44:50 -070063
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070064 // Helper function needed since TEST_F makes a new class.
65 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
66 return &self->tlsPtr_;
67 }
68
Andreas Gampe4fc046e2014-05-06 16:56:39 -070069 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070070 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070071 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070072 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070073
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074 // TODO: Set up a frame according to referrer's specs.
75 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070076 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070077 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070078 }
79
Andreas Gampe51f76352014-05-21 08:28:48 -070080 // TODO: Set up a frame according to referrer's specs.
81 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070082 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070083 // Push a transition back into managed code onto the linked list in thread.
84 ManagedStack fragment;
85 self->PushManagedStackFragment(&fragment);
86
87 size_t result;
88 size_t fpr_result = 0;
89#if defined(__i386__)
90 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070091#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
92#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070093 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070094 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
95 // esp, then we won't be able to access it after spilling.
96
97 // Spill 6 registers.
98 PUSH(%%ebx)
99 PUSH(%%ecx)
100 PUSH(%%edx)
101 PUSH(%%esi)
102 PUSH(%%edi)
103 PUSH(%%ebp)
104
105 // Store the inputs to the stack, but keep the referrer up top, less work.
106 PUSH(%[referrer]) // Align stack.
107 PUSH(%[referrer]) // Store referrer
108
109 PUSH(%[arg0])
110 PUSH(%[arg1])
111 PUSH(%[arg2])
112 PUSH(%[code])
113 // Now read them back into the required registers.
114 POP(%%edi)
115 POP(%%edx)
116 POP(%%ecx)
117 POP(%%eax)
118 // Call is prepared now.
119
Andreas Gampe51f76352014-05-21 08:28:48 -0700120 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700121 "addl $8, %%esp\n\t" // Pop referrer and padding.
122 ".cfi_adjust_cfa_offset -8\n\t"
123
124 // Restore 6 registers.
125 POP(%%ebp)
126 POP(%%edi)
127 POP(%%esi)
128 POP(%%edx)
129 POP(%%ecx)
130 POP(%%ebx)
131
Andreas Gampe51f76352014-05-21 08:28:48 -0700132 : "=a" (result)
133 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700134 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
135 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700136 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700137 : "memory", "xmm7"); // clobber.
138#undef PUSH
139#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700140#elif defined(__arm__)
141 __asm__ __volatile__(
142 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
143 ".cfi_adjust_cfa_offset 52\n\t"
144 "push {r9}\n\t"
145 ".cfi_adjust_cfa_offset 4\n\t"
146 "mov r9, %[referrer]\n\n"
147 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
148 ".cfi_adjust_cfa_offset 8\n\t"
149 "ldr r9, [sp, #8]\n\t"
150
151 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
152 "sub sp, sp, #24\n\t"
153 "str %[arg0], [sp]\n\t"
154 "str %[arg1], [sp, #4]\n\t"
155 "str %[arg2], [sp, #8]\n\t"
156 "str %[code], [sp, #12]\n\t"
157 "str %[self], [sp, #16]\n\t"
158 "str %[hidden], [sp, #20]\n\t"
159 "ldr r0, [sp]\n\t"
160 "ldr r1, [sp, #4]\n\t"
161 "ldr r2, [sp, #8]\n\t"
162 "ldr r3, [sp, #12]\n\t"
163 "ldr r9, [sp, #16]\n\t"
164 "ldr r12, [sp, #20]\n\t"
165 "add sp, sp, #24\n\t"
166
167 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700168 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700169 ".cfi_adjust_cfa_offset -12\n\t"
170 "pop {r1-r12, lr}\n\t" // Restore state
171 ".cfi_adjust_cfa_offset -52\n\t"
172 "mov %[result], r0\n\t" // Save the result
173 : [result] "=r" (result)
174 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700175 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
176 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700177 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700178#elif defined(__aarch64__)
179 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000181 "sub sp, sp, #80\n\t"
182 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700183 "stp x0, x1, [sp]\n\t"
184 "stp x2, x3, [sp, #16]\n\t"
185 "stp x4, x5, [sp, #32]\n\t"
186 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000187 // To be extra defensive, store x20. We do this because some of the stubs might make a
188 // transition into the runtime via the blr instruction below and *not* save x20.
189 "str x20, [sp, #64]\n\t"
190 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700191
Andreas Gampef39b3782014-06-03 14:38:30 -0700192 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
193 ".cfi_adjust_cfa_offset 16\n\t"
194 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700195
196 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
197 "sub sp, sp, #48\n\t"
198 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700199 // All things are "r" constraints, so direct str/stp should work.
200 "stp %[arg0], %[arg1], [sp]\n\t"
201 "stp %[arg2], %[code], [sp, #16]\n\t"
202 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700203
204 // Now we definitely have x0-x3 free, use it to garble d8 - d15
205 "movk x0, #0xfad0\n\t"
206 "movk x0, #0xebad, lsl #16\n\t"
207 "movk x0, #0xfad0, lsl #32\n\t"
208 "movk x0, #0xebad, lsl #48\n\t"
209 "fmov d8, x0\n\t"
210 "add x0, x0, 1\n\t"
211 "fmov d9, x0\n\t"
212 "add x0, x0, 1\n\t"
213 "fmov d10, x0\n\t"
214 "add x0, x0, 1\n\t"
215 "fmov d11, x0\n\t"
216 "add x0, x0, 1\n\t"
217 "fmov d12, x0\n\t"
218 "add x0, x0, 1\n\t"
219 "fmov d13, x0\n\t"
220 "add x0, x0, 1\n\t"
221 "fmov d14, x0\n\t"
222 "add x0, x0, 1\n\t"
223 "fmov d15, x0\n\t"
224
Andreas Gampef39b3782014-06-03 14:38:30 -0700225 // Load call params into the right registers.
226 "ldp x0, x1, [sp]\n\t"
227 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100228 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700229 "add sp, sp, #48\n\t"
230 ".cfi_adjust_cfa_offset -48\n\t"
231
Andreas Gampe51f76352014-05-21 08:28:48 -0700232 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "mov x8, x0\n\t" // Store result
234 "add sp, sp, #16\n\t" // Drop the quick "frame"
235 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700236
237 // Test d8 - d15. We can use x1 and x2.
238 "movk x1, #0xfad0\n\t"
239 "movk x1, #0xebad, lsl #16\n\t"
240 "movk x1, #0xfad0, lsl #32\n\t"
241 "movk x1, #0xebad, lsl #48\n\t"
242 "fmov x2, d8\n\t"
243 "cmp x1, x2\n\t"
244 "b.ne 1f\n\t"
245 "add x1, x1, 1\n\t"
246
247 "fmov x2, d9\n\t"
248 "cmp x1, x2\n\t"
249 "b.ne 1f\n\t"
250 "add x1, x1, 1\n\t"
251
252 "fmov x2, d10\n\t"
253 "cmp x1, x2\n\t"
254 "b.ne 1f\n\t"
255 "add x1, x1, 1\n\t"
256
257 "fmov x2, d11\n\t"
258 "cmp x1, x2\n\t"
259 "b.ne 1f\n\t"
260 "add x1, x1, 1\n\t"
261
262 "fmov x2, d12\n\t"
263 "cmp x1, x2\n\t"
264 "b.ne 1f\n\t"
265 "add x1, x1, 1\n\t"
266
267 "fmov x2, d13\n\t"
268 "cmp x1, x2\n\t"
269 "b.ne 1f\n\t"
270 "add x1, x1, 1\n\t"
271
272 "fmov x2, d14\n\t"
273 "cmp x1, x2\n\t"
274 "b.ne 1f\n\t"
275 "add x1, x1, 1\n\t"
276
277 "fmov x2, d15\n\t"
278 "cmp x1, x2\n\t"
279 "b.ne 1f\n\t"
280
Andreas Gampef39b3782014-06-03 14:38:30 -0700281 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700282
283 // Finish up.
284 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700285 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
286 "ldp x2, x3, [sp, #16]\n\t"
287 "ldp x4, x5, [sp, #32]\n\t"
288 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000289 "ldr x20, [sp, #64]\n\t"
290 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
291 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700292
Andreas Gampef39b3782014-06-03 14:38:30 -0700293 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
294 "mov %[result], x8\n\t" // Store the call result
295
Andreas Gampe51f76352014-05-21 08:28:48 -0700296 "b 3f\n\t" // Goto end
297
298 // Failed fpr verification.
299 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700300 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700301 "b 2b\n\t" // Goto finish-up
302
303 // End
304 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700305 : [result] "=r" (result)
306 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700308 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000309 // Leave one register unclobbered, which is needed for compiling with
310 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
311 // which means we should unclobber one of the callee-saved registers that are unused.
312 // Here we use x20.
313 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700314 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
315 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
316 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
317 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700318 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000319 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200320#elif defined(__mips__) && !defined(__LP64__)
321 __asm__ __volatile__ (
322 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
323 "addiu $sp, $sp, -64\n\t"
324 "sw $a0, 0($sp)\n\t"
325 "sw $a1, 4($sp)\n\t"
326 "sw $a2, 8($sp)\n\t"
327 "sw $a3, 12($sp)\n\t"
328 "sw $t0, 16($sp)\n\t"
329 "sw $t1, 20($sp)\n\t"
330 "sw $t2, 24($sp)\n\t"
331 "sw $t3, 28($sp)\n\t"
332 "sw $t4, 32($sp)\n\t"
333 "sw $t5, 36($sp)\n\t"
334 "sw $t6, 40($sp)\n\t"
335 "sw $t7, 44($sp)\n\t"
336 // Spill gp register since it is caller save.
337 "sw $gp, 52($sp)\n\t"
338
339 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
340 "sw %[referrer], 0($sp)\n\t"
341
342 // Push everything on the stack, so we don't rely on the order.
343 "addiu $sp, $sp, -24\n\t"
344 "sw %[arg0], 0($sp)\n\t"
345 "sw %[arg1], 4($sp)\n\t"
346 "sw %[arg2], 8($sp)\n\t"
347 "sw %[code], 12($sp)\n\t"
348 "sw %[self], 16($sp)\n\t"
349 "sw %[hidden], 20($sp)\n\t"
350
351 // Load call params into the right registers.
352 "lw $a0, 0($sp)\n\t"
353 "lw $a1, 4($sp)\n\t"
354 "lw $a2, 8($sp)\n\t"
355 "lw $t9, 12($sp)\n\t"
356 "lw $s1, 16($sp)\n\t"
357 "lw $t0, 20($sp)\n\t"
358 "addiu $sp, $sp, 24\n\t"
359
360 "jalr $t9\n\t" // Call the stub.
361 "nop\n\t"
362 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
363
364 // Restore stuff not named clobbered.
365 "lw $a0, 0($sp)\n\t"
366 "lw $a1, 4($sp)\n\t"
367 "lw $a2, 8($sp)\n\t"
368 "lw $a3, 12($sp)\n\t"
369 "lw $t0, 16($sp)\n\t"
370 "lw $t1, 20($sp)\n\t"
371 "lw $t2, 24($sp)\n\t"
372 "lw $t3, 28($sp)\n\t"
373 "lw $t4, 32($sp)\n\t"
374 "lw $t5, 36($sp)\n\t"
375 "lw $t6, 40($sp)\n\t"
376 "lw $t7, 44($sp)\n\t"
377 // Restore gp.
378 "lw $gp, 52($sp)\n\t"
379 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
380
381 "move %[result], $v0\n\t" // Store the call result.
382 : [result] "=r" (result)
383 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
384 [referrer] "r"(referrer), [hidden] "r"(hidden)
385 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
386 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100387 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
388 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
389 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200390 "memory"); // clobber.
391#elif defined(__mips__) && defined(__LP64__)
392 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100393 // Spill a0-a7 which we say we don't clobber. May contain args.
394 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200395 "sd $a0, 0($sp)\n\t"
396 "sd $a1, 8($sp)\n\t"
397 "sd $a2, 16($sp)\n\t"
398 "sd $a3, 24($sp)\n\t"
399 "sd $a4, 32($sp)\n\t"
400 "sd $a5, 40($sp)\n\t"
401 "sd $a6, 48($sp)\n\t"
402 "sd $a7, 56($sp)\n\t"
403
404 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
405 "sd %[referrer], 0($sp)\n\t"
406
407 // Push everything on the stack, so we don't rely on the order.
408 "daddiu $sp, $sp, -48\n\t"
409 "sd %[arg0], 0($sp)\n\t"
410 "sd %[arg1], 8($sp)\n\t"
411 "sd %[arg2], 16($sp)\n\t"
412 "sd %[code], 24($sp)\n\t"
413 "sd %[self], 32($sp)\n\t"
414 "sd %[hidden], 40($sp)\n\t"
415
416 // Load call params into the right registers.
417 "ld $a0, 0($sp)\n\t"
418 "ld $a1, 8($sp)\n\t"
419 "ld $a2, 16($sp)\n\t"
420 "ld $t9, 24($sp)\n\t"
421 "ld $s1, 32($sp)\n\t"
422 "ld $t0, 40($sp)\n\t"
423 "daddiu $sp, $sp, 48\n\t"
424
425 "jalr $t9\n\t" // Call the stub.
426 "nop\n\t"
427 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
428
429 // Restore stuff not named clobbered.
430 "ld $a0, 0($sp)\n\t"
431 "ld $a1, 8($sp)\n\t"
432 "ld $a2, 16($sp)\n\t"
433 "ld $a3, 24($sp)\n\t"
434 "ld $a4, 32($sp)\n\t"
435 "ld $a5, 40($sp)\n\t"
436 "ld $a6, 48($sp)\n\t"
437 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100438 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200439
440 "move %[result], $v0\n\t" // Store the call result.
441 : [result] "=r" (result)
442 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
443 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100444 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
445 // t0-t3 are ambiguous.
446 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
447 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100448 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
449 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
450 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200451 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700452#elif defined(__x86_64__) && !defined(__APPLE__)
453#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
454#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
455 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
456 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700457 // TODO: Set the thread?
458 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700459 // Spill almost everything (except rax, rsp). 14 registers.
460 PUSH(%%rbx)
461 PUSH(%%rcx)
462 PUSH(%%rdx)
463 PUSH(%%rsi)
464 PUSH(%%rdi)
465 PUSH(%%rbp)
466 PUSH(%%r8)
467 PUSH(%%r9)
468 PUSH(%%r10)
469 PUSH(%%r11)
470 PUSH(%%r12)
471 PUSH(%%r13)
472 PUSH(%%r14)
473 PUSH(%%r15)
474
475 PUSH(%[referrer]) // Push referrer & 16B alignment padding
476 PUSH(%[referrer])
477
478 // Now juggle the input registers.
479 PUSH(%[arg0])
480 PUSH(%[arg1])
481 PUSH(%[arg2])
482 PUSH(%[hidden])
483 PUSH(%[code])
484 POP(%%r8)
485 POP(%%rax)
486 POP(%%rdx)
487 POP(%%rsi)
488 POP(%%rdi)
489
490 "call *%%r8\n\t" // Call the stub
491 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700493
494 POP(%%r15)
495 POP(%%r14)
496 POP(%%r13)
497 POP(%%r12)
498 POP(%%r11)
499 POP(%%r10)
500 POP(%%r9)
501 POP(%%r8)
502 POP(%%rbp)
503 POP(%%rdi)
504 POP(%%rsi)
505 POP(%%rdx)
506 POP(%%rcx)
507 POP(%%rbx)
508
Andreas Gampe51f76352014-05-21 08:28:48 -0700509 : "=a" (result)
510 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700511 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
512 [referrer] "r"(referrer), [hidden] "r"(hidden)
513 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
514 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
515 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
516#undef PUSH
517#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700518#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800519 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700520 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
521 result = 0;
522#endif
523 // Pop transition.
524 self->PopManagedStackFragment(fragment);
525
526 fp_result = fpr_result;
527 EXPECT_EQ(0U, fp_result);
528
529 return result;
530 }
531
Andreas Gampe29b38412014-08-13 00:15:43 -0700532 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
533 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700534 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700535 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
536 }
537
Andreas Gampe6cf80102014-05-19 11:32:41 -0700538 protected:
539 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700540};
541
542
Andreas Gampe525cde22014-04-22 15:44:50 -0700543TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200544#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700545 Thread* self = Thread::Current();
546
547 uint32_t orig[20];
548 uint32_t trg[20];
549 for (size_t i = 0; i < 20; ++i) {
550 orig[i] = i;
551 trg[i] = 0;
552 }
553
554 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700555 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700556
557 EXPECT_EQ(orig[0], trg[0]);
558
559 for (size_t i = 1; i < 4; ++i) {
560 EXPECT_NE(orig[i], trg[i]);
561 }
562
563 for (size_t i = 4; i < 14; ++i) {
564 EXPECT_EQ(orig[i], trg[i]);
565 }
566
567 for (size_t i = 14; i < 20; ++i) {
568 EXPECT_NE(orig[i], trg[i]);
569 }
570
571 // TODO: Test overlapping?
572
573#else
574 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
575 // Force-print to std::cout so it's also outside the logcat.
576 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
577#endif
578}
579
Andreas Gampe525cde22014-04-22 15:44:50 -0700580TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200581#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
582 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700583 static constexpr size_t kThinLockLoops = 100;
584
Andreas Gampe525cde22014-04-22 15:44:50 -0700585 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700586
587 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
588
Andreas Gampe525cde22014-04-22 15:44:50 -0700589 // Create an object
590 ScopedObjectAccess soa(self);
591 // garbage is created during ClassLinker::Init
592
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700593 StackHandleScope<2> hs(soa.Self());
594 Handle<mirror::String> obj(
595 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700596 LockWord lock = obj->GetLockWord(false);
597 LockWord::LockState old_state = lock.GetState();
598 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
599
Andreas Gampe29b38412014-08-13 00:15:43 -0700600 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700601
602 LockWord lock_after = obj->GetLockWord(false);
603 LockWord::LockState new_state = lock_after.GetState();
604 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700605 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
606
607 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700608 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700609
610 // Check we're at lock count i
611
612 LockWord l_inc = obj->GetLockWord(false);
613 LockWord::LockState l_inc_state = l_inc.GetState();
614 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
615 EXPECT_EQ(l_inc.ThinLockCount(), i);
616 }
617
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700618 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700619 Handle<mirror::String> obj2(hs.NewHandle(
620 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700621
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700622 obj2->IdentityHashCode();
623
Andreas Gampe29b38412014-08-13 00:15:43 -0700624 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700625
626 LockWord lock_after2 = obj2->GetLockWord(false);
627 LockWord::LockState new_state2 = lock_after2.GetState();
628 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
629 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
630
631 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700632#else
633 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
634 // Force-print to std::cout so it's also outside the logcat.
635 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
636#endif
637}
638
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700639
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700640class RandGen {
641 public:
642 explicit RandGen(uint32_t seed) : val_(seed) {}
643
644 uint32_t next() {
645 val_ = val_ * 48271 % 2147483647 + 13;
646 return val_;
647 }
648
649 uint32_t val_;
650};
651
652
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700653// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
654static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200655#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
656 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700657 static constexpr size_t kThinLockLoops = 100;
658
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700659 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700660
661 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
662 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700663 // Create an object
664 ScopedObjectAccess soa(self);
665 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700666 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
667 StackHandleScope<kNumberOfLocks + 1> hs(self);
668 Handle<mirror::String> obj(
669 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700670 LockWord lock = obj->GetLockWord(false);
671 LockWord::LockState old_state = lock.GetState();
672 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
673
Andreas Gampe29b38412014-08-13 00:15:43 -0700674 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700675 // This should be an illegal monitor state.
676 EXPECT_TRUE(self->IsExceptionPending());
677 self->ClearException();
678
679 LockWord lock_after = obj->GetLockWord(false);
680 LockWord::LockState new_state = lock_after.GetState();
681 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700682
Andreas Gampe29b38412014-08-13 00:15:43 -0700683 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700684
685 LockWord lock_after2 = obj->GetLockWord(false);
686 LockWord::LockState new_state2 = lock_after2.GetState();
687 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
688
Andreas Gampe29b38412014-08-13 00:15:43 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700690
691 LockWord lock_after3 = obj->GetLockWord(false);
692 LockWord::LockState new_state3 = lock_after3.GetState();
693 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
694
695 // Stress test:
696 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
697 // each step.
698
699 RandGen r(0x1234);
700
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700701 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700702 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700703
704 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700706 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707
708 // Initialize = allocate.
709 for (size_t i = 0; i < kNumberOfLocks; ++i) {
710 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700711 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700712 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700713 }
714
715 for (size_t i = 0; i < kIterations; ++i) {
716 // Select which lock to update.
717 size_t index = r.next() % kNumberOfLocks;
718
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700719 // Make lock fat?
720 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
721 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700722 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700723
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700725 LockWord::LockState iter_state = lock_iter.GetState();
726 if (counts[index] == 0) {
727 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
728 } else {
729 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
730 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700731 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800732 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700733 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800734 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800736 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 } else {
738 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700741
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700743 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
744 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700745 counts[index]++;
746 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700747 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700748 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700749 counts[index]--;
750 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700751
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700752 EXPECT_FALSE(self->IsExceptionPending());
753
754 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700755 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700756 LockWord::LockState iter_state = lock_iter.GetState();
757 if (fat[index]) {
758 // Abuse MonitorInfo.
759 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 EXPECT_EQ(counts[index], info.entry_count_) << index;
762 } else {
763 if (counts[index] > 0) {
764 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
765 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
766 } else {
767 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
768 }
769 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700770 }
771 }
772
773 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700774 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700775 for (size_t i = 0; i < kNumberOfLocks; ++i) {
776 size_t index = kNumberOfLocks - 1 - i;
777 size_t count = counts[index];
778 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700779 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
780 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700781 count--;
782 }
783
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700784 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700785 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700786 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
787 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 }
789
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700790 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700791#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800792 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700794 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796#endif
797}
798
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700799TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800800 // This will lead to monitor error messages in the log.
801 ScopedLogSeverity sls(LogSeverity::FATAL);
802
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700803 TestUnlockObject(this);
804}
Andreas Gampe525cde22014-04-22 15:44:50 -0700805
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200806#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
807 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700808extern "C" void art_quick_check_cast(void);
809#endif
810
811TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200812#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
813 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700814 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700815
816 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
817
Andreas Gampe525cde22014-04-22 15:44:50 -0700818 // Find some classes.
819 ScopedObjectAccess soa(self);
820 // garbage is created during ClassLinker::Init
821
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700822 StackHandleScope<2> hs(soa.Self());
823 Handle<mirror::Class> c(
824 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
825 Handle<mirror::Class> c2(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700827
828 EXPECT_FALSE(self->IsExceptionPending());
829
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700830 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700831 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700832
833 EXPECT_FALSE(self->IsExceptionPending());
834
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700835 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700836 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700837
838 EXPECT_FALSE(self->IsExceptionPending());
839
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700840 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700841 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700842
843 EXPECT_FALSE(self->IsExceptionPending());
844
845 // TODO: Make the following work. But that would require correct managed frames.
846
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700847 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700848 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700849
850 EXPECT_TRUE(self->IsExceptionPending());
851 self->ClearException();
852
853#else
854 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
855 // Force-print to std::cout so it's also outside the logcat.
856 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
857#endif
858}
859
860
Andreas Gampe525cde22014-04-22 15:44:50 -0700861TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200862#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
863 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700864 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700865
866 // Do not check non-checked ones, we'd need handlers and stuff...
867 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
868 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
869
Andreas Gampe525cde22014-04-22 15:44:50 -0700870 // Create an object
871 ScopedObjectAccess soa(self);
872 // garbage is created during ClassLinker::Init
873
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700874 StackHandleScope<5> hs(soa.Self());
875 Handle<mirror::Class> c(
876 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
877 Handle<mirror::Class> ca(
878 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700879
880 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700881 Handle<mirror::ObjectArray<mirror::Object>> array(
882 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700883
884 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700885 Handle<mirror::String> str_obj(
886 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700887
888 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700889 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700890
891 // Play with it...
892
893 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700894 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700895
896 EXPECT_FALSE(self->IsExceptionPending());
897
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700898 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700899 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700900
901 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700902 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700903
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700904 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700905 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700906
907 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700909
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700910 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700911 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700912
913 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700915
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700916 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700917 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700918
919 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700921
922 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700923
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700925 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700926
927 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700928 EXPECT_EQ(nullptr, array->Get(0));
929
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700930 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700931 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700932
933 EXPECT_FALSE(self->IsExceptionPending());
934 EXPECT_EQ(nullptr, array->Get(1));
935
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700936 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700937 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700938
939 EXPECT_FALSE(self->IsExceptionPending());
940 EXPECT_EQ(nullptr, array->Get(2));
941
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700942 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700943 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700944
945 EXPECT_FALSE(self->IsExceptionPending());
946 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700947
948 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
949
950 // 2) Failure cases (str into str[])
951 // 2.1) Array = null
952 // TODO: Throwing NPE needs actual DEX code
953
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700954// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700955// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
956//
957// EXPECT_TRUE(self->IsExceptionPending());
958// self->ClearException();
959
960 // 2.2) Index < 0
961
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700962 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
963 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700964 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700965
966 EXPECT_TRUE(self->IsExceptionPending());
967 self->ClearException();
968
969 // 2.3) Index > 0
970
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700972 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700973
974 EXPECT_TRUE(self->IsExceptionPending());
975 self->ClearException();
976
977 // 3) Failure cases (obj into str[])
978
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700979 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700980 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700981
982 EXPECT_TRUE(self->IsExceptionPending());
983 self->ClearException();
984
985 // Tests done.
986#else
987 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
988 // Force-print to std::cout so it's also outside the logcat.
989 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
990#endif
991}
992
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700993TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200994#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
995 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800996 // This will lead to OOM error messages in the log.
997 ScopedLogSeverity sls(LogSeverity::FATAL);
998
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700999 // TODO: Check the "Unresolved" allocation stubs
1000
1001 Thread* self = Thread::Current();
1002 // Create an object
1003 ScopedObjectAccess soa(self);
1004 // garbage is created during ClassLinker::Init
1005
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001006 StackHandleScope<2> hs(soa.Self());
1007 Handle<mirror::Class> c(
1008 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001009
1010 // Play with it...
1011
1012 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001013 {
1014 // Use an arbitrary method from c to use as referrer
1015 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001016 // arbitrary
Andreas Gampe542451c2016-07-26 09:02:02 -07001017 reinterpret_cast<size_t>(c->GetVirtualMethod(0, kRuntimePointerSize)),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001019 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001020 self);
1021
1022 EXPECT_FALSE(self->IsExceptionPending());
1023 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1024 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001025 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001026 VerifyObject(obj);
1027 }
1028
1029 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001030 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001031 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001032 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001033 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001034 self);
1035
1036 EXPECT_FALSE(self->IsExceptionPending());
1037 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1038 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001039 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001040 VerifyObject(obj);
1041 }
1042
1043 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001044 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001045 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001046 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001047 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001048 self);
1049
1050 EXPECT_FALSE(self->IsExceptionPending());
1051 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1052 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001053 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001054 VerifyObject(obj);
1055 }
1056
1057 // Failure tests.
1058
1059 // Out-of-memory.
1060 {
1061 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1062
1063 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001064 Handle<mirror::Class> ca(
1065 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1066
1067 // Use arbitrary large amount for now.
1068 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001069 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001070
1071 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001072 // Start allocating with 128K
1073 size_t length = 128 * KB / 4;
1074 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001075 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1076 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1077 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001078 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079
1080 // Try a smaller length
1081 length = length / 8;
1082 // Use at most half the reported free space.
1083 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1084 if (length * 8 > mem) {
1085 length = mem / 8;
1086 }
1087 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001088 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001089 }
1090 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001091 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001092
1093 // Allocate simple objects till it fails.
1094 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1096 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1097 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001098 }
1099 }
1100 self->ClearException();
1101
Mathieu Chartiere401d142015-04-22 13:56:20 -07001102 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001103 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001104 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 EXPECT_TRUE(self->IsExceptionPending());
1106 self->ClearException();
1107 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001108 }
1109
1110 // Tests done.
1111#else
1112 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1113 // Force-print to std::cout so it's also outside the logcat.
1114 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1115#endif
1116}
1117
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001118TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001119#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1120 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001121 // TODO: Check the "Unresolved" allocation stubs
1122
Andreas Gampe369810a2015-01-14 19:53:31 -08001123 // This will lead to OOM error messages in the log.
1124 ScopedLogSeverity sls(LogSeverity::FATAL);
1125
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001126 Thread* self = Thread::Current();
1127 // Create an object
1128 ScopedObjectAccess soa(self);
1129 // garbage is created during ClassLinker::Init
1130
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001131 StackHandleScope<2> hs(self);
1132 Handle<mirror::Class> c(
1133 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001134
1135 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001136 Handle<mirror::Class> c_obj(
1137 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001138
1139 // Play with it...
1140
1141 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001142
1143 // For some reason this does not work, as the type_idx is artificial and outside what the
1144 // resolved types of c_obj allow...
1145
Ian Rogerscf7f1912014-10-22 22:06:39 -07001146 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001147 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001148 size_t result = Invoke3(
1149 static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1150 10U,
1151 // arbitrary
1152 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1153 StubTest::GetEntrypoint(self, kQuickAllocArray),
1154 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155
1156 EXPECT_FALSE(self->IsExceptionPending());
1157 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1158 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001159 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001160 VerifyObject(obj);
1161 EXPECT_EQ(obj->GetLength(), 10);
1162 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001163
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001165 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001166 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001167 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1168 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001169 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001170 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001171 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1173 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1174 EXPECT_TRUE(obj->IsArrayInstance());
1175 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001176 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001177 VerifyObject(obj);
1178 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1179 EXPECT_EQ(array->GetLength(), 10);
1180 }
1181
1182 // Failure tests.
1183
1184 // Out-of-memory.
1185 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001186 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001187 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001188 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001189 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 self);
1191
1192 EXPECT_TRUE(self->IsExceptionPending());
1193 self->ClearException();
1194 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1195 }
1196
1197 // Tests done.
1198#else
1199 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1200 // Force-print to std::cout so it's also outside the logcat.
1201 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1202#endif
1203}
1204
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001207 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001208 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1209#if defined(__i386__) || defined(__mips__) || \
1210 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001211 // TODO: Check the "Unresolved" allocation stubs
1212
1213 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001214
1215 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1216
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001217 ScopedObjectAccess soa(self);
1218 // garbage is created during ClassLinker::Init
1219
1220 // Create some strings
1221 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001222 // Setup: The first half is standard. The second half uses a non-zero offset.
1223 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001224 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001225 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1226 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1227 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1228 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001229 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 StackHandleScope<kStringCount> hs(self);
1232 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233
Jeff Hao848f70a2014-01-15 13:49:50 -08001234 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001236 }
1237
1238 // TODO: wide characters
1239
1240 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001241 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1242 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001243 int32_t expected[kStringCount][kStringCount];
1244 for (size_t x = 0; x < kStringCount; ++x) {
1245 for (size_t y = 0; y < kStringCount; ++y) {
1246 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001247 }
1248 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001249
1250 // Play with it...
1251
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001252 for (size_t x = 0; x < kStringCount; ++x) {
1253 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001254 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001255 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1256 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001257 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001258
1259 EXPECT_FALSE(self->IsExceptionPending());
1260
1261 // The result is a 32b signed integer
1262 union {
1263 size_t r;
1264 int32_t i;
1265 } conv;
1266 conv.r = result;
1267 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001268 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1269 conv.r;
1270 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1271 conv.r;
1272 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1273 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274 }
1275 }
1276
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001277 // TODO: Deallocate things.
1278
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001279 // Tests done.
1280#else
1281 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1282 // Force-print to std::cout so it's also outside the logcat.
1283 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1284 std::endl;
1285#endif
1286}
1287
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001288
Mathieu Chartierc7853442015-03-27 14:35:38 -07001289static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001290 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001291 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001292#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1293 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001294 constexpr size_t num_values = 5;
1295 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1296
1297 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001298 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001299 static_cast<size_t>(values[i]),
1300 0U,
1301 StubTest::GetEntrypoint(self, kQuickSet8Static),
1302 self,
1303 referrer);
1304
Mathieu Chartierc7853442015-03-27 14:35:38 -07001305 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001306 0U, 0U,
1307 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1308 self,
1309 referrer);
1310 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1311 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1312 }
1313#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001314 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001315 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1316 // Force-print to std::cout so it's also outside the logcat.
1317 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1318#endif
1319}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001320static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001321 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001322 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001323#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1324 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001325 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001326
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001327 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001328 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001329 static_cast<size_t>(values[i]),
1330 0U,
1331 StubTest::GetEntrypoint(self, kQuickSet8Static),
1332 self,
1333 referrer);
1334
Mathieu Chartierc7853442015-03-27 14:35:38 -07001335 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001336 0U, 0U,
1337 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1338 self,
1339 referrer);
1340 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1341 }
1342#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001343 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001344 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1345 // Force-print to std::cout so it's also outside the logcat.
1346 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1347#endif
1348}
1349
1350
Mathieu Chartierc7853442015-03-27 14:35:38 -07001351static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001352 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001353 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001354#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1355 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001356 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001357
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001358 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001359 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001360 reinterpret_cast<size_t>(obj->Get()),
1361 static_cast<size_t>(values[i]),
1362 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1363 self,
1364 referrer);
1365
Mathieu Chartierc7853442015-03-27 14:35:38 -07001366 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001367 EXPECT_EQ(values[i], res) << "Iteration " << i;
1368
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001370
Mathieu Chartierc7853442015-03-27 14:35:38 -07001371 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001372 reinterpret_cast<size_t>(obj->Get()),
1373 0U,
1374 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1375 self,
1376 referrer);
1377 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1378 }
1379#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001380 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001381 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1382 // Force-print to std::cout so it's also outside the logcat.
1383 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1384#endif
1385}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001387 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001388 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001389#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1390 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001391 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001392
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001393 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001394 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001395 reinterpret_cast<size_t>(obj->Get()),
1396 static_cast<size_t>(values[i]),
1397 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1398 self,
1399 referrer);
1400
Mathieu Chartierc7853442015-03-27 14:35:38 -07001401 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001402 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001403 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001404
Mathieu Chartierc7853442015-03-27 14:35:38 -07001405 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001406 reinterpret_cast<size_t>(obj->Get()),
1407 0U,
1408 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1409 self,
1410 referrer);
1411 EXPECT_EQ(res, static_cast<int8_t>(res2));
1412 }
1413#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001414 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001415 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1416 // Force-print to std::cout so it's also outside the logcat.
1417 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1418#endif
1419}
1420
Mathieu Chartiere401d142015-04-22 13:56:20 -07001421static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001422 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001423 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001424#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1425 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001426 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001427
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001428 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001429 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001430 static_cast<size_t>(values[i]),
1431 0U,
1432 StubTest::GetEntrypoint(self, kQuickSet16Static),
1433 self,
1434 referrer);
1435
Mathieu Chartierc7853442015-03-27 14:35:38 -07001436 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001437 0U, 0U,
1438 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1439 self,
1440 referrer);
1441
1442 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1443 }
1444#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001445 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001446 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1447 // Force-print to std::cout so it's also outside the logcat.
1448 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1449#endif
1450}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001451static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001452 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001453 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001454#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1455 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001456 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001457
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001458 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001459 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001460 static_cast<size_t>(values[i]),
1461 0U,
1462 StubTest::GetEntrypoint(self, kQuickSet16Static),
1463 self,
1464 referrer);
1465
Mathieu Chartierc7853442015-03-27 14:35:38 -07001466 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001467 0U, 0U,
1468 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1469 self,
1470 referrer);
1471
1472 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1473 }
1474#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001475 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001476 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1477 // Force-print to std::cout so it's also outside the logcat.
1478 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1479#endif
1480}
1481
Mathieu Chartierc7853442015-03-27 14:35:38 -07001482static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001483 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001484 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001485#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1486 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001487 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001488
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001489 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001490 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001491 reinterpret_cast<size_t>(obj->Get()),
1492 static_cast<size_t>(values[i]),
1493 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1494 self,
1495 referrer);
1496
Mathieu Chartierc7853442015-03-27 14:35:38 -07001497 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001498 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001499 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001500
Mathieu Chartierc7853442015-03-27 14:35:38 -07001501 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001502 reinterpret_cast<size_t>(obj->Get()),
1503 0U,
1504 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1505 self,
1506 referrer);
1507 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1508 }
1509#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001510 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001511 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1512 // Force-print to std::cout so it's also outside the logcat.
1513 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1514#endif
1515}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001516static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001517 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001518 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001519#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1520 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001521 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001522
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001523 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001524 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001525 reinterpret_cast<size_t>(obj->Get()),
1526 static_cast<size_t>(values[i]),
1527 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1528 self,
1529 referrer);
1530
Mathieu Chartierc7853442015-03-27 14:35:38 -07001531 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001532 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001533 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001534
Mathieu Chartierc7853442015-03-27 14:35:38 -07001535 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001536 reinterpret_cast<size_t>(obj->Get()),
1537 0U,
1538 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1539 self,
1540 referrer);
1541 EXPECT_EQ(res, static_cast<int16_t>(res2));
1542 }
1543#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001544 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001545 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1546 // Force-print to std::cout so it's also outside the logcat.
1547 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1548#endif
1549}
1550
Mathieu Chartiere401d142015-04-22 13:56:20 -07001551static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001552 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001553 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001554#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1555 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001556 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001557
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001558 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001559 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001560 static_cast<size_t>(values[i]),
1561 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001562 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563 self,
1564 referrer);
1565
Mathieu Chartierc7853442015-03-27 14:35:38 -07001566 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001568 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569 self,
1570 referrer);
1571
Goran Jakovljevic04568812015-04-23 15:27:23 +02001572#if defined(__mips__) && defined(__LP64__)
1573 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1574#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001576#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001577 }
1578#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001579 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001580 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1581 // Force-print to std::cout so it's also outside the logcat.
1582 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1583#endif
1584}
1585
1586
Mathieu Chartierc7853442015-03-27 14:35:38 -07001587static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001588 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001589 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001590#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1591 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001592 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001593
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001594 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001595 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001596 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001597 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001598 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001599 self,
1600 referrer);
1601
Mathieu Chartierc7853442015-03-27 14:35:38 -07001602 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1604
1605 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001606 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607
Mathieu Chartierc7853442015-03-27 14:35:38 -07001608 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001609 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001611 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612 self,
1613 referrer);
1614 EXPECT_EQ(res, static_cast<int32_t>(res2));
1615 }
1616#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001617 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1619 // Force-print to std::cout so it's also outside the logcat.
1620 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1621#endif
1622}
1623
1624
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001625#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1626 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627
1628static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001629 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001630 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001631 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1632 reinterpret_cast<size_t>(val),
1633 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001634 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001635 self,
1636 referrer);
1637
1638 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1639 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001640 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001641 self,
1642 referrer);
1643
1644 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1645}
1646#endif
1647
Mathieu Chartiere401d142015-04-22 13:56:20 -07001648static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001649 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001650 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001651#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1652 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001653 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001654
1655 // Allocate a string object for simplicity.
1656 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658
Mathieu Chartierc7853442015-03-27 14:35:38 -07001659 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001661 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1663 // Force-print to std::cout so it's also outside the logcat.
1664 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1665#endif
1666}
1667
1668
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001669#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1670 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001671static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001672 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001673 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001674 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001675 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001676 reinterpret_cast<size_t>(trg),
1677 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001678 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001679 self,
1680 referrer);
1681
Mathieu Chartierc7853442015-03-27 14:35:38 -07001682 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001683 reinterpret_cast<size_t>(trg),
1684 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001685 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686 self,
1687 referrer);
1688
1689 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1690
Mathieu Chartierc7853442015-03-27 14:35:38 -07001691 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001692}
1693#endif
1694
Mathieu Chartierc7853442015-03-27 14:35:38 -07001695static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001696 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001697 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001698#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1699 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001700 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001701
1702 // Allocate a string object for simplicity.
1703 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001704 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001706 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001707#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001708 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1710 // Force-print to std::cout so it's also outside the logcat.
1711 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1712#endif
1713}
1714
1715
Calin Juravle872ab3f2015-10-02 07:27:51 +01001716// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001717
Mathieu Chartiere401d142015-04-22 13:56:20 -07001718static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001719 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001720 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001721#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1722 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001723 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001724
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001725 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001726 // 64 bit FieldSet stores the set value in the second register.
1727 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001728 0U,
1729 values[i],
1730 StubTest::GetEntrypoint(self, kQuickSet64Static),
1731 self,
1732 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001733
Mathieu Chartierc7853442015-03-27 14:35:38 -07001734 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001736 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001737 self,
1738 referrer);
1739
1740 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1741 }
1742#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001743 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001744 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1745 // Force-print to std::cout so it's also outside the logcat.
1746 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1747#endif
1748}
1749
1750
Mathieu Chartierc7853442015-03-27 14:35:38 -07001751static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001752 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001753 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001754#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1755 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001756 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001757
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001758 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001759 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001760 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001762 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001763 self,
1764 referrer);
1765
Mathieu Chartierc7853442015-03-27 14:35:38 -07001766 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001767 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1768
1769 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001770 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001771
Mathieu Chartierc7853442015-03-27 14:35:38 -07001772 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001773 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001775 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001776 self,
1777 referrer);
1778 EXPECT_EQ(res, static_cast<int64_t>(res2));
1779 }
1780#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001781 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001782 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1783 // Force-print to std::cout so it's also outside the logcat.
1784 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1785#endif
1786}
1787
1788static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1789 // garbage is created during ClassLinker::Init
1790
1791 JNIEnv* env = Thread::Current()->GetJniEnv();
1792 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001793 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001794 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001795 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001796
1797 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001798 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001799 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001800 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001801 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001802 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001803
1804 // Play with it...
1805
1806 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001807 for (ArtField& f : c->GetSFields()) {
1808 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001809 if (test_type != type) {
1810 continue;
1811 }
1812 switch (type) {
1813 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001814 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001815 break;
1816 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001817 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001818 break;
1819 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001820 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001821 break;
1822 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001823 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001824 break;
1825 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001826 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001827 break;
1828 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001829 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001830 break;
1831 case Primitive::Type::kPrimNot:
1832 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001833 if (f.GetTypeDescriptor()[0] != '[') {
1834 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001835 }
1836 break;
1837 default:
1838 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001839 }
1840 }
1841
1842 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001843 for (ArtField& f : c->GetIFields()) {
1844 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001845 if (test_type != type) {
1846 continue;
1847 }
1848 switch (type) {
1849 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001850 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001851 break;
1852 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001853 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001854 break;
1855 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001856 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001857 break;
1858 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001859 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001860 break;
1861 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001862 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001863 break;
1864 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001865 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001866 break;
1867 case Primitive::Type::kPrimNot:
1868 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001869 if (f.GetTypeDescriptor()[0] != '[') {
1870 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001871 }
1872 break;
1873 default:
1874 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001875 }
1876 }
1877
1878 // TODO: Deallocate things.
1879}
1880
Fred Shih37f05ef2014-07-16 18:38:08 -07001881TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001882 Thread* self = Thread::Current();
1883
1884 self->TransitionFromSuspendedToRunnable();
1885 LoadDex("AllFields");
1886 bool started = runtime_->Start();
1887 CHECK(started);
1888
1889 TestFields(self, this, Primitive::Type::kPrimBoolean);
1890 TestFields(self, this, Primitive::Type::kPrimByte);
1891}
1892
1893TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001894 Thread* self = Thread::Current();
1895
1896 self->TransitionFromSuspendedToRunnable();
1897 LoadDex("AllFields");
1898 bool started = runtime_->Start();
1899 CHECK(started);
1900
1901 TestFields(self, this, Primitive::Type::kPrimChar);
1902 TestFields(self, this, Primitive::Type::kPrimShort);
1903}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001904
1905TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001906 Thread* self = Thread::Current();
1907
1908 self->TransitionFromSuspendedToRunnable();
1909 LoadDex("AllFields");
1910 bool started = runtime_->Start();
1911 CHECK(started);
1912
1913 TestFields(self, this, Primitive::Type::kPrimInt);
1914}
1915
1916TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001917 Thread* self = Thread::Current();
1918
1919 self->TransitionFromSuspendedToRunnable();
1920 LoadDex("AllFields");
1921 bool started = runtime_->Start();
1922 CHECK(started);
1923
1924 TestFields(self, this, Primitive::Type::kPrimNot);
1925}
1926
1927TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001928 Thread* self = Thread::Current();
1929
1930 self->TransitionFromSuspendedToRunnable();
1931 LoadDex("AllFields");
1932 bool started = runtime_->Start();
1933 CHECK(started);
1934
1935 TestFields(self, this, Primitive::Type::kPrimLong);
1936}
1937
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001938// Disabled, b/27991555 .
1939// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1940// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1941// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1942// the bridge and uses that to check for inlined frames, crashing in the process.
1943TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001944#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1945 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001946 Thread* self = Thread::Current();
1947
1948 ScopedObjectAccess soa(self);
1949 StackHandleScope<7> hs(self);
1950
1951 JNIEnv* env = Thread::Current()->GetJniEnv();
1952
1953 // ArrayList
1954
1955 // Load ArrayList and used methods (JNI).
1956 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1957 ASSERT_NE(nullptr, arraylist_jclass);
1958 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1959 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001960 jmethodID contains_jmethod = env->GetMethodID(
1961 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001962 ASSERT_NE(nullptr, contains_jmethod);
1963 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1964 ASSERT_NE(nullptr, add_jmethod);
1965
Mathieu Chartiere401d142015-04-22 13:56:20 -07001966 // Get representation.
1967 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001968
1969 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001970 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1971 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001972 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001973 }
1974
1975 // List
1976
1977 // Load List and used methods (JNI).
1978 jclass list_jclass = env->FindClass("java/util/List");
1979 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001980 jmethodID inf_contains_jmethod = env->GetMethodID(
1981 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001982 ASSERT_NE(nullptr, inf_contains_jmethod);
1983
1984 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001985 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001986
1987 // Object
1988
1989 jclass obj_jclass = env->FindClass("java/lang/Object");
1990 ASSERT_NE(nullptr, obj_jclass);
1991 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1992 ASSERT_NE(nullptr, obj_constructor);
1993
Andreas Gampe51f76352014-05-21 08:28:48 -07001994 // Create instances.
1995
1996 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1997 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001998 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001999
2000 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2001 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002002 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002003
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002004 // Invocation tests.
2005
2006 // 1. imt_conflict
2007
2008 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002009
2010 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2011 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002012 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2013 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002014 ImtConflictTable* empty_conflict_table =
2015 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002016 void* data = linear_alloc->Alloc(
2017 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07002018 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002019 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07002020 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
2021 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002022
Andreas Gampe51f76352014-05-21 08:28:48 -07002023 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002024 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2025 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002026 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002027 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002028 self,
2029 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002030 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002031
2032 ASSERT_FALSE(self->IsExceptionPending());
2033 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2034
2035 // Add object.
2036
2037 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2038
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002039 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002040
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002041 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002042
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002043 result =
2044 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2045 reinterpret_cast<size_t>(array_list.Get()),
2046 reinterpret_cast<size_t>(obj.Get()),
2047 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2048 self,
2049 contains_amethod,
2050 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002051
2052 ASSERT_FALSE(self->IsExceptionPending());
2053 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002054
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002055 // 2. regular interface trampoline
2056
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002057 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2058 reinterpret_cast<size_t>(array_list.Get()),
2059 reinterpret_cast<size_t>(obj.Get()),
2060 StubTest::GetEntrypoint(self,
2061 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2062 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002063
2064 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002065 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002066
Mathieu Chartiere401d142015-04-22 13:56:20 -07002067 result = Invoke3WithReferrer(
2068 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2069 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2070 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2071 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002072
2073 ASSERT_FALSE(self->IsExceptionPending());
2074 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002075#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002076 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002077 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002078 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2079#endif
2080}
2081
Andreas Gampe6aac3552014-06-09 14:55:53 -07002082TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002083#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002084 Thread* self = Thread::Current();
2085 ScopedObjectAccess soa(self);
2086 // garbage is created during ClassLinker::Init
2087
2088 // Create some strings
2089 // Use array so we can index into it and use a matrix for expected results
2090 // Setup: The first half is standard. The second half uses a non-zero offset.
2091 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002092 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2093 static constexpr size_t kStringCount = arraysize(c_str);
2094 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2095 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002096
2097 StackHandleScope<kStringCount> hs(self);
2098 Handle<mirror::String> s[kStringCount];
2099
2100 for (size_t i = 0; i < kStringCount; ++i) {
2101 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2102 }
2103
2104 // Matrix of expectations. First component is first parameter. Note we only check against the
2105 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2106 // rely on String::CompareTo being correct.
2107 static constexpr size_t kMaxLen = 9;
2108 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2109
2110 // Last dimension: start, offset by 1.
2111 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2112 for (size_t x = 0; x < kStringCount; ++x) {
2113 for (size_t y = 0; y < kCharCount; ++y) {
2114 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2115 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2116 }
2117 }
2118 }
2119
2120 // Play with it...
2121
2122 for (size_t x = 0; x < kStringCount; ++x) {
2123 for (size_t y = 0; y < kCharCount; ++y) {
2124 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2125 int32_t start = static_cast<int32_t>(z) - 1;
2126
2127 // Test string_compareto x y
2128 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002129 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002130
2131 EXPECT_FALSE(self->IsExceptionPending());
2132
2133 // The result is a 32b signed integer
2134 union {
2135 size_t r;
2136 int32_t i;
2137 } conv;
2138 conv.r = result;
2139
2140 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2141 c_char[y] << " @ " << start;
2142 }
2143 }
2144 }
2145
2146 // TODO: Deallocate things.
2147
2148 // Tests done.
2149#else
2150 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2151 // Force-print to std::cout so it's also outside the logcat.
2152 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002153#endif
2154}
2155
Roland Levillain02b75802016-07-13 11:54:35 +01002156// TODO: Exercise the ReadBarrierMarkRegX entry points.
2157
Man Cao1aee9002015-07-14 22:31:42 -07002158TEST_F(StubTest, ReadBarrier) {
2159#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2160 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2161 Thread* self = Thread::Current();
2162
2163 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2164
2165 // Create an object
2166 ScopedObjectAccess soa(self);
2167 // garbage is created during ClassLinker::Init
2168
2169 StackHandleScope<2> hs(soa.Self());
2170 Handle<mirror::Class> c(
2171 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2172
2173 // Build an object instance
2174 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2175
2176 EXPECT_FALSE(self->IsExceptionPending());
2177
2178 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2179 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2180
2181 EXPECT_FALSE(self->IsExceptionPending());
2182 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2183 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2184 EXPECT_EQ(klass, obj->GetClass());
2185
2186 // Tests done.
2187#else
2188 LOG(INFO) << "Skipping read_barrier_slow";
2189 // Force-print to std::cout so it's also outside the logcat.
2190 std::cout << "Skipping read_barrier_slow" << std::endl;
2191#endif
2192}
2193
Roland Levillain0d5a2812015-11-13 10:07:31 +00002194TEST_F(StubTest, ReadBarrierForRoot) {
2195#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2196 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2197 Thread* self = Thread::Current();
2198
2199 const uintptr_t readBarrierForRootSlow =
2200 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2201
2202 // Create an object
2203 ScopedObjectAccess soa(self);
2204 // garbage is created during ClassLinker::Init
2205
2206 StackHandleScope<1> hs(soa.Self());
2207
2208 Handle<mirror::String> obj(
2209 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2210
2211 EXPECT_FALSE(self->IsExceptionPending());
2212
2213 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2214 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2215
2216 EXPECT_FALSE(self->IsExceptionPending());
2217 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2218 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2219 EXPECT_EQ(klass, obj->GetClass());
2220
2221 // Tests done.
2222#else
2223 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2224 // Force-print to std::cout so it's also outside the logcat.
2225 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2226#endif
2227}
2228
Andreas Gampe525cde22014-04-22 15:44:50 -07002229} // namespace art