blob: f10799cc2832aaee1094c7032ebd70de42c4afd0 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070024#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070073 ArtMethod* referrer) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074 // Push a transition back into managed code onto the linked list in thread.
75 ManagedStack fragment;
76 self->PushManagedStackFragment(&fragment);
77
78 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070079 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070080#if defined(__i386__)
81 // TODO: Set the thread?
82 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "subl $12, %%esp\n\t" // Align stack.
84 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070085 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070086 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070087 : "=a" (result)
88 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070089 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
90 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
91 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070092 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
93 // but compilation fails when declaring that.
94#elif defined(__arm__)
95 __asm__ __volatile__(
96 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
97 ".cfi_adjust_cfa_offset 52\n\t"
98 "push {r9}\n\t"
99 ".cfi_adjust_cfa_offset 4\n\t"
100 "mov r9, %[referrer]\n\n"
101 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
102 ".cfi_adjust_cfa_offset 8\n\t"
103 "ldr r9, [sp, #8]\n\t"
104
105 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
106 "sub sp, sp, #20\n\t"
107 "str %[arg0], [sp]\n\t"
108 "str %[arg1], [sp, #4]\n\t"
109 "str %[arg2], [sp, #8]\n\t"
110 "str %[code], [sp, #12]\n\t"
111 "str %[self], [sp, #16]\n\t"
112 "ldr r0, [sp]\n\t"
113 "ldr r1, [sp, #4]\n\t"
114 "ldr r2, [sp, #8]\n\t"
115 "ldr r3, [sp, #12]\n\t"
116 "ldr r9, [sp, #16]\n\t"
117 "add sp, sp, #20\n\t"
118
119 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700120 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700121 ".cfi_adjust_cfa_offset -12\n\t"
122 "pop {r1-r12, lr}\n\t" // Restore state
123 ".cfi_adjust_cfa_offset -52\n\t"
124 "mov %[result], r0\n\t" // Save the result
125 : [result] "=r" (result)
126 // Use the result from r0
127 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
128 [referrer] "r"(referrer)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700129 : "r0", "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700130#elif defined(__aarch64__)
131 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700133 "sub sp, sp, #64\n\t"
134 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700135 "stp x0, x1, [sp]\n\t"
136 "stp x2, x3, [sp, #16]\n\t"
137 "stp x4, x5, [sp, #32]\n\t"
138 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700139
Andreas Gampef39b3782014-06-03 14:38:30 -0700140 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
141 ".cfi_adjust_cfa_offset 16\n\t"
142 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
144 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
145 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700146 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700147 // All things are "r" constraints, so direct str/stp should work.
148 "stp %[arg0], %[arg1], [sp]\n\t"
149 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700150 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700151
152 // Now we definitely have x0-x3 free, use it to garble d8 - d15
153 "movk x0, #0xfad0\n\t"
154 "movk x0, #0xebad, lsl #16\n\t"
155 "movk x0, #0xfad0, lsl #32\n\t"
156 "movk x0, #0xebad, lsl #48\n\t"
157 "fmov d8, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d9, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d10, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d11, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d12, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d13, x0\n\t"
168 "add x0, x0, 1\n\t"
169 "fmov d14, x0\n\t"
170 "add x0, x0, 1\n\t"
171 "fmov d15, x0\n\t"
172
Andreas Gampef39b3782014-06-03 14:38:30 -0700173 // Load call params into the right registers.
174 "ldp x0, x1, [sp]\n\t"
175 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100176 "ldr x19, [sp, #32]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700178 ".cfi_adjust_cfa_offset -48\n\t"
179
Andreas Gampe6cf80102014-05-19 11:32:41 -0700180
181 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "mov x8, x0\n\t" // Store result
183 "add sp, sp, #16\n\t" // Drop the quick "frame"
184 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700185
186 // Test d8 - d15. We can use x1 and x2.
187 "movk x1, #0xfad0\n\t"
188 "movk x1, #0xebad, lsl #16\n\t"
189 "movk x1, #0xfad0, lsl #32\n\t"
190 "movk x1, #0xebad, lsl #48\n\t"
191 "fmov x2, d8\n\t"
192 "cmp x1, x2\n\t"
193 "b.ne 1f\n\t"
194 "add x1, x1, 1\n\t"
195
196 "fmov x2, d9\n\t"
197 "cmp x1, x2\n\t"
198 "b.ne 1f\n\t"
199 "add x1, x1, 1\n\t"
200
201 "fmov x2, d10\n\t"
202 "cmp x1, x2\n\t"
203 "b.ne 1f\n\t"
204 "add x1, x1, 1\n\t"
205
206 "fmov x2, d11\n\t"
207 "cmp x1, x2\n\t"
208 "b.ne 1f\n\t"
209 "add x1, x1, 1\n\t"
210
211 "fmov x2, d12\n\t"
212 "cmp x1, x2\n\t"
213 "b.ne 1f\n\t"
214 "add x1, x1, 1\n\t"
215
216 "fmov x2, d13\n\t"
217 "cmp x1, x2\n\t"
218 "b.ne 1f\n\t"
219 "add x1, x1, 1\n\t"
220
221 "fmov x2, d14\n\t"
222 "cmp x1, x2\n\t"
223 "b.ne 1f\n\t"
224 "add x1, x1, 1\n\t"
225
226 "fmov x2, d15\n\t"
227 "cmp x1, x2\n\t"
228 "b.ne 1f\n\t"
229
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700231
232 // Finish up.
233 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
235 "ldp x2, x3, [sp, #16]\n\t"
236 "ldp x4, x5, [sp, #32]\n\t"
237 "ldp x6, x7, [sp, #48]\n\t"
238 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700239 ".cfi_adjust_cfa_offset -64\n\t"
240
Andreas Gampef39b3782014-06-03 14:38:30 -0700241 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
242 "mov %[result], x8\n\t" // Store the call result
243
Andreas Gampe6cf80102014-05-19 11:32:41 -0700244 "b 3f\n\t" // Goto end
245
246 // Failed fpr verification.
247 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700248 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700249 "b 2b\n\t" // Goto finish-up
250
251 // End
252 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700254 // Use the result from r0
255 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700256 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700257 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
258 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
259 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
260 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
261 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700262 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
263 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200264#elif defined(__mips__) && !defined(__LP64__)
265 __asm__ __volatile__ (
266 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
267 "addiu $sp, $sp, -64\n\t"
268 "sw $a0, 0($sp)\n\t"
269 "sw $a1, 4($sp)\n\t"
270 "sw $a2, 8($sp)\n\t"
271 "sw $a3, 12($sp)\n\t"
272 "sw $t0, 16($sp)\n\t"
273 "sw $t1, 20($sp)\n\t"
274 "sw $t2, 24($sp)\n\t"
275 "sw $t3, 28($sp)\n\t"
276 "sw $t4, 32($sp)\n\t"
277 "sw $t5, 36($sp)\n\t"
278 "sw $t6, 40($sp)\n\t"
279 "sw $t7, 44($sp)\n\t"
280 // Spill gp register since it is caller save.
281 "sw $gp, 52($sp)\n\t"
282
283 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
284 "sw %[referrer], 0($sp)\n\t"
285
286 // Push everything on the stack, so we don't rely on the order.
287 "addiu $sp, $sp, -20\n\t"
288 "sw %[arg0], 0($sp)\n\t"
289 "sw %[arg1], 4($sp)\n\t"
290 "sw %[arg2], 8($sp)\n\t"
291 "sw %[code], 12($sp)\n\t"
292 "sw %[self], 16($sp)\n\t"
293
294 // Load call params into the right registers.
295 "lw $a0, 0($sp)\n\t"
296 "lw $a1, 4($sp)\n\t"
297 "lw $a2, 8($sp)\n\t"
298 "lw $t9, 12($sp)\n\t"
299 "lw $s1, 16($sp)\n\t"
300 "addiu $sp, $sp, 20\n\t"
301
302 "jalr $t9\n\t" // Call the stub.
303 "nop\n\t"
304 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
305
306 // Restore stuff not named clobbered.
307 "lw $a0, 0($sp)\n\t"
308 "lw $a1, 4($sp)\n\t"
309 "lw $a2, 8($sp)\n\t"
310 "lw $a3, 12($sp)\n\t"
311 "lw $t0, 16($sp)\n\t"
312 "lw $t1, 20($sp)\n\t"
313 "lw $t2, 24($sp)\n\t"
314 "lw $t3, 28($sp)\n\t"
315 "lw $t4, 32($sp)\n\t"
316 "lw $t5, 36($sp)\n\t"
317 "lw $t6, 40($sp)\n\t"
318 "lw $t7, 44($sp)\n\t"
319 // Restore gp.
320 "lw $gp, 52($sp)\n\t"
321 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
322
323 "move %[result], $v0\n\t" // Store the call result.
324 : [result] "=r" (result)
325 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
326 [referrer] "r"(referrer)
327 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
328 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100329 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
330 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
331 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200332 "memory"); // clobber.
333#elif defined(__mips__) && defined(__LP64__)
334 __asm__ __volatile__ (
335 // Spill a0-a7 which we say we don't clobber. May contain args.
336 "daddiu $sp, $sp, -64\n\t"
337 "sd $a0, 0($sp)\n\t"
338 "sd $a1, 8($sp)\n\t"
339 "sd $a2, 16($sp)\n\t"
340 "sd $a3, 24($sp)\n\t"
341 "sd $a4, 32($sp)\n\t"
342 "sd $a5, 40($sp)\n\t"
343 "sd $a6, 48($sp)\n\t"
344 "sd $a7, 56($sp)\n\t"
345
346 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
347 "sd %[referrer], 0($sp)\n\t"
348
349 // Push everything on the stack, so we don't rely on the order.
350 "daddiu $sp, $sp, -40\n\t"
351 "sd %[arg0], 0($sp)\n\t"
352 "sd %[arg1], 8($sp)\n\t"
353 "sd %[arg2], 16($sp)\n\t"
354 "sd %[code], 24($sp)\n\t"
355 "sd %[self], 32($sp)\n\t"
356
357 // Load call params into the right registers.
358 "ld $a0, 0($sp)\n\t"
359 "ld $a1, 8($sp)\n\t"
360 "ld $a2, 16($sp)\n\t"
361 "ld $t9, 24($sp)\n\t"
362 "ld $s1, 32($sp)\n\t"
363 "daddiu $sp, $sp, 40\n\t"
364
365 "jalr $t9\n\t" // Call the stub.
366 "nop\n\t"
367 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
368
369 // Restore stuff not named clobbered.
370 "ld $a0, 0($sp)\n\t"
371 "ld $a1, 8($sp)\n\t"
372 "ld $a2, 16($sp)\n\t"
373 "ld $a3, 24($sp)\n\t"
374 "ld $a4, 32($sp)\n\t"
375 "ld $a5, 40($sp)\n\t"
376 "ld $a6, 48($sp)\n\t"
377 "ld $a7, 56($sp)\n\t"
378 "daddiu $sp, $sp, 64\n\t"
379
380 "move %[result], $v0\n\t" // Store the call result.
381 : [result] "=r" (result)
382 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
383 [referrer] "r"(referrer)
384 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
385 "t8", "t9", "k0", "k1", "fp", "ra",
386 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
387 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
388 "f27", "f28", "f29", "f30", "f31",
389 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700390#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700391 // Note: Uses the native convention
392 // TODO: Set the thread?
393 __asm__ __volatile__(
394 "pushq %[referrer]\n\t" // Push referrer
395 "pushq (%%rsp)\n\t" // & 16B alignment padding
396 ".cfi_adjust_cfa_offset 16\n\t"
397 "call *%%rax\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700398 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700399 ".cfi_adjust_cfa_offset -16\n\t"
400 : "=a" (result)
401 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800402 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700403 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800404 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700405 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700406 // TODO: Should we clobber the other registers?
407#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800408 UNUSED(arg0, arg1, arg2, code, referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700409 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
410 result = 0;
411#endif
412 // Pop transition.
413 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700414
415 fp_result = fpr_result;
416 EXPECT_EQ(0U, fp_result);
417
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700418 return result;
419 }
420
Andreas Gampe51f76352014-05-21 08:28:48 -0700421 // TODO: Set up a frame according to referrer's specs.
422 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700423 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -0700424 // Push a transition back into managed code onto the linked list in thread.
425 ManagedStack fragment;
426 self->PushManagedStackFragment(&fragment);
427
428 size_t result;
429 size_t fpr_result = 0;
430#if defined(__i386__)
431 // TODO: Set the thread?
432 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000433 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700434 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700435 "pushl %[referrer]\n\t" // Store referrer
436 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700437 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700438 : "=a" (result)
439 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700440 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700441 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
442 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700443 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
444 // but compilation fails when declaring that.
445#elif defined(__arm__)
446 __asm__ __volatile__(
447 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
448 ".cfi_adjust_cfa_offset 52\n\t"
449 "push {r9}\n\t"
450 ".cfi_adjust_cfa_offset 4\n\t"
451 "mov r9, %[referrer]\n\n"
452 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
453 ".cfi_adjust_cfa_offset 8\n\t"
454 "ldr r9, [sp, #8]\n\t"
455
456 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
457 "sub sp, sp, #24\n\t"
458 "str %[arg0], [sp]\n\t"
459 "str %[arg1], [sp, #4]\n\t"
460 "str %[arg2], [sp, #8]\n\t"
461 "str %[code], [sp, #12]\n\t"
462 "str %[self], [sp, #16]\n\t"
463 "str %[hidden], [sp, #20]\n\t"
464 "ldr r0, [sp]\n\t"
465 "ldr r1, [sp, #4]\n\t"
466 "ldr r2, [sp, #8]\n\t"
467 "ldr r3, [sp, #12]\n\t"
468 "ldr r9, [sp, #16]\n\t"
469 "ldr r12, [sp, #20]\n\t"
470 "add sp, sp, #24\n\t"
471
472 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700473 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700474 ".cfi_adjust_cfa_offset -12\n\t"
475 "pop {r1-r12, lr}\n\t" // Restore state
476 ".cfi_adjust_cfa_offset -52\n\t"
477 "mov %[result], r0\n\t" // Save the result
478 : [result] "=r" (result)
479 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700480 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
481 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700482 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700483#elif defined(__aarch64__)
484 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700485 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700486 "sub sp, sp, #64\n\t"
487 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700488 "stp x0, x1, [sp]\n\t"
489 "stp x2, x3, [sp, #16]\n\t"
490 "stp x4, x5, [sp, #32]\n\t"
491 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700492
Andreas Gampef39b3782014-06-03 14:38:30 -0700493 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
494 ".cfi_adjust_cfa_offset 16\n\t"
495 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700496
497 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
498 "sub sp, sp, #48\n\t"
499 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700500 // All things are "r" constraints, so direct str/stp should work.
501 "stp %[arg0], %[arg1], [sp]\n\t"
502 "stp %[arg2], %[code], [sp, #16]\n\t"
503 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700504
505 // Now we definitely have x0-x3 free, use it to garble d8 - d15
506 "movk x0, #0xfad0\n\t"
507 "movk x0, #0xebad, lsl #16\n\t"
508 "movk x0, #0xfad0, lsl #32\n\t"
509 "movk x0, #0xebad, lsl #48\n\t"
510 "fmov d8, x0\n\t"
511 "add x0, x0, 1\n\t"
512 "fmov d9, x0\n\t"
513 "add x0, x0, 1\n\t"
514 "fmov d10, x0\n\t"
515 "add x0, x0, 1\n\t"
516 "fmov d11, x0\n\t"
517 "add x0, x0, 1\n\t"
518 "fmov d12, x0\n\t"
519 "add x0, x0, 1\n\t"
520 "fmov d13, x0\n\t"
521 "add x0, x0, 1\n\t"
522 "fmov d14, x0\n\t"
523 "add x0, x0, 1\n\t"
524 "fmov d15, x0\n\t"
525
Andreas Gampef39b3782014-06-03 14:38:30 -0700526 // Load call params into the right registers.
527 "ldp x0, x1, [sp]\n\t"
528 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100529 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700530 "add sp, sp, #48\n\t"
531 ".cfi_adjust_cfa_offset -48\n\t"
532
Andreas Gampe51f76352014-05-21 08:28:48 -0700533 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700534 "mov x8, x0\n\t" // Store result
535 "add sp, sp, #16\n\t" // Drop the quick "frame"
536 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700537
538 // Test d8 - d15. We can use x1 and x2.
539 "movk x1, #0xfad0\n\t"
540 "movk x1, #0xebad, lsl #16\n\t"
541 "movk x1, #0xfad0, lsl #32\n\t"
542 "movk x1, #0xebad, lsl #48\n\t"
543 "fmov x2, d8\n\t"
544 "cmp x1, x2\n\t"
545 "b.ne 1f\n\t"
546 "add x1, x1, 1\n\t"
547
548 "fmov x2, d9\n\t"
549 "cmp x1, x2\n\t"
550 "b.ne 1f\n\t"
551 "add x1, x1, 1\n\t"
552
553 "fmov x2, d10\n\t"
554 "cmp x1, x2\n\t"
555 "b.ne 1f\n\t"
556 "add x1, x1, 1\n\t"
557
558 "fmov x2, d11\n\t"
559 "cmp x1, x2\n\t"
560 "b.ne 1f\n\t"
561 "add x1, x1, 1\n\t"
562
563 "fmov x2, d12\n\t"
564 "cmp x1, x2\n\t"
565 "b.ne 1f\n\t"
566 "add x1, x1, 1\n\t"
567
568 "fmov x2, d13\n\t"
569 "cmp x1, x2\n\t"
570 "b.ne 1f\n\t"
571 "add x1, x1, 1\n\t"
572
573 "fmov x2, d14\n\t"
574 "cmp x1, x2\n\t"
575 "b.ne 1f\n\t"
576 "add x1, x1, 1\n\t"
577
578 "fmov x2, d15\n\t"
579 "cmp x1, x2\n\t"
580 "b.ne 1f\n\t"
581
Andreas Gampef39b3782014-06-03 14:38:30 -0700582 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700583
584 // Finish up.
585 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700586 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
587 "ldp x2, x3, [sp, #16]\n\t"
588 "ldp x4, x5, [sp, #32]\n\t"
589 "ldp x6, x7, [sp, #48]\n\t"
590 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700591 ".cfi_adjust_cfa_offset -64\n\t"
592
Andreas Gampef39b3782014-06-03 14:38:30 -0700593 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
594 "mov %[result], x8\n\t" // Store the call result
595
Andreas Gampe51f76352014-05-21 08:28:48 -0700596 "b 3f\n\t" // Goto end
597
598 // Failed fpr verification.
599 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700600 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700601 "b 2b\n\t" // Goto finish-up
602
603 // End
604 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700605 : [result] "=r" (result)
606 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700607 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700608 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
609 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
610 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
611 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
612 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
613 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700614 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
615 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200616#elif defined(__mips__) && !defined(__LP64__)
617 __asm__ __volatile__ (
618 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
619 "addiu $sp, $sp, -64\n\t"
620 "sw $a0, 0($sp)\n\t"
621 "sw $a1, 4($sp)\n\t"
622 "sw $a2, 8($sp)\n\t"
623 "sw $a3, 12($sp)\n\t"
624 "sw $t0, 16($sp)\n\t"
625 "sw $t1, 20($sp)\n\t"
626 "sw $t2, 24($sp)\n\t"
627 "sw $t3, 28($sp)\n\t"
628 "sw $t4, 32($sp)\n\t"
629 "sw $t5, 36($sp)\n\t"
630 "sw $t6, 40($sp)\n\t"
631 "sw $t7, 44($sp)\n\t"
632 // Spill gp register since it is caller save.
633 "sw $gp, 52($sp)\n\t"
634
635 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
636 "sw %[referrer], 0($sp)\n\t"
637
638 // Push everything on the stack, so we don't rely on the order.
639 "addiu $sp, $sp, -24\n\t"
640 "sw %[arg0], 0($sp)\n\t"
641 "sw %[arg1], 4($sp)\n\t"
642 "sw %[arg2], 8($sp)\n\t"
643 "sw %[code], 12($sp)\n\t"
644 "sw %[self], 16($sp)\n\t"
645 "sw %[hidden], 20($sp)\n\t"
646
647 // Load call params into the right registers.
648 "lw $a0, 0($sp)\n\t"
649 "lw $a1, 4($sp)\n\t"
650 "lw $a2, 8($sp)\n\t"
651 "lw $t9, 12($sp)\n\t"
652 "lw $s1, 16($sp)\n\t"
653 "lw $t0, 20($sp)\n\t"
654 "addiu $sp, $sp, 24\n\t"
655
656 "jalr $t9\n\t" // Call the stub.
657 "nop\n\t"
658 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
659
660 // Restore stuff not named clobbered.
661 "lw $a0, 0($sp)\n\t"
662 "lw $a1, 4($sp)\n\t"
663 "lw $a2, 8($sp)\n\t"
664 "lw $a3, 12($sp)\n\t"
665 "lw $t0, 16($sp)\n\t"
666 "lw $t1, 20($sp)\n\t"
667 "lw $t2, 24($sp)\n\t"
668 "lw $t3, 28($sp)\n\t"
669 "lw $t4, 32($sp)\n\t"
670 "lw $t5, 36($sp)\n\t"
671 "lw $t6, 40($sp)\n\t"
672 "lw $t7, 44($sp)\n\t"
673 // Restore gp.
674 "lw $gp, 52($sp)\n\t"
675 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
676
677 "move %[result], $v0\n\t" // Store the call result.
678 : [result] "=r" (result)
679 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
680 [referrer] "r"(referrer), [hidden] "r"(hidden)
681 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
682 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100683 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
684 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
685 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200686 "memory"); // clobber.
687#elif defined(__mips__) && defined(__LP64__)
688 __asm__ __volatile__ (
689 // Spill a0-a7 which we say we don't clobber. May contain args.
690 "daddiu $sp, $sp, -64\n\t"
691 "sd $a0, 0($sp)\n\t"
692 "sd $a1, 8($sp)\n\t"
693 "sd $a2, 16($sp)\n\t"
694 "sd $a3, 24($sp)\n\t"
695 "sd $a4, 32($sp)\n\t"
696 "sd $a5, 40($sp)\n\t"
697 "sd $a6, 48($sp)\n\t"
698 "sd $a7, 56($sp)\n\t"
699
700 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
701 "sd %[referrer], 0($sp)\n\t"
702
703 // Push everything on the stack, so we don't rely on the order.
704 "daddiu $sp, $sp, -48\n\t"
705 "sd %[arg0], 0($sp)\n\t"
706 "sd %[arg1], 8($sp)\n\t"
707 "sd %[arg2], 16($sp)\n\t"
708 "sd %[code], 24($sp)\n\t"
709 "sd %[self], 32($sp)\n\t"
710 "sd %[hidden], 40($sp)\n\t"
711
712 // Load call params into the right registers.
713 "ld $a0, 0($sp)\n\t"
714 "ld $a1, 8($sp)\n\t"
715 "ld $a2, 16($sp)\n\t"
716 "ld $t9, 24($sp)\n\t"
717 "ld $s1, 32($sp)\n\t"
718 "ld $t0, 40($sp)\n\t"
719 "daddiu $sp, $sp, 48\n\t"
720
721 "jalr $t9\n\t" // Call the stub.
722 "nop\n\t"
723 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
724
725 // Restore stuff not named clobbered.
726 "ld $a0, 0($sp)\n\t"
727 "ld $a1, 8($sp)\n\t"
728 "ld $a2, 16($sp)\n\t"
729 "ld $a3, 24($sp)\n\t"
730 "ld $a4, 32($sp)\n\t"
731 "ld $a5, 40($sp)\n\t"
732 "ld $a6, 48($sp)\n\t"
733 "ld $a7, 56($sp)\n\t"
734 "daddiu $sp, $sp, 64\n\t"
735
736 "move %[result], $v0\n\t" // Store the call result.
737 : [result] "=r" (result)
738 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
739 [referrer] "r"(referrer), [hidden] "r"(hidden)
740 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
741 "t8", "t9", "k0", "k1", "fp", "ra",
742 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
743 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
744 "f27", "f28", "f29", "f30", "f31",
745 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700746#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700747 // Note: Uses the native convention
748 // TODO: Set the thread?
749 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700750 "pushq %[referrer]\n\t" // Push referrer
751 "pushq (%%rsp)\n\t" // & 16B alignment padding
752 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700753 "call *%%rbx\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700754 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700755 ".cfi_adjust_cfa_offset -16\n\t"
756 : "=a" (result)
757 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700758 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700759 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700760 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700761 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700762 // TODO: Should we clobber the other registers?
763#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800764 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700765 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
766 result = 0;
767#endif
768 // Pop transition.
769 self->PopManagedStackFragment(fragment);
770
771 fp_result = fpr_result;
772 EXPECT_EQ(0U, fp_result);
773
774 return result;
775 }
776
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700777 // Method with 32b arg0, 64b arg1
778 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700779 ArtMethod* referrer) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200780#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
781 defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700782 // Just pass through.
783 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
784#else
785 // Need to split up arguments.
786 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
787 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
788
789 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
790#endif
791 }
792
Andreas Gampe29b38412014-08-13 00:15:43 -0700793 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
794 int32_t offset;
795#ifdef __LP64__
796 offset = GetThreadOffset<8>(entrypoint).Int32Value();
797#else
798 offset = GetThreadOffset<4>(entrypoint).Int32Value();
799#endif
800 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
801 }
802
Andreas Gampe6cf80102014-05-19 11:32:41 -0700803 protected:
804 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700805};
806
807
Andreas Gampe525cde22014-04-22 15:44:50 -0700808TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200809#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700810 Thread* self = Thread::Current();
811
812 uint32_t orig[20];
813 uint32_t trg[20];
814 for (size_t i = 0; i < 20; ++i) {
815 orig[i] = i;
816 trg[i] = 0;
817 }
818
819 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700820 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700821
822 EXPECT_EQ(orig[0], trg[0]);
823
824 for (size_t i = 1; i < 4; ++i) {
825 EXPECT_NE(orig[i], trg[i]);
826 }
827
828 for (size_t i = 4; i < 14; ++i) {
829 EXPECT_EQ(orig[i], trg[i]);
830 }
831
832 for (size_t i = 14; i < 20; ++i) {
833 EXPECT_NE(orig[i], trg[i]);
834 }
835
836 // TODO: Test overlapping?
837
838#else
839 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
840 // Force-print to std::cout so it's also outside the logcat.
841 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
842#endif
843}
844
Andreas Gampe525cde22014-04-22 15:44:50 -0700845TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200846#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
847 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700848 static constexpr size_t kThinLockLoops = 100;
849
Andreas Gampe525cde22014-04-22 15:44:50 -0700850 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700851
852 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
853
Andreas Gampe525cde22014-04-22 15:44:50 -0700854 // Create an object
855 ScopedObjectAccess soa(self);
856 // garbage is created during ClassLinker::Init
857
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700858 StackHandleScope<2> hs(soa.Self());
859 Handle<mirror::String> obj(
860 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700861 LockWord lock = obj->GetLockWord(false);
862 LockWord::LockState old_state = lock.GetState();
863 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
864
Andreas Gampe29b38412014-08-13 00:15:43 -0700865 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700866
867 LockWord lock_after = obj->GetLockWord(false);
868 LockWord::LockState new_state = lock_after.GetState();
869 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700870 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
871
872 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700873 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700874
875 // Check we're at lock count i
876
877 LockWord l_inc = obj->GetLockWord(false);
878 LockWord::LockState l_inc_state = l_inc.GetState();
879 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
880 EXPECT_EQ(l_inc.ThinLockCount(), i);
881 }
882
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700883 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 Handle<mirror::String> obj2(hs.NewHandle(
885 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700886
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700887 obj2->IdentityHashCode();
888
Andreas Gampe29b38412014-08-13 00:15:43 -0700889 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700890
891 LockWord lock_after2 = obj2->GetLockWord(false);
892 LockWord::LockState new_state2 = lock_after2.GetState();
893 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
894 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
895
896 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700897#else
898 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
899 // Force-print to std::cout so it's also outside the logcat.
900 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
901#endif
902}
903
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700904
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700905class RandGen {
906 public:
907 explicit RandGen(uint32_t seed) : val_(seed) {}
908
909 uint32_t next() {
910 val_ = val_ * 48271 % 2147483647 + 13;
911 return val_;
912 }
913
914 uint32_t val_;
915};
916
917
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700918// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
919static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200920#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
921 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700922 static constexpr size_t kThinLockLoops = 100;
923
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700924 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700925
926 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
927 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700928 // Create an object
929 ScopedObjectAccess soa(self);
930 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700931 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
932 StackHandleScope<kNumberOfLocks + 1> hs(self);
933 Handle<mirror::String> obj(
934 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700935 LockWord lock = obj->GetLockWord(false);
936 LockWord::LockState old_state = lock.GetState();
937 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
938
Andreas Gampe29b38412014-08-13 00:15:43 -0700939 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700940 // This should be an illegal monitor state.
941 EXPECT_TRUE(self->IsExceptionPending());
942 self->ClearException();
943
944 LockWord lock_after = obj->GetLockWord(false);
945 LockWord::LockState new_state = lock_after.GetState();
946 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700947
Andreas Gampe29b38412014-08-13 00:15:43 -0700948 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700949
950 LockWord lock_after2 = obj->GetLockWord(false);
951 LockWord::LockState new_state2 = lock_after2.GetState();
952 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
953
Andreas Gampe29b38412014-08-13 00:15:43 -0700954 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700955
956 LockWord lock_after3 = obj->GetLockWord(false);
957 LockWord::LockState new_state3 = lock_after3.GetState();
958 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
959
960 // Stress test:
961 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
962 // each step.
963
964 RandGen r(0x1234);
965
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700966 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700967 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700968
969 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700970 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700972
973 // Initialize = allocate.
974 for (size_t i = 0; i < kNumberOfLocks; ++i) {
975 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700976 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700977 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700978 }
979
980 for (size_t i = 0; i < kIterations; ++i) {
981 // Select which lock to update.
982 size_t index = r.next() % kNumberOfLocks;
983
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700984 // Make lock fat?
985 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
986 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700987 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700988
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700989 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700990 LockWord::LockState iter_state = lock_iter.GetState();
991 if (counts[index] == 0) {
992 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
993 } else {
994 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
995 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700996 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800997 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700998 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800999 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001000 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001001 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001002 } else {
1003 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001004 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001005 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001006
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001007 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -07001008 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
1009 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001010 counts[index]++;
1011 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001012 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001013 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001014 counts[index]--;
1015 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001016
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001017 EXPECT_FALSE(self->IsExceptionPending());
1018
1019 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001020 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001021 LockWord::LockState iter_state = lock_iter.GetState();
1022 if (fat[index]) {
1023 // Abuse MonitorInfo.
1024 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001025 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001026 EXPECT_EQ(counts[index], info.entry_count_) << index;
1027 } else {
1028 if (counts[index] > 0) {
1029 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
1030 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
1031 } else {
1032 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
1033 }
1034 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001035 }
1036 }
1037
1038 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001039 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001040 for (size_t i = 0; i < kNumberOfLocks; ++i) {
1041 size_t index = kNumberOfLocks - 1 - i;
1042 size_t count = counts[index];
1043 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -07001044 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
1045 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001046 count--;
1047 }
1048
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001049 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001050 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001051 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
1052 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001053 }
1054
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001055 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -07001056#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001057 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001058 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -07001059 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001060 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -07001061#endif
1062}
1063
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001064TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -08001065 // This will lead to monitor error messages in the log.
1066 ScopedLogSeverity sls(LogSeverity::FATAL);
1067
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001068 TestUnlockObject(this);
1069}
Andreas Gampe525cde22014-04-22 15:44:50 -07001070
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001071#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1072 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001073extern "C" void art_quick_check_cast(void);
1074#endif
1075
1076TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001077#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1078 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001079 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001080
1081 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
1082
Andreas Gampe525cde22014-04-22 15:44:50 -07001083 // Find some classes.
1084 ScopedObjectAccess soa(self);
1085 // garbage is created during ClassLinker::Init
1086
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001087 StackHandleScope<2> hs(soa.Self());
1088 Handle<mirror::Class> c(
1089 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1090 Handle<mirror::Class> c2(
1091 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001092
1093 EXPECT_FALSE(self->IsExceptionPending());
1094
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001096 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001097
1098 EXPECT_FALSE(self->IsExceptionPending());
1099
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001100 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001101 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001102
1103 EXPECT_FALSE(self->IsExceptionPending());
1104
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001105 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001106 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001107
1108 EXPECT_FALSE(self->IsExceptionPending());
1109
1110 // TODO: Make the following work. But that would require correct managed frames.
1111
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001112 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001113 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001114
1115 EXPECT_TRUE(self->IsExceptionPending());
1116 self->ClearException();
1117
1118#else
1119 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
1120 // Force-print to std::cout so it's also outside the logcat.
1121 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
1122#endif
1123}
1124
1125
Andreas Gampe525cde22014-04-22 15:44:50 -07001126TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001127#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1128 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001129 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001130
1131 // Do not check non-checked ones, we'd need handlers and stuff...
1132 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
1133 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
1134
Andreas Gampe525cde22014-04-22 15:44:50 -07001135 // Create an object
1136 ScopedObjectAccess soa(self);
1137 // garbage is created during ClassLinker::Init
1138
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001139 StackHandleScope<5> hs(soa.Self());
1140 Handle<mirror::Class> c(
1141 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
1142 Handle<mirror::Class> ca(
1143 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001144
1145 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001146 Handle<mirror::ObjectArray<mirror::Object>> array(
1147 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -07001148
1149 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001150 Handle<mirror::String> str_obj(
1151 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001152
1153 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001154 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -07001155
1156 // Play with it...
1157
1158 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -07001159 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001160
1161 EXPECT_FALSE(self->IsExceptionPending());
1162
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001163 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001164 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001165
1166 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001167 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -07001168
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001169 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001170 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001171
1172 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001173 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001174
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001175 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001176 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001177
1178 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001179 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001180
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001181 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001182 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001183
1184 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001185 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001186
1187 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001188
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001189 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001190 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001191
1192 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -07001193 EXPECT_EQ(nullptr, array->Get(0));
1194
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001195 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001196 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001197
1198 EXPECT_FALSE(self->IsExceptionPending());
1199 EXPECT_EQ(nullptr, array->Get(1));
1200
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001201 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001202 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001203
1204 EXPECT_FALSE(self->IsExceptionPending());
1205 EXPECT_EQ(nullptr, array->Get(2));
1206
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001207 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001208 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001209
1210 EXPECT_FALSE(self->IsExceptionPending());
1211 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -07001212
1213 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
1214
1215 // 2) Failure cases (str into str[])
1216 // 2.1) Array = null
1217 // TODO: Throwing NPE needs actual DEX code
1218
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001219// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -07001220// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
1221//
1222// EXPECT_TRUE(self->IsExceptionPending());
1223// self->ClearException();
1224
1225 // 2.2) Index < 0
1226
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001227 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
1228 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001229 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001230
1231 EXPECT_TRUE(self->IsExceptionPending());
1232 self->ClearException();
1233
1234 // 2.3) Index > 0
1235
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001236 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001237 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001238
1239 EXPECT_TRUE(self->IsExceptionPending());
1240 self->ClearException();
1241
1242 // 3) Failure cases (obj into str[])
1243
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001244 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001245 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001246
1247 EXPECT_TRUE(self->IsExceptionPending());
1248 self->ClearException();
1249
1250 // Tests done.
1251#else
1252 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1253 // Force-print to std::cout so it's also outside the logcat.
1254 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1255#endif
1256}
1257
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001258TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001259#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1260 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001261 // This will lead to OOM error messages in the log.
1262 ScopedLogSeverity sls(LogSeverity::FATAL);
1263
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001264 // TODO: Check the "Unresolved" allocation stubs
1265
1266 Thread* self = Thread::Current();
1267 // Create an object
1268 ScopedObjectAccess soa(self);
1269 // garbage is created during ClassLinker::Init
1270
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001271 StackHandleScope<2> hs(soa.Self());
1272 Handle<mirror::Class> c(
1273 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001274
1275 // Play with it...
1276
1277 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001278 {
1279 // Use an arbitrary method from c to use as referrer
1280 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001281 // arbitrary
1282 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001283 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001284 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001285 self);
1286
1287 EXPECT_FALSE(self->IsExceptionPending());
1288 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1289 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001290 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001291 VerifyObject(obj);
1292 }
1293
1294 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001295 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001296 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001297 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001298 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001299 self);
1300
1301 EXPECT_FALSE(self->IsExceptionPending());
1302 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1303 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001304 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001305 VerifyObject(obj);
1306 }
1307
1308 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001309 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001310 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001311 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001312 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001313 self);
1314
1315 EXPECT_FALSE(self->IsExceptionPending());
1316 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1317 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001318 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001319 VerifyObject(obj);
1320 }
1321
1322 // Failure tests.
1323
1324 // Out-of-memory.
1325 {
1326 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1327
1328 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001329 Handle<mirror::Class> ca(
1330 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1331
1332 // Use arbitrary large amount for now.
1333 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001334 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001335
1336 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001337 // Start allocating with 128K
1338 size_t length = 128 * KB / 4;
1339 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001340 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1341 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1342 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001343 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001344
1345 // Try a smaller length
1346 length = length / 8;
1347 // Use at most half the reported free space.
1348 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1349 if (length * 8 > mem) {
1350 length = mem / 8;
1351 }
1352 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001353 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001354 }
1355 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001356 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001357
1358 // Allocate simple objects till it fails.
1359 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001360 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1361 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1362 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001363 }
1364 }
1365 self->ClearException();
1366
Mathieu Chartiere401d142015-04-22 13:56:20 -07001367 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001368 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001369 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001370 EXPECT_TRUE(self->IsExceptionPending());
1371 self->ClearException();
1372 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001373 }
1374
1375 // Tests done.
1376#else
1377 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1378 // Force-print to std::cout so it's also outside the logcat.
1379 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1380#endif
1381}
1382
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001383TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001384#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1385 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001386 // TODO: Check the "Unresolved" allocation stubs
1387
Andreas Gampe369810a2015-01-14 19:53:31 -08001388 // This will lead to OOM error messages in the log.
1389 ScopedLogSeverity sls(LogSeverity::FATAL);
1390
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001391 Thread* self = Thread::Current();
1392 // Create an object
1393 ScopedObjectAccess soa(self);
1394 // garbage is created during ClassLinker::Init
1395
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001396 StackHandleScope<2> hs(self);
1397 Handle<mirror::Class> c(
1398 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001399
1400 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001401 Handle<mirror::Class> c_obj(
1402 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001403
1404 // Play with it...
1405
1406 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001407
1408 // For some reason this does not work, as the type_idx is artificial and outside what the
1409 // resolved types of c_obj allow...
1410
Ian Rogerscf7f1912014-10-22 22:06:39 -07001411 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001412 // Use an arbitrary method from c to use as referrer
1413 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001414 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001415 // arbitrary
1416 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001417 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001418 self);
1419
1420 EXPECT_FALSE(self->IsExceptionPending());
1421 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1422 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001423 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001424 VerifyObject(obj);
1425 EXPECT_EQ(obj->GetLength(), 10);
1426 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001427
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001428 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001429 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001430 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001431 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1432 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001433 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001434 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001435 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001436 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1437 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1438 EXPECT_TRUE(obj->IsArrayInstance());
1439 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001440 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001441 VerifyObject(obj);
1442 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1443 EXPECT_EQ(array->GetLength(), 10);
1444 }
1445
1446 // Failure tests.
1447
1448 // Out-of-memory.
1449 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001450 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001451 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001452 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001453 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001454 self);
1455
1456 EXPECT_TRUE(self->IsExceptionPending());
1457 self->ClearException();
1458 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1459 }
1460
1461 // Tests done.
1462#else
1463 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1464 // Force-print to std::cout so it's also outside the logcat.
1465 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1466#endif
1467}
1468
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001469
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001470TEST_F(StubTest, StringCompareTo) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001471#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001472 // TODO: Check the "Unresolved" allocation stubs
1473
1474 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001475
1476 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1477
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001478 ScopedObjectAccess soa(self);
1479 // garbage is created during ClassLinker::Init
1480
1481 // Create some strings
1482 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001483 // Setup: The first half is standard. The second half uses a non-zero offset.
1484 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001485 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001486 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1487 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1488 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1489 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001490 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001491
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001492 StackHandleScope<kStringCount> hs(self);
1493 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001494
Jeff Hao848f70a2014-01-15 13:49:50 -08001495 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001496 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001497 }
1498
1499 // TODO: wide characters
1500
1501 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001502 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1503 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001504 int32_t expected[kStringCount][kStringCount];
1505 for (size_t x = 0; x < kStringCount; ++x) {
1506 for (size_t y = 0; y < kStringCount; ++y) {
1507 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001508 }
1509 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001510
1511 // Play with it...
1512
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001513 for (size_t x = 0; x < kStringCount; ++x) {
1514 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001515 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001516 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1517 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001518 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001519
1520 EXPECT_FALSE(self->IsExceptionPending());
1521
1522 // The result is a 32b signed integer
1523 union {
1524 size_t r;
1525 int32_t i;
1526 } conv;
1527 conv.r = result;
1528 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001529 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1530 conv.r;
1531 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1532 conv.r;
1533 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1534 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001535 }
1536 }
1537
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001538 // TODO: Deallocate things.
1539
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001540 // Tests done.
1541#else
1542 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1543 // Force-print to std::cout so it's also outside the logcat.
1544 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1545 std::endl;
1546#endif
1547}
1548
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001549
Mathieu Chartierc7853442015-03-27 14:35:38 -07001550static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001551 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001552 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001553#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1554 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001555 constexpr size_t num_values = 5;
1556 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1557
1558 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001559 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001560 static_cast<size_t>(values[i]),
1561 0U,
1562 StubTest::GetEntrypoint(self, kQuickSet8Static),
1563 self,
1564 referrer);
1565
Mathieu Chartierc7853442015-03-27 14:35:38 -07001566 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001567 0U, 0U,
1568 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1569 self,
1570 referrer);
1571 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1572 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1573 }
1574#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001575 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001576 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1577 // Force-print to std::cout so it's also outside the logcat.
1578 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1579#endif
1580}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001581static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001582 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001583 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001584#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1585 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001586 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001587
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001588 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001589 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001590 static_cast<size_t>(values[i]),
1591 0U,
1592 StubTest::GetEntrypoint(self, kQuickSet8Static),
1593 self,
1594 referrer);
1595
Mathieu Chartierc7853442015-03-27 14:35:38 -07001596 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001597 0U, 0U,
1598 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1599 self,
1600 referrer);
1601 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1602 }
1603#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001604 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001605 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1606 // Force-print to std::cout so it's also outside the logcat.
1607 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1608#endif
1609}
1610
1611
Mathieu Chartierc7853442015-03-27 14:35:38 -07001612static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001613 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001614 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001615#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1616 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001617 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001618
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001619 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001620 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001621 reinterpret_cast<size_t>(obj->Get()),
1622 static_cast<size_t>(values[i]),
1623 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1624 self,
1625 referrer);
1626
Mathieu Chartierc7853442015-03-27 14:35:38 -07001627 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001628 EXPECT_EQ(values[i], res) << "Iteration " << i;
1629
Mathieu Chartierc7853442015-03-27 14:35:38 -07001630 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001631
Mathieu Chartierc7853442015-03-27 14:35:38 -07001632 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001633 reinterpret_cast<size_t>(obj->Get()),
1634 0U,
1635 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1636 self,
1637 referrer);
1638 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1639 }
1640#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001641 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001642 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1643 // Force-print to std::cout so it's also outside the logcat.
1644 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1645#endif
1646}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001647static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001648 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001649 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001650#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1651 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001652 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001653
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001654 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001655 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001656 reinterpret_cast<size_t>(obj->Get()),
1657 static_cast<size_t>(values[i]),
1658 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1659 self,
1660 referrer);
1661
Mathieu Chartierc7853442015-03-27 14:35:38 -07001662 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001663 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001664 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001665
Mathieu Chartierc7853442015-03-27 14:35:38 -07001666 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001667 reinterpret_cast<size_t>(obj->Get()),
1668 0U,
1669 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1670 self,
1671 referrer);
1672 EXPECT_EQ(res, static_cast<int8_t>(res2));
1673 }
1674#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001675 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001676 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1677 // Force-print to std::cout so it's also outside the logcat.
1678 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1679#endif
1680}
1681
Mathieu Chartiere401d142015-04-22 13:56:20 -07001682static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001683 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001684 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001685#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1686 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001687 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001688
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001689 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001690 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001691 static_cast<size_t>(values[i]),
1692 0U,
1693 StubTest::GetEntrypoint(self, kQuickSet16Static),
1694 self,
1695 referrer);
1696
Mathieu Chartierc7853442015-03-27 14:35:38 -07001697 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001698 0U, 0U,
1699 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1700 self,
1701 referrer);
1702
1703 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1704 }
1705#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001706 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001707 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1708 // Force-print to std::cout so it's also outside the logcat.
1709 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1710#endif
1711}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001712static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001713 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001714 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001715#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1716 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001717 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001718
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001719 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001720 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001721 static_cast<size_t>(values[i]),
1722 0U,
1723 StubTest::GetEntrypoint(self, kQuickSet16Static),
1724 self,
1725 referrer);
1726
Mathieu Chartierc7853442015-03-27 14:35:38 -07001727 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001728 0U, 0U,
1729 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1730 self,
1731 referrer);
1732
1733 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1734 }
1735#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001736 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001737 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1738 // Force-print to std::cout so it's also outside the logcat.
1739 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1740#endif
1741}
1742
Mathieu Chartierc7853442015-03-27 14:35:38 -07001743static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001744 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001745 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001746#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1747 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001748 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001749
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001750 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001751 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001752 reinterpret_cast<size_t>(obj->Get()),
1753 static_cast<size_t>(values[i]),
1754 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1755 self,
1756 referrer);
1757
Mathieu Chartierc7853442015-03-27 14:35:38 -07001758 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001759 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001760 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001761
Mathieu Chartierc7853442015-03-27 14:35:38 -07001762 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001763 reinterpret_cast<size_t>(obj->Get()),
1764 0U,
1765 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1766 self,
1767 referrer);
1768 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1769 }
1770#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001771 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001772 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1773 // Force-print to std::cout so it's also outside the logcat.
1774 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1775#endif
1776}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001777static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001778 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001779 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001780#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1781 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001782 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001783
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001784 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001785 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001786 reinterpret_cast<size_t>(obj->Get()),
1787 static_cast<size_t>(values[i]),
1788 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1789 self,
1790 referrer);
1791
Mathieu Chartierc7853442015-03-27 14:35:38 -07001792 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001793 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001794 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001795
Mathieu Chartierc7853442015-03-27 14:35:38 -07001796 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001797 reinterpret_cast<size_t>(obj->Get()),
1798 0U,
1799 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1800 self,
1801 referrer);
1802 EXPECT_EQ(res, static_cast<int16_t>(res2));
1803 }
1804#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001805 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001806 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1807 // Force-print to std::cout so it's also outside the logcat.
1808 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1809#endif
1810}
1811
Mathieu Chartiere401d142015-04-22 13:56:20 -07001812static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001813 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001814 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001815#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1816 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001817 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001818
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001819 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001820 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001821 static_cast<size_t>(values[i]),
1822 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001823 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001824 self,
1825 referrer);
1826
Mathieu Chartierc7853442015-03-27 14:35:38 -07001827 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001828 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001829 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001830 self,
1831 referrer);
1832
Goran Jakovljevic04568812015-04-23 15:27:23 +02001833#if defined(__mips__) && defined(__LP64__)
1834 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1835#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001836 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001837#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001838 }
1839#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001840 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001841 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1842 // Force-print to std::cout so it's also outside the logcat.
1843 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1844#endif
1845}
1846
1847
Mathieu Chartierc7853442015-03-27 14:35:38 -07001848static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001849 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001850 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001851#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1852 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001853 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001854
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001855 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001856 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001857 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001858 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001859 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001860 self,
1861 referrer);
1862
Mathieu Chartierc7853442015-03-27 14:35:38 -07001863 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001864 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1865
1866 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001867 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001868
Mathieu Chartierc7853442015-03-27 14:35:38 -07001869 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001870 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001871 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001872 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001873 self,
1874 referrer);
1875 EXPECT_EQ(res, static_cast<int32_t>(res2));
1876 }
1877#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001878 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001879 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1880 // Force-print to std::cout so it's also outside the logcat.
1881 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1882#endif
1883}
1884
1885
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001886#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1887 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001888
1889static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001890 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001891 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001892 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1893 reinterpret_cast<size_t>(val),
1894 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001895 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001896 self,
1897 referrer);
1898
1899 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1900 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001901 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001902 self,
1903 referrer);
1904
1905 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1906}
1907#endif
1908
Mathieu Chartiere401d142015-04-22 13:56:20 -07001909static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001910 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001911 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001912#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1913 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001914 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001915
1916 // Allocate a string object for simplicity.
1917 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001918 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001919
Mathieu Chartierc7853442015-03-27 14:35:38 -07001920 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001921#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001922 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001923 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1924 // Force-print to std::cout so it's also outside the logcat.
1925 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1926#endif
1927}
1928
1929
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001930#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1931 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001932static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001933 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001934 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001935 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001936 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001937 reinterpret_cast<size_t>(trg),
1938 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001939 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001940 self,
1941 referrer);
1942
Mathieu Chartierc7853442015-03-27 14:35:38 -07001943 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001944 reinterpret_cast<size_t>(trg),
1945 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001946 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001947 self,
1948 referrer);
1949
1950 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1951
Mathieu Chartierc7853442015-03-27 14:35:38 -07001952 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001953}
1954#endif
1955
Mathieu Chartierc7853442015-03-27 14:35:38 -07001956static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001957 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001958 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001959#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1960 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001961 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001962
1963 // Allocate a string object for simplicity.
1964 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001965 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001966
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001967 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001968#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001969 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001970 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1971 // Force-print to std::cout so it's also outside the logcat.
1972 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1973#endif
1974}
1975
1976
1977// TODO: Complete these tests for 32b architectures.
1978
Mathieu Chartiere401d142015-04-22 13:56:20 -07001979static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001980 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001981 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001982#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1983 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001984 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001985
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001986 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001987 test->Invoke3UWithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001988 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001989 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001990 self,
1991 referrer);
1992
Mathieu Chartierc7853442015-03-27 14:35:38 -07001993 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001994 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001995 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001996 self,
1997 referrer);
1998
1999 EXPECT_EQ(res, values[i]) << "Iteration " << i;
2000 }
2001#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002002 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002003 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
2004 // Force-print to std::cout so it's also outside the logcat.
2005 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
2006#endif
2007}
2008
2009
Mathieu Chartierc7853442015-03-27 14:35:38 -07002010static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002011 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07002012 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02002013#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
2014 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002015 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002016
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002017 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07002018 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002019 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002020 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07002021 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002022 self,
2023 referrer);
2024
Mathieu Chartierc7853442015-03-27 14:35:38 -07002025 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002026 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
2027
2028 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07002029 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002030
Mathieu Chartierc7853442015-03-27 14:35:38 -07002031 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002032 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002033 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07002034 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002035 self,
2036 referrer);
2037 EXPECT_EQ(res, static_cast<int64_t>(res2));
2038 }
2039#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002040 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002041 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
2042 // Force-print to std::cout so it's also outside the logcat.
2043 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
2044#endif
2045}
2046
2047static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
2048 // garbage is created during ClassLinker::Init
2049
2050 JNIEnv* env = Thread::Current()->GetJniEnv();
2051 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002052 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002053 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002054 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002055
2056 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002057 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002058 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
2059 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002060 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07002061 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002062
2063 // Play with it...
2064
2065 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002066 for (ArtField& f : c->GetSFields()) {
2067 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07002068 if (test_type != type) {
2069 continue;
2070 }
2071 switch (type) {
2072 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002073 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002074 break;
2075 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002076 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002077 break;
2078 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002079 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002080 break;
2081 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002082 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002083 break;
2084 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002085 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002086 break;
2087 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002088 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002089 break;
2090 case Primitive::Type::kPrimNot:
2091 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002092 if (f.GetTypeDescriptor()[0] != '[') {
2093 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002094 }
2095 break;
2096 default:
2097 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002098 }
2099 }
2100
2101 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002102 for (ArtField& f : c->GetIFields()) {
2103 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07002104 if (test_type != type) {
2105 continue;
2106 }
2107 switch (type) {
2108 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002109 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002110 break;
2111 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002112 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002113 break;
2114 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002115 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002116 break;
2117 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002118 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002119 break;
2120 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002121 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002122 break;
2123 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002124 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002125 break;
2126 case Primitive::Type::kPrimNot:
2127 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002128 if (f.GetTypeDescriptor()[0] != '[') {
2129 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002130 }
2131 break;
2132 default:
2133 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002134 }
2135 }
2136
2137 // TODO: Deallocate things.
2138}
2139
Fred Shih37f05ef2014-07-16 18:38:08 -07002140TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07002141 Thread* self = Thread::Current();
2142
2143 self->TransitionFromSuspendedToRunnable();
2144 LoadDex("AllFields");
2145 bool started = runtime_->Start();
2146 CHECK(started);
2147
2148 TestFields(self, this, Primitive::Type::kPrimBoolean);
2149 TestFields(self, this, Primitive::Type::kPrimByte);
2150}
2151
2152TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07002153 Thread* self = Thread::Current();
2154
2155 self->TransitionFromSuspendedToRunnable();
2156 LoadDex("AllFields");
2157 bool started = runtime_->Start();
2158 CHECK(started);
2159
2160 TestFields(self, this, Primitive::Type::kPrimChar);
2161 TestFields(self, this, Primitive::Type::kPrimShort);
2162}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002163
2164TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002165 Thread* self = Thread::Current();
2166
2167 self->TransitionFromSuspendedToRunnable();
2168 LoadDex("AllFields");
2169 bool started = runtime_->Start();
2170 CHECK(started);
2171
2172 TestFields(self, this, Primitive::Type::kPrimInt);
2173}
2174
2175TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002176 Thread* self = Thread::Current();
2177
2178 self->TransitionFromSuspendedToRunnable();
2179 LoadDex("AllFields");
2180 bool started = runtime_->Start();
2181 CHECK(started);
2182
2183 TestFields(self, this, Primitive::Type::kPrimNot);
2184}
2185
2186TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002187 Thread* self = Thread::Current();
2188
2189 self->TransitionFromSuspendedToRunnable();
2190 LoadDex("AllFields");
2191 bool started = runtime_->Start();
2192 CHECK(started);
2193
2194 TestFields(self, this, Primitive::Type::kPrimLong);
2195}
2196
Andreas Gampe51f76352014-05-21 08:28:48 -07002197TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02002198#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
2199 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07002200 Thread* self = Thread::Current();
2201
2202 ScopedObjectAccess soa(self);
2203 StackHandleScope<7> hs(self);
2204
2205 JNIEnv* env = Thread::Current()->GetJniEnv();
2206
2207 // ArrayList
2208
2209 // Load ArrayList and used methods (JNI).
2210 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2211 ASSERT_NE(nullptr, arraylist_jclass);
2212 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2213 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002214 jmethodID contains_jmethod = env->GetMethodID(
2215 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002216 ASSERT_NE(nullptr, contains_jmethod);
2217 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2218 ASSERT_NE(nullptr, add_jmethod);
2219
Mathieu Chartiere401d142015-04-22 13:56:20 -07002220 // Get representation.
2221 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002222
2223 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002224 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
2225 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002226 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002227 }
2228
2229 // List
2230
2231 // Load List and used methods (JNI).
2232 jclass list_jclass = env->FindClass("java/util/List");
2233 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002234 jmethodID inf_contains_jmethod = env->GetMethodID(
2235 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002236 ASSERT_NE(nullptr, inf_contains_jmethod);
2237
2238 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002239 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002240
2241 // Object
2242
2243 jclass obj_jclass = env->FindClass("java/lang/Object");
2244 ASSERT_NE(nullptr, obj_jclass);
2245 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2246 ASSERT_NE(nullptr, obj_constructor);
2247
Andreas Gampe51f76352014-05-21 08:28:48 -07002248 // Create instances.
2249
2250 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2251 ASSERT_NE(nullptr, jarray_list);
2252 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2253
2254 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2255 ASSERT_NE(nullptr, jobj);
2256 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2257
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002258 // Invocation tests.
2259
2260 // 1. imt_conflict
2261
2262 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002263
2264 size_t result =
2265 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2266 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002267 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002268 self, contains_amethod,
2269 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002270
2271 ASSERT_FALSE(self->IsExceptionPending());
2272 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2273
2274 // Add object.
2275
2276 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2277
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002278 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002279
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002280 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002281
Mathieu Chartiere401d142015-04-22 13:56:20 -07002282 result = Invoke3WithReferrerAndHidden(
2283 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
2284 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
2285 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002286
2287 ASSERT_FALSE(self->IsExceptionPending());
2288 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002289
2290 // 2. regular interface trampoline
2291
Mathieu Chartiere401d142015-04-22 13:56:20 -07002292 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002293 reinterpret_cast<size_t>(array_list.Get()),
2294 reinterpret_cast<size_t>(obj.Get()),
2295 StubTest::GetEntrypoint(self,
2296 kQuickInvokeInterfaceTrampolineWithAccessCheck),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002297 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002298
2299 ASSERT_FALSE(self->IsExceptionPending());
2300 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2301
Mathieu Chartiere401d142015-04-22 13:56:20 -07002302 result = Invoke3WithReferrer(
2303 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2304 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2305 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2306 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002307
2308 ASSERT_FALSE(self->IsExceptionPending());
2309 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002310#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002311 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002312 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002313 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2314#endif
2315}
2316
Andreas Gampe6aac3552014-06-09 14:55:53 -07002317TEST_F(StubTest, StringIndexOf) {
2318#if defined(__arm__) || defined(__aarch64__)
2319 Thread* self = Thread::Current();
2320 ScopedObjectAccess soa(self);
2321 // garbage is created during ClassLinker::Init
2322
2323 // Create some strings
2324 // Use array so we can index into it and use a matrix for expected results
2325 // Setup: The first half is standard. The second half uses a non-zero offset.
2326 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002327 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2328 static constexpr size_t kStringCount = arraysize(c_str);
2329 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2330 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002331
2332 StackHandleScope<kStringCount> hs(self);
2333 Handle<mirror::String> s[kStringCount];
2334
2335 for (size_t i = 0; i < kStringCount; ++i) {
2336 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2337 }
2338
2339 // Matrix of expectations. First component is first parameter. Note we only check against the
2340 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2341 // rely on String::CompareTo being correct.
2342 static constexpr size_t kMaxLen = 9;
2343 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2344
2345 // Last dimension: start, offset by 1.
2346 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2347 for (size_t x = 0; x < kStringCount; ++x) {
2348 for (size_t y = 0; y < kCharCount; ++y) {
2349 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2350 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2351 }
2352 }
2353 }
2354
2355 // Play with it...
2356
2357 for (size_t x = 0; x < kStringCount; ++x) {
2358 for (size_t y = 0; y < kCharCount; ++y) {
2359 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2360 int32_t start = static_cast<int32_t>(z) - 1;
2361
2362 // Test string_compareto x y
2363 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002364 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002365
2366 EXPECT_FALSE(self->IsExceptionPending());
2367
2368 // The result is a 32b signed integer
2369 union {
2370 size_t r;
2371 int32_t i;
2372 } conv;
2373 conv.r = result;
2374
2375 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2376 c_char[y] << " @ " << start;
2377 }
2378 }
2379 }
2380
2381 // TODO: Deallocate things.
2382
2383 // Tests done.
2384#else
2385 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2386 // Force-print to std::cout so it's also outside the logcat.
2387 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002388#endif
2389}
2390
Man Cao1aee9002015-07-14 22:31:42 -07002391TEST_F(StubTest, ReadBarrier) {
2392#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2393 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2394 Thread* self = Thread::Current();
2395
2396 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2397
2398 // Create an object
2399 ScopedObjectAccess soa(self);
2400 // garbage is created during ClassLinker::Init
2401
2402 StackHandleScope<2> hs(soa.Self());
2403 Handle<mirror::Class> c(
2404 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2405
2406 // Build an object instance
2407 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2408
2409 EXPECT_FALSE(self->IsExceptionPending());
2410
2411 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2412 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2413
2414 EXPECT_FALSE(self->IsExceptionPending());
2415 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2416 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2417 EXPECT_EQ(klass, obj->GetClass());
2418
2419 // Tests done.
2420#else
2421 LOG(INFO) << "Skipping read_barrier_slow";
2422 // Force-print to std::cout so it's also outside the logcat.
2423 std::cout << "Skipping read_barrier_slow" << std::endl;
2424#endif
2425}
2426
Andreas Gampe525cde22014-04-22 15:44:50 -07002427} // namespace art