blob: a31c08b8c2b47257ff191b9ae6d667f1128fa4f7 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070019#include "mirror/art_method-inl.h"
20#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070021#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022
23#include <cstdio>
24
25namespace art {
26
27
28class StubTest : public CommonRuntimeTest {
29 protected:
30 // We need callee-save methods set up in the Runtime for exceptions.
31 void SetUp() OVERRIDE {
32 // Do the normal setup.
33 CommonRuntimeTest::SetUp();
34
35 {
36 // Create callee-save methods
37 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010038 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070039 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
40 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
41 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010042 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070043 }
44 }
45 }
46 }
47
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070048 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
49 // Use a smaller heap
50 for (std::pair<std::string, const void*>& pair : *options) {
51 if (pair.first.find("-Xmx") == 0) {
52 pair.first = "-Xmx4M"; // Smallest we can go.
53 }
54 }
Andreas Gampe51f76352014-05-21 08:28:48 -070055 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 }
Andreas Gampe525cde22014-04-22 15:44:50 -070057
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070058 // Helper function needed since TEST_F makes a new class.
59 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
60 return &self->tlsPtr_;
61 }
62
Andreas Gampe4fc046e2014-05-06 16:56:39 -070063 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070064 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070065 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070066 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070067
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070068 // TODO: Set up a frame according to referrer's specs.
69 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
70 mirror::ArtMethod* referrer) {
71 // Push a transition back into managed code onto the linked list in thread.
72 ManagedStack fragment;
73 self->PushManagedStackFragment(&fragment);
74
75 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070076 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077#if defined(__i386__)
78 // TODO: Set the thread?
79 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070080 "subl $12, %%esp\n\t" // Align stack.
81 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070082 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 : "=a" (result)
85 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070086 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
87 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
88 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070089 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
90 // but compilation fails when declaring that.
91#elif defined(__arm__)
92 __asm__ __volatile__(
93 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
94 ".cfi_adjust_cfa_offset 52\n\t"
95 "push {r9}\n\t"
96 ".cfi_adjust_cfa_offset 4\n\t"
97 "mov r9, %[referrer]\n\n"
98 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
99 ".cfi_adjust_cfa_offset 8\n\t"
100 "ldr r9, [sp, #8]\n\t"
101
102 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
103 "sub sp, sp, #20\n\t"
104 "str %[arg0], [sp]\n\t"
105 "str %[arg1], [sp, #4]\n\t"
106 "str %[arg2], [sp, #8]\n\t"
107 "str %[code], [sp, #12]\n\t"
108 "str %[self], [sp, #16]\n\t"
109 "ldr r0, [sp]\n\t"
110 "ldr r1, [sp, #4]\n\t"
111 "ldr r2, [sp, #8]\n\t"
112 "ldr r3, [sp, #12]\n\t"
113 "ldr r9, [sp, #16]\n\t"
114 "add sp, sp, #20\n\t"
115
116 "blx r3\n\t" // Call the stub
117 "add sp, sp, #12\n\t" // Pop nullptr and padding
118 ".cfi_adjust_cfa_offset -12\n\t"
119 "pop {r1-r12, lr}\n\t" // Restore state
120 ".cfi_adjust_cfa_offset -52\n\t"
121 "mov %[result], r0\n\t" // Save the result
122 : [result] "=r" (result)
123 // Use the result from r0
124 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
125 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700126 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700127#elif defined(__aarch64__)
128 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700129 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700130 "sub sp, sp, #64\n\t"
131 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 "stp x0, x1, [sp]\n\t"
133 "stp x2, x3, [sp, #16]\n\t"
134 "stp x4, x5, [sp, #32]\n\t"
135 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700136
Andreas Gampef39b3782014-06-03 14:38:30 -0700137 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
138 ".cfi_adjust_cfa_offset 16\n\t"
139 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700140
141 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
142 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700143 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700144 // All things are "r" constraints, so direct str/stp should work.
145 "stp %[arg0], %[arg1], [sp]\n\t"
146 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700147 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700148
149 // Now we definitely have x0-x3 free, use it to garble d8 - d15
150 "movk x0, #0xfad0\n\t"
151 "movk x0, #0xebad, lsl #16\n\t"
152 "movk x0, #0xfad0, lsl #32\n\t"
153 "movk x0, #0xebad, lsl #48\n\t"
154 "fmov d8, x0\n\t"
155 "add x0, x0, 1\n\t"
156 "fmov d9, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d10, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d11, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d12, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d13, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d14, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d15, x0\n\t"
169
Andreas Gampef39b3782014-06-03 14:38:30 -0700170 // Load call params into the right registers.
171 "ldp x0, x1, [sp]\n\t"
172 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700173 "ldr x18, [sp, #32]\n\t"
174 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 ".cfi_adjust_cfa_offset -48\n\t"
176
Andreas Gampe6cf80102014-05-19 11:32:41 -0700177
178 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700179 "mov x8, x0\n\t" // Store result
180 "add sp, sp, #16\n\t" // Drop the quick "frame"
181 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700182
183 // Test d8 - d15. We can use x1 and x2.
184 "movk x1, #0xfad0\n\t"
185 "movk x1, #0xebad, lsl #16\n\t"
186 "movk x1, #0xfad0, lsl #32\n\t"
187 "movk x1, #0xebad, lsl #48\n\t"
188 "fmov x2, d8\n\t"
189 "cmp x1, x2\n\t"
190 "b.ne 1f\n\t"
191 "add x1, x1, 1\n\t"
192
193 "fmov x2, d9\n\t"
194 "cmp x1, x2\n\t"
195 "b.ne 1f\n\t"
196 "add x1, x1, 1\n\t"
197
198 "fmov x2, d10\n\t"
199 "cmp x1, x2\n\t"
200 "b.ne 1f\n\t"
201 "add x1, x1, 1\n\t"
202
203 "fmov x2, d11\n\t"
204 "cmp x1, x2\n\t"
205 "b.ne 1f\n\t"
206 "add x1, x1, 1\n\t"
207
208 "fmov x2, d12\n\t"
209 "cmp x1, x2\n\t"
210 "b.ne 1f\n\t"
211 "add x1, x1, 1\n\t"
212
213 "fmov x2, d13\n\t"
214 "cmp x1, x2\n\t"
215 "b.ne 1f\n\t"
216 "add x1, x1, 1\n\t"
217
218 "fmov x2, d14\n\t"
219 "cmp x1, x2\n\t"
220 "b.ne 1f\n\t"
221 "add x1, x1, 1\n\t"
222
223 "fmov x2, d15\n\t"
224 "cmp x1, x2\n\t"
225 "b.ne 1f\n\t"
226
Andreas Gampef39b3782014-06-03 14:38:30 -0700227 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700228
229 // Finish up.
230 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700231 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
232 "ldp x2, x3, [sp, #16]\n\t"
233 "ldp x4, x5, [sp, #32]\n\t"
234 "ldp x6, x7, [sp, #48]\n\t"
235 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700236 ".cfi_adjust_cfa_offset -64\n\t"
237
Andreas Gampef39b3782014-06-03 14:38:30 -0700238 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
239 "mov %[result], x8\n\t" // Store the call result
240
Andreas Gampe6cf80102014-05-19 11:32:41 -0700241 "b 3f\n\t" // Goto end
242
243 // Failed fpr verification.
244 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700245 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700246 "b 2b\n\t" // Goto finish-up
247
248 // End
249 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700250 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700251 // Use the result from r0
252 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700254 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
255 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
256 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
257 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
258 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700259 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
260 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700261#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700262 // Note: Uses the native convention
263 // TODO: Set the thread?
264 __asm__ __volatile__(
265 "pushq %[referrer]\n\t" // Push referrer
266 "pushq (%%rsp)\n\t" // & 16B alignment padding
267 ".cfi_adjust_cfa_offset 16\n\t"
268 "call *%%rax\n\t" // Call the stub
269 "addq $16, %%rsp\n\t" // Pop nullptr and padding
270 ".cfi_adjust_cfa_offset -16\n\t"
271 : "=a" (result)
272 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700273 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
274 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
275 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
276 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700277 // TODO: Should we clobber the other registers?
278#else
279 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
280 result = 0;
281#endif
282 // Pop transition.
283 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700284
285 fp_result = fpr_result;
286 EXPECT_EQ(0U, fp_result);
287
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700288 return result;
289 }
290
Andreas Gampe51f76352014-05-21 08:28:48 -0700291 // TODO: Set up a frame according to referrer's specs.
292 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
293 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
294 // Push a transition back into managed code onto the linked list in thread.
295 ManagedStack fragment;
296 self->PushManagedStackFragment(&fragment);
297
298 size_t result;
299 size_t fpr_result = 0;
300#if defined(__i386__)
301 // TODO: Set the thread?
302 __asm__ __volatile__(
303 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700304 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700305 "pushl %[referrer]\n\t" // Store referrer
306 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700307 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 : "=a" (result)
309 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700310 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
311 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
312 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700313 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
314 // but compilation fails when declaring that.
315#elif defined(__arm__)
316 __asm__ __volatile__(
317 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
318 ".cfi_adjust_cfa_offset 52\n\t"
319 "push {r9}\n\t"
320 ".cfi_adjust_cfa_offset 4\n\t"
321 "mov r9, %[referrer]\n\n"
322 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
323 ".cfi_adjust_cfa_offset 8\n\t"
324 "ldr r9, [sp, #8]\n\t"
325
326 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
327 "sub sp, sp, #24\n\t"
328 "str %[arg0], [sp]\n\t"
329 "str %[arg1], [sp, #4]\n\t"
330 "str %[arg2], [sp, #8]\n\t"
331 "str %[code], [sp, #12]\n\t"
332 "str %[self], [sp, #16]\n\t"
333 "str %[hidden], [sp, #20]\n\t"
334 "ldr r0, [sp]\n\t"
335 "ldr r1, [sp, #4]\n\t"
336 "ldr r2, [sp, #8]\n\t"
337 "ldr r3, [sp, #12]\n\t"
338 "ldr r9, [sp, #16]\n\t"
339 "ldr r12, [sp, #20]\n\t"
340 "add sp, sp, #24\n\t"
341
342 "blx r3\n\t" // Call the stub
343 "add sp, sp, #12\n\t" // Pop nullptr and padding
344 ".cfi_adjust_cfa_offset -12\n\t"
345 "pop {r1-r12, lr}\n\t" // Restore state
346 ".cfi_adjust_cfa_offset -52\n\t"
347 "mov %[result], r0\n\t" // Save the result
348 : [result] "=r" (result)
349 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700350 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
351 [referrer] "r"(referrer), [hidden] "r"(hidden)
352 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700353#elif defined(__aarch64__)
354 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700355 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700356 "sub sp, sp, #64\n\t"
357 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700358 "stp x0, x1, [sp]\n\t"
359 "stp x2, x3, [sp, #16]\n\t"
360 "stp x4, x5, [sp, #32]\n\t"
361 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700362
Andreas Gampef39b3782014-06-03 14:38:30 -0700363 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
364 ".cfi_adjust_cfa_offset 16\n\t"
365 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700366
367 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
368 "sub sp, sp, #48\n\t"
369 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700370 // All things are "r" constraints, so direct str/stp should work.
371 "stp %[arg0], %[arg1], [sp]\n\t"
372 "stp %[arg2], %[code], [sp, #16]\n\t"
373 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700374
375 // Now we definitely have x0-x3 free, use it to garble d8 - d15
376 "movk x0, #0xfad0\n\t"
377 "movk x0, #0xebad, lsl #16\n\t"
378 "movk x0, #0xfad0, lsl #32\n\t"
379 "movk x0, #0xebad, lsl #48\n\t"
380 "fmov d8, x0\n\t"
381 "add x0, x0, 1\n\t"
382 "fmov d9, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d10, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d11, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d12, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d13, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d14, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d15, x0\n\t"
395
Andreas Gampef39b3782014-06-03 14:38:30 -0700396 // Load call params into the right registers.
397 "ldp x0, x1, [sp]\n\t"
398 "ldp x2, x3, [sp, #16]\n\t"
399 "ldp x18, x12, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700400 "add sp, sp, #48\n\t"
401 ".cfi_adjust_cfa_offset -48\n\t"
402
Andreas Gampe51f76352014-05-21 08:28:48 -0700403 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700404 "mov x8, x0\n\t" // Store result
405 "add sp, sp, #16\n\t" // Drop the quick "frame"
406 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700407
408 // Test d8 - d15. We can use x1 and x2.
409 "movk x1, #0xfad0\n\t"
410 "movk x1, #0xebad, lsl #16\n\t"
411 "movk x1, #0xfad0, lsl #32\n\t"
412 "movk x1, #0xebad, lsl #48\n\t"
413 "fmov x2, d8\n\t"
414 "cmp x1, x2\n\t"
415 "b.ne 1f\n\t"
416 "add x1, x1, 1\n\t"
417
418 "fmov x2, d9\n\t"
419 "cmp x1, x2\n\t"
420 "b.ne 1f\n\t"
421 "add x1, x1, 1\n\t"
422
423 "fmov x2, d10\n\t"
424 "cmp x1, x2\n\t"
425 "b.ne 1f\n\t"
426 "add x1, x1, 1\n\t"
427
428 "fmov x2, d11\n\t"
429 "cmp x1, x2\n\t"
430 "b.ne 1f\n\t"
431 "add x1, x1, 1\n\t"
432
433 "fmov x2, d12\n\t"
434 "cmp x1, x2\n\t"
435 "b.ne 1f\n\t"
436 "add x1, x1, 1\n\t"
437
438 "fmov x2, d13\n\t"
439 "cmp x1, x2\n\t"
440 "b.ne 1f\n\t"
441 "add x1, x1, 1\n\t"
442
443 "fmov x2, d14\n\t"
444 "cmp x1, x2\n\t"
445 "b.ne 1f\n\t"
446 "add x1, x1, 1\n\t"
447
448 "fmov x2, d15\n\t"
449 "cmp x1, x2\n\t"
450 "b.ne 1f\n\t"
451
Andreas Gampef39b3782014-06-03 14:38:30 -0700452 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700453
454 // Finish up.
455 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700456 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
457 "ldp x2, x3, [sp, #16]\n\t"
458 "ldp x4, x5, [sp, #32]\n\t"
459 "ldp x6, x7, [sp, #48]\n\t"
460 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700461 ".cfi_adjust_cfa_offset -64\n\t"
462
Andreas Gampef39b3782014-06-03 14:38:30 -0700463 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
464 "mov %[result], x8\n\t" // Store the call result
465
Andreas Gampe51f76352014-05-21 08:28:48 -0700466 "b 3f\n\t" // Goto end
467
468 // Failed fpr verification.
469 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700470 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700471 "b 2b\n\t" // Goto finish-up
472
473 // End
474 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700475 : [result] "=r" (result)
476 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700477 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700478 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
479 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
480 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
481 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
482 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
483 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700484 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
485 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700486#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700487 // Note: Uses the native convention
488 // TODO: Set the thread?
489 __asm__ __volatile__(
490 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
491 "movd %%r9, %%xmm0\n\t"
492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
495 "call *%%rax\n\t" // Call the stub
496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
500 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700502 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
503 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700504 // TODO: Should we clobber the other registers?
505#else
506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700518 // Method with 32b arg0, 64b arg1
519 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
520 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700522 // Just pass through.
523 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
524#else
525 // Need to split up arguments.
526 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
527 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
528
529 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
530#endif
531 }
532
533 // Method with 32b arg0, 32b arg1, 64b arg2
534 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
535 Thread* self, mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700536#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700537 // Just pass through.
538 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
539#else
540 // TODO: Needs 4-param invoke.
541 return 0;
542#endif
543 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700544
545 protected:
546 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700547};
548
549
Ian Rogersc3ccc102014-06-25 11:52:14 -0700550#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700551extern "C" void art_quick_memcpy(void);
552#endif
553
554TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700555#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700556 Thread* self = Thread::Current();
557
558 uint32_t orig[20];
559 uint32_t trg[20];
560 for (size_t i = 0; i < 20; ++i) {
561 orig[i] = i;
562 trg[i] = 0;
563 }
564
565 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
566 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
567
568 EXPECT_EQ(orig[0], trg[0]);
569
570 for (size_t i = 1; i < 4; ++i) {
571 EXPECT_NE(orig[i], trg[i]);
572 }
573
574 for (size_t i = 4; i < 14; ++i) {
575 EXPECT_EQ(orig[i], trg[i]);
576 }
577
578 for (size_t i = 14; i < 20; ++i) {
579 EXPECT_NE(orig[i], trg[i]);
580 }
581
582 // TODO: Test overlapping?
583
584#else
585 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
586 // Force-print to std::cout so it's also outside the logcat.
587 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
588#endif
589}
590
Ian Rogersc3ccc102014-06-25 11:52:14 -0700591#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700592extern "C" void art_quick_lock_object(void);
593#endif
594
595TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700596#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700597 static constexpr size_t kThinLockLoops = 100;
598
Andreas Gampe525cde22014-04-22 15:44:50 -0700599 Thread* self = Thread::Current();
600 // Create an object
601 ScopedObjectAccess soa(self);
602 // garbage is created during ClassLinker::Init
603
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700604 StackHandleScope<2> hs(soa.Self());
605 Handle<mirror::String> obj(
606 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700607 LockWord lock = obj->GetLockWord(false);
608 LockWord::LockState old_state = lock.GetState();
609 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
610
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700611 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700612 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
613
614 LockWord lock_after = obj->GetLockWord(false);
615 LockWord::LockState new_state = lock_after.GetState();
616 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700617 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
618
619 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700620 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700621 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
622
623 // Check we're at lock count i
624
625 LockWord l_inc = obj->GetLockWord(false);
626 LockWord::LockState l_inc_state = l_inc.GetState();
627 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
628 EXPECT_EQ(l_inc.ThinLockCount(), i);
629 }
630
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700631 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700632 Handle<mirror::String> obj2(hs.NewHandle(
633 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700634
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700635 obj2->IdentityHashCode();
636
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700637 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700638 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
639
640 LockWord lock_after2 = obj2->GetLockWord(false);
641 LockWord::LockState new_state2 = lock_after2.GetState();
642 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
643 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
644
645 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700646#else
647 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
648 // Force-print to std::cout so it's also outside the logcat.
649 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
650#endif
651}
652
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700653
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700654class RandGen {
655 public:
656 explicit RandGen(uint32_t seed) : val_(seed) {}
657
658 uint32_t next() {
659 val_ = val_ * 48271 % 2147483647 + 13;
660 return val_;
661 }
662
663 uint32_t val_;
664};
665
666
Ian Rogersc3ccc102014-06-25 11:52:14 -0700667#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700668extern "C" void art_quick_lock_object(void);
669extern "C" void art_quick_unlock_object(void);
670#endif
671
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700672// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
673static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700674#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700675 static constexpr size_t kThinLockLoops = 100;
676
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700677 Thread* self = Thread::Current();
678 // Create an object
679 ScopedObjectAccess soa(self);
680 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700681 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
682 StackHandleScope<kNumberOfLocks + 1> hs(self);
683 Handle<mirror::String> obj(
684 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700685 LockWord lock = obj->GetLockWord(false);
686 LockWord::LockState old_state = lock.GetState();
687 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
688
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700690 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691 // This should be an illegal monitor state.
692 EXPECT_TRUE(self->IsExceptionPending());
693 self->ClearException();
694
695 LockWord lock_after = obj->GetLockWord(false);
696 LockWord::LockState new_state = lock_after.GetState();
697 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700698
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700699 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700700 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700701
702 LockWord lock_after2 = obj->GetLockWord(false);
703 LockWord::LockState new_state2 = lock_after2.GetState();
704 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
705
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700706 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700707 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 LockWord lock_after3 = obj->GetLockWord(false);
710 LockWord::LockState new_state3 = lock_after3.GetState();
711 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
712
713 // Stress test:
714 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
715 // each step.
716
717 RandGen r(0x1234);
718
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700719 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700721
722 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700723 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700725
726 // Initialize = allocate.
727 for (size_t i = 0; i < kNumberOfLocks; ++i) {
728 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700729 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700730 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700731 }
732
733 for (size_t i = 0; i < kIterations; ++i) {
734 // Select which lock to update.
735 size_t index = r.next() % kNumberOfLocks;
736
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 // Make lock fat?
738 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
739 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700740 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700741
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700742 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 LockWord::LockState iter_state = lock_iter.GetState();
744 if (counts[index] == 0) {
745 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
746 } else {
747 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
748 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700749 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 bool lock; // Whether to lock or unlock in this step.
751 if (counts[index] == 0) {
752 lock = true;
753 } else if (counts[index] == kThinLockLoops) {
754 lock = false;
755 } else {
756 // Randomly.
757 lock = r.next() % 2 == 0;
758 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700759
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
763 counts[index]++;
764 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700765 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700766 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
767 counts[index]--;
768 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700769
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700770 EXPECT_FALSE(self->IsExceptionPending());
771
772 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700773 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700774 LockWord::LockState iter_state = lock_iter.GetState();
775 if (fat[index]) {
776 // Abuse MonitorInfo.
777 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700778 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700779 EXPECT_EQ(counts[index], info.entry_count_) << index;
780 } else {
781 if (counts[index] > 0) {
782 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
783 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
784 } else {
785 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
786 }
787 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 }
789 }
790
791 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700792 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 for (size_t i = 0; i < kNumberOfLocks; ++i) {
794 size_t index = kNumberOfLocks - 1 - i;
795 size_t count = counts[index];
796 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700797 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700798 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700799 count--;
800 }
801
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700802 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700803 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
805 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700806 }
807
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700808 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700809#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700810 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700811 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700812 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700813#endif
814}
815
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700816TEST_F(StubTest, UnlockObject) {
817 TestUnlockObject(this);
818}
Andreas Gampe525cde22014-04-22 15:44:50 -0700819
Ian Rogersc3ccc102014-06-25 11:52:14 -0700820#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700821extern "C" void art_quick_check_cast(void);
822#endif
823
824TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700825#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700826 Thread* self = Thread::Current();
827 // Find some classes.
828 ScopedObjectAccess soa(self);
829 // garbage is created during ClassLinker::Init
830
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700831 StackHandleScope<2> hs(soa.Self());
832 Handle<mirror::Class> c(
833 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
834 Handle<mirror::Class> c2(
835 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700840 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
841
842 EXPECT_FALSE(self->IsExceptionPending());
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700845 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
846
847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700849 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700850 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
851
852 EXPECT_FALSE(self->IsExceptionPending());
853
854 // TODO: Make the following work. But that would require correct managed frames.
855
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700856 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700857 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
858
859 EXPECT_TRUE(self->IsExceptionPending());
860 self->ClearException();
861
862#else
863 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
864 // Force-print to std::cout so it's also outside the logcat.
865 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
866#endif
867}
868
869
Ian Rogersc3ccc102014-06-25 11:52:14 -0700870#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700871extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
872// Do not check non-checked ones, we'd need handlers and stuff...
873#endif
874
875TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700876 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
877
Ian Rogersc3ccc102014-06-25 11:52:14 -0700878#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700879 Thread* self = Thread::Current();
880 // Create an object
881 ScopedObjectAccess soa(self);
882 // garbage is created during ClassLinker::Init
883
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 StackHandleScope<5> hs(soa.Self());
885 Handle<mirror::Class> c(
886 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
887 Handle<mirror::Class> ca(
888 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700889
890 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700891 Handle<mirror::ObjectArray<mirror::Object>> array(
892 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
894 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700895 Handle<mirror::String> str_obj(
896 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700897
898 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700899 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700900
901 // Play with it...
902
903 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700909 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700915 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700921 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
922
923 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700925
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700927 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
928
929 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700930 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700931
932 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700935 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
936
937 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700938 EXPECT_EQ(nullptr, array->Get(0));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700941 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(1));
945
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700947 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
948
949 EXPECT_FALSE(self->IsExceptionPending());
950 EXPECT_EQ(nullptr, array->Get(2));
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700953 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
954
955 EXPECT_FALSE(self->IsExceptionPending());
956 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700957
958 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
959
960 // 2) Failure cases (str into str[])
961 // 2.1) Array = null
962 // TODO: Throwing NPE needs actual DEX code
963
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700964// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700965// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
966//
967// EXPECT_TRUE(self->IsExceptionPending());
968// self->ClearException();
969
970 // 2.2) Index < 0
971
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700972 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
973 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700974 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
975
976 EXPECT_TRUE(self->IsExceptionPending());
977 self->ClearException();
978
979 // 2.3) Index > 0
980
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700982 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
983
984 EXPECT_TRUE(self->IsExceptionPending());
985 self->ClearException();
986
987 // 3) Failure cases (obj into str[])
988
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700989 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700990 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
991
992 EXPECT_TRUE(self->IsExceptionPending());
993 self->ClearException();
994
995 // Tests done.
996#else
997 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
998 // Force-print to std::cout so it's also outside the logcat.
999 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1000#endif
1001}
1002
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001003TEST_F(StubTest, AllocObject) {
1004 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1005
Ian Rogersc3ccc102014-06-25 11:52:14 -07001006#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001007 // TODO: Check the "Unresolved" allocation stubs
1008
1009 Thread* self = Thread::Current();
1010 // Create an object
1011 ScopedObjectAccess soa(self);
1012 // garbage is created during ClassLinker::Init
1013
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001014 StackHandleScope<2> hs(soa.Self());
1015 Handle<mirror::Class> c(
1016 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001017
1018 // Play with it...
1019
1020 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 {
1022 // Use an arbitrary method from c to use as referrer
1023 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1024 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1025 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001026 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001027 self);
1028
1029 EXPECT_FALSE(self->IsExceptionPending());
1030 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1031 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001032 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 VerifyObject(obj);
1034 }
1035
1036 {
1037 // We can use nullptr in the second argument as we do not need a method here (not used in
1038 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001039 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001040 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001041 self);
1042
1043 EXPECT_FALSE(self->IsExceptionPending());
1044 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1045 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001046 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 VerifyObject(obj);
1048 }
1049
1050 {
1051 // We can use nullptr in the second argument as we do not need a method here (not used in
1052 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001053 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001054 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 self);
1056
1057 EXPECT_FALSE(self->IsExceptionPending());
1058 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1059 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001060 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001061 VerifyObject(obj);
1062 }
1063
1064 // Failure tests.
1065
1066 // Out-of-memory.
1067 {
1068 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1069
1070 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071 Handle<mirror::Class> ca(
1072 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1073
1074 // Use arbitrary large amount for now.
1075 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001076 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001077
1078 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 // Start allocating with 128K
1080 size_t length = 128 * KB / 4;
1081 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001082 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1083 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1084 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001085 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001086
1087 // Try a smaller length
1088 length = length / 8;
1089 // Use at most half the reported free space.
1090 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1091 if (length * 8 > mem) {
1092 length = mem / 8;
1093 }
1094 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096 }
1097 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001098 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099
1100 // Allocate simple objects till it fails.
1101 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001102 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1103 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1104 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 }
1106 }
1107 self->ClearException();
1108
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001109 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001110 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001111 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001112 EXPECT_TRUE(self->IsExceptionPending());
1113 self->ClearException();
1114 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001115 }
1116
1117 // Tests done.
1118#else
1119 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1120 // Force-print to std::cout so it's also outside the logcat.
1121 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1122#endif
1123}
1124
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001125TEST_F(StubTest, AllocObjectArray) {
1126 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1127
Ian Rogersc3ccc102014-06-25 11:52:14 -07001128#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001129 // TODO: Check the "Unresolved" allocation stubs
1130
1131 Thread* self = Thread::Current();
1132 // Create an object
1133 ScopedObjectAccess soa(self);
1134 // garbage is created during ClassLinker::Init
1135
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001136 StackHandleScope<2> hs(self);
1137 Handle<mirror::Class> c(
1138 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139
1140 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 Handle<mirror::Class> c_obj(
1142 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001143
1144 // Play with it...
1145
1146 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001147
1148 // For some reason this does not work, as the type_idx is artificial and outside what the
1149 // resolved types of c_obj allow...
1150
1151 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001152 // Use an arbitrary method from c to use as referrer
1153 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1154 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1155 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001156 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 self);
1158
1159 EXPECT_FALSE(self->IsExceptionPending());
1160 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1161 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 VerifyObject(obj);
1164 EXPECT_EQ(obj->GetLength(), 10);
1165 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001166
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 {
1168 // We can use nullptr in the second argument as we do not need a method here (not used in
1169 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001170 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001171 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001173 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1175 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1176 EXPECT_TRUE(obj->IsArrayInstance());
1177 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001178 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001179 VerifyObject(obj);
1180 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1181 EXPECT_EQ(array->GetLength(), 10);
1182 }
1183
1184 // Failure tests.
1185
1186 // Out-of-memory.
1187 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001188 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001190 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 self);
1192
1193 EXPECT_TRUE(self->IsExceptionPending());
1194 self->ClearException();
1195 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1196 }
1197
1198 // Tests done.
1199#else
1200 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1201 // Force-print to std::cout so it's also outside the logcat.
1202 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1203#endif
1204}
1205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206
Ian Rogersc3ccc102014-06-25 11:52:14 -07001207#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001208extern "C" void art_quick_string_compareto(void);
1209#endif
1210
1211TEST_F(StubTest, StringCompareTo) {
1212 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1213
Ian Rogersc3ccc102014-06-25 11:52:14 -07001214#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001215 // TODO: Check the "Unresolved" allocation stubs
1216
1217 Thread* self = Thread::Current();
1218 ScopedObjectAccess soa(self);
1219 // garbage is created during ClassLinker::Init
1220
1221 // Create some strings
1222 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001223 // Setup: The first half is standard. The second half uses a non-zero offset.
1224 // TODO: Shared backing arrays.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001225 static constexpr size_t kBaseStringCount = 7;
1226 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", "aac", "aac" , };
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001227
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001228 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001229
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001230 StackHandleScope<kStringCount> hs(self);
1231 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 for (size_t i = 0; i < kBaseStringCount; ++i) {
1234 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001235 }
1236
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001237 RandGen r(0x1234);
1238
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001239 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1240 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1241 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001242 if (length > 1) {
1243 // Set a random offset and length.
1244 int32_t new_offset = 1 + (r.next() % (length - 1));
1245 int32_t rest = length - new_offset - 1;
1246 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1247
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001248 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1249 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001250 }
1251 }
1252
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001253 // TODO: wide characters
1254
1255 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001256 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1257 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001258 int32_t expected[kStringCount][kStringCount];
1259 for (size_t x = 0; x < kStringCount; ++x) {
1260 for (size_t y = 0; y < kStringCount; ++y) {
1261 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001262 }
1263 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001264
1265 // Play with it...
1266
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001267 for (size_t x = 0; x < kStringCount; ++x) {
1268 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001269 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001270 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1271 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001272 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1273
1274 EXPECT_FALSE(self->IsExceptionPending());
1275
1276 // The result is a 32b signed integer
1277 union {
1278 size_t r;
1279 int32_t i;
1280 } conv;
1281 conv.r = result;
1282 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001283 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1284 conv.r;
1285 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1286 conv.r;
1287 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1288 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001289 }
1290 }
1291
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001292 // TODO: Deallocate things.
1293
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001294 // Tests done.
1295#else
1296 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1297 // Force-print to std::cout so it's also outside the logcat.
1298 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1299 std::endl;
1300#endif
1301}
1302
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001303
Ian Rogersc3ccc102014-06-25 11:52:14 -07001304#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001305extern "C" void art_quick_set32_static(void);
1306extern "C" void art_quick_get32_static(void);
1307#endif
1308
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001309static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001310 mirror::ArtMethod* referrer, StubTest* test)
1311 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001312#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001313 constexpr size_t num_values = 7;
1314 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1315
1316 for (size_t i = 0; i < num_values; ++i) {
1317 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1318 static_cast<size_t>(values[i]),
1319 0U,
1320 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1321 self,
1322 referrer);
1323
1324 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1325 0U, 0U,
1326 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1327 self,
1328 referrer);
1329
1330 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1331 }
1332#else
1333 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1334 // Force-print to std::cout so it's also outside the logcat.
1335 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1336#endif
1337}
1338
1339
Ian Rogersc3ccc102014-06-25 11:52:14 -07001340#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001341extern "C" void art_quick_set32_instance(void);
1342extern "C" void art_quick_get32_instance(void);
1343#endif
1344
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001345static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001346 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1347 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001348#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001349 constexpr size_t num_values = 7;
1350 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1351
1352 for (size_t i = 0; i < num_values; ++i) {
1353 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001354 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001355 static_cast<size_t>(values[i]),
1356 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1357 self,
1358 referrer);
1359
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001360 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001361 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1362
1363 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001364 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001365
1366 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001367 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001368 0U,
1369 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1370 self,
1371 referrer);
1372 EXPECT_EQ(res, static_cast<int32_t>(res2));
1373 }
1374#else
1375 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1376 // Force-print to std::cout so it's also outside the logcat.
1377 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1378#endif
1379}
1380
1381
Ian Rogersc3ccc102014-06-25 11:52:14 -07001382#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001383extern "C" void art_quick_set_obj_static(void);
1384extern "C" void art_quick_get_obj_static(void);
1385
1386static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1387 mirror::ArtMethod* referrer, StubTest* test)
1388 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1389 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1390 reinterpret_cast<size_t>(val),
1391 0U,
1392 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1393 self,
1394 referrer);
1395
1396 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1397 0U, 0U,
1398 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1399 self,
1400 referrer);
1401
1402 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1403}
1404#endif
1405
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001406static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001407 mirror::ArtMethod* referrer, StubTest* test)
1408 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001409#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001410 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1411
1412 // Allocate a string object for simplicity.
1413 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1414 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1415
1416 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1417#else
1418 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1419 // Force-print to std::cout so it's also outside the logcat.
1420 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1421#endif
1422}
1423
1424
Ian Rogersc3ccc102014-06-25 11:52:14 -07001425#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001426extern "C" void art_quick_set_obj_instance(void);
1427extern "C" void art_quick_get_obj_instance(void);
1428
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001429static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001430 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1431 StubTest* test)
1432 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1433 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1434 reinterpret_cast<size_t>(trg),
1435 reinterpret_cast<size_t>(val),
1436 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1437 self,
1438 referrer);
1439
1440 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1441 reinterpret_cast<size_t>(trg),
1442 0U,
1443 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1444 self,
1445 referrer);
1446
1447 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1448
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001449 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001450}
1451#endif
1452
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001453static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001454 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1455 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001456#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001457 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001458
1459 // Allocate a string object for simplicity.
1460 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001461 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001462
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001463 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001464#else
1465 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1466 // Force-print to std::cout so it's also outside the logcat.
1467 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1468#endif
1469}
1470
1471
1472// TODO: Complete these tests for 32b architectures.
1473
Ian Rogersc3ccc102014-06-25 11:52:14 -07001474#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001475extern "C" void art_quick_set64_static(void);
1476extern "C" void art_quick_get64_static(void);
1477#endif
1478
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001479static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001480 mirror::ArtMethod* referrer, StubTest* test)
1481 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001482#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001483 constexpr size_t num_values = 8;
1484 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1485
1486 for (size_t i = 0; i < num_values; ++i) {
1487 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1488 values[i],
1489 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1490 self,
1491 referrer);
1492
1493 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1494 0U, 0U,
1495 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1496 self,
1497 referrer);
1498
1499 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1500 }
1501#else
1502 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1503 // Force-print to std::cout so it's also outside the logcat.
1504 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1505#endif
1506}
1507
1508
Ian Rogersc3ccc102014-06-25 11:52:14 -07001509#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001510extern "C" void art_quick_set64_instance(void);
1511extern "C" void art_quick_get64_instance(void);
1512#endif
1513
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001514static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001515 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1516 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001517#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001518 constexpr size_t num_values = 8;
1519 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1520
1521 for (size_t i = 0; i < num_values; ++i) {
1522 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001523 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001524 static_cast<size_t>(values[i]),
1525 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1526 self,
1527 referrer);
1528
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001529 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001530 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1531
1532 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001533 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001534
1535 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001536 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001537 0U,
1538 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1539 self,
1540 referrer);
1541 EXPECT_EQ(res, static_cast<int64_t>(res2));
1542 }
1543#else
1544 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1545 // Force-print to std::cout so it's also outside the logcat.
1546 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1547#endif
1548}
1549
1550static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1551 // garbage is created during ClassLinker::Init
1552
1553 JNIEnv* env = Thread::Current()->GetJniEnv();
1554 jclass jc = env->FindClass("AllFields");
1555 CHECK(jc != NULL);
1556 jobject o = env->AllocObject(jc);
1557 CHECK(o != NULL);
1558
1559 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001560 StackHandleScope<5> hs(self);
1561 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1562 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001564 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565
1566 // Play with it...
1567
1568 // Static fields.
1569 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001570 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001571 int32_t num_fields = fields->GetLength();
1572 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001573 StackHandleScope<1> hs(self);
1574 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001576 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001577 switch (type) {
1578 case Primitive::Type::kPrimInt:
1579 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001580 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001581 }
1582 break;
1583
1584 case Primitive::Type::kPrimLong:
1585 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001586 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001587 }
1588 break;
1589
1590 case Primitive::Type::kPrimNot:
1591 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001592 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001593 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594 }
1595 break;
1596
1597 default:
1598 break; // Skip.
1599 }
1600 }
1601 }
1602
1603 // Instance fields.
1604 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001605 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001606 int32_t num_fields = fields->GetLength();
1607 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001608 StackHandleScope<1> hs(self);
1609 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001611 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612 switch (type) {
1613 case Primitive::Type::kPrimInt:
1614 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001615 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001616 }
1617 break;
1618
1619 case Primitive::Type::kPrimLong:
1620 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001621 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001622 }
1623 break;
1624
1625 case Primitive::Type::kPrimNot:
1626 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001627 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001628 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629 }
1630 break;
1631
1632 default:
1633 break; // Skip.
1634 }
1635 }
1636 }
1637
1638 // TODO: Deallocate things.
1639}
1640
1641
1642TEST_F(StubTest, Fields32) {
1643 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1644
1645 Thread* self = Thread::Current();
1646
1647 self->TransitionFromSuspendedToRunnable();
1648 LoadDex("AllFields");
1649 bool started = runtime_->Start();
1650 CHECK(started);
1651
1652 TestFields(self, this, Primitive::Type::kPrimInt);
1653}
1654
1655TEST_F(StubTest, FieldsObj) {
1656 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1657
1658 Thread* self = Thread::Current();
1659
1660 self->TransitionFromSuspendedToRunnable();
1661 LoadDex("AllFields");
1662 bool started = runtime_->Start();
1663 CHECK(started);
1664
1665 TestFields(self, this, Primitive::Type::kPrimNot);
1666}
1667
1668TEST_F(StubTest, Fields64) {
1669 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1670
1671 Thread* self = Thread::Current();
1672
1673 self->TransitionFromSuspendedToRunnable();
1674 LoadDex("AllFields");
1675 bool started = runtime_->Start();
1676 CHECK(started);
1677
1678 TestFields(self, this, Primitive::Type::kPrimLong);
1679}
1680
Ian Rogersc3ccc102014-06-25 11:52:14 -07001681#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001682extern "C" void art_quick_imt_conflict_trampoline(void);
1683#endif
1684
1685TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001686#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001687 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1688
1689 Thread* self = Thread::Current();
1690
1691 ScopedObjectAccess soa(self);
1692 StackHandleScope<7> hs(self);
1693
1694 JNIEnv* env = Thread::Current()->GetJniEnv();
1695
1696 // ArrayList
1697
1698 // Load ArrayList and used methods (JNI).
1699 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1700 ASSERT_NE(nullptr, arraylist_jclass);
1701 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1702 ASSERT_NE(nullptr, arraylist_constructor);
1703 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1704 ASSERT_NE(nullptr, contains_jmethod);
1705 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1706 ASSERT_NE(nullptr, add_jmethod);
1707
1708 // Get mirror representation.
1709 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1710
1711 // Patch up ArrayList.contains.
1712 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1713 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
1714 GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge));
1715 }
1716
1717 // List
1718
1719 // Load List and used methods (JNI).
1720 jclass list_jclass = env->FindClass("java/util/List");
1721 ASSERT_NE(nullptr, list_jclass);
1722 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1723 ASSERT_NE(nullptr, inf_contains_jmethod);
1724
1725 // Get mirror representation.
1726 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1727
1728 // Object
1729
1730 jclass obj_jclass = env->FindClass("java/lang/Object");
1731 ASSERT_NE(nullptr, obj_jclass);
1732 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1733 ASSERT_NE(nullptr, obj_constructor);
1734
1735 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1736
1737 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
1738 mirror::ArtMethod* m = arraylist_class->GetImTable()->Get(
1739 inf_contains->GetDexMethodIndex() % ClassLinker::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001740
Andreas Gampe0ea37942014-05-21 14:12:18 -07001741 if (!m->IsImtConflictMethod()) {
1742 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1743 PrettyMethod(m, true);
1744 LOG(WARNING) << "Please update StubTest.IMT.";
1745 return;
1746 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001747
1748 // Create instances.
1749
1750 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1751 ASSERT_NE(nullptr, jarray_list);
1752 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1753
1754 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1755 ASSERT_NE(nullptr, jobj);
1756 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1757
1758 // Invoke.
1759
1760 size_t result =
1761 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1762 reinterpret_cast<size_t>(obj.Get()),
1763 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1764 self, contains_amethod.Get(),
1765 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1766
1767 ASSERT_FALSE(self->IsExceptionPending());
1768 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1769
1770 // Add object.
1771
1772 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1773
1774 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1775
1776 // Invoke again.
1777
1778 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1779 reinterpret_cast<size_t>(obj.Get()),
1780 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1781 self, contains_amethod.Get(),
1782 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1783
1784 ASSERT_FALSE(self->IsExceptionPending());
1785 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1786#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001787 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001788 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001789 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1790#endif
1791}
1792
1793#if defined(__arm__) || defined(__aarch64__)
1794extern "C" void art_quick_indexof(void);
1795#endif
1796
1797TEST_F(StubTest, StringIndexOf) {
1798#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07001799 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1800
Andreas Gampe6aac3552014-06-09 14:55:53 -07001801 Thread* self = Thread::Current();
1802 ScopedObjectAccess soa(self);
1803 // garbage is created during ClassLinker::Init
1804
1805 // Create some strings
1806 // Use array so we can index into it and use a matrix for expected results
1807 // Setup: The first half is standard. The second half uses a non-zero offset.
1808 // TODO: Shared backing arrays.
1809 static constexpr size_t kStringCount = 7;
1810 const char* c_str[kStringCount] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1811 static constexpr size_t kCharCount = 5;
1812 const char c_char[kCharCount] = { 'a', 'b', 'c', 'd', 'e' };
1813
1814 StackHandleScope<kStringCount> hs(self);
1815 Handle<mirror::String> s[kStringCount];
1816
1817 for (size_t i = 0; i < kStringCount; ++i) {
1818 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1819 }
1820
1821 // Matrix of expectations. First component is first parameter. Note we only check against the
1822 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1823 // rely on String::CompareTo being correct.
1824 static constexpr size_t kMaxLen = 9;
1825 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1826
1827 // Last dimension: start, offset by 1.
1828 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
1829 for (size_t x = 0; x < kStringCount; ++x) {
1830 for (size_t y = 0; y < kCharCount; ++y) {
1831 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1832 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
1833 }
1834 }
1835 }
1836
1837 // Play with it...
1838
1839 for (size_t x = 0; x < kStringCount; ++x) {
1840 for (size_t y = 0; y < kCharCount; ++y) {
1841 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1842 int32_t start = static_cast<int32_t>(z) - 1;
1843
1844 // Test string_compareto x y
1845 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
1846 reinterpret_cast<uintptr_t>(&art_quick_indexof), self);
1847
1848 EXPECT_FALSE(self->IsExceptionPending());
1849
1850 // The result is a 32b signed integer
1851 union {
1852 size_t r;
1853 int32_t i;
1854 } conv;
1855 conv.r = result;
1856
1857 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
1858 c_char[y] << " @ " << start;
1859 }
1860 }
1861 }
1862
1863 // TODO: Deallocate things.
1864
1865 // Tests done.
1866#else
1867 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
1868 // Force-print to std::cout so it's also outside the logcat.
1869 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07001870#endif
1871}
1872
Andreas Gampe525cde22014-04-22 15:44:50 -07001873} // namespace art