blob: 3be0faf5ac5eba5a56e656b7c57c036f4bf3712f [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070019#include "mirror/art_method-inl.h"
20#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070021#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022
23#include <cstdio>
24
25namespace art {
26
27
28class StubTest : public CommonRuntimeTest {
29 protected:
30 // We need callee-save methods set up in the Runtime for exceptions.
31 void SetUp() OVERRIDE {
32 // Do the normal setup.
33 CommonRuntimeTest::SetUp();
34
35 {
36 // Create callee-save methods
37 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010038 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070039 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
40 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
41 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010042 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070043 }
44 }
45 }
46 }
47
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070048 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
49 // Use a smaller heap
50 for (std::pair<std::string, const void*>& pair : *options) {
51 if (pair.first.find("-Xmx") == 0) {
52 pair.first = "-Xmx4M"; // Smallest we can go.
53 }
54 }
Andreas Gampe51f76352014-05-21 08:28:48 -070055 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 }
Andreas Gampe525cde22014-04-22 15:44:50 -070057
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070058 // Helper function needed since TEST_F makes a new class.
59 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
60 return &self->tlsPtr_;
61 }
62
Andreas Gampe4fc046e2014-05-06 16:56:39 -070063 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070064 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070065 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070066 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070067
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070068 // TODO: Set up a frame according to referrer's specs.
69 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
70 mirror::ArtMethod* referrer) {
71 // Push a transition back into managed code onto the linked list in thread.
72 ManagedStack fragment;
73 self->PushManagedStackFragment(&fragment);
74
75 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070076 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077#if defined(__i386__)
78 // TODO: Set the thread?
79 __asm__ __volatile__(
80 "pushl %[referrer]\n\t" // Store referrer
81 "call *%%edi\n\t" // Call the stub
82 "addl $4, %%esp" // Pop referrer
83 : "=a" (result)
84 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070085 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
86 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
87 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070088 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
89 // but compilation fails when declaring that.
90#elif defined(__arm__)
91 __asm__ __volatile__(
92 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
93 ".cfi_adjust_cfa_offset 52\n\t"
94 "push {r9}\n\t"
95 ".cfi_adjust_cfa_offset 4\n\t"
96 "mov r9, %[referrer]\n\n"
97 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
98 ".cfi_adjust_cfa_offset 8\n\t"
99 "ldr r9, [sp, #8]\n\t"
100
101 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
102 "sub sp, sp, #20\n\t"
103 "str %[arg0], [sp]\n\t"
104 "str %[arg1], [sp, #4]\n\t"
105 "str %[arg2], [sp, #8]\n\t"
106 "str %[code], [sp, #12]\n\t"
107 "str %[self], [sp, #16]\n\t"
108 "ldr r0, [sp]\n\t"
109 "ldr r1, [sp, #4]\n\t"
110 "ldr r2, [sp, #8]\n\t"
111 "ldr r3, [sp, #12]\n\t"
112 "ldr r9, [sp, #16]\n\t"
113 "add sp, sp, #20\n\t"
114
115 "blx r3\n\t" // Call the stub
116 "add sp, sp, #12\n\t" // Pop nullptr and padding
117 ".cfi_adjust_cfa_offset -12\n\t"
118 "pop {r1-r12, lr}\n\t" // Restore state
119 ".cfi_adjust_cfa_offset -52\n\t"
120 "mov %[result], r0\n\t" // Save the result
121 : [result] "=r" (result)
122 // Use the result from r0
123 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
124 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700125 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700126#elif defined(__aarch64__)
127 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700128 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700129 "sub sp, sp, #64\n\t"
130 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 "stp x0, x1, [sp]\n\t"
132 "stp x2, x3, [sp, #16]\n\t"
133 "stp x4, x5, [sp, #32]\n\t"
134 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700135
Andreas Gampef39b3782014-06-03 14:38:30 -0700136 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
137 ".cfi_adjust_cfa_offset 16\n\t"
138 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700139
140 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
141 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700142 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700143 // All things are "r" constraints, so direct str/stp should work.
144 "stp %[arg0], %[arg1], [sp]\n\t"
145 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700146 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700147
148 // Now we definitely have x0-x3 free, use it to garble d8 - d15
149 "movk x0, #0xfad0\n\t"
150 "movk x0, #0xebad, lsl #16\n\t"
151 "movk x0, #0xfad0, lsl #32\n\t"
152 "movk x0, #0xebad, lsl #48\n\t"
153 "fmov d8, x0\n\t"
154 "add x0, x0, 1\n\t"
155 "fmov d9, x0\n\t"
156 "add x0, x0, 1\n\t"
157 "fmov d10, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d11, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d12, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d13, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d14, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d15, x0\n\t"
168
Andreas Gampef39b3782014-06-03 14:38:30 -0700169 // Load call params into the right registers.
170 "ldp x0, x1, [sp]\n\t"
171 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700172 "ldr x18, [sp, #32]\n\t"
173 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700174 ".cfi_adjust_cfa_offset -48\n\t"
175
Andreas Gampe6cf80102014-05-19 11:32:41 -0700176
177 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700178 "mov x8, x0\n\t" // Store result
179 "add sp, sp, #16\n\t" // Drop the quick "frame"
180 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700181
182 // Test d8 - d15. We can use x1 and x2.
183 "movk x1, #0xfad0\n\t"
184 "movk x1, #0xebad, lsl #16\n\t"
185 "movk x1, #0xfad0, lsl #32\n\t"
186 "movk x1, #0xebad, lsl #48\n\t"
187 "fmov x2, d8\n\t"
188 "cmp x1, x2\n\t"
189 "b.ne 1f\n\t"
190 "add x1, x1, 1\n\t"
191
192 "fmov x2, d9\n\t"
193 "cmp x1, x2\n\t"
194 "b.ne 1f\n\t"
195 "add x1, x1, 1\n\t"
196
197 "fmov x2, d10\n\t"
198 "cmp x1, x2\n\t"
199 "b.ne 1f\n\t"
200 "add x1, x1, 1\n\t"
201
202 "fmov x2, d11\n\t"
203 "cmp x1, x2\n\t"
204 "b.ne 1f\n\t"
205 "add x1, x1, 1\n\t"
206
207 "fmov x2, d12\n\t"
208 "cmp x1, x2\n\t"
209 "b.ne 1f\n\t"
210 "add x1, x1, 1\n\t"
211
212 "fmov x2, d13\n\t"
213 "cmp x1, x2\n\t"
214 "b.ne 1f\n\t"
215 "add x1, x1, 1\n\t"
216
217 "fmov x2, d14\n\t"
218 "cmp x1, x2\n\t"
219 "b.ne 1f\n\t"
220 "add x1, x1, 1\n\t"
221
222 "fmov x2, d15\n\t"
223 "cmp x1, x2\n\t"
224 "b.ne 1f\n\t"
225
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700227
228 // Finish up.
229 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
231 "ldp x2, x3, [sp, #16]\n\t"
232 "ldp x4, x5, [sp, #32]\n\t"
233 "ldp x6, x7, [sp, #48]\n\t"
234 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700235 ".cfi_adjust_cfa_offset -64\n\t"
236
Andreas Gampef39b3782014-06-03 14:38:30 -0700237 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
238 "mov %[result], x8\n\t" // Store the call result
239
Andreas Gampe6cf80102014-05-19 11:32:41 -0700240 "b 3f\n\t" // Goto end
241
242 // Failed fpr verification.
243 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700244 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700245 "b 2b\n\t" // Goto finish-up
246
247 // End
248 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700249 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700250 // Use the result from r0
251 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700253 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
254 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
255 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
256 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
257 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700258 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
259 "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700260#elif defined(__x86_64__)
261 // Note: Uses the native convention
262 // TODO: Set the thread?
263 __asm__ __volatile__(
264 "pushq %[referrer]\n\t" // Push referrer
265 "pushq (%%rsp)\n\t" // & 16B alignment padding
266 ".cfi_adjust_cfa_offset 16\n\t"
267 "call *%%rax\n\t" // Call the stub
268 "addq $16, %%rsp\n\t" // Pop nullptr and padding
269 ".cfi_adjust_cfa_offset -16\n\t"
270 : "=a" (result)
271 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700272 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
273 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
274 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
275 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700276 // TODO: Should we clobber the other registers?
277#else
278 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
279 result = 0;
280#endif
281 // Pop transition.
282 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700283
284 fp_result = fpr_result;
285 EXPECT_EQ(0U, fp_result);
286
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700287 return result;
288 }
289
Andreas Gampe51f76352014-05-21 08:28:48 -0700290 // TODO: Set up a frame according to referrer's specs.
291 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
292 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
293 // Push a transition back into managed code onto the linked list in thread.
294 ManagedStack fragment;
295 self->PushManagedStackFragment(&fragment);
296
297 size_t result;
298 size_t fpr_result = 0;
299#if defined(__i386__)
300 // TODO: Set the thread?
301 __asm__ __volatile__(
302 "movd %[hidden], %%xmm0\n\t"
303 "pushl %[referrer]\n\t" // Store referrer
304 "call *%%edi\n\t" // Call the stub
305 "addl $4, %%esp" // Pop referrer
306 : "=a" (result)
307 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700308 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
309 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
310 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700311 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
312 // but compilation fails when declaring that.
313#elif defined(__arm__)
314 __asm__ __volatile__(
315 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
316 ".cfi_adjust_cfa_offset 52\n\t"
317 "push {r9}\n\t"
318 ".cfi_adjust_cfa_offset 4\n\t"
319 "mov r9, %[referrer]\n\n"
320 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
321 ".cfi_adjust_cfa_offset 8\n\t"
322 "ldr r9, [sp, #8]\n\t"
323
324 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
325 "sub sp, sp, #24\n\t"
326 "str %[arg0], [sp]\n\t"
327 "str %[arg1], [sp, #4]\n\t"
328 "str %[arg2], [sp, #8]\n\t"
329 "str %[code], [sp, #12]\n\t"
330 "str %[self], [sp, #16]\n\t"
331 "str %[hidden], [sp, #20]\n\t"
332 "ldr r0, [sp]\n\t"
333 "ldr r1, [sp, #4]\n\t"
334 "ldr r2, [sp, #8]\n\t"
335 "ldr r3, [sp, #12]\n\t"
336 "ldr r9, [sp, #16]\n\t"
337 "ldr r12, [sp, #20]\n\t"
338 "add sp, sp, #24\n\t"
339
340 "blx r3\n\t" // Call the stub
341 "add sp, sp, #12\n\t" // Pop nullptr and padding
342 ".cfi_adjust_cfa_offset -12\n\t"
343 "pop {r1-r12, lr}\n\t" // Restore state
344 ".cfi_adjust_cfa_offset -52\n\t"
345 "mov %[result], r0\n\t" // Save the result
346 : [result] "=r" (result)
347 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700348 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
349 [referrer] "r"(referrer), [hidden] "r"(hidden)
350 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700351#elif defined(__aarch64__)
352 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700353 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700354 "sub sp, sp, #64\n\t"
355 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700356 "stp x0, x1, [sp]\n\t"
357 "stp x2, x3, [sp, #16]\n\t"
358 "stp x4, x5, [sp, #32]\n\t"
359 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700360
Andreas Gampef39b3782014-06-03 14:38:30 -0700361 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
362 ".cfi_adjust_cfa_offset 16\n\t"
363 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
365 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
366 "sub sp, sp, #48\n\t"
367 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700368 // All things are "r" constraints, so direct str/stp should work.
369 "stp %[arg0], %[arg1], [sp]\n\t"
370 "stp %[arg2], %[code], [sp, #16]\n\t"
371 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700372
373 // Now we definitely have x0-x3 free, use it to garble d8 - d15
374 "movk x0, #0xfad0\n\t"
375 "movk x0, #0xebad, lsl #16\n\t"
376 "movk x0, #0xfad0, lsl #32\n\t"
377 "movk x0, #0xebad, lsl #48\n\t"
378 "fmov d8, x0\n\t"
379 "add x0, x0, 1\n\t"
380 "fmov d9, x0\n\t"
381 "add x0, x0, 1\n\t"
382 "fmov d10, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d11, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d12, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d13, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d14, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d15, x0\n\t"
393
Andreas Gampef39b3782014-06-03 14:38:30 -0700394 // Load call params into the right registers.
395 "ldp x0, x1, [sp]\n\t"
396 "ldp x2, x3, [sp, #16]\n\t"
397 "ldp x18, x12, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700398 "add sp, sp, #48\n\t"
399 ".cfi_adjust_cfa_offset -48\n\t"
400
Andreas Gampe51f76352014-05-21 08:28:48 -0700401 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700402 "mov x8, x0\n\t" // Store result
403 "add sp, sp, #16\n\t" // Drop the quick "frame"
404 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700405
406 // Test d8 - d15. We can use x1 and x2.
407 "movk x1, #0xfad0\n\t"
408 "movk x1, #0xebad, lsl #16\n\t"
409 "movk x1, #0xfad0, lsl #32\n\t"
410 "movk x1, #0xebad, lsl #48\n\t"
411 "fmov x2, d8\n\t"
412 "cmp x1, x2\n\t"
413 "b.ne 1f\n\t"
414 "add x1, x1, 1\n\t"
415
416 "fmov x2, d9\n\t"
417 "cmp x1, x2\n\t"
418 "b.ne 1f\n\t"
419 "add x1, x1, 1\n\t"
420
421 "fmov x2, d10\n\t"
422 "cmp x1, x2\n\t"
423 "b.ne 1f\n\t"
424 "add x1, x1, 1\n\t"
425
426 "fmov x2, d11\n\t"
427 "cmp x1, x2\n\t"
428 "b.ne 1f\n\t"
429 "add x1, x1, 1\n\t"
430
431 "fmov x2, d12\n\t"
432 "cmp x1, x2\n\t"
433 "b.ne 1f\n\t"
434 "add x1, x1, 1\n\t"
435
436 "fmov x2, d13\n\t"
437 "cmp x1, x2\n\t"
438 "b.ne 1f\n\t"
439 "add x1, x1, 1\n\t"
440
441 "fmov x2, d14\n\t"
442 "cmp x1, x2\n\t"
443 "b.ne 1f\n\t"
444 "add x1, x1, 1\n\t"
445
446 "fmov x2, d15\n\t"
447 "cmp x1, x2\n\t"
448 "b.ne 1f\n\t"
449
Andreas Gampef39b3782014-06-03 14:38:30 -0700450 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700451
452 // Finish up.
453 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
455 "ldp x2, x3, [sp, #16]\n\t"
456 "ldp x4, x5, [sp, #32]\n\t"
457 "ldp x6, x7, [sp, #48]\n\t"
458 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700459 ".cfi_adjust_cfa_offset -64\n\t"
460
Andreas Gampef39b3782014-06-03 14:38:30 -0700461 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
462 "mov %[result], x8\n\t" // Store the call result
463
Andreas Gampe51f76352014-05-21 08:28:48 -0700464 "b 3f\n\t" // Goto end
465
466 // Failed fpr verification.
467 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700468 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700469 "b 2b\n\t" // Goto finish-up
470
471 // End
472 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700473 : [result] "=r" (result)
474 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700475 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700476 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
477 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
478 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
479 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
480 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
481 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700482 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
483 "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700484#elif defined(__x86_64__)
485 // Note: Uses the native convention
486 // TODO: Set the thread?
487 __asm__ __volatile__(
488 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
489 "movd %%r9, %%xmm0\n\t"
490 "pushq %[referrer]\n\t" // Push referrer
491 "pushq (%%rsp)\n\t" // & 16B alignment padding
492 ".cfi_adjust_cfa_offset 16\n\t"
493 "call *%%rax\n\t" // Call the stub
494 "addq $16, %%rsp\n\t" // Pop nullptr and padding
495 ".cfi_adjust_cfa_offset -16\n\t"
496 : "=a" (result)
497 // Use the result from rax
498 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
499 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700500 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
501 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700502 // TODO: Should we clobber the other registers?
503#else
504 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
505 result = 0;
506#endif
507 // Pop transition.
508 self->PopManagedStackFragment(fragment);
509
510 fp_result = fpr_result;
511 EXPECT_EQ(0U, fp_result);
512
513 return result;
514 }
515
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700516 // Method with 32b arg0, 64b arg1
517 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
518 mirror::ArtMethod* referrer) {
519#if defined(__x86_64__) || defined(__aarch64__)
520 // Just pass through.
521 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
522#else
523 // Need to split up arguments.
524 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
525 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
526
527 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
528#endif
529 }
530
531 // Method with 32b arg0, 32b arg1, 64b arg2
532 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
533 Thread* self, mirror::ArtMethod* referrer) {
534#if defined(__x86_64__) || defined(__aarch64__)
535 // Just pass through.
536 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
537#else
538 // TODO: Needs 4-param invoke.
539 return 0;
540#endif
541 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700542
543 protected:
544 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700545};
546
547
548#if defined(__i386__) || defined(__x86_64__)
549extern "C" void art_quick_memcpy(void);
550#endif
551
552TEST_F(StubTest, Memcpy) {
553#if defined(__i386__) || defined(__x86_64__)
554 Thread* self = Thread::Current();
555
556 uint32_t orig[20];
557 uint32_t trg[20];
558 for (size_t i = 0; i < 20; ++i) {
559 orig[i] = i;
560 trg[i] = 0;
561 }
562
563 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
564 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
565
566 EXPECT_EQ(orig[0], trg[0]);
567
568 for (size_t i = 1; i < 4; ++i) {
569 EXPECT_NE(orig[i], trg[i]);
570 }
571
572 for (size_t i = 4; i < 14; ++i) {
573 EXPECT_EQ(orig[i], trg[i]);
574 }
575
576 for (size_t i = 14; i < 20; ++i) {
577 EXPECT_NE(orig[i], trg[i]);
578 }
579
580 // TODO: Test overlapping?
581
582#else
583 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
584 // Force-print to std::cout so it's also outside the logcat.
585 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
586#endif
587}
588
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700589#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700590extern "C" void art_quick_lock_object(void);
591#endif
592
593TEST_F(StubTest, LockObject) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700594#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700595 static constexpr size_t kThinLockLoops = 100;
596
Andreas Gampe525cde22014-04-22 15:44:50 -0700597 Thread* self = Thread::Current();
598 // Create an object
599 ScopedObjectAccess soa(self);
600 // garbage is created during ClassLinker::Init
601
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700602 StackHandleScope<2> hs(soa.Self());
603 Handle<mirror::String> obj(
604 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700605 LockWord lock = obj->GetLockWord(false);
606 LockWord::LockState old_state = lock.GetState();
607 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
608
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700609 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700610 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
611
612 LockWord lock_after = obj->GetLockWord(false);
613 LockWord::LockState new_state = lock_after.GetState();
614 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700615 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
616
617 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700618 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700619 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
620
621 // Check we're at lock count i
622
623 LockWord l_inc = obj->GetLockWord(false);
624 LockWord::LockState l_inc_state = l_inc.GetState();
625 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
626 EXPECT_EQ(l_inc.ThinLockCount(), i);
627 }
628
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700629 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700630 Handle<mirror::String> obj2(hs.NewHandle(
631 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700632
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700633 obj2->IdentityHashCode();
634
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700635 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700636 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
637
638 LockWord lock_after2 = obj2->GetLockWord(false);
639 LockWord::LockState new_state2 = lock_after2.GetState();
640 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
641 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
642
643 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700644#else
645 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
646 // Force-print to std::cout so it's also outside the logcat.
647 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
648#endif
649}
650
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700651
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700652class RandGen {
653 public:
654 explicit RandGen(uint32_t seed) : val_(seed) {}
655
656 uint32_t next() {
657 val_ = val_ * 48271 % 2147483647 + 13;
658 return val_;
659 }
660
661 uint32_t val_;
662};
663
664
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700665#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700666extern "C" void art_quick_lock_object(void);
667extern "C" void art_quick_unlock_object(void);
668#endif
669
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700670// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
671static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
672#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700673 static constexpr size_t kThinLockLoops = 100;
674
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700675 Thread* self = Thread::Current();
676 // Create an object
677 ScopedObjectAccess soa(self);
678 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700679 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
680 StackHandleScope<kNumberOfLocks + 1> hs(self);
681 Handle<mirror::String> obj(
682 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700683 LockWord lock = obj->GetLockWord(false);
684 LockWord::LockState old_state = lock.GetState();
685 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
686
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700687 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700688 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700689 // This should be an illegal monitor state.
690 EXPECT_TRUE(self->IsExceptionPending());
691 self->ClearException();
692
693 LockWord lock_after = obj->GetLockWord(false);
694 LockWord::LockState new_state = lock_after.GetState();
695 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700696
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700697 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700698 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700699
700 LockWord lock_after2 = obj->GetLockWord(false);
701 LockWord::LockState new_state2 = lock_after2.GetState();
702 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
703
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700704 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706
707 LockWord lock_after3 = obj->GetLockWord(false);
708 LockWord::LockState new_state3 = lock_after3.GetState();
709 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
710
711 // Stress test:
712 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
713 // each step.
714
715 RandGen r(0x1234);
716
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700717 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700718 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700719
720 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700721 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700722 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700723
724 // Initialize = allocate.
725 for (size_t i = 0; i < kNumberOfLocks; ++i) {
726 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700727 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700728 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700729 }
730
731 for (size_t i = 0; i < kIterations; ++i) {
732 // Select which lock to update.
733 size_t index = r.next() % kNumberOfLocks;
734
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 // Make lock fat?
736 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
737 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700738 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700739
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700740 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 LockWord::LockState iter_state = lock_iter.GetState();
742 if (counts[index] == 0) {
743 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
744 } else {
745 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
746 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700747 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 bool lock; // Whether to lock or unlock in this step.
749 if (counts[index] == 0) {
750 lock = true;
751 } else if (counts[index] == kThinLockLoops) {
752 lock = false;
753 } else {
754 // Randomly.
755 lock = r.next() % 2 == 0;
756 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700757
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700758 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700759 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
761 counts[index]++;
762 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700763 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
765 counts[index]--;
766 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700767
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700768 EXPECT_FALSE(self->IsExceptionPending());
769
770 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700771 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700772 LockWord::LockState iter_state = lock_iter.GetState();
773 if (fat[index]) {
774 // Abuse MonitorInfo.
775 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700776 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700777 EXPECT_EQ(counts[index], info.entry_count_) << index;
778 } else {
779 if (counts[index] > 0) {
780 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
781 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
782 } else {
783 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
784 }
785 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 }
787 }
788
789 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700790 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 for (size_t i = 0; i < kNumberOfLocks; ++i) {
792 size_t index = kNumberOfLocks - 1 - i;
793 size_t count = counts[index];
794 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700795 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700796 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700797 count--;
798 }
799
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700800 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700801 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700802 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
803 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700804 }
805
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700806 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700807#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700808 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700809 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700810 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700811#endif
812}
813
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700814TEST_F(StubTest, UnlockObject) {
815 TestUnlockObject(this);
816}
Andreas Gampe525cde22014-04-22 15:44:50 -0700817
818#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
819extern "C" void art_quick_check_cast(void);
820#endif
821
822TEST_F(StubTest, CheckCast) {
823#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
824 Thread* self = Thread::Current();
825 // Find some classes.
826 ScopedObjectAccess soa(self);
827 // garbage is created during ClassLinker::Init
828
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700829 StackHandleScope<2> hs(soa.Self());
830 Handle<mirror::Class> c(
831 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
832 Handle<mirror::Class> c2(
833 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700834
835 EXPECT_FALSE(self->IsExceptionPending());
836
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700837 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700838 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
839
840 EXPECT_FALSE(self->IsExceptionPending());
841
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700842 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700843 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
844
845 EXPECT_FALSE(self->IsExceptionPending());
846
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700847 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700848 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
849
850 EXPECT_FALSE(self->IsExceptionPending());
851
852 // TODO: Make the following work. But that would require correct managed frames.
853
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700854 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700855 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
856
857 EXPECT_TRUE(self->IsExceptionPending());
858 self->ClearException();
859
860#else
861 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
862 // Force-print to std::cout so it's also outside the logcat.
863 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
864#endif
865}
866
867
Andreas Gampef4e910b2014-04-29 16:55:52 -0700868#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700869extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
870// Do not check non-checked ones, we'd need handlers and stuff...
871#endif
872
873TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700874 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
875
Andreas Gampef4e910b2014-04-29 16:55:52 -0700876#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700877 Thread* self = Thread::Current();
878 // Create an object
879 ScopedObjectAccess soa(self);
880 // garbage is created during ClassLinker::Init
881
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700882 StackHandleScope<5> hs(soa.Self());
883 Handle<mirror::Class> c(
884 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
885 Handle<mirror::Class> ca(
886 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700887
888 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700889 Handle<mirror::ObjectArray<mirror::Object>> array(
890 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700891
892 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700893 Handle<mirror::String> str_obj(
894 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700895
896 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700897 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700898
899 // Play with it...
900
901 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700902 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700903
904 EXPECT_FALSE(self->IsExceptionPending());
905
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700907 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
908
909 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700910 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700911
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
914
915 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700916 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700917
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
920
921 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700923
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700925 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
926
927 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700929
930 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700931
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700933 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
934
935 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700936 EXPECT_EQ(nullptr, array->Get(0));
937
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700938 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700939 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
940
941 EXPECT_FALSE(self->IsExceptionPending());
942 EXPECT_EQ(nullptr, array->Get(1));
943
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700944 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700945 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
946
947 EXPECT_FALSE(self->IsExceptionPending());
948 EXPECT_EQ(nullptr, array->Get(2));
949
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700950 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700951 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
952
953 EXPECT_FALSE(self->IsExceptionPending());
954 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700955
956 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
957
958 // 2) Failure cases (str into str[])
959 // 2.1) Array = null
960 // TODO: Throwing NPE needs actual DEX code
961
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700962// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700963// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
964//
965// EXPECT_TRUE(self->IsExceptionPending());
966// self->ClearException();
967
968 // 2.2) Index < 0
969
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700970 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
971 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700972 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
973
974 EXPECT_TRUE(self->IsExceptionPending());
975 self->ClearException();
976
977 // 2.3) Index > 0
978
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700979 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700980 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
981
982 EXPECT_TRUE(self->IsExceptionPending());
983 self->ClearException();
984
985 // 3) Failure cases (obj into str[])
986
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700987 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700988 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
989
990 EXPECT_TRUE(self->IsExceptionPending());
991 self->ClearException();
992
993 // Tests done.
994#else
995 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
996 // Force-print to std::cout so it's also outside the logcat.
997 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
998#endif
999}
1000
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001001TEST_F(StubTest, AllocObject) {
1002 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1003
1004#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1005 // TODO: Check the "Unresolved" allocation stubs
1006
1007 Thread* self = Thread::Current();
1008 // Create an object
1009 ScopedObjectAccess soa(self);
1010 // garbage is created during ClassLinker::Init
1011
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001012 StackHandleScope<2> hs(soa.Self());
1013 Handle<mirror::Class> c(
1014 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015
1016 // Play with it...
1017
1018 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 {
1020 // Use an arbitrary method from c to use as referrer
1021 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1022 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1023 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001024 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001025 self);
1026
1027 EXPECT_FALSE(self->IsExceptionPending());
1028 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1029 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001030 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001031 VerifyObject(obj);
1032 }
1033
1034 {
1035 // We can use nullptr in the second argument as we do not need a method here (not used in
1036 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001037 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001038 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001039 self);
1040
1041 EXPECT_FALSE(self->IsExceptionPending());
1042 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1043 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001044 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001045 VerifyObject(obj);
1046 }
1047
1048 {
1049 // We can use nullptr in the second argument as we do not need a method here (not used in
1050 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001051 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001052 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001053 self);
1054
1055 EXPECT_FALSE(self->IsExceptionPending());
1056 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1057 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001058 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001059 VerifyObject(obj);
1060 }
1061
1062 // Failure tests.
1063
1064 // Out-of-memory.
1065 {
1066 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1067
1068 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001069 Handle<mirror::Class> ca(
1070 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1071
1072 // Use arbitrary large amount for now.
1073 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001074 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001075
1076 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077 // Start allocating with 128K
1078 size_t length = 128 * KB / 4;
1079 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001080 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1081 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1082 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001083 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084
1085 // Try a smaller length
1086 length = length / 8;
1087 // Use at most half the reported free space.
1088 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1089 if (length * 8 > mem) {
1090 length = mem / 8;
1091 }
1092 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001093 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001094 }
1095 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097
1098 // Allocate simple objects till it fails.
1099 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001100 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1101 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1102 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 }
1104 }
1105 self->ClearException();
1106
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001107 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001108 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001109 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001110 EXPECT_TRUE(self->IsExceptionPending());
1111 self->ClearException();
1112 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 }
1114
1115 // Tests done.
1116#else
1117 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1118 // Force-print to std::cout so it's also outside the logcat.
1119 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1120#endif
1121}
1122
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001123TEST_F(StubTest, AllocObjectArray) {
1124 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1125
1126#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1127 // TODO: Check the "Unresolved" allocation stubs
1128
1129 Thread* self = Thread::Current();
1130 // Create an object
1131 ScopedObjectAccess soa(self);
1132 // garbage is created during ClassLinker::Init
1133
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001134 StackHandleScope<2> hs(self);
1135 Handle<mirror::Class> c(
1136 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001137
1138 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001139 Handle<mirror::Class> c_obj(
1140 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001141
1142 // Play with it...
1143
1144 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001145
1146 // For some reason this does not work, as the type_idx is artificial and outside what the
1147 // resolved types of c_obj allow...
1148
1149 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001150 // Use an arbitrary method from c to use as referrer
1151 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1152 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1153 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001154 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 self);
1156
1157 EXPECT_FALSE(self->IsExceptionPending());
1158 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1159 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001160 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161 VerifyObject(obj);
1162 EXPECT_EQ(obj->GetLength(), 10);
1163 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001164
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001165 {
1166 // We can use nullptr in the second argument as we do not need a method here (not used in
1167 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001168 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001169 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001170 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001171 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1173 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1174 EXPECT_TRUE(obj->IsArrayInstance());
1175 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001176 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001177 VerifyObject(obj);
1178 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1179 EXPECT_EQ(array->GetLength(), 10);
1180 }
1181
1182 // Failure tests.
1183
1184 // Out-of-memory.
1185 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001186 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001187 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001188 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189 self);
1190
1191 EXPECT_TRUE(self->IsExceptionPending());
1192 self->ClearException();
1193 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1194 }
1195
1196 // Tests done.
1197#else
1198 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1199 // Force-print to std::cout so it's also outside the logcat.
1200 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1201#endif
1202}
1203
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001204
Andreas Gampe266340d2014-05-02 07:55:24 -07001205#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206extern "C" void art_quick_string_compareto(void);
1207#endif
1208
1209TEST_F(StubTest, StringCompareTo) {
1210 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1211
Andreas Gampe266340d2014-05-02 07:55:24 -07001212#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001213 // TODO: Check the "Unresolved" allocation stubs
1214
1215 Thread* self = Thread::Current();
1216 ScopedObjectAccess soa(self);
1217 // garbage is created during ClassLinker::Init
1218
1219 // Create some strings
1220 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001221 // Setup: The first half is standard. The second half uses a non-zero offset.
1222 // TODO: Shared backing arrays.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001223 static constexpr size_t kBaseStringCount = 7;
1224 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", "aac", "aac" , };
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001225
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001226 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001227
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001228 StackHandleScope<kStringCount> hs(self);
1229 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 for (size_t i = 0; i < kBaseStringCount; ++i) {
1232 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233 }
1234
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001235 RandGen r(0x1234);
1236
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001237 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1238 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1239 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001240 if (length > 1) {
1241 // Set a random offset and length.
1242 int32_t new_offset = 1 + (r.next() % (length - 1));
1243 int32_t rest = length - new_offset - 1;
1244 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1245
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001246 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1247 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001248 }
1249 }
1250
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001251 // TODO: wide characters
1252
1253 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001254 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1255 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001256 int32_t expected[kStringCount][kStringCount];
1257 for (size_t x = 0; x < kStringCount; ++x) {
1258 for (size_t y = 0; y < kStringCount; ++y) {
1259 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001260 }
1261 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001262
1263 // Play with it...
1264
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001265 for (size_t x = 0; x < kStringCount; ++x) {
1266 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001267 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001268 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1269 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001270 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1271
1272 EXPECT_FALSE(self->IsExceptionPending());
1273
1274 // The result is a 32b signed integer
1275 union {
1276 size_t r;
1277 int32_t i;
1278 } conv;
1279 conv.r = result;
1280 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001281 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1282 conv.r;
1283 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1284 conv.r;
1285 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1286 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001287 }
1288 }
1289
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001290 // TODO: Deallocate things.
1291
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001292 // Tests done.
1293#else
1294 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1295 // Force-print to std::cout so it's also outside the logcat.
1296 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1297 std::endl;
1298#endif
1299}
1300
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001301
1302#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1303extern "C" void art_quick_set32_static(void);
1304extern "C" void art_quick_get32_static(void);
1305#endif
1306
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001307static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001308 mirror::ArtMethod* referrer, StubTest* test)
1309 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1310#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1311 constexpr size_t num_values = 7;
1312 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1313
1314 for (size_t i = 0; i < num_values; ++i) {
1315 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1316 static_cast<size_t>(values[i]),
1317 0U,
1318 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1319 self,
1320 referrer);
1321
1322 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1323 0U, 0U,
1324 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1325 self,
1326 referrer);
1327
1328 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1329 }
1330#else
1331 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1332 // Force-print to std::cout so it's also outside the logcat.
1333 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1334#endif
1335}
1336
1337
1338#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1339extern "C" void art_quick_set32_instance(void);
1340extern "C" void art_quick_get32_instance(void);
1341#endif
1342
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001343static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001344 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1345 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1346#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1347 constexpr size_t num_values = 7;
1348 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1349
1350 for (size_t i = 0; i < num_values; ++i) {
1351 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001352 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001353 static_cast<size_t>(values[i]),
1354 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1355 self,
1356 referrer);
1357
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001358 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001359 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1360
1361 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001362 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001363
1364 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001365 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001366 0U,
1367 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1368 self,
1369 referrer);
1370 EXPECT_EQ(res, static_cast<int32_t>(res2));
1371 }
1372#else
1373 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1374 // Force-print to std::cout so it's also outside the logcat.
1375 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1376#endif
1377}
1378
1379
1380#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1381extern "C" void art_quick_set_obj_static(void);
1382extern "C" void art_quick_get_obj_static(void);
1383
1384static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1385 mirror::ArtMethod* referrer, StubTest* test)
1386 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1387 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1388 reinterpret_cast<size_t>(val),
1389 0U,
1390 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1391 self,
1392 referrer);
1393
1394 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1395 0U, 0U,
1396 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1397 self,
1398 referrer);
1399
1400 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1401}
1402#endif
1403
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001404static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001405 mirror::ArtMethod* referrer, StubTest* test)
1406 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1407#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1408 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1409
1410 // Allocate a string object for simplicity.
1411 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1412 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1413
1414 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1415#else
1416 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1417 // Force-print to std::cout so it's also outside the logcat.
1418 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1419#endif
1420}
1421
1422
1423#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1424extern "C" void art_quick_set_obj_instance(void);
1425extern "C" void art_quick_get_obj_instance(void);
1426
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001427static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001428 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1429 StubTest* test)
1430 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1431 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1432 reinterpret_cast<size_t>(trg),
1433 reinterpret_cast<size_t>(val),
1434 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1435 self,
1436 referrer);
1437
1438 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1439 reinterpret_cast<size_t>(trg),
1440 0U,
1441 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1442 self,
1443 referrer);
1444
1445 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1446
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001447 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001448}
1449#endif
1450
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001451static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001452 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1453 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1454#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001455 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001456
1457 // Allocate a string object for simplicity.
1458 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001459 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001460
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001461 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001462#else
1463 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1464 // Force-print to std::cout so it's also outside the logcat.
1465 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1466#endif
1467}
1468
1469
1470// TODO: Complete these tests for 32b architectures.
1471
1472#if defined(__x86_64__) || defined(__aarch64__)
1473extern "C" void art_quick_set64_static(void);
1474extern "C" void art_quick_get64_static(void);
1475#endif
1476
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001477static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001478 mirror::ArtMethod* referrer, StubTest* test)
1479 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1480#if defined(__x86_64__) || defined(__aarch64__)
1481 constexpr size_t num_values = 8;
1482 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1483
1484 for (size_t i = 0; i < num_values; ++i) {
1485 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1486 values[i],
1487 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1488 self,
1489 referrer);
1490
1491 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1492 0U, 0U,
1493 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1494 self,
1495 referrer);
1496
1497 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1498 }
1499#else
1500 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1501 // Force-print to std::cout so it's also outside the logcat.
1502 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1503#endif
1504}
1505
1506
1507#if defined(__x86_64__) || defined(__aarch64__)
1508extern "C" void art_quick_set64_instance(void);
1509extern "C" void art_quick_get64_instance(void);
1510#endif
1511
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001512static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001513 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1514 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1515#if defined(__x86_64__) || defined(__aarch64__)
1516 constexpr size_t num_values = 8;
1517 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1518
1519 for (size_t i = 0; i < num_values; ++i) {
1520 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001521 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001522 static_cast<size_t>(values[i]),
1523 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1524 self,
1525 referrer);
1526
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001527 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001528 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1529
1530 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001531 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001532
1533 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001534 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001535 0U,
1536 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1537 self,
1538 referrer);
1539 EXPECT_EQ(res, static_cast<int64_t>(res2));
1540 }
1541#else
1542 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1543 // Force-print to std::cout so it's also outside the logcat.
1544 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1545#endif
1546}
1547
1548static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1549 // garbage is created during ClassLinker::Init
1550
1551 JNIEnv* env = Thread::Current()->GetJniEnv();
1552 jclass jc = env->FindClass("AllFields");
1553 CHECK(jc != NULL);
1554 jobject o = env->AllocObject(jc);
1555 CHECK(o != NULL);
1556
1557 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001558 StackHandleScope<5> hs(self);
1559 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1560 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001562 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563
1564 // Play with it...
1565
1566 // Static fields.
1567 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001568 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569 int32_t num_fields = fields->GetLength();
1570 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001571 StackHandleScope<1> hs(self);
1572 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001573
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001574 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 switch (type) {
1576 case Primitive::Type::kPrimInt:
1577 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001578 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579 }
1580 break;
1581
1582 case Primitive::Type::kPrimLong:
1583 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001584 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001585 }
1586 break;
1587
1588 case Primitive::Type::kPrimNot:
1589 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001590 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 }
1593 break;
1594
1595 default:
1596 break; // Skip.
1597 }
1598 }
1599 }
1600
1601 // Instance fields.
1602 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001603 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001604 int32_t num_fields = fields->GetLength();
1605 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001606 StackHandleScope<1> hs(self);
1607 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001608
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001609 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 switch (type) {
1611 case Primitive::Type::kPrimInt:
1612 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001613 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001614 }
1615 break;
1616
1617 case Primitive::Type::kPrimLong:
1618 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001619 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001620 }
1621 break;
1622
1623 case Primitive::Type::kPrimNot:
1624 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001625 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001626 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627 }
1628 break;
1629
1630 default:
1631 break; // Skip.
1632 }
1633 }
1634 }
1635
1636 // TODO: Deallocate things.
1637}
1638
1639
1640TEST_F(StubTest, Fields32) {
1641 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1642
1643 Thread* self = Thread::Current();
1644
1645 self->TransitionFromSuspendedToRunnable();
1646 LoadDex("AllFields");
1647 bool started = runtime_->Start();
1648 CHECK(started);
1649
1650 TestFields(self, this, Primitive::Type::kPrimInt);
1651}
1652
1653TEST_F(StubTest, FieldsObj) {
1654 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1655
1656 Thread* self = Thread::Current();
1657
1658 self->TransitionFromSuspendedToRunnable();
1659 LoadDex("AllFields");
1660 bool started = runtime_->Start();
1661 CHECK(started);
1662
1663 TestFields(self, this, Primitive::Type::kPrimNot);
1664}
1665
1666TEST_F(StubTest, Fields64) {
1667 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1668
1669 Thread* self = Thread::Current();
1670
1671 self->TransitionFromSuspendedToRunnable();
1672 LoadDex("AllFields");
1673 bool started = runtime_->Start();
1674 CHECK(started);
1675
1676 TestFields(self, this, Primitive::Type::kPrimLong);
1677}
1678
Andreas Gampe51f76352014-05-21 08:28:48 -07001679#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1680extern "C" void art_quick_imt_conflict_trampoline(void);
1681#endif
1682
1683TEST_F(StubTest, IMT) {
1684#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1685 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1686
1687 Thread* self = Thread::Current();
1688
1689 ScopedObjectAccess soa(self);
1690 StackHandleScope<7> hs(self);
1691
1692 JNIEnv* env = Thread::Current()->GetJniEnv();
1693
1694 // ArrayList
1695
1696 // Load ArrayList and used methods (JNI).
1697 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1698 ASSERT_NE(nullptr, arraylist_jclass);
1699 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1700 ASSERT_NE(nullptr, arraylist_constructor);
1701 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1702 ASSERT_NE(nullptr, contains_jmethod);
1703 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1704 ASSERT_NE(nullptr, add_jmethod);
1705
1706 // Get mirror representation.
1707 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1708
1709 // Patch up ArrayList.contains.
1710 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1711 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
1712 GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge));
1713 }
1714
1715 // List
1716
1717 // Load List and used methods (JNI).
1718 jclass list_jclass = env->FindClass("java/util/List");
1719 ASSERT_NE(nullptr, list_jclass);
1720 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1721 ASSERT_NE(nullptr, inf_contains_jmethod);
1722
1723 // Get mirror representation.
1724 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1725
1726 // Object
1727
1728 jclass obj_jclass = env->FindClass("java/lang/Object");
1729 ASSERT_NE(nullptr, obj_jclass);
1730 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1731 ASSERT_NE(nullptr, obj_constructor);
1732
1733 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1734
1735 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
1736 mirror::ArtMethod* m = arraylist_class->GetImTable()->Get(
1737 inf_contains->GetDexMethodIndex() % ClassLinker::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001738
Andreas Gampe0ea37942014-05-21 14:12:18 -07001739 if (!m->IsImtConflictMethod()) {
1740 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1741 PrettyMethod(m, true);
1742 LOG(WARNING) << "Please update StubTest.IMT.";
1743 return;
1744 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001745
1746 // Create instances.
1747
1748 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1749 ASSERT_NE(nullptr, jarray_list);
1750 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1751
1752 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1753 ASSERT_NE(nullptr, jobj);
1754 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1755
1756 // Invoke.
1757
1758 size_t result =
1759 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1760 reinterpret_cast<size_t>(obj.Get()),
1761 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1762 self, contains_amethod.Get(),
1763 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1764
1765 ASSERT_FALSE(self->IsExceptionPending());
1766 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1767
1768 // Add object.
1769
1770 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1771
1772 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1773
1774 // Invoke again.
1775
1776 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1777 reinterpret_cast<size_t>(obj.Get()),
1778 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1779 self, contains_amethod.Get(),
1780 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1781
1782 ASSERT_FALSE(self->IsExceptionPending());
1783 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1784#else
1785 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
1786 // Force-print to std::cout so it's also outside the logcat.
1787 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
1788#endif
1789}
1790
Andreas Gampe525cde22014-04-22 15:44:50 -07001791} // namespace art