blob: aa18c977048a61893eff4cdbe84366c34b969815 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070019#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070020
21#include <cstdio>
22
23namespace art {
24
25
26class StubTest : public CommonRuntimeTest {
27 protected:
28 // We need callee-save methods set up in the Runtime for exceptions.
29 void SetUp() OVERRIDE {
30 // Do the normal setup.
31 CommonRuntimeTest::SetUp();
32
33 {
34 // Create callee-save methods
35 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010036 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070037 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
38 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
39 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 }
42 }
43 }
44 }
45
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070046 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
47 // Use a smaller heap
48 for (std::pair<std::string, const void*>& pair : *options) {
49 if (pair.first.find("-Xmx") == 0) {
50 pair.first = "-Xmx4M"; // Smallest we can go.
51 }
52 }
53 }
Andreas Gampe525cde22014-04-22 15:44:50 -070054
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070055 // Helper function needed since TEST_F makes a new class.
56 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
57 return &self->tlsPtr_;
58 }
59
Andreas Gampe4fc046e2014-05-06 16:56:39 -070060 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070061 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070062 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070063 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070064
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070065 // TODO: Set up a frame according to referrer's specs.
66 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
67 mirror::ArtMethod* referrer) {
68 // Push a transition back into managed code onto the linked list in thread.
69 ManagedStack fragment;
70 self->PushManagedStackFragment(&fragment);
71
72 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070073 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074#if defined(__i386__)
75 // TODO: Set the thread?
76 __asm__ __volatile__(
77 "pushl %[referrer]\n\t" // Store referrer
78 "call *%%edi\n\t" // Call the stub
79 "addl $4, %%esp" // Pop referrer
80 : "=a" (result)
81 // Use the result from eax
82 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
83 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
84 : ); // clobber.
85 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
86 // but compilation fails when declaring that.
87#elif defined(__arm__)
88 __asm__ __volatile__(
89 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
90 ".cfi_adjust_cfa_offset 52\n\t"
91 "push {r9}\n\t"
92 ".cfi_adjust_cfa_offset 4\n\t"
93 "mov r9, %[referrer]\n\n"
94 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
95 ".cfi_adjust_cfa_offset 8\n\t"
96 "ldr r9, [sp, #8]\n\t"
97
98 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
99 "sub sp, sp, #20\n\t"
100 "str %[arg0], [sp]\n\t"
101 "str %[arg1], [sp, #4]\n\t"
102 "str %[arg2], [sp, #8]\n\t"
103 "str %[code], [sp, #12]\n\t"
104 "str %[self], [sp, #16]\n\t"
105 "ldr r0, [sp]\n\t"
106 "ldr r1, [sp, #4]\n\t"
107 "ldr r2, [sp, #8]\n\t"
108 "ldr r3, [sp, #12]\n\t"
109 "ldr r9, [sp, #16]\n\t"
110 "add sp, sp, #20\n\t"
111
112 "blx r3\n\t" // Call the stub
113 "add sp, sp, #12\n\t" // Pop nullptr and padding
114 ".cfi_adjust_cfa_offset -12\n\t"
115 "pop {r1-r12, lr}\n\t" // Restore state
116 ".cfi_adjust_cfa_offset -52\n\t"
117 "mov %[result], r0\n\t" // Save the result
118 : [result] "=r" (result)
119 // Use the result from r0
120 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
121 [referrer] "r"(referrer)
122 : ); // clobber.
123#elif defined(__aarch64__)
124 __asm__ __volatile__(
Andreas Gampe6cf80102014-05-19 11:32:41 -0700125 // Spill space for d8 - d15
126 "sub sp, sp, #64\n\t"
127 ".cfi_adjust_cfa_offset 64\n\t"
128 "stp d8, d9, [sp]\n\t"
129 "stp d10, d11, [sp, #16]\n\t"
130 "stp d12, d13, [sp, #32]\n\t"
131 "stp d14, d15, [sp, #48]\n\t"
132
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700133 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
134 ".cfi_adjust_cfa_offset 48\n\t"
135 "stp %[referrer], x1, [sp]\n\t"// referrer, x1
136 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
137 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
138
139 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
140 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700141 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142 "str %[arg0], [sp]\n\t"
143 "str %[arg1], [sp, #8]\n\t"
144 "str %[arg2], [sp, #16]\n\t"
145 "str %[code], [sp, #24]\n\t"
146 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700147
148 // Now we definitely have x0-x3 free, use it to garble d8 - d15
149 "movk x0, #0xfad0\n\t"
150 "movk x0, #0xebad, lsl #16\n\t"
151 "movk x0, #0xfad0, lsl #32\n\t"
152 "movk x0, #0xebad, lsl #48\n\t"
153 "fmov d8, x0\n\t"
154 "add x0, x0, 1\n\t"
155 "fmov d9, x0\n\t"
156 "add x0, x0, 1\n\t"
157 "fmov d10, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d11, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d12, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d13, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d14, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d15, x0\n\t"
168
169 // Load call params
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700170 "ldr x0, [sp]\n\t"
171 "ldr x1, [sp, #8]\n\t"
172 "ldr x2, [sp, #16]\n\t"
173 "ldr x3, [sp, #24]\n\t"
174 "ldr x18, [sp, #32]\n\t"
175 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700176 ".cfi_adjust_cfa_offset -48\n\t"
177
Andreas Gampe6cf80102014-05-19 11:32:41 -0700178
179 "blr x3\n\t" // Call the stub
180
181 // Test d8 - d15. We can use x1 and x2.
182 "movk x1, #0xfad0\n\t"
183 "movk x1, #0xebad, lsl #16\n\t"
184 "movk x1, #0xfad0, lsl #32\n\t"
185 "movk x1, #0xebad, lsl #48\n\t"
186 "fmov x2, d8\n\t"
187 "cmp x1, x2\n\t"
188 "b.ne 1f\n\t"
189 "add x1, x1, 1\n\t"
190
191 "fmov x2, d9\n\t"
192 "cmp x1, x2\n\t"
193 "b.ne 1f\n\t"
194 "add x1, x1, 1\n\t"
195
196 "fmov x2, d10\n\t"
197 "cmp x1, x2\n\t"
198 "b.ne 1f\n\t"
199 "add x1, x1, 1\n\t"
200
201 "fmov x2, d11\n\t"
202 "cmp x1, x2\n\t"
203 "b.ne 1f\n\t"
204 "add x1, x1, 1\n\t"
205
206 "fmov x2, d12\n\t"
207 "cmp x1, x2\n\t"
208 "b.ne 1f\n\t"
209 "add x1, x1, 1\n\t"
210
211 "fmov x2, d13\n\t"
212 "cmp x1, x2\n\t"
213 "b.ne 1f\n\t"
214 "add x1, x1, 1\n\t"
215
216 "fmov x2, d14\n\t"
217 "cmp x1, x2\n\t"
218 "b.ne 1f\n\t"
219 "add x1, x1, 1\n\t"
220
221 "fmov x2, d15\n\t"
222 "cmp x1, x2\n\t"
223 "b.ne 1f\n\t"
224
225 "mov %[fpr_result], #0\n\t"
226
227 // Finish up.
228 "2:\n\t"
229 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
230 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
231 "ldr x30, [sp, #40]\n\t" // Restore xLR
232 "add sp, sp, #48\n\t" // Free stack space
233 ".cfi_adjust_cfa_offset -48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700234 "mov %[result], x0\n\t" // Save the result
Andreas Gampe6cf80102014-05-19 11:32:41 -0700235
236 "ldp d8, d9, [sp]\n\t" // Restore d8 - d15
237 "ldp d10, d11, [sp, #16]\n\t"
238 "ldp d12, d13, [sp, #32]\n\t"
239 "ldp d14, d15, [sp, #48]\n\t"
240 "add sp, sp, #64\n\t"
241 ".cfi_adjust_cfa_offset -64\n\t"
242
243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
247 "mov %[fpr_result], #1\n\t"
248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
252 : [result] "=r" (result), [fpr_result] "=r" (fpr_result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
255 [referrer] "r"(referrer)
256 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
257#elif defined(__x86_64__)
258 // Note: Uses the native convention
259 // TODO: Set the thread?
260 __asm__ __volatile__(
261 "pushq %[referrer]\n\t" // Push referrer
262 "pushq (%%rsp)\n\t" // & 16B alignment padding
263 ".cfi_adjust_cfa_offset 16\n\t"
264 "call *%%rax\n\t" // Call the stub
265 "addq $16, %%rsp\n\t" // Pop nullptr and padding
266 ".cfi_adjust_cfa_offset -16\n\t"
267 : "=a" (result)
268 // Use the result from rax
269 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
270 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
271 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
272 // TODO: Should we clobber the other registers?
273#else
274 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
275 result = 0;
276#endif
277 // Pop transition.
278 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700279
280 fp_result = fpr_result;
281 EXPECT_EQ(0U, fp_result);
282
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700283 return result;
284 }
285
286 // Method with 32b arg0, 64b arg1
287 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
288 mirror::ArtMethod* referrer) {
289#if defined(__x86_64__) || defined(__aarch64__)
290 // Just pass through.
291 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
292#else
293 // Need to split up arguments.
294 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
295 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
296
297 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
298#endif
299 }
300
301 // Method with 32b arg0, 32b arg1, 64b arg2
302 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
303 Thread* self, mirror::ArtMethod* referrer) {
304#if defined(__x86_64__) || defined(__aarch64__)
305 // Just pass through.
306 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
307#else
308 // TODO: Needs 4-param invoke.
309 return 0;
310#endif
311 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700312
313 protected:
314 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700315};
316
317
318#if defined(__i386__) || defined(__x86_64__)
319extern "C" void art_quick_memcpy(void);
320#endif
321
322TEST_F(StubTest, Memcpy) {
323#if defined(__i386__) || defined(__x86_64__)
324 Thread* self = Thread::Current();
325
326 uint32_t orig[20];
327 uint32_t trg[20];
328 for (size_t i = 0; i < 20; ++i) {
329 orig[i] = i;
330 trg[i] = 0;
331 }
332
333 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
334 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
335
336 EXPECT_EQ(orig[0], trg[0]);
337
338 for (size_t i = 1; i < 4; ++i) {
339 EXPECT_NE(orig[i], trg[i]);
340 }
341
342 for (size_t i = 4; i < 14; ++i) {
343 EXPECT_EQ(orig[i], trg[i]);
344 }
345
346 for (size_t i = 14; i < 20; ++i) {
347 EXPECT_NE(orig[i], trg[i]);
348 }
349
350 // TODO: Test overlapping?
351
352#else
353 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
354 // Force-print to std::cout so it's also outside the logcat.
355 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
356#endif
357}
358
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700359#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700360extern "C" void art_quick_lock_object(void);
361#endif
362
363TEST_F(StubTest, LockObject) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700364#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700365 static constexpr size_t kThinLockLoops = 100;
366
Andreas Gampe525cde22014-04-22 15:44:50 -0700367 Thread* self = Thread::Current();
368 // Create an object
369 ScopedObjectAccess soa(self);
370 // garbage is created during ClassLinker::Init
371
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700372 StackHandleScope<2> hs(soa.Self());
373 Handle<mirror::String> obj(
374 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700375 LockWord lock = obj->GetLockWord(false);
376 LockWord::LockState old_state = lock.GetState();
377 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
378
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700379 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700380 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
381
382 LockWord lock_after = obj->GetLockWord(false);
383 LockWord::LockState new_state = lock_after.GetState();
384 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700385 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
386
387 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700388 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700389 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
390
391 // Check we're at lock count i
392
393 LockWord l_inc = obj->GetLockWord(false);
394 LockWord::LockState l_inc_state = l_inc.GetState();
395 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
396 EXPECT_EQ(l_inc.ThinLockCount(), i);
397 }
398
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700399 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700400 Handle<mirror::String> obj2(hs.NewHandle(
401 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700402
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700403 obj2->IdentityHashCode();
404
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700405 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700406 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
407
408 LockWord lock_after2 = obj2->GetLockWord(false);
409 LockWord::LockState new_state2 = lock_after2.GetState();
410 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
411 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
412
413 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700414#else
415 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
416 // Force-print to std::cout so it's also outside the logcat.
417 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
418#endif
419}
420
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700421
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700422class RandGen {
423 public:
424 explicit RandGen(uint32_t seed) : val_(seed) {}
425
426 uint32_t next() {
427 val_ = val_ * 48271 % 2147483647 + 13;
428 return val_;
429 }
430
431 uint32_t val_;
432};
433
434
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700435#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700436extern "C" void art_quick_lock_object(void);
437extern "C" void art_quick_unlock_object(void);
438#endif
439
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700440// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
441static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
442#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700443 static constexpr size_t kThinLockLoops = 100;
444
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700445 Thread* self = Thread::Current();
446 // Create an object
447 ScopedObjectAccess soa(self);
448 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700449 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
450 StackHandleScope<kNumberOfLocks + 1> hs(self);
451 Handle<mirror::String> obj(
452 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700453 LockWord lock = obj->GetLockWord(false);
454 LockWord::LockState old_state = lock.GetState();
455 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
456
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700457 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700458 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700459 // This should be an illegal monitor state.
460 EXPECT_TRUE(self->IsExceptionPending());
461 self->ClearException();
462
463 LockWord lock_after = obj->GetLockWord(false);
464 LockWord::LockState new_state = lock_after.GetState();
465 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700466
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700467 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700468 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700469
470 LockWord lock_after2 = obj->GetLockWord(false);
471 LockWord::LockState new_state2 = lock_after2.GetState();
472 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
473
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700474 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700475 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700476
477 LockWord lock_after3 = obj->GetLockWord(false);
478 LockWord::LockState new_state3 = lock_after3.GetState();
479 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
480
481 // Stress test:
482 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
483 // each step.
484
485 RandGen r(0x1234);
486
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700487 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700488 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700489
490 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700491 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700492 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700493
494 // Initialize = allocate.
495 for (size_t i = 0; i < kNumberOfLocks; ++i) {
496 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700497 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700498 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700499 }
500
501 for (size_t i = 0; i < kIterations; ++i) {
502 // Select which lock to update.
503 size_t index = r.next() % kNumberOfLocks;
504
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700505 // Make lock fat?
506 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
507 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700508 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700509
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700510 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700511 LockWord::LockState iter_state = lock_iter.GetState();
512 if (counts[index] == 0) {
513 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
514 } else {
515 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
516 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700517 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700518 bool lock; // Whether to lock or unlock in this step.
519 if (counts[index] == 0) {
520 lock = true;
521 } else if (counts[index] == kThinLockLoops) {
522 lock = false;
523 } else {
524 // Randomly.
525 lock = r.next() % 2 == 0;
526 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700527
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700528 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700529 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700530 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
531 counts[index]++;
532 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700533 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700534 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
535 counts[index]--;
536 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700537
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700538 EXPECT_FALSE(self->IsExceptionPending());
539
540 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700541 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700542 LockWord::LockState iter_state = lock_iter.GetState();
543 if (fat[index]) {
544 // Abuse MonitorInfo.
545 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700546 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700547 EXPECT_EQ(counts[index], info.entry_count_) << index;
548 } else {
549 if (counts[index] > 0) {
550 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
551 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
552 } else {
553 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
554 }
555 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700556 }
557 }
558
559 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700560 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700561 for (size_t i = 0; i < kNumberOfLocks; ++i) {
562 size_t index = kNumberOfLocks - 1 - i;
563 size_t count = counts[index];
564 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700565 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700566 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700567 count--;
568 }
569
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700570 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700571 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700572 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
573 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700574 }
575
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700576 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700577#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700578 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700579 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700580 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700581#endif
582}
583
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700584TEST_F(StubTest, UnlockObject) {
585 TestUnlockObject(this);
586}
Andreas Gampe525cde22014-04-22 15:44:50 -0700587
588#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
589extern "C" void art_quick_check_cast(void);
590#endif
591
592TEST_F(StubTest, CheckCast) {
593#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
594 Thread* self = Thread::Current();
595 // Find some classes.
596 ScopedObjectAccess soa(self);
597 // garbage is created during ClassLinker::Init
598
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700599 StackHandleScope<2> hs(soa.Self());
600 Handle<mirror::Class> c(
601 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
602 Handle<mirror::Class> c2(
603 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700604
605 EXPECT_FALSE(self->IsExceptionPending());
606
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700607 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700608 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
609
610 EXPECT_FALSE(self->IsExceptionPending());
611
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700612 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700613 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
614
615 EXPECT_FALSE(self->IsExceptionPending());
616
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700617 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700618 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
619
620 EXPECT_FALSE(self->IsExceptionPending());
621
622 // TODO: Make the following work. But that would require correct managed frames.
623
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700624 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700625 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
626
627 EXPECT_TRUE(self->IsExceptionPending());
628 self->ClearException();
629
630#else
631 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
632 // Force-print to std::cout so it's also outside the logcat.
633 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
634#endif
635}
636
637
Andreas Gampef4e910b2014-04-29 16:55:52 -0700638#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700639extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
640// Do not check non-checked ones, we'd need handlers and stuff...
641#endif
642
643TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700644 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
645
Andreas Gampef4e910b2014-04-29 16:55:52 -0700646#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700647 Thread* self = Thread::Current();
648 // Create an object
649 ScopedObjectAccess soa(self);
650 // garbage is created during ClassLinker::Init
651
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700652 StackHandleScope<5> hs(soa.Self());
653 Handle<mirror::Class> c(
654 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
655 Handle<mirror::Class> ca(
656 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700657
658 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700659 Handle<mirror::ObjectArray<mirror::Object>> array(
660 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700661
662 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700663 Handle<mirror::String> str_obj(
664 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700665
666 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700667 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700668
669 // Play with it...
670
671 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700672 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700673
674 EXPECT_FALSE(self->IsExceptionPending());
675
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700676 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700677 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
678
679 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700680 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700681
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700682 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700683 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
684
685 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700686 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700687
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700688 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700689 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
690
691 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700692 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700693
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700694 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700695 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
696
697 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700698 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700699
700 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700701
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700702 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700703 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
704
705 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700706 EXPECT_EQ(nullptr, array->Get(0));
707
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700708 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700709 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
710
711 EXPECT_FALSE(self->IsExceptionPending());
712 EXPECT_EQ(nullptr, array->Get(1));
713
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700714 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700715 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
716
717 EXPECT_FALSE(self->IsExceptionPending());
718 EXPECT_EQ(nullptr, array->Get(2));
719
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700720 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700721 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
722
723 EXPECT_FALSE(self->IsExceptionPending());
724 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700725
726 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
727
728 // 2) Failure cases (str into str[])
729 // 2.1) Array = null
730 // TODO: Throwing NPE needs actual DEX code
731
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700732// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700733// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
734//
735// EXPECT_TRUE(self->IsExceptionPending());
736// self->ClearException();
737
738 // 2.2) Index < 0
739
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700740 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
741 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700742 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
743
744 EXPECT_TRUE(self->IsExceptionPending());
745 self->ClearException();
746
747 // 2.3) Index > 0
748
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700749 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700750 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
751
752 EXPECT_TRUE(self->IsExceptionPending());
753 self->ClearException();
754
755 // 3) Failure cases (obj into str[])
756
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700757 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700758 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
759
760 EXPECT_TRUE(self->IsExceptionPending());
761 self->ClearException();
762
763 // Tests done.
764#else
765 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
766 // Force-print to std::cout so it's also outside the logcat.
767 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
768#endif
769}
770
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700771TEST_F(StubTest, AllocObject) {
772 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
773
774#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
775 // TODO: Check the "Unresolved" allocation stubs
776
777 Thread* self = Thread::Current();
778 // Create an object
779 ScopedObjectAccess soa(self);
780 // garbage is created during ClassLinker::Init
781
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700782 StackHandleScope<2> hs(soa.Self());
783 Handle<mirror::Class> c(
784 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700785
786 // Play with it...
787
788 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700789 {
790 // Use an arbitrary method from c to use as referrer
791 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
792 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
793 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700794 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700795 self);
796
797 EXPECT_FALSE(self->IsExceptionPending());
798 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
799 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700800 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700801 VerifyObject(obj);
802 }
803
804 {
805 // We can use nullptr in the second argument as we do not need a method here (not used in
806 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700807 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700808 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700809 self);
810
811 EXPECT_FALSE(self->IsExceptionPending());
812 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
813 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700814 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700815 VerifyObject(obj);
816 }
817
818 {
819 // We can use nullptr in the second argument as we do not need a method here (not used in
820 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700821 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700822 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700823 self);
824
825 EXPECT_FALSE(self->IsExceptionPending());
826 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
827 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700828 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700829 VerifyObject(obj);
830 }
831
832 // Failure tests.
833
834 // Out-of-memory.
835 {
836 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
837
838 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Handle<mirror::Class> ca(
840 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
841
842 // Use arbitrary large amount for now.
843 static const size_t kMaxHandles = 1000000;
844 UniquePtr<StackHandleScope<kMaxHandles> > hsp(new StackHandleScope<kMaxHandles>(self));
845
846 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700847 // Start allocating with 128K
848 size_t length = 128 * KB / 4;
849 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700850 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
851 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
852 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700853 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700854
855 // Try a smaller length
856 length = length / 8;
857 // Use at most half the reported free space.
858 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
859 if (length * 8 > mem) {
860 length = mem / 8;
861 }
862 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700863 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700864 }
865 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700866 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700867
868 // Allocate simple objects till it fails.
869 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700870 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
871 if (!self->IsExceptionPending() && h.Get() != nullptr) {
872 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700873 }
874 }
875 self->ClearException();
876
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700877 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700878 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700879 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700880 EXPECT_TRUE(self->IsExceptionPending());
881 self->ClearException();
882 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700883 }
884
885 // Tests done.
886#else
887 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
888 // Force-print to std::cout so it's also outside the logcat.
889 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
890#endif
891}
892
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700893TEST_F(StubTest, AllocObjectArray) {
894 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
895
896#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
897 // TODO: Check the "Unresolved" allocation stubs
898
899 Thread* self = Thread::Current();
900 // Create an object
901 ScopedObjectAccess soa(self);
902 // garbage is created during ClassLinker::Init
903
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700904 StackHandleScope<2> hs(self);
905 Handle<mirror::Class> c(
906 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700907
908 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 Handle<mirror::Class> c_obj(
910 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700911
912 // Play with it...
913
914 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700915
916 // For some reason this does not work, as the type_idx is artificial and outside what the
917 // resolved types of c_obj allow...
918
919 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700920 // Use an arbitrary method from c to use as referrer
921 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
922 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
923 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700924 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700925 self);
926
927 EXPECT_FALSE(self->IsExceptionPending());
928 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
929 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700930 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700931 VerifyObject(obj);
932 EXPECT_EQ(obj->GetLength(), 10);
933 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700934
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700935 {
936 // We can use nullptr in the second argument as we do not need a method here (not used in
937 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700938 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700939 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700940 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700941 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700942 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
943 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
944 EXPECT_TRUE(obj->IsArrayInstance());
945 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700947 VerifyObject(obj);
948 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
949 EXPECT_EQ(array->GetLength(), 10);
950 }
951
952 // Failure tests.
953
954 // Out-of-memory.
955 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700956 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700957 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -0700958 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700959 self);
960
961 EXPECT_TRUE(self->IsExceptionPending());
962 self->ClearException();
963 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
964 }
965
966 // Tests done.
967#else
968 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
969 // Force-print to std::cout so it's also outside the logcat.
970 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
971#endif
972}
973
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700974
Andreas Gampe266340d2014-05-02 07:55:24 -0700975#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700976extern "C" void art_quick_string_compareto(void);
977#endif
978
979TEST_F(StubTest, StringCompareTo) {
980 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
981
Andreas Gampe266340d2014-05-02 07:55:24 -0700982#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700983 // TODO: Check the "Unresolved" allocation stubs
984
985 Thread* self = Thread::Current();
986 ScopedObjectAccess soa(self);
987 // garbage is created during ClassLinker::Init
988
989 // Create some strings
990 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700991 // Setup: The first half is standard. The second half uses a non-zero offset.
992 // TODO: Shared backing arrays.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700993 static constexpr size_t kBaseStringCount = 7;
994 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", "aac", "aac" , };
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700995
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700996 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +0700997
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700998 StackHandleScope<kStringCount> hs(self);
999 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001000
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001001 for (size_t i = 0; i < kBaseStringCount; ++i) {
1002 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001003 }
1004
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001005 RandGen r(0x1234);
1006
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001007 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1008 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1009 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001010 if (length > 1) {
1011 // Set a random offset and length.
1012 int32_t new_offset = 1 + (r.next() % (length - 1));
1013 int32_t rest = length - new_offset - 1;
1014 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1015
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001016 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1017 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001018 }
1019 }
1020
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001021 // TODO: wide characters
1022
1023 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001024 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1025 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001026 int32_t expected[kStringCount][kStringCount];
1027 for (size_t x = 0; x < kStringCount; ++x) {
1028 for (size_t y = 0; y < kStringCount; ++y) {
1029 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001030 }
1031 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001032
1033 // Play with it...
1034
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001035 for (size_t x = 0; x < kStringCount; ++x) {
1036 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001037 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001038 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1039 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001040 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1041
1042 EXPECT_FALSE(self->IsExceptionPending());
1043
1044 // The result is a 32b signed integer
1045 union {
1046 size_t r;
1047 int32_t i;
1048 } conv;
1049 conv.r = result;
1050 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001051 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1052 conv.r;
1053 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1054 conv.r;
1055 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1056 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001057 }
1058 }
1059
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001060 // TODO: Deallocate things.
1061
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001062 // Tests done.
1063#else
1064 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1065 // Force-print to std::cout so it's also outside the logcat.
1066 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1067 std::endl;
1068#endif
1069}
1070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001071
1072#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1073extern "C" void art_quick_set32_static(void);
1074extern "C" void art_quick_get32_static(void);
1075#endif
1076
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001077static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001078 mirror::ArtMethod* referrer, StubTest* test)
1079 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1080#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1081 constexpr size_t num_values = 7;
1082 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1083
1084 for (size_t i = 0; i < num_values; ++i) {
1085 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1086 static_cast<size_t>(values[i]),
1087 0U,
1088 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1089 self,
1090 referrer);
1091
1092 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1093 0U, 0U,
1094 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1095 self,
1096 referrer);
1097
1098 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1099 }
1100#else
1101 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1102 // Force-print to std::cout so it's also outside the logcat.
1103 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1104#endif
1105}
1106
1107
1108#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1109extern "C" void art_quick_set32_instance(void);
1110extern "C" void art_quick_get32_instance(void);
1111#endif
1112
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001113static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001114 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1115 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1116#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1117 constexpr size_t num_values = 7;
1118 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1119
1120 for (size_t i = 0; i < num_values; ++i) {
1121 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001122 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001123 static_cast<size_t>(values[i]),
1124 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1125 self,
1126 referrer);
1127
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001128 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001129 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1130
1131 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001132 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001133
1134 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001135 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001136 0U,
1137 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1138 self,
1139 referrer);
1140 EXPECT_EQ(res, static_cast<int32_t>(res2));
1141 }
1142#else
1143 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1144 // Force-print to std::cout so it's also outside the logcat.
1145 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1146#endif
1147}
1148
1149
1150#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1151extern "C" void art_quick_set_obj_static(void);
1152extern "C" void art_quick_get_obj_static(void);
1153
1154static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1155 mirror::ArtMethod* referrer, StubTest* test)
1156 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1157 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1158 reinterpret_cast<size_t>(val),
1159 0U,
1160 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1161 self,
1162 referrer);
1163
1164 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1165 0U, 0U,
1166 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1167 self,
1168 referrer);
1169
1170 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1171}
1172#endif
1173
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001174static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001175 mirror::ArtMethod* referrer, StubTest* test)
1176 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1177#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1178 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1179
1180 // Allocate a string object for simplicity.
1181 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1182 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1183
1184 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1185#else
1186 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1187 // Force-print to std::cout so it's also outside the logcat.
1188 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1189#endif
1190}
1191
1192
1193#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1194extern "C" void art_quick_set_obj_instance(void);
1195extern "C" void art_quick_get_obj_instance(void);
1196
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001197static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001198 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1199 StubTest* test)
1200 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1201 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1202 reinterpret_cast<size_t>(trg),
1203 reinterpret_cast<size_t>(val),
1204 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1205 self,
1206 referrer);
1207
1208 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1209 reinterpret_cast<size_t>(trg),
1210 0U,
1211 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1212 self,
1213 referrer);
1214
1215 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1216
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001217 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001218}
1219#endif
1220
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001221static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001222 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1223 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1224#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001225 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001226
1227 // Allocate a string object for simplicity.
1228 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001232#else
1233 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1234 // Force-print to std::cout so it's also outside the logcat.
1235 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1236#endif
1237}
1238
1239
1240// TODO: Complete these tests for 32b architectures.
1241
1242#if defined(__x86_64__) || defined(__aarch64__)
1243extern "C" void art_quick_set64_static(void);
1244extern "C" void art_quick_get64_static(void);
1245#endif
1246
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001247static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001248 mirror::ArtMethod* referrer, StubTest* test)
1249 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1250#if defined(__x86_64__) || defined(__aarch64__)
1251 constexpr size_t num_values = 8;
1252 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1253
1254 for (size_t i = 0; i < num_values; ++i) {
1255 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1256 values[i],
1257 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1258 self,
1259 referrer);
1260
1261 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1262 0U, 0U,
1263 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1264 self,
1265 referrer);
1266
1267 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1268 }
1269#else
1270 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1271 // Force-print to std::cout so it's also outside the logcat.
1272 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1273#endif
1274}
1275
1276
1277#if defined(__x86_64__) || defined(__aarch64__)
1278extern "C" void art_quick_set64_instance(void);
1279extern "C" void art_quick_get64_instance(void);
1280#endif
1281
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001282static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001283 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1284 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1285#if defined(__x86_64__) || defined(__aarch64__)
1286 constexpr size_t num_values = 8;
1287 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1288
1289 for (size_t i = 0; i < num_values; ++i) {
1290 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001291 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001292 static_cast<size_t>(values[i]),
1293 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1294 self,
1295 referrer);
1296
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001297 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001298 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1299
1300 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001301 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001302
1303 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001304 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001305 0U,
1306 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1307 self,
1308 referrer);
1309 EXPECT_EQ(res, static_cast<int64_t>(res2));
1310 }
1311#else
1312 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1313 // Force-print to std::cout so it's also outside the logcat.
1314 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1315#endif
1316}
1317
1318static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1319 // garbage is created during ClassLinker::Init
1320
1321 JNIEnv* env = Thread::Current()->GetJniEnv();
1322 jclass jc = env->FindClass("AllFields");
1323 CHECK(jc != NULL);
1324 jobject o = env->AllocObject(jc);
1325 CHECK(o != NULL);
1326
1327 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001328 StackHandleScope<5> hs(self);
1329 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1330 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001331 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001332 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001333
1334 // Play with it...
1335
1336 // Static fields.
1337 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001338 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001339 int32_t num_fields = fields->GetLength();
1340 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001341 StackHandleScope<1> hs(self);
1342 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001343
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001344 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001345 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1346 switch (type) {
1347 case Primitive::Type::kPrimInt:
1348 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001349 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001350 }
1351 break;
1352
1353 case Primitive::Type::kPrimLong:
1354 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001355 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001356 }
1357 break;
1358
1359 case Primitive::Type::kPrimNot:
1360 // Don't try array.
1361 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001362 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001363 }
1364 break;
1365
1366 default:
1367 break; // Skip.
1368 }
1369 }
1370 }
1371
1372 // Instance fields.
1373 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001374 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001375 int32_t num_fields = fields->GetLength();
1376 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001377 StackHandleScope<1> hs(self);
1378 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001379
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001380 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001381 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1382 switch (type) {
1383 case Primitive::Type::kPrimInt:
1384 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001385 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001386 }
1387 break;
1388
1389 case Primitive::Type::kPrimLong:
1390 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001391 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001392 }
1393 break;
1394
1395 case Primitive::Type::kPrimNot:
1396 // Don't try array.
1397 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001398 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001399 }
1400 break;
1401
1402 default:
1403 break; // Skip.
1404 }
1405 }
1406 }
1407
1408 // TODO: Deallocate things.
1409}
1410
1411
1412TEST_F(StubTest, Fields32) {
1413 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1414
1415 Thread* self = Thread::Current();
1416
1417 self->TransitionFromSuspendedToRunnable();
1418 LoadDex("AllFields");
1419 bool started = runtime_->Start();
1420 CHECK(started);
1421
1422 TestFields(self, this, Primitive::Type::kPrimInt);
1423}
1424
1425TEST_F(StubTest, FieldsObj) {
1426 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1427
1428 Thread* self = Thread::Current();
1429
1430 self->TransitionFromSuspendedToRunnable();
1431 LoadDex("AllFields");
1432 bool started = runtime_->Start();
1433 CHECK(started);
1434
1435 TestFields(self, this, Primitive::Type::kPrimNot);
1436}
1437
1438TEST_F(StubTest, Fields64) {
1439 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1440
1441 Thread* self = Thread::Current();
1442
1443 self->TransitionFromSuspendedToRunnable();
1444 LoadDex("AllFields");
1445 bool started = runtime_->Start();
1446 CHECK(started);
1447
1448 TestFields(self, this, Primitive::Type::kPrimLong);
1449}
1450
Andreas Gampe525cde22014-04-22 15:44:50 -07001451} // namespace art