Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "callee_save_frame.h" |
Dragos Sbirlea | bd136a2 | 2013-08-13 18:07:04 -0700 | [diff] [blame] | 18 | #include "common_throws.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 19 | #include "dex_file-inl.h" |
| 20 | #include "dex_instruction-inl.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 21 | #include "entrypoints/entrypoint_utils-inl.h" |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 22 | #include "entrypoints/runtime_asm_entrypoints.h" |
Ian Rogers | 83883d7 | 2013-10-21 21:07:24 -0700 | [diff] [blame] | 23 | #include "gc/accounting/card_table-inl.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 24 | #include "interpreter/interpreter.h" |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 25 | #include "method_reference.h" |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 26 | #include "mirror/art_method-inl.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 27 | #include "mirror/class-inl.h" |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 28 | #include "mirror/dex_cache-inl.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 29 | #include "mirror/object-inl.h" |
| 30 | #include "mirror/object_array-inl.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 31 | #include "runtime.h" |
Ian Rogers | 53b8b09 | 2014-03-13 23:45:53 -0700 | [diff] [blame] | 32 | #include "scoped_thread_state_change.h" |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 33 | #include "debugger.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 34 | |
| 35 | namespace art { |
| 36 | |
| 37 | // Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame. |
| 38 | class QuickArgumentVisitor { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 39 | // Number of bytes for each out register in the caller method's frame. |
| 40 | static constexpr size_t kBytesStackArgLocation = 4; |
Alexei Zavjalov | 41c507a | 2014-05-15 16:02:46 +0700 | [diff] [blame] | 41 | // Frame size in bytes of a callee-save frame for RefsAndArgs. |
| 42 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = |
| 43 | GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 44 | #if defined(__arm__) |
| 45 | // The callee save frame is pointed to by SP. |
| 46 | // | argN | | |
| 47 | // | ... | | |
| 48 | // | arg4 | | |
| 49 | // | arg3 spill | | Caller's frame |
| 50 | // | arg2 spill | | |
| 51 | // | arg1 spill | | |
| 52 | // | Method* | --- |
| 53 | // | LR | |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 54 | // | ... | 4x6 bytes callee saves |
| 55 | // | R3 | |
| 56 | // | R2 | |
| 57 | // | R1 | |
| 58 | // | S15 | |
| 59 | // | : | |
| 60 | // | S0 | |
| 61 | // | | 4x2 bytes padding |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 62 | // | Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 63 | static constexpr bool kSplitPairAcrossRegisterAndStack = kArm32QuickCodeUseSoftFloat; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 64 | static constexpr bool kAlignPairRegister = !kArm32QuickCodeUseSoftFloat; |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 65 | static constexpr bool kQuickSoftFloatAbi = kArm32QuickCodeUseSoftFloat; |
| 66 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = !kArm32QuickCodeUseSoftFloat; |
| 67 | static constexpr size_t kNumQuickGprArgs = 3; |
| 68 | static constexpr size_t kNumQuickFprArgs = kArm32QuickCodeUseSoftFloat ? 0 : 16; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 69 | static constexpr bool kGprFprLockstep = false; |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 70 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = |
| 71 | arm::ArmCalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg. |
| 72 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = |
| 73 | arm::ArmCalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg. |
| 74 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = |
| 75 | arm::ArmCalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 76 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 77 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 78 | } |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 79 | #elif defined(__aarch64__) |
| 80 | // The callee save frame is pointed to by SP. |
| 81 | // | argN | | |
| 82 | // | ... | | |
| 83 | // | arg4 | | |
| 84 | // | arg3 spill | | Caller's frame |
| 85 | // | arg2 spill | | |
| 86 | // | arg1 spill | | |
| 87 | // | Method* | --- |
| 88 | // | LR | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 89 | // | X29 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 90 | // | : | |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 91 | // | X19 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 92 | // | X7 | |
| 93 | // | : | |
| 94 | // | X1 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 95 | // | D7 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 96 | // | : | |
| 97 | // | D0 | |
| 98 | // | | padding |
| 99 | // | Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 100 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 101 | static constexpr bool kAlignPairRegister = false; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 102 | static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 103 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 104 | static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs. |
| 105 | static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 106 | static constexpr bool kGprFprLockstep = false; |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 107 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = |
| 108 | arm64::Arm64CalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg. |
| 109 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = |
| 110 | arm64::Arm64CalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg. |
| 111 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = |
| 112 | arm64::Arm64CalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 113 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 114 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 115 | } |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 116 | #elif defined(__mips__) && !defined(__LP64__) |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 117 | // The callee save frame is pointed to by SP. |
| 118 | // | argN | | |
| 119 | // | ... | | |
| 120 | // | arg4 | | |
| 121 | // | arg3 spill | | Caller's frame |
| 122 | // | arg2 spill | | |
| 123 | // | arg1 spill | | |
| 124 | // | Method* | --- |
| 125 | // | RA | |
| 126 | // | ... | callee saves |
| 127 | // | A3 | arg3 |
| 128 | // | A2 | arg2 |
| 129 | // | A1 | arg1 |
| 130 | // | A0/Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 131 | static constexpr bool kSplitPairAcrossRegisterAndStack = true; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 132 | static constexpr bool kAlignPairRegister = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 133 | static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 134 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 135 | static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs. |
| 136 | static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 137 | static constexpr bool kGprFprLockstep = false; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 138 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg. |
Douglas Leung | c6d8672 | 2014-12-10 16:15:17 -0800 | [diff] [blame] | 139 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 16; // Offset of first GPR arg. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 140 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 141 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 142 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 143 | } |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 144 | #elif defined(__mips__) && defined(__LP64__) |
| 145 | // The callee save frame is pointed to by SP. |
| 146 | // | argN | | |
| 147 | // | ... | | |
| 148 | // | arg4 | | |
| 149 | // | arg3 spill | | Caller's frame |
| 150 | // | arg2 spill | | |
| 151 | // | arg1 spill | | |
| 152 | // | Method* | --- |
| 153 | // | RA | |
| 154 | // | ... | callee saves |
| 155 | // | F7 | f_arg7 |
| 156 | // | F6 | f_arg6 |
| 157 | // | F5 | f_arg5 |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 158 | // | F4 | f_arg4 |
| 159 | // | F3 | f_arg3 |
| 160 | // | F2 | f_arg2 |
| 161 | // | F1 | f_arg1 |
| 162 | // | F0 | f_arg0 |
| 163 | // | A7 | arg7 |
| 164 | // | A6 | arg6 |
| 165 | // | A5 | arg5 |
| 166 | // | A4 | arg4 |
| 167 | // | A3 | arg3 |
| 168 | // | A2 | arg2 |
| 169 | // | A1 | arg1 |
| 170 | // | | padding |
| 171 | // | A0/Method* | <- sp |
| 172 | // NOTE: for Mip64, when A0 is skipped, F0 is also skipped. |
Douglas Leung | d18e083 | 2015-02-09 15:22:26 -0800 | [diff] [blame] | 173 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 174 | static constexpr bool kAlignPairRegister = false; |
| 175 | static constexpr bool kQuickSoftFloatAbi = false; |
| 176 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
| 177 | // These values are set to zeros because GPR and FPR register |
| 178 | // assignments for Mips64 are interleaved, which the current VisitArguments() |
| 179 | // function does not support. |
| 180 | static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs. |
| 181 | static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs. |
| 182 | static constexpr bool kGprFprLockstep = true; |
| 183 | |
| 184 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 24; // Offset of first FPR arg (F1). |
| 185 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg (A1). |
| 186 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 200; // Offset of return address. |
| 187 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
| 188 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
| 189 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 190 | #elif defined(__i386__) |
| 191 | // The callee save frame is pointed to by SP. |
| 192 | // | argN | | |
| 193 | // | ... | | |
| 194 | // | arg4 | | |
| 195 | // | arg3 spill | | Caller's frame |
| 196 | // | arg2 spill | | |
| 197 | // | arg1 spill | | |
| 198 | // | Method* | --- |
| 199 | // | Return | |
| 200 | // | EBP,ESI,EDI | callee saves |
| 201 | // | EBX | arg3 |
| 202 | // | EDX | arg2 |
| 203 | // | ECX | arg1 |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 204 | // | XMM3 | float arg 4 |
| 205 | // | XMM2 | float arg 3 |
| 206 | // | XMM1 | float arg 2 |
| 207 | // | XMM0 | float arg 1 |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 208 | // | EAX/Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 209 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 210 | static constexpr bool kAlignPairRegister = false; |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 211 | static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 212 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 213 | static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs. |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 214 | static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 215 | static constexpr bool kGprFprLockstep = false; |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 216 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 4; // Offset of first FPR arg. |
| 217 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4 + 4*8; // Offset of first GPR arg. |
| 218 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28 + 4*8; // Offset of return address. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 219 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 220 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 221 | } |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 222 | #elif defined(__x86_64__) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 223 | // The callee save frame is pointed to by SP. |
| 224 | // | argN | | |
| 225 | // | ... | | |
| 226 | // | reg. arg spills | | Caller's frame |
| 227 | // | Method* | --- |
| 228 | // | Return | |
| 229 | // | R15 | callee save |
| 230 | // | R14 | callee save |
| 231 | // | R13 | callee save |
| 232 | // | R12 | callee save |
| 233 | // | R9 | arg5 |
| 234 | // | R8 | arg4 |
| 235 | // | RSI/R6 | arg1 |
| 236 | // | RBP/R5 | callee save |
| 237 | // | RBX/R3 | callee save |
| 238 | // | RDX/R2 | arg2 |
| 239 | // | RCX/R1 | arg3 |
| 240 | // | XMM7 | float arg 8 |
| 241 | // | XMM6 | float arg 7 |
| 242 | // | XMM5 | float arg 6 |
| 243 | // | XMM4 | float arg 5 |
| 244 | // | XMM3 | float arg 4 |
| 245 | // | XMM2 | float arg 3 |
| 246 | // | XMM1 | float arg 2 |
| 247 | // | XMM0 | float arg 1 |
| 248 | // | Padding | |
| 249 | // | RDI/Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 250 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 251 | static constexpr bool kAlignPairRegister = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 252 | static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 253 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 254 | static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs. |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 255 | static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 256 | static constexpr bool kGprFprLockstep = false; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 257 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg. |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 258 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg. |
| 259 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 260 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
| 261 | switch (gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 262 | case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 263 | case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 264 | case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 265 | case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 266 | case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 267 | default: |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 268 | LOG(FATAL) << "Unexpected GPR index: " << gpr_index; |
| 269 | return 0; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 270 | } |
| 271 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 272 | #else |
| 273 | #error "Unsupported architecture" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 274 | #endif |
| 275 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 276 | public: |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 277 | // Special handling for proxy methods. Proxy methods are instance methods so the |
| 278 | // 'this' object is the 1st argument. They also have the same frame layout as the |
| 279 | // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the |
| 280 | // 1st GPR. |
| 281 | static mirror::Object* GetProxyThisObject(StackReference<mirror::ArtMethod>* sp) |
| 282 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 283 | CHECK(sp->AsMirrorPtr()->IsProxyMethod()); |
| 284 | CHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize, sp->AsMirrorPtr()->GetFrameSizeInBytes()); |
| 285 | CHECK_GT(kNumQuickGprArgs, 0u); |
| 286 | constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR. |
| 287 | size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset + |
| 288 | GprIndexToGprOffset(kThisGprIndex); |
| 289 | uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset; |
| 290 | return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address)->AsMirrorPtr(); |
| 291 | } |
| 292 | |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 293 | static mirror::ArtMethod* GetCallingMethod(StackReference<mirror::ArtMethod>* sp) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 294 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 295 | DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod()); |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 296 | uint8_t* previous_sp = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize; |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 297 | return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 298 | } |
| 299 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 300 | // For the given quick ref and args quick frame, return the caller's PC. |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 301 | static uintptr_t GetCallingPc(StackReference<mirror::ArtMethod>* sp) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 302 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 303 | DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod()); |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 304 | uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 305 | return *reinterpret_cast<uintptr_t*>(lr); |
| 306 | } |
| 307 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 308 | QuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, const char* shorty, |
| 309 | uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) : |
| 310 | is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len), |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 311 | gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset), |
| 312 | fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset), |
| 313 | stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 314 | + sizeof(StackReference<mirror::ArtMethod>)), // Skip StackReference<ArtMethod>. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 315 | gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0), |
| 316 | cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) { |
Andreas Gampe | 575e78c | 2014-11-03 23:41:03 -0800 | [diff] [blame] | 317 | static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0), |
| 318 | "Number of Quick FPR arguments unexpected"); |
| 319 | static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled), |
| 320 | "Double alignment unexpected"); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 321 | // For register alignment, we want to assume that counters(fpr_double_index_) are even if the |
| 322 | // next register is even. |
Andreas Gampe | 575e78c | 2014-11-03 23:41:03 -0800 | [diff] [blame] | 323 | static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0, |
| 324 | "Number of Quick FPR arguments not even"); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 325 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 326 | |
| 327 | virtual ~QuickArgumentVisitor() {} |
| 328 | |
| 329 | virtual void Visit() = 0; |
| 330 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 331 | Primitive::Type GetParamPrimitiveType() const { |
| 332 | return cur_type_; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 333 | } |
| 334 | |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 335 | uint8_t* GetParamAddress() const { |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 336 | if (!kQuickSoftFloatAbi) { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 337 | Primitive::Type type = GetParamPrimitiveType(); |
| 338 | if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) { |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 339 | if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) { |
| 340 | if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) { |
| 341 | return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA)); |
| 342 | } |
| 343 | } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 344 | return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA)); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 345 | } |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 346 | return stack_args_ + (stack_index_ * kBytesStackArgLocation); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 347 | } |
| 348 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 349 | if (gpr_index_ < kNumQuickGprArgs) { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 350 | return gpr_args_ + GprIndexToGprOffset(gpr_index_); |
| 351 | } |
| 352 | return stack_args_ + (stack_index_ * kBytesStackArgLocation); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 353 | } |
| 354 | |
| 355 | bool IsSplitLongOrDouble() const { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 356 | if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 357 | return is_split_long_or_double_; |
| 358 | } else { |
| 359 | return false; // An optimization for when GPR and FPRs are 64bit. |
| 360 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 361 | } |
| 362 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 363 | bool IsParamAReference() const { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 364 | return GetParamPrimitiveType() == Primitive::kPrimNot; |
| 365 | } |
| 366 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 367 | bool IsParamALongOrDouble() const { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 368 | Primitive::Type type = GetParamPrimitiveType(); |
| 369 | return type == Primitive::kPrimLong || type == Primitive::kPrimDouble; |
| 370 | } |
| 371 | |
| 372 | uint64_t ReadSplitLongParam() const { |
Nicolas Geoffray | 425f239 | 2015-01-08 14:52:29 +0000 | [diff] [blame] | 373 | // The splitted long is always available through the stack. |
| 374 | return *reinterpret_cast<uint64_t*>(stack_args_ |
| 375 | + stack_index_ * kBytesStackArgLocation); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 376 | } |
| 377 | |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 378 | void IncGprIndex() { |
| 379 | gpr_index_++; |
| 380 | if (kGprFprLockstep) { |
| 381 | fpr_index_++; |
| 382 | } |
| 383 | } |
| 384 | |
| 385 | void IncFprIndex() { |
| 386 | fpr_index_++; |
| 387 | if (kGprFprLockstep) { |
| 388 | gpr_index_++; |
| 389 | } |
| 390 | } |
| 391 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 392 | void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 393 | // (a) 'stack_args_' should point to the first method's argument |
| 394 | // (b) whatever the argument type it is, the 'stack_index_' should |
| 395 | // be moved forward along with every visiting. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 396 | gpr_index_ = 0; |
| 397 | fpr_index_ = 0; |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 398 | if (kQuickDoubleRegAlignedFloatBackFilled) { |
| 399 | fpr_double_index_ = 0; |
| 400 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 401 | stack_index_ = 0; |
| 402 | if (!is_static_) { // Handle this. |
| 403 | cur_type_ = Primitive::kPrimNot; |
| 404 | is_split_long_or_double_ = false; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 405 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 406 | stack_index_++; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 407 | if (kNumQuickGprArgs > 0) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 408 | IncGprIndex(); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 409 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 410 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 411 | for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) { |
| 412 | cur_type_ = Primitive::GetType(shorty_[shorty_index]); |
| 413 | switch (cur_type_) { |
| 414 | case Primitive::kPrimNot: |
| 415 | case Primitive::kPrimBoolean: |
| 416 | case Primitive::kPrimByte: |
| 417 | case Primitive::kPrimChar: |
| 418 | case Primitive::kPrimShort: |
| 419 | case Primitive::kPrimInt: |
| 420 | is_split_long_or_double_ = false; |
| 421 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 422 | stack_index_++; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 423 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 424 | IncGprIndex(); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 425 | } |
| 426 | break; |
| 427 | case Primitive::kPrimFloat: |
| 428 | is_split_long_or_double_ = false; |
| 429 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 430 | stack_index_++; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 431 | if (kQuickSoftFloatAbi) { |
| 432 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 433 | IncGprIndex(); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 434 | } |
| 435 | } else { |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 436 | if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 437 | IncFprIndex(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 438 | if (kQuickDoubleRegAlignedFloatBackFilled) { |
| 439 | // Double should not overlap with float. |
| 440 | // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4. |
| 441 | fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2)); |
| 442 | // Float should not overlap with double. |
| 443 | if (fpr_index_ % 2 == 0) { |
| 444 | fpr_index_ = std::max(fpr_double_index_, fpr_index_); |
| 445 | } |
| 446 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 447 | } |
| 448 | } |
| 449 | break; |
| 450 | case Primitive::kPrimDouble: |
| 451 | case Primitive::kPrimLong: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 452 | if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) { |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 453 | if (cur_type_ == Primitive::kPrimLong && kAlignPairRegister && gpr_index_ == 0) { |
| 454 | // Currently, this is only for ARM, where the first available parameter register |
| 455 | // is R1. So we skip it, and use R2 instead. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 456 | IncGprIndex(); |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 457 | } |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 458 | is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) && |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 459 | ((gpr_index_ + 1) == kNumQuickGprArgs); |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 460 | if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) { |
| 461 | // We don't want to split this. Pass over this register. |
| 462 | gpr_index_++; |
| 463 | is_split_long_or_double_ = false; |
| 464 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 465 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 466 | if (kBytesStackArgLocation == 4) { |
| 467 | stack_index_+= 2; |
| 468 | } else { |
| 469 | CHECK_EQ(kBytesStackArgLocation, 8U); |
| 470 | stack_index_++; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 471 | } |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 472 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 473 | IncGprIndex(); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 474 | if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) { |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 475 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 476 | IncGprIndex(); |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 477 | } |
| 478 | } |
| 479 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 480 | } else { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 481 | is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) && |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 482 | ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 483 | Visit(); |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 484 | if (kBytesStackArgLocation == 4) { |
| 485 | stack_index_+= 2; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 486 | } else { |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 487 | CHECK_EQ(kBytesStackArgLocation, 8U); |
| 488 | stack_index_++; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 489 | } |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 490 | if (kQuickDoubleRegAlignedFloatBackFilled) { |
| 491 | if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) { |
| 492 | fpr_double_index_ += 2; |
| 493 | // Float should not overlap with double. |
| 494 | if (fpr_index_ % 2 == 0) { |
| 495 | fpr_index_ = std::max(fpr_double_index_, fpr_index_); |
| 496 | } |
| 497 | } |
| 498 | } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 499 | IncFprIndex(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 500 | if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) { |
| 501 | if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 502 | IncFprIndex(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 503 | } |
| 504 | } |
| 505 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 506 | } |
| 507 | break; |
| 508 | default: |
| 509 | LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_; |
| 510 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 511 | } |
| 512 | } |
| 513 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 514 | protected: |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 515 | const bool is_static_; |
| 516 | const char* const shorty_; |
| 517 | const uint32_t shorty_len_; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 518 | |
| 519 | private: |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 520 | uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame. |
| 521 | uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame. |
| 522 | uint8_t* const stack_args_; // Address of stack arguments in caller's frame. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 523 | uint32_t gpr_index_; // Index into spilled GPRs. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 524 | // Index into spilled FPRs. |
| 525 | // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_ |
| 526 | // holds a higher register number. |
| 527 | uint32_t fpr_index_; |
| 528 | // Index into spilled FPRs for aligned double. |
| 529 | // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in |
| 530 | // terms of singles, may be behind fpr_index. |
| 531 | uint32_t fpr_double_index_; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 532 | uint32_t stack_index_; // Index into arguments on the stack. |
| 533 | // The current type of argument during VisitArguments. |
| 534 | Primitive::Type cur_type_; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 535 | // Does a 64bit parameter straddle the register and stack arguments? |
| 536 | bool is_split_long_or_double_; |
| 537 | }; |
| 538 | |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 539 | // Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It |
| 540 | // allows to use the QuickArgumentVisitor constants without moving all the code in its own module. |
| 541 | extern "C" mirror::Object* artQuickGetProxyThisObject(StackReference<mirror::ArtMethod>* sp) |
| 542 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 543 | return QuickArgumentVisitor::GetProxyThisObject(sp); |
| 544 | } |
| 545 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 546 | // Visits arguments on the stack placing them into the shadow frame. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 547 | class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 548 | public: |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 549 | BuildQuickShadowFrameVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, |
| 550 | const char* shorty, uint32_t shorty_len, ShadowFrame* sf, |
| 551 | size_t first_arg_reg) : |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 552 | QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {} |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 553 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 554 | void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 555 | |
| 556 | private: |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 557 | ShadowFrame* const sf_; |
| 558 | uint32_t cur_reg_; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 559 | |
Dragos Sbirlea | bd136a2 | 2013-08-13 18:07:04 -0700 | [diff] [blame] | 560 | DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 561 | }; |
| 562 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 563 | void BuildQuickShadowFrameVisitor::Visit() { |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 564 | Primitive::Type type = GetParamPrimitiveType(); |
| 565 | switch (type) { |
| 566 | case Primitive::kPrimLong: // Fall-through. |
| 567 | case Primitive::kPrimDouble: |
| 568 | if (IsSplitLongOrDouble()) { |
| 569 | sf_->SetVRegLong(cur_reg_, ReadSplitLongParam()); |
| 570 | } else { |
| 571 | sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress())); |
| 572 | } |
| 573 | ++cur_reg_; |
| 574 | break; |
| 575 | case Primitive::kPrimNot: { |
| 576 | StackReference<mirror::Object>* stack_ref = |
| 577 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 578 | sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr()); |
| 579 | } |
| 580 | break; |
| 581 | case Primitive::kPrimBoolean: // Fall-through. |
| 582 | case Primitive::kPrimByte: // Fall-through. |
| 583 | case Primitive::kPrimChar: // Fall-through. |
| 584 | case Primitive::kPrimShort: // Fall-through. |
| 585 | case Primitive::kPrimInt: // Fall-through. |
| 586 | case Primitive::kPrimFloat: |
| 587 | sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress())); |
| 588 | break; |
| 589 | case Primitive::kPrimVoid: |
| 590 | LOG(FATAL) << "UNREACHABLE"; |
Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 591 | UNREACHABLE(); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 592 | } |
| 593 | ++cur_reg_; |
| 594 | } |
| 595 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 596 | extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self, |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 597 | StackReference<mirror::ArtMethod>* sp) |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 598 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 599 | // Ensure we don't get thread suspension until the object arguments are safely in the shadow |
| 600 | // frame. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 601 | ScopedQuickEntrypointChecks sqec(self); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 602 | |
| 603 | if (method->IsAbstract()) { |
| 604 | ThrowAbstractMethodError(method); |
| 605 | return 0; |
| 606 | } else { |
Brian Carlstrom | 2ec6520 | 2014-03-03 15:16:37 -0800 | [diff] [blame] | 607 | DCHECK(!method->IsNative()) << PrettyMethod(method); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 608 | const char* old_cause = self->StartAssertNoThreadSuspension( |
| 609 | "Building interpreter shadow frame"); |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 610 | const DexFile::CodeItem* code_item = method->GetCodeItem(); |
Brian Carlstrom | 2ec6520 | 2014-03-03 15:16:37 -0800 | [diff] [blame] | 611 | DCHECK(code_item != nullptr) << PrettyMethod(method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 612 | uint16_t num_regs = code_item->registers_size_; |
| 613 | void* memory = alloca(ShadowFrame::ComputeSize(num_regs)); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 614 | // No last shadow coming from quick. |
| 615 | ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, nullptr, method, 0, memory)); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 616 | size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_; |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 617 | uint32_t shorty_len = 0; |
| 618 | const char* shorty = method->GetShorty(&shorty_len); |
| 619 | BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len, |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 620 | shadow_frame, first_arg_reg); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 621 | shadow_frame_builder.VisitArguments(); |
Ian Rogers | e94652f | 2014-12-02 11:13:19 -0800 | [diff] [blame] | 622 | const bool needs_initialization = |
| 623 | method->IsStatic() && !method->GetDeclaringClass()->IsInitialized(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 624 | // Push a transition back into managed code onto the linked list in thread. |
| 625 | ManagedStack fragment; |
| 626 | self->PushManagedStackFragment(&fragment); |
| 627 | self->PushShadowFrame(shadow_frame); |
| 628 | self->EndAssertNoThreadSuspension(old_cause); |
| 629 | |
Ian Rogers | e94652f | 2014-12-02 11:13:19 -0800 | [diff] [blame] | 630 | if (needs_initialization) { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 631 | // Ensure static method's class is initialized. |
Ian Rogers | e94652f | 2014-12-02 11:13:19 -0800 | [diff] [blame] | 632 | StackHandleScope<1> hs(self); |
| 633 | Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass())); |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 634 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) { |
Ian Rogers | e94652f | 2014-12-02 11:13:19 -0800 | [diff] [blame] | 635 | DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(shadow_frame->GetMethod()); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 636 | self->PopManagedStackFragment(fragment); |
| 637 | return 0; |
| 638 | } |
| 639 | } |
Ian Rogers | e94652f | 2014-12-02 11:13:19 -0800 | [diff] [blame] | 640 | JValue result = interpreter::EnterInterpreterFromEntryPoint(self, code_item, shadow_frame); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 641 | // Pop transition. |
| 642 | self->PopManagedStackFragment(fragment); |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 643 | |
| 644 | // Request a stack deoptimization if needed |
| 645 | mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp); |
| 646 | if (UNLIKELY(Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) { |
| 647 | self->SetException(Thread::GetDeoptimizationException()); |
| 648 | self->SetDeoptimizationReturnValue(result); |
| 649 | } |
| 650 | |
Mathieu Chartier | 5275bcb | 2014-02-20 17:16:42 -0800 | [diff] [blame] | 651 | // No need to restore the args since the method has already been run by the interpreter. |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 652 | return result.GetJ(); |
| 653 | } |
| 654 | } |
| 655 | |
| 656 | // Visits arguments on the stack placing them into the args vector, Object* arguments are converted |
| 657 | // to jobjects. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 658 | class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 659 | public: |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 660 | BuildQuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, |
| 661 | const char* shorty, uint32_t shorty_len, |
| 662 | ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) : |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 663 | QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {} |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 664 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 665 | void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 666 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 667 | void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 5275bcb | 2014-02-20 17:16:42 -0800 | [diff] [blame] | 668 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 669 | private: |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 670 | ScopedObjectAccessUnchecked* const soa_; |
| 671 | std::vector<jvalue>* const args_; |
Mathieu Chartier | 5275bcb | 2014-02-20 17:16:42 -0800 | [diff] [blame] | 672 | // References which we must update when exiting in case the GC moved the objects. |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 673 | std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_; |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 674 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 675 | DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor); |
| 676 | }; |
| 677 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 678 | void BuildQuickArgumentVisitor::Visit() { |
| 679 | jvalue val; |
| 680 | Primitive::Type type = GetParamPrimitiveType(); |
| 681 | switch (type) { |
| 682 | case Primitive::kPrimNot: { |
| 683 | StackReference<mirror::Object>* stack_ref = |
| 684 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 685 | val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr()); |
| 686 | references_.push_back(std::make_pair(val.l, stack_ref)); |
| 687 | break; |
| 688 | } |
| 689 | case Primitive::kPrimLong: // Fall-through. |
| 690 | case Primitive::kPrimDouble: |
| 691 | if (IsSplitLongOrDouble()) { |
| 692 | val.j = ReadSplitLongParam(); |
| 693 | } else { |
| 694 | val.j = *reinterpret_cast<jlong*>(GetParamAddress()); |
| 695 | } |
| 696 | break; |
| 697 | case Primitive::kPrimBoolean: // Fall-through. |
| 698 | case Primitive::kPrimByte: // Fall-through. |
| 699 | case Primitive::kPrimChar: // Fall-through. |
| 700 | case Primitive::kPrimShort: // Fall-through. |
| 701 | case Primitive::kPrimInt: // Fall-through. |
| 702 | case Primitive::kPrimFloat: |
| 703 | val.i = *reinterpret_cast<jint*>(GetParamAddress()); |
| 704 | break; |
| 705 | case Primitive::kPrimVoid: |
| 706 | LOG(FATAL) << "UNREACHABLE"; |
Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 707 | UNREACHABLE(); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 708 | } |
| 709 | args_->push_back(val); |
| 710 | } |
| 711 | |
| 712 | void BuildQuickArgumentVisitor::FixupReferences() { |
| 713 | // Fixup any references which may have changed. |
| 714 | for (const auto& pair : references_) { |
| 715 | pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 716 | soa_->Env()->DeleteLocalRef(pair.first); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 717 | } |
| 718 | } |
| 719 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 720 | // Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method |
| 721 | // which is responsible for recording callee save registers. We explicitly place into jobjects the |
| 722 | // incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a |
| 723 | // field within the proxy object, which will box the primitive arguments and deal with error cases. |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 724 | extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method, |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 725 | mirror::Object* receiver, |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 726 | Thread* self, StackReference<mirror::ArtMethod>* sp) |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 727 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Brian Carlstrom | d3633d5 | 2013-08-20 21:06:26 -0700 | [diff] [blame] | 728 | DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method); |
| 729 | DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 730 | // Ensure we don't get thread suspension until the object arguments are safely in jobjects. |
| 731 | const char* old_cause = |
| 732 | self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments"); |
| 733 | // Register the top of the managed stack, making stack crawlable. |
Jeff Hao | f0a3f09 | 2014-07-24 16:26:09 -0700 | [diff] [blame] | 734 | DCHECK_EQ(sp->AsMirrorPtr(), proxy_method) << PrettyMethod(proxy_method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 735 | DCHECK_EQ(proxy_method->GetFrameSizeInBytes(), |
Brian Carlstrom | d3633d5 | 2013-08-20 21:06:26 -0700 | [diff] [blame] | 736 | Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes()) |
| 737 | << PrettyMethod(proxy_method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 738 | self->VerifyStack(); |
| 739 | // Start new JNI local reference state. |
| 740 | JNIEnvExt* env = self->GetJniEnv(); |
| 741 | ScopedObjectAccessUnchecked soa(env); |
| 742 | ScopedJniEnvLocalRefState env_state(env); |
| 743 | // Create local ref. copies of proxy method and the receiver. |
| 744 | jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver); |
| 745 | |
| 746 | // Placing arguments into args vector and remove the receiver. |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 747 | mirror::ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(); |
| 748 | CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " " |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 749 | << PrettyMethod(non_proxy_method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 750 | std::vector<jvalue> args; |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 751 | uint32_t shorty_len = 0; |
| 752 | const char* shorty = proxy_method->GetShorty(&shorty_len); |
| 753 | BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args); |
Brian Carlstrom | d3633d5 | 2013-08-20 21:06:26 -0700 | [diff] [blame] | 754 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 755 | local_ref_visitor.VisitArguments(); |
Brian Carlstrom | d3633d5 | 2013-08-20 21:06:26 -0700 | [diff] [blame] | 756 | DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 757 | args.erase(args.begin()); |
| 758 | |
| 759 | // Convert proxy method into expected interface method. |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 760 | mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod(); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 761 | DCHECK(interface_method != nullptr) << PrettyMethod(proxy_method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 762 | DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method); |
| 763 | jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method); |
| 764 | |
| 765 | // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code |
| 766 | // that performs allocations. |
| 767 | self->EndAssertNoThreadSuspension(old_cause); |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 768 | JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args); |
Mathieu Chartier | 5275bcb | 2014-02-20 17:16:42 -0800 | [diff] [blame] | 769 | // Restore references which might have moved. |
| 770 | local_ref_visitor.FixupReferences(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 771 | return result.GetJ(); |
| 772 | } |
| 773 | |
| 774 | // Read object references held in arguments from quick frames and place in a JNI local references, |
| 775 | // so they don't get garbage collected. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 776 | class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 777 | public: |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 778 | RememberForGcArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, |
| 779 | const char* shorty, uint32_t shorty_len, |
| 780 | ScopedObjectAccessUnchecked* soa) : |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 781 | QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {} |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 782 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 783 | void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE; |
Mathieu Chartier | 07d447b | 2013-09-26 11:57:43 -0700 | [diff] [blame] | 784 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 785 | void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 786 | |
| 787 | private: |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 788 | ScopedObjectAccessUnchecked* const soa_; |
Mathieu Chartier | 5275bcb | 2014-02-20 17:16:42 -0800 | [diff] [blame] | 789 | // References which we must update when exiting in case the GC moved the objects. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 790 | std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_; |
| 791 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 792 | DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 793 | }; |
| 794 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 795 | void RememberForGcArgumentVisitor::Visit() { |
| 796 | if (IsParamAReference()) { |
| 797 | StackReference<mirror::Object>* stack_ref = |
| 798 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 799 | jobject reference = |
| 800 | soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr()); |
| 801 | references_.push_back(std::make_pair(reference, stack_ref)); |
| 802 | } |
| 803 | } |
| 804 | |
| 805 | void RememberForGcArgumentVisitor::FixupReferences() { |
| 806 | // Fixup any references which may have changed. |
| 807 | for (const auto& pair : references_) { |
| 808 | pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 809 | soa_->Env()->DeleteLocalRef(pair.first); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 810 | } |
| 811 | } |
| 812 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 813 | // Lazily resolve a method for quick. Called by stub code. |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 814 | extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called, |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 815 | mirror::Object* receiver, |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 816 | Thread* self, |
| 817 | StackReference<mirror::ArtMethod>* sp) |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 818 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 819 | ScopedQuickEntrypointChecks sqec(self); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 820 | // Start new JNI local reference state |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 821 | JNIEnvExt* env = self->GetJniEnv(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 822 | ScopedObjectAccessUnchecked soa(env); |
| 823 | ScopedJniEnvLocalRefState env_state(env); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 824 | const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up"); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 825 | |
| 826 | // Compute details about the called method (avoid GCs) |
| 827 | ClassLinker* linker = Runtime::Current()->GetClassLinker(); |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 828 | mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 829 | InvokeType invoke_type; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 830 | MethodReference called_method(nullptr, 0); |
| 831 | const bool called_method_known_on_entry = !called->IsRuntimeMethod(); |
| 832 | if (!called_method_known_on_entry) { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 833 | uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp)); |
| 834 | const DexFile::CodeItem* code; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 835 | called_method.dex_file = caller->GetDexFile(); |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 836 | code = caller->GetCodeItem(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 837 | CHECK_LT(dex_pc, code->insns_size_in_code_units_); |
| 838 | const Instruction* instr = Instruction::At(&code->insns_[dex_pc]); |
| 839 | Instruction::Code instr_code = instr->Opcode(); |
| 840 | bool is_range; |
| 841 | switch (instr_code) { |
| 842 | case Instruction::INVOKE_DIRECT: |
| 843 | invoke_type = kDirect; |
| 844 | is_range = false; |
| 845 | break; |
| 846 | case Instruction::INVOKE_DIRECT_RANGE: |
| 847 | invoke_type = kDirect; |
| 848 | is_range = true; |
| 849 | break; |
| 850 | case Instruction::INVOKE_STATIC: |
| 851 | invoke_type = kStatic; |
| 852 | is_range = false; |
| 853 | break; |
| 854 | case Instruction::INVOKE_STATIC_RANGE: |
| 855 | invoke_type = kStatic; |
| 856 | is_range = true; |
| 857 | break; |
| 858 | case Instruction::INVOKE_SUPER: |
| 859 | invoke_type = kSuper; |
| 860 | is_range = false; |
| 861 | break; |
| 862 | case Instruction::INVOKE_SUPER_RANGE: |
| 863 | invoke_type = kSuper; |
| 864 | is_range = true; |
| 865 | break; |
| 866 | case Instruction::INVOKE_VIRTUAL: |
| 867 | invoke_type = kVirtual; |
| 868 | is_range = false; |
| 869 | break; |
| 870 | case Instruction::INVOKE_VIRTUAL_RANGE: |
| 871 | invoke_type = kVirtual; |
| 872 | is_range = true; |
| 873 | break; |
| 874 | case Instruction::INVOKE_INTERFACE: |
| 875 | invoke_type = kInterface; |
| 876 | is_range = false; |
| 877 | break; |
| 878 | case Instruction::INVOKE_INTERFACE_RANGE: |
| 879 | invoke_type = kInterface; |
| 880 | is_range = true; |
| 881 | break; |
| 882 | default: |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 883 | LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr); |
| 884 | UNREACHABLE(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 885 | } |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 886 | called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 887 | } else { |
| 888 | invoke_type = kStatic; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 889 | called_method.dex_file = called->GetDexFile(); |
| 890 | called_method.dex_method_index = called->GetDexMethodIndex(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 891 | } |
| 892 | uint32_t shorty_len; |
| 893 | const char* shorty = |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 894 | called_method.dex_file->GetMethodShorty( |
| 895 | called_method.dex_file->GetMethodId(called_method.dex_method_index), &shorty_len); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 896 | RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 897 | visitor.VisitArguments(); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 898 | self->EndAssertNoThreadSuspension(old_cause); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 899 | const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 900 | // Resolve method filling in dex cache. |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 901 | if (!called_method_known_on_entry) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 902 | StackHandleScope<1> hs(self); |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 903 | mirror::Object* dummy = nullptr; |
| 904 | HandleWrapper<mirror::Object> h_receiver( |
| 905 | hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy)); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 906 | DCHECK_EQ(caller->GetDexFile(), called_method.dex_file); |
| 907 | called = linker->ResolveMethod(self, called_method.dex_method_index, &caller, invoke_type); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 908 | } |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 909 | const void* code = nullptr; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 910 | if (LIKELY(!self->IsExceptionPending())) { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 911 | // Incompatible class change should have been handled in resolve method. |
Brian Carlstrom | 2ec6520 | 2014-03-03 15:16:37 -0800 | [diff] [blame] | 912 | CHECK(!called->CheckIncompatibleClassChange(invoke_type)) |
| 913 | << PrettyMethod(called) << " " << invoke_type; |
Mathieu Chartier | 55871bf | 2014-02-27 10:24:50 -0800 | [diff] [blame] | 914 | if (virtual_or_interface) { |
| 915 | // Refine called method based on receiver. |
| 916 | CHECK(receiver != nullptr) << invoke_type; |
Mingyao Yang | f486778 | 2014-05-05 11:55:02 -0700 | [diff] [blame] | 917 | |
| 918 | mirror::ArtMethod* orig_called = called; |
Mathieu Chartier | 55871bf | 2014-02-27 10:24:50 -0800 | [diff] [blame] | 919 | if (invoke_type == kVirtual) { |
| 920 | called = receiver->GetClass()->FindVirtualMethodForVirtual(called); |
| 921 | } else { |
| 922 | called = receiver->GetClass()->FindVirtualMethodForInterface(called); |
| 923 | } |
Mingyao Yang | f486778 | 2014-05-05 11:55:02 -0700 | [diff] [blame] | 924 | |
| 925 | CHECK(called != nullptr) << PrettyMethod(orig_called) << " " |
| 926 | << PrettyTypeOf(receiver) << " " |
| 927 | << invoke_type << " " << orig_called->GetVtableIndex(); |
| 928 | |
Ian Rogers | 83883d7 | 2013-10-21 21:07:24 -0700 | [diff] [blame] | 929 | // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 930 | // of the sharpened method avoiding dirtying the dex cache if possible. |
Ian Rogers | 00f1527 | 2014-12-02 16:55:46 -0800 | [diff] [blame] | 931 | // Note, called_method.dex_method_index references the dex method before the |
| 932 | // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares |
| 933 | // about the name and signature. |
| 934 | uint32_t update_dex_cache_method_index = called->GetDexMethodIndex(); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 935 | if (!called->HasSameDexCacheResolvedMethods(caller)) { |
Ian Rogers | 83883d7 | 2013-10-21 21:07:24 -0700 | [diff] [blame] | 936 | // Calling from one dex file to another, need to compute the method index appropriate to |
Vladimir Marko | bbcc0c0 | 2014-02-03 14:08:42 +0000 | [diff] [blame] | 937 | // the caller's dex file. Since we get here only if the original called was a runtime |
| 938 | // method, we've got the correct dex_file and a dex_method_idx from above. |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 939 | DCHECK(!called_method_known_on_entry); |
| 940 | DCHECK_EQ(caller->GetDexFile(), called_method.dex_file); |
| 941 | const DexFile* caller_dex_file = called_method.dex_file; |
| 942 | uint32_t caller_method_name_and_sig_index = called_method.dex_method_index; |
| 943 | update_dex_cache_method_index = |
| 944 | called->FindDexMethodIndexInOtherDexFile(*caller_dex_file, |
| 945 | caller_method_name_and_sig_index); |
| 946 | } |
| 947 | if ((update_dex_cache_method_index != DexFile::kDexNoIndex) && |
| 948 | (caller->GetDexCacheResolvedMethod(update_dex_cache_method_index) != called)) { |
| 949 | caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called); |
Ian Rogers | 83883d7 | 2013-10-21 21:07:24 -0700 | [diff] [blame] | 950 | } |
Mathieu Chartier | e4a91bb | 2015-01-28 13:11:44 -0800 | [diff] [blame] | 951 | } else if (invoke_type == kStatic) { |
| 952 | const auto called_dex_method_idx = called->GetDexMethodIndex(); |
| 953 | // For static invokes, we may dispatch to the static method in the superclass but resolve |
| 954 | // using the subclass. To prevent getting slow paths on each invoke, we force set the |
| 955 | // resolved method for the super class dex method index if we are in the same dex file. |
| 956 | // b/19175856 |
| 957 | if (called->GetDexFile() == called_method.dex_file && |
| 958 | called_method.dex_method_index != called_dex_method_idx) { |
| 959 | called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called); |
| 960 | } |
Ian Rogers | 83883d7 | 2013-10-21 21:07:24 -0700 | [diff] [blame] | 961 | } |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 962 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 963 | // Ensure that the called method's class is initialized. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 964 | StackHandleScope<1> hs(soa.Self()); |
| 965 | Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass())); |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 966 | linker->EnsureInitialized(soa.Self(), called_class, true, true); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 967 | if (LIKELY(called_class->IsInitialized())) { |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 968 | if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) { |
| 969 | // If we are single-stepping or the called method is deoptimized (by a |
| 970 | // breakpoint, for example), then we have to execute the called method |
| 971 | // with the interpreter. |
| 972 | code = GetQuickToInterpreterBridge(); |
| 973 | } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) { |
| 974 | // If the caller is deoptimized (by a breakpoint, for example), we have to |
| 975 | // continue its execution with interpreter when returning from the called |
| 976 | // method. Because we do not want to execute the called method with the |
| 977 | // interpreter, we wrap its execution into the instrumentation stubs. |
| 978 | // When the called method returns, it will execute the instrumentation |
| 979 | // exit hook that will determine the need of the interpreter with a call |
| 980 | // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if |
| 981 | // it is needed. |
| 982 | code = GetQuickInstrumentationEntryPoint(); |
| 983 | } else { |
| 984 | code = called->GetEntryPointFromQuickCompiledCode(); |
| 985 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 986 | } else if (called_class->IsInitializing()) { |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 987 | if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) { |
| 988 | // If we are single-stepping or the called method is deoptimized (by a |
| 989 | // breakpoint, for example), then we have to execute the called method |
| 990 | // with the interpreter. |
| 991 | code = GetQuickToInterpreterBridge(); |
| 992 | } else if (invoke_type == kStatic) { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 993 | // Class is still initializing, go to oat and grab code (trampoline must be left in place |
| 994 | // until class is initialized to stop races between threads). |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 995 | code = linker->GetQuickOatCodeFor(called); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 996 | } else { |
| 997 | // No trampoline for non-static methods. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 998 | code = called->GetEntryPointFromQuickCompiledCode(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 999 | } |
| 1000 | } else { |
| 1001 | DCHECK(called_class->IsErroneous()); |
| 1002 | } |
| 1003 | } |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1004 | CHECK_EQ(code == nullptr, self->IsExceptionPending()); |
Mathieu Chartier | 07d447b | 2013-09-26 11:57:43 -0700 | [diff] [blame] | 1005 | // Fixup any locally saved objects may have moved during a GC. |
| 1006 | visitor.FixupReferences(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1007 | // Place called method in callee-save frame to be placed as first argument to quick method. |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 1008 | sp->Assign(called); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1009 | return code; |
| 1010 | } |
| 1011 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1012 | /* |
| 1013 | * This class uses a couple of observations to unite the different calling conventions through |
| 1014 | * a few constants. |
| 1015 | * |
| 1016 | * 1) Number of registers used for passing is normally even, so counting down has no penalty for |
| 1017 | * possible alignment. |
| 1018 | * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point |
| 1019 | * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote |
| 1020 | * when we have to split things |
| 1021 | * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats |
| 1022 | * and we can use Int handling directly. |
| 1023 | * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code |
| 1024 | * necessary when widening. Also, widening of Ints will take place implicitly, and the |
| 1025 | * extension should be compatible with Aarch64, which mandates copying the available bits |
| 1026 | * into LSB and leaving the rest unspecified. |
| 1027 | * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on |
| 1028 | * the stack. |
| 1029 | * 6) There is only little endian. |
| 1030 | * |
| 1031 | * |
| 1032 | * Actual work is supposed to be done in a delegate of the template type. The interface is as |
| 1033 | * follows: |
| 1034 | * |
| 1035 | * void PushGpr(uintptr_t): Add a value for the next GPR |
| 1036 | * |
| 1037 | * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need |
| 1038 | * padding, that is, think the architecture is 32b and aligns 64b. |
| 1039 | * |
| 1040 | * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to |
| 1041 | * split this if necessary. The current state will have aligned, if |
| 1042 | * necessary. |
| 1043 | * |
| 1044 | * void PushStack(uintptr_t): Push a value to the stack. |
| 1045 | * |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1046 | * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr, |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1047 | * as this might be important for null initialization. |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1048 | * Must return the jobject, that is, the reference to the |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1049 | * entry in the HandleScope (nullptr if necessary). |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1050 | * |
| 1051 | */ |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1052 | template<class T> class BuildNativeCallFrameStateMachine { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1053 | public: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1054 | #if defined(__arm__) |
| 1055 | // TODO: These are all dummy values! |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1056 | static constexpr bool kNativeSoftFloatAbi = true; |
| 1057 | static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3 |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1058 | static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs. |
| 1059 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1060 | static constexpr size_t kRegistersNeededForLong = 2; |
| 1061 | static constexpr size_t kRegistersNeededForDouble = 2; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1062 | static constexpr bool kMultiRegistersAligned = true; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1063 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1064 | static constexpr bool kMultiGPRegistersWidened = false; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1065 | static constexpr bool kAlignLongOnStack = true; |
| 1066 | static constexpr bool kAlignDoubleOnStack = true; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1067 | #elif defined(__aarch64__) |
| 1068 | static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI. |
| 1069 | static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs. |
| 1070 | static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs. |
| 1071 | |
| 1072 | static constexpr size_t kRegistersNeededForLong = 1; |
| 1073 | static constexpr size_t kRegistersNeededForDouble = 1; |
| 1074 | static constexpr bool kMultiRegistersAligned = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1075 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1076 | static constexpr bool kMultiGPRegistersWidened = false; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1077 | static constexpr bool kAlignLongOnStack = false; |
| 1078 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1079 | #elif defined(__mips__) && !defined(__LP64__) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1080 | static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI. |
Douglas Leung | 735b855 | 2014-10-31 12:21:40 -0700 | [diff] [blame] | 1081 | static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs. |
| 1082 | static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1083 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1084 | static constexpr size_t kRegistersNeededForLong = 2; |
| 1085 | static constexpr size_t kRegistersNeededForDouble = 2; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1086 | static constexpr bool kMultiRegistersAligned = true; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1087 | static constexpr bool kMultiFPRegistersWidened = true; |
| 1088 | static constexpr bool kMultiGPRegistersWidened = false; |
Douglas Leung | 735b855 | 2014-10-31 12:21:40 -0700 | [diff] [blame] | 1089 | static constexpr bool kAlignLongOnStack = true; |
| 1090 | static constexpr bool kAlignDoubleOnStack = true; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1091 | #elif defined(__mips__) && defined(__LP64__) |
| 1092 | // Let the code prepare GPRs only and we will load the FPRs with same data. |
| 1093 | static constexpr bool kNativeSoftFloatAbi = true; |
| 1094 | static constexpr size_t kNumNativeGprArgs = 8; |
| 1095 | static constexpr size_t kNumNativeFprArgs = 0; |
| 1096 | |
| 1097 | static constexpr size_t kRegistersNeededForLong = 1; |
| 1098 | static constexpr size_t kRegistersNeededForDouble = 1; |
| 1099 | static constexpr bool kMultiRegistersAligned = false; |
| 1100 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1101 | static constexpr bool kMultiGPRegistersWidened = true; |
| 1102 | static constexpr bool kAlignLongOnStack = false; |
| 1103 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1104 | #elif defined(__i386__) |
| 1105 | // TODO: Check these! |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1106 | static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1107 | static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs. |
| 1108 | static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs. |
| 1109 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1110 | static constexpr size_t kRegistersNeededForLong = 2; |
| 1111 | static constexpr size_t kRegistersNeededForDouble = 2; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1112 | static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1113 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1114 | static constexpr bool kMultiGPRegistersWidened = false; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1115 | static constexpr bool kAlignLongOnStack = false; |
| 1116 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1117 | #elif defined(__x86_64__) |
| 1118 | static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI. |
| 1119 | static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs. |
| 1120 | static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs. |
| 1121 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1122 | static constexpr size_t kRegistersNeededForLong = 1; |
| 1123 | static constexpr size_t kRegistersNeededForDouble = 1; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1124 | static constexpr bool kMultiRegistersAligned = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1125 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1126 | static constexpr bool kMultiGPRegistersWidened = false; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1127 | static constexpr bool kAlignLongOnStack = false; |
| 1128 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1129 | #else |
| 1130 | #error "Unsupported architecture" |
| 1131 | #endif |
| 1132 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1133 | public: |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1134 | explicit BuildNativeCallFrameStateMachine(T* delegate) |
| 1135 | : gpr_index_(kNumNativeGprArgs), |
| 1136 | fpr_index_(kNumNativeFprArgs), |
| 1137 | stack_entries_(0), |
| 1138 | delegate_(delegate) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1139 | // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff |
| 1140 | // the next register is even; counting down is just to make the compiler happy... |
Andreas Gampe | 575e78c | 2014-11-03 23:41:03 -0800 | [diff] [blame] | 1141 | static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even"); |
| 1142 | static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even"); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1143 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1144 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1145 | virtual ~BuildNativeCallFrameStateMachine() {} |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1146 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1147 | bool HavePointerGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1148 | return gpr_index_ > 0; |
| 1149 | } |
| 1150 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1151 | void AdvancePointer(const void* val) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1152 | if (HavePointerGpr()) { |
| 1153 | gpr_index_--; |
| 1154 | PushGpr(reinterpret_cast<uintptr_t>(val)); |
| 1155 | } else { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1156 | stack_entries_++; // TODO: have a field for pointer length as multiple of 32b |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1157 | PushStack(reinterpret_cast<uintptr_t>(val)); |
| 1158 | gpr_index_ = 0; |
| 1159 | } |
| 1160 | } |
| 1161 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1162 | bool HaveHandleScopeGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1163 | return gpr_index_ > 0; |
| 1164 | } |
| 1165 | |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1166 | void AdvanceHandleScope(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1167 | uintptr_t handle = PushHandle(ptr); |
| 1168 | if (HaveHandleScopeGpr()) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1169 | gpr_index_--; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1170 | PushGpr(handle); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1171 | } else { |
| 1172 | stack_entries_++; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1173 | PushStack(handle); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1174 | gpr_index_ = 0; |
| 1175 | } |
| 1176 | } |
| 1177 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1178 | bool HaveIntGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1179 | return gpr_index_ > 0; |
| 1180 | } |
| 1181 | |
| 1182 | void AdvanceInt(uint32_t val) { |
| 1183 | if (HaveIntGpr()) { |
| 1184 | gpr_index_--; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1185 | if (kMultiGPRegistersWidened) { |
| 1186 | DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t)); |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1187 | PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val))); |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1188 | } else { |
| 1189 | PushGpr(val); |
| 1190 | } |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1191 | } else { |
| 1192 | stack_entries_++; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1193 | if (kMultiGPRegistersWidened) { |
| 1194 | DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t)); |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1195 | PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val))); |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1196 | } else { |
| 1197 | PushStack(val); |
| 1198 | } |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1199 | gpr_index_ = 0; |
| 1200 | } |
| 1201 | } |
| 1202 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1203 | bool HaveLongGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1204 | return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0); |
| 1205 | } |
| 1206 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1207 | bool LongGprNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1208 | return kRegistersNeededForLong > 1 && // only pad when using multiple registers |
| 1209 | kAlignLongOnStack && // and when it needs alignment |
| 1210 | (gpr_index_ & 1) == 1; // counter is odd, see constructor |
| 1211 | } |
| 1212 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1213 | bool LongStackNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1214 | return kRegistersNeededForLong > 1 && // only pad when using multiple registers |
| 1215 | kAlignLongOnStack && // and when it needs 8B alignment |
| 1216 | (stack_entries_ & 1) == 1; // counter is odd |
| 1217 | } |
| 1218 | |
| 1219 | void AdvanceLong(uint64_t val) { |
| 1220 | if (HaveLongGpr()) { |
| 1221 | if (LongGprNeedsPadding()) { |
| 1222 | PushGpr(0); |
| 1223 | gpr_index_--; |
| 1224 | } |
| 1225 | if (kRegistersNeededForLong == 1) { |
| 1226 | PushGpr(static_cast<uintptr_t>(val)); |
| 1227 | } else { |
| 1228 | PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF)); |
| 1229 | PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF)); |
| 1230 | } |
| 1231 | gpr_index_ -= kRegistersNeededForLong; |
| 1232 | } else { |
| 1233 | if (LongStackNeedsPadding()) { |
| 1234 | PushStack(0); |
| 1235 | stack_entries_++; |
| 1236 | } |
| 1237 | if (kRegistersNeededForLong == 1) { |
| 1238 | PushStack(static_cast<uintptr_t>(val)); |
| 1239 | stack_entries_++; |
| 1240 | } else { |
| 1241 | PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF)); |
| 1242 | PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF)); |
| 1243 | stack_entries_ += 2; |
| 1244 | } |
| 1245 | gpr_index_ = 0; |
| 1246 | } |
| 1247 | } |
| 1248 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1249 | bool HaveFloatFpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1250 | return fpr_index_ > 0; |
| 1251 | } |
| 1252 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1253 | void AdvanceFloat(float val) { |
| 1254 | if (kNativeSoftFloatAbi) { |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1255 | AdvanceInt(bit_cast<uint32_t, float>(val)); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1256 | } else { |
| 1257 | if (HaveFloatFpr()) { |
| 1258 | fpr_index_--; |
| 1259 | if (kRegistersNeededForDouble == 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1260 | if (kMultiFPRegistersWidened) { |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1261 | PushFpr8(bit_cast<uint64_t, double>(val)); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1262 | } else { |
| 1263 | // No widening, just use the bits. |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1264 | PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val))); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1265 | } |
| 1266 | } else { |
| 1267 | PushFpr4(val); |
| 1268 | } |
| 1269 | } else { |
| 1270 | stack_entries_++; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1271 | if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1272 | // Need to widen before storing: Note the "double" in the template instantiation. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1273 | // Note: We need to jump through those hoops to make the compiler happy. |
| 1274 | DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t)); |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1275 | PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val))); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1276 | } else { |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1277 | PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val))); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1278 | } |
| 1279 | fpr_index_ = 0; |
| 1280 | } |
| 1281 | } |
| 1282 | } |
| 1283 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1284 | bool HaveDoubleFpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1285 | return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0); |
| 1286 | } |
| 1287 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1288 | bool DoubleFprNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1289 | return kRegistersNeededForDouble > 1 && // only pad when using multiple registers |
| 1290 | kAlignDoubleOnStack && // and when it needs alignment |
| 1291 | (fpr_index_ & 1) == 1; // counter is odd, see constructor |
| 1292 | } |
| 1293 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1294 | bool DoubleStackNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1295 | return kRegistersNeededForDouble > 1 && // only pad when using multiple registers |
| 1296 | kAlignDoubleOnStack && // and when it needs 8B alignment |
| 1297 | (stack_entries_ & 1) == 1; // counter is odd |
| 1298 | } |
| 1299 | |
| 1300 | void AdvanceDouble(uint64_t val) { |
| 1301 | if (kNativeSoftFloatAbi) { |
| 1302 | AdvanceLong(val); |
| 1303 | } else { |
| 1304 | if (HaveDoubleFpr()) { |
| 1305 | if (DoubleFprNeedsPadding()) { |
| 1306 | PushFpr4(0); |
| 1307 | fpr_index_--; |
| 1308 | } |
| 1309 | PushFpr8(val); |
| 1310 | fpr_index_ -= kRegistersNeededForDouble; |
| 1311 | } else { |
| 1312 | if (DoubleStackNeedsPadding()) { |
| 1313 | PushStack(0); |
| 1314 | stack_entries_++; |
| 1315 | } |
| 1316 | if (kRegistersNeededForDouble == 1) { |
| 1317 | PushStack(static_cast<uintptr_t>(val)); |
| 1318 | stack_entries_++; |
| 1319 | } else { |
| 1320 | PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF)); |
| 1321 | PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF)); |
| 1322 | stack_entries_ += 2; |
| 1323 | } |
| 1324 | fpr_index_ = 0; |
| 1325 | } |
| 1326 | } |
| 1327 | } |
| 1328 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1329 | uint32_t GetStackEntries() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1330 | return stack_entries_; |
| 1331 | } |
| 1332 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1333 | uint32_t GetNumberOfUsedGprs() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1334 | return kNumNativeGprArgs - gpr_index_; |
| 1335 | } |
| 1336 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1337 | uint32_t GetNumberOfUsedFprs() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1338 | return kNumNativeFprArgs - fpr_index_; |
| 1339 | } |
| 1340 | |
| 1341 | private: |
| 1342 | void PushGpr(uintptr_t val) { |
| 1343 | delegate_->PushGpr(val); |
| 1344 | } |
| 1345 | void PushFpr4(float val) { |
| 1346 | delegate_->PushFpr4(val); |
| 1347 | } |
| 1348 | void PushFpr8(uint64_t val) { |
| 1349 | delegate_->PushFpr8(val); |
| 1350 | } |
| 1351 | void PushStack(uintptr_t val) { |
| 1352 | delegate_->PushStack(val); |
| 1353 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1354 | uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1355 | return delegate_->PushHandle(ref); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1356 | } |
| 1357 | |
| 1358 | uint32_t gpr_index_; // Number of free GPRs |
| 1359 | uint32_t fpr_index_; // Number of free FPRs |
| 1360 | uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not |
| 1361 | // extended |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1362 | T* const delegate_; // What Push implementation gets called |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1363 | }; |
| 1364 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1365 | // Computes the sizes of register stacks and call stack area. Handling of references can be extended |
| 1366 | // in subclasses. |
| 1367 | // |
| 1368 | // To handle native pointers, use "L" in the shorty for an object reference, which simulates |
| 1369 | // them with handles. |
| 1370 | class ComputeNativeCallFrameSize { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1371 | public: |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1372 | ComputeNativeCallFrameSize() : num_stack_entries_(0) {} |
| 1373 | |
| 1374 | virtual ~ComputeNativeCallFrameSize() {} |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1375 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1376 | uint32_t GetStackSize() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1377 | return num_stack_entries_ * sizeof(uintptr_t); |
| 1378 | } |
| 1379 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1380 | uint8_t* LayoutCallStack(uint8_t* sp8) const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1381 | sp8 -= GetStackSize(); |
Andreas Gampe | 779f8c9 | 2014-06-09 18:29:38 -0700 | [diff] [blame] | 1382 | // Align by kStackAlignment. |
| 1383 | sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment)); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1384 | return sp8; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1385 | } |
| 1386 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1387 | uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr) |
| 1388 | const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1389 | // Assumption is OK right now, as we have soft-float arm |
| 1390 | size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs; |
| 1391 | sp8 -= fregs * sizeof(uintptr_t); |
| 1392 | *start_fpr = reinterpret_cast<uint32_t*>(sp8); |
| 1393 | size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs; |
| 1394 | sp8 -= iregs * sizeof(uintptr_t); |
| 1395 | *start_gpr = reinterpret_cast<uintptr_t*>(sp8); |
| 1396 | return sp8; |
| 1397 | } |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1398 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1399 | uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr, |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1400 | uint32_t** start_fpr) const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1401 | // Native call stack. |
| 1402 | sp8 = LayoutCallStack(sp8); |
| 1403 | *start_stack = reinterpret_cast<uintptr_t*>(sp8); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1404 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1405 | // Put fprs and gprs below. |
| 1406 | sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1407 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1408 | // Return the new bottom. |
| 1409 | return sp8; |
| 1410 | } |
| 1411 | |
| 1412 | virtual void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1413 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1414 | UNUSED(sm); |
| 1415 | } |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1416 | |
| 1417 | void Walk(const char* shorty, uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1418 | BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this); |
| 1419 | |
| 1420 | WalkHeader(&sm); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1421 | |
| 1422 | for (uint32_t i = 1; i < shorty_len; ++i) { |
| 1423 | Primitive::Type cur_type_ = Primitive::GetType(shorty[i]); |
| 1424 | switch (cur_type_) { |
| 1425 | case Primitive::kPrimNot: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1426 | // TODO: fix abuse of mirror types. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1427 | sm.AdvanceHandleScope( |
| 1428 | reinterpret_cast<mirror::Object*>(0x12345678)); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1429 | break; |
| 1430 | |
| 1431 | case Primitive::kPrimBoolean: |
| 1432 | case Primitive::kPrimByte: |
| 1433 | case Primitive::kPrimChar: |
| 1434 | case Primitive::kPrimShort: |
| 1435 | case Primitive::kPrimInt: |
| 1436 | sm.AdvanceInt(0); |
| 1437 | break; |
| 1438 | case Primitive::kPrimFloat: |
| 1439 | sm.AdvanceFloat(0); |
| 1440 | break; |
| 1441 | case Primitive::kPrimDouble: |
| 1442 | sm.AdvanceDouble(0); |
| 1443 | break; |
| 1444 | case Primitive::kPrimLong: |
| 1445 | sm.AdvanceLong(0); |
| 1446 | break; |
| 1447 | default: |
| 1448 | LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1449 | UNREACHABLE(); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1450 | } |
| 1451 | } |
| 1452 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1453 | num_stack_entries_ = sm.GetStackEntries(); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1454 | } |
| 1455 | |
| 1456 | void PushGpr(uintptr_t /* val */) { |
| 1457 | // not optimizing registers, yet |
| 1458 | } |
| 1459 | |
| 1460 | void PushFpr4(float /* val */) { |
| 1461 | // not optimizing registers, yet |
| 1462 | } |
| 1463 | |
| 1464 | void PushFpr8(uint64_t /* val */) { |
| 1465 | // not optimizing registers, yet |
| 1466 | } |
| 1467 | |
| 1468 | void PushStack(uintptr_t /* val */) { |
| 1469 | // counting is already done in the superclass |
| 1470 | } |
| 1471 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1472 | virtual uintptr_t PushHandle(mirror::Object* /* ptr */) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1473 | return reinterpret_cast<uintptr_t>(nullptr); |
| 1474 | } |
| 1475 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1476 | protected: |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1477 | uint32_t num_stack_entries_; |
| 1478 | }; |
| 1479 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1480 | class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize { |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1481 | public: |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1482 | ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {} |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1483 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1484 | // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs |
| 1485 | // is at *m = sp. Will update to point to the bottom of the save frame. |
| 1486 | // |
| 1487 | // Note: assumes ComputeAll() has been run before. |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1488 | void LayoutCalleeSaveFrame(Thread* self, StackReference<mirror::ArtMethod>** m, void* sp, |
| 1489 | HandleScope** handle_scope) |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1490 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1491 | mirror::ArtMethod* method = (*m)->AsMirrorPtr(); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1492 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1493 | uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp); |
| 1494 | |
| 1495 | // First, fix up the layout of the callee-save frame. |
| 1496 | // We have to squeeze in the HandleScope, and relocate the method pointer. |
| 1497 | |
| 1498 | // "Free" the slot for the method. |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 1499 | sp8 += sizeof(void*); // In the callee-save frame we use a full pointer. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1500 | |
| 1501 | // Under the callee saves put handle scope and new method stack reference. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1502 | size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_); |
| 1503 | size_t scope_and_method = handle_scope_size + sizeof(StackReference<mirror::ArtMethod>); |
| 1504 | |
| 1505 | sp8 -= scope_and_method; |
| 1506 | // Align by kStackAlignment. |
| 1507 | sp8 = reinterpret_cast<uint8_t*>(RoundDown( |
| 1508 | reinterpret_cast<uintptr_t>(sp8), kStackAlignment)); |
| 1509 | |
| 1510 | uint8_t* sp8_table = sp8 + sizeof(StackReference<mirror::ArtMethod>); |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1511 | *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(), |
| 1512 | num_handle_scope_references_); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1513 | |
| 1514 | // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us. |
| 1515 | uint8_t* method_pointer = sp8; |
| 1516 | StackReference<mirror::ArtMethod>* new_method_ref = |
| 1517 | reinterpret_cast<StackReference<mirror::ArtMethod>*>(method_pointer); |
| 1518 | new_method_ref->Assign(method); |
| 1519 | *m = new_method_ref; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1520 | } |
| 1521 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1522 | // Adds space for the cookie. Note: may leave stack unaligned. |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1523 | void LayoutCookie(uint8_t** sp) const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1524 | // Reference cookie and padding |
| 1525 | *sp -= 8; |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 1526 | } |
| 1527 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1528 | // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie. |
| 1529 | // Returns the new bottom. Note: this may be unaligned. |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1530 | uint8_t* LayoutJNISaveFrame(Thread* self, StackReference<mirror::ArtMethod>** m, void* sp, |
| 1531 | HandleScope** handle_scope) |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 1532 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1533 | // First, fix up the layout of the callee-save frame. |
| 1534 | // We have to squeeze in the HandleScope, and relocate the method pointer. |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1535 | LayoutCalleeSaveFrame(self, m, sp, handle_scope); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1536 | |
| 1537 | // The bottom of the callee-save frame is now where the method is, *m. |
| 1538 | uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m); |
| 1539 | |
| 1540 | // Add space for cookie. |
| 1541 | LayoutCookie(&sp8); |
| 1542 | |
| 1543 | return sp8; |
| 1544 | } |
| 1545 | |
| 1546 | // WARNING: After this, *sp won't be pointing to the method anymore! |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1547 | uint8_t* ComputeLayout(Thread* self, StackReference<mirror::ArtMethod>** m, |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1548 | const char* shorty, uint32_t shorty_len, HandleScope** handle_scope, |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1549 | uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr) |
| 1550 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1551 | Walk(shorty, shorty_len); |
| 1552 | |
| 1553 | // JNI part. |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1554 | uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1555 | |
| 1556 | sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr); |
| 1557 | |
| 1558 | // Return the new bottom. |
| 1559 | return sp8; |
| 1560 | } |
| 1561 | |
| 1562 | uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE; |
| 1563 | |
| 1564 | // Add JNIEnv* and jobj/jclass before the shorty-derived elements. |
| 1565 | void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE |
| 1566 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 1567 | |
| 1568 | private: |
| 1569 | uint32_t num_handle_scope_references_; |
| 1570 | }; |
| 1571 | |
| 1572 | uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) { |
| 1573 | num_handle_scope_references_++; |
| 1574 | return reinterpret_cast<uintptr_t>(nullptr); |
| 1575 | } |
| 1576 | |
| 1577 | void ComputeGenericJniFrameSize::WalkHeader( |
| 1578 | BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) { |
| 1579 | // JNIEnv |
| 1580 | sm->AdvancePointer(nullptr); |
| 1581 | |
| 1582 | // Class object or this as first argument |
| 1583 | sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678)); |
| 1584 | } |
| 1585 | |
| 1586 | // Class to push values to three separate regions. Used to fill the native call part. Adheres to |
| 1587 | // the template requirements of BuildGenericJniFrameStateMachine. |
| 1588 | class FillNativeCall { |
| 1589 | public: |
| 1590 | FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) : |
| 1591 | cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {} |
| 1592 | |
| 1593 | virtual ~FillNativeCall() {} |
| 1594 | |
| 1595 | void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) { |
| 1596 | cur_gpr_reg_ = gpr_regs; |
| 1597 | cur_fpr_reg_ = fpr_regs; |
| 1598 | cur_stack_arg_ = stack_args; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1599 | } |
| 1600 | |
| 1601 | void PushGpr(uintptr_t val) { |
| 1602 | *cur_gpr_reg_ = val; |
| 1603 | cur_gpr_reg_++; |
| 1604 | } |
| 1605 | |
| 1606 | void PushFpr4(float val) { |
| 1607 | *cur_fpr_reg_ = val; |
| 1608 | cur_fpr_reg_++; |
| 1609 | } |
| 1610 | |
| 1611 | void PushFpr8(uint64_t val) { |
| 1612 | uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_); |
| 1613 | *tmp = val; |
| 1614 | cur_fpr_reg_ += 2; |
| 1615 | } |
| 1616 | |
| 1617 | void PushStack(uintptr_t val) { |
| 1618 | *cur_stack_arg_ = val; |
| 1619 | cur_stack_arg_++; |
| 1620 | } |
| 1621 | |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1622 | virtual uintptr_t PushHandle(mirror::Object*) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1623 | LOG(FATAL) << "(Non-JNI) Native call does not use handles."; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1624 | UNREACHABLE(); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1625 | } |
| 1626 | |
| 1627 | private: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1628 | uintptr_t* cur_gpr_reg_; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1629 | uint32_t* cur_fpr_reg_; |
| 1630 | uintptr_t* cur_stack_arg_; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1631 | }; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1632 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1633 | // Visits arguments on the stack placing them into a region lower down the stack for the benefit |
| 1634 | // of transitioning into native code. |
| 1635 | class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor { |
| 1636 | public: |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1637 | BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len, |
| 1638 | StackReference<mirror::ArtMethod>** sp) |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1639 | : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), |
| 1640 | jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) { |
| 1641 | ComputeGenericJniFrameSize fsc; |
| 1642 | uintptr_t* start_gpr_reg; |
| 1643 | uint32_t* start_fpr_reg; |
| 1644 | uintptr_t* start_stack_arg; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1645 | bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len, |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1646 | &handle_scope_, |
| 1647 | &start_stack_arg, |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1648 | &start_gpr_reg, &start_fpr_reg); |
| 1649 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1650 | jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_); |
| 1651 | |
| 1652 | // jni environment is always first argument |
| 1653 | sm_.AdvancePointer(self->GetJniEnv()); |
| 1654 | |
| 1655 | if (is_static) { |
| 1656 | sm_.AdvanceHandleScope((*sp)->AsMirrorPtr()->GetDeclaringClass()); |
| 1657 | } |
| 1658 | } |
| 1659 | |
| 1660 | void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE; |
| 1661 | |
| 1662 | void FinalizeHandleScope(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 1663 | |
| 1664 | StackReference<mirror::Object>* GetFirstHandleScopeEntry() |
| 1665 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1666 | return handle_scope_->GetHandle(0).GetReference(); |
| 1667 | } |
| 1668 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1669 | jobject GetFirstHandleScopeJObject() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1670 | return handle_scope_->GetHandle(0).ToJObject(); |
| 1671 | } |
| 1672 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1673 | void* GetBottomOfUsedArea() const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1674 | return bottom_of_used_area_; |
| 1675 | } |
| 1676 | |
| 1677 | private: |
| 1678 | // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall. |
| 1679 | class FillJniCall FINAL : public FillNativeCall { |
| 1680 | public: |
| 1681 | FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, |
| 1682 | HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args), |
| 1683 | handle_scope_(handle_scope), cur_entry_(0) {} |
| 1684 | |
| 1685 | uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 1686 | |
| 1687 | void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) { |
| 1688 | FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args); |
| 1689 | handle_scope_ = scope; |
| 1690 | cur_entry_ = 0U; |
| 1691 | } |
| 1692 | |
| 1693 | void ResetRemainingScopeSlots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 1694 | // Initialize padding entries. |
| 1695 | size_t expected_slots = handle_scope_->NumberOfReferences(); |
| 1696 | while (cur_entry_ < expected_slots) { |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 1697 | handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1698 | } |
| 1699 | DCHECK_NE(cur_entry_, 0U); |
| 1700 | } |
| 1701 | |
| 1702 | private: |
| 1703 | HandleScope* handle_scope_; |
| 1704 | size_t cur_entry_; |
| 1705 | }; |
| 1706 | |
| 1707 | HandleScope* handle_scope_; |
| 1708 | FillJniCall jni_call_; |
| 1709 | void* bottom_of_used_area_; |
| 1710 | |
| 1711 | BuildNativeCallFrameStateMachine<FillJniCall> sm_; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1712 | |
| 1713 | DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor); |
| 1714 | }; |
| 1715 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1716 | uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) { |
| 1717 | uintptr_t tmp; |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 1718 | MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1719 | h.Assign(ref); |
| 1720 | tmp = reinterpret_cast<uintptr_t>(h.ToJObject()); |
| 1721 | cur_entry_++; |
| 1722 | return tmp; |
| 1723 | } |
| 1724 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 1725 | void BuildGenericJniFrameVisitor::Visit() { |
| 1726 | Primitive::Type type = GetParamPrimitiveType(); |
| 1727 | switch (type) { |
| 1728 | case Primitive::kPrimLong: { |
| 1729 | jlong long_arg; |
| 1730 | if (IsSplitLongOrDouble()) { |
| 1731 | long_arg = ReadSplitLongParam(); |
| 1732 | } else { |
| 1733 | long_arg = *reinterpret_cast<jlong*>(GetParamAddress()); |
| 1734 | } |
| 1735 | sm_.AdvanceLong(long_arg); |
| 1736 | break; |
| 1737 | } |
| 1738 | case Primitive::kPrimDouble: { |
| 1739 | uint64_t double_arg; |
| 1740 | if (IsSplitLongOrDouble()) { |
| 1741 | // Read into union so that we don't case to a double. |
| 1742 | double_arg = ReadSplitLongParam(); |
| 1743 | } else { |
| 1744 | double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress()); |
| 1745 | } |
| 1746 | sm_.AdvanceDouble(double_arg); |
| 1747 | break; |
| 1748 | } |
| 1749 | case Primitive::kPrimNot: { |
| 1750 | StackReference<mirror::Object>* stack_ref = |
| 1751 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1752 | sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr()); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 1753 | break; |
| 1754 | } |
| 1755 | case Primitive::kPrimFloat: |
| 1756 | sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress())); |
| 1757 | break; |
| 1758 | case Primitive::kPrimBoolean: // Fall-through. |
| 1759 | case Primitive::kPrimByte: // Fall-through. |
| 1760 | case Primitive::kPrimChar: // Fall-through. |
| 1761 | case Primitive::kPrimShort: // Fall-through. |
| 1762 | case Primitive::kPrimInt: // Fall-through. |
| 1763 | sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress())); |
| 1764 | break; |
| 1765 | case Primitive::kPrimVoid: |
| 1766 | LOG(FATAL) << "UNREACHABLE"; |
Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 1767 | UNREACHABLE(); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 1768 | } |
| 1769 | } |
| 1770 | |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1771 | void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1772 | // Clear out rest of the scope. |
| 1773 | jni_call_.ResetRemainingScopeSlots(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1774 | // Install HandleScope. |
| 1775 | self->PushHandleScope(handle_scope_); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 1776 | } |
| 1777 | |
Ian Rogers | 04c31d2 | 2014-07-07 21:44:06 -0700 | [diff] [blame] | 1778 | #if defined(__arm__) || defined(__aarch64__) |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1779 | extern "C" void* artFindNativeMethod(); |
Ian Rogers | 04c31d2 | 2014-07-07 21:44:06 -0700 | [diff] [blame] | 1780 | #else |
| 1781 | extern "C" void* artFindNativeMethod(Thread* self); |
| 1782 | #endif |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1783 | |
Andreas Gampe | ad61517 | 2014-04-04 16:20:13 -0700 | [diff] [blame] | 1784 | uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) { |
| 1785 | if (lock != nullptr) { |
| 1786 | return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self)); |
| 1787 | } else { |
| 1788 | return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self)); |
| 1789 | } |
| 1790 | } |
| 1791 | |
| 1792 | void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) { |
| 1793 | if (lock != nullptr) { |
| 1794 | JniMethodEndSynchronized(cookie, lock, self); |
| 1795 | } else { |
| 1796 | JniMethodEnd(cookie, self); |
| 1797 | } |
| 1798 | } |
| 1799 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1800 | /* |
| 1801 | * Initializes an alloca region assumed to be directly below sp for a native call: |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1802 | * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers. |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1803 | * The final element on the stack is a pointer to the native code. |
| 1804 | * |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1805 | * On entry, the stack has a standard callee-save frame above sp, and an alloca below it. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1806 | * We need to fix this, as the handle scope needs to go into the callee-save frame. |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1807 | * |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1808 | * The return of this function denotes: |
| 1809 | * 1) How many bytes of the alloca can be released, if the value is non-negative. |
| 1810 | * 2) An error, if the value is negative. |
| 1811 | */ |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1812 | extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, |
| 1813 | StackReference<mirror::ArtMethod>* sp) |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 1814 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 1815 | mirror::ArtMethod* called = sp->AsMirrorPtr(); |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1816 | DCHECK(called->IsNative()) << PrettyMethod(called, true); |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 1817 | uint32_t shorty_len = 0; |
| 1818 | const char* shorty = called->GetShorty(&shorty_len); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1819 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1820 | // Run the visitor and update sp. |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 1821 | BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1822 | visitor.VisitArguments(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1823 | visitor.FinalizeHandleScope(self); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1824 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1825 | // Fix up managed-stack things in Thread. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1826 | self->SetTopOfStack(sp); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1827 | |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1828 | self->VerifyStack(); |
| 1829 | |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1830 | // Start JNI, save the cookie. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1831 | uint32_t cookie; |
| 1832 | if (called->IsSynchronized()) { |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 1833 | cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1834 | if (self->IsExceptionPending()) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1835 | self->PopHandleScope(); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1836 | // A negative value denotes an error. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1837 | return GetTwoWordFailureValue(); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1838 | } |
| 1839 | } else { |
| 1840 | cookie = JniMethodStart(self); |
| 1841 | } |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1842 | uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp); |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1843 | *(sp32 - 1) = cookie; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1844 | |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1845 | // Retrieve the stored native code. |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 1846 | void* nativeCode = called->GetEntryPointFromJni(); |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1847 | |
Andreas Gampe | 9a6a99a | 2014-03-14 07:52:20 -0700 | [diff] [blame] | 1848 | // There are two cases for the content of nativeCode: |
| 1849 | // 1) Pointer to the native function. |
| 1850 | // 2) Pointer to the trampoline for native code binding. |
| 1851 | // In the second case, we need to execute the binding and continue with the actual native function |
| 1852 | // pointer. |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1853 | DCHECK(nativeCode != nullptr); |
| 1854 | if (nativeCode == GetJniDlsymLookupStub()) { |
Ian Rogers | 04c31d2 | 2014-07-07 21:44:06 -0700 | [diff] [blame] | 1855 | #if defined(__arm__) || defined(__aarch64__) |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1856 | nativeCode = artFindNativeMethod(); |
Ian Rogers | 04c31d2 | 2014-07-07 21:44:06 -0700 | [diff] [blame] | 1857 | #else |
| 1858 | nativeCode = artFindNativeMethod(self); |
| 1859 | #endif |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1860 | |
| 1861 | if (nativeCode == nullptr) { |
| 1862 | DCHECK(self->IsExceptionPending()); // There should be an exception pending now. |
Andreas Gampe | ad61517 | 2014-04-04 16:20:13 -0700 | [diff] [blame] | 1863 | |
| 1864 | // End JNI, as the assembly will move to deliver the exception. |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 1865 | jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr; |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 1866 | if (shorty[0] == 'L') { |
Andreas Gampe | ad61517 | 2014-04-04 16:20:13 -0700 | [diff] [blame] | 1867 | artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock); |
| 1868 | } else { |
| 1869 | artQuickGenericJniEndJNINonRef(self, cookie, lock); |
| 1870 | } |
| 1871 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1872 | return GetTwoWordFailureValue(); |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 1873 | } |
| 1874 | // Note that the native code pointer will be automatically set by artFindNativeMethod(). |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1875 | } |
| 1876 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1877 | // Return native code addr(lo) and bottom of alloca address(hi). |
| 1878 | return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()), |
| 1879 | reinterpret_cast<uintptr_t>(nativeCode)); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1880 | } |
| 1881 | |
| 1882 | /* |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1883 | * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1884 | * unlocking. |
| 1885 | */ |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1886 | extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1887 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1888 | StackReference<mirror::ArtMethod>* sp = self->GetManagedStack()->GetTopQuickFrame(); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1889 | uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp); |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 1890 | mirror::ArtMethod* called = sp->AsMirrorPtr(); |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1891 | uint32_t cookie = *(sp32 - 1); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1892 | |
Andreas Gampe | ad61517 | 2014-04-04 16:20:13 -0700 | [diff] [blame] | 1893 | jobject lock = nullptr; |
| 1894 | if (called->IsSynchronized()) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1895 | HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp) |
| 1896 | + sizeof(StackReference<mirror::ArtMethod>)); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1897 | lock = table->GetHandle(0).ToJObject(); |
Andreas Gampe | ad61517 | 2014-04-04 16:20:13 -0700 | [diff] [blame] | 1898 | } |
| 1899 | |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 1900 | char return_shorty_char = called->GetShorty()[0]; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1901 | |
| 1902 | if (return_shorty_char == 'L') { |
Andreas Gampe | ad61517 | 2014-04-04 16:20:13 -0700 | [diff] [blame] | 1903 | return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1904 | } else { |
Andreas Gampe | ad61517 | 2014-04-04 16:20:13 -0700 | [diff] [blame] | 1905 | artQuickGenericJniEndJNINonRef(self, cookie, lock); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1906 | |
| 1907 | switch (return_shorty_char) { |
Nicolas Geoffray | 54accbc | 2014-08-13 03:40:45 +0100 | [diff] [blame] | 1908 | case 'F': { |
| 1909 | if (kRuntimeISA == kX86) { |
| 1910 | // Convert back the result to float. |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1911 | double d = bit_cast<double, uint64_t>(result_f); |
| 1912 | return bit_cast<uint32_t, float>(static_cast<float>(d)); |
Nicolas Geoffray | 54accbc | 2014-08-13 03:40:45 +0100 | [diff] [blame] | 1913 | } else { |
| 1914 | return result_f; |
| 1915 | } |
| 1916 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1917 | case 'D': |
| 1918 | return result_f; |
| 1919 | case 'Z': |
| 1920 | return result.z; |
| 1921 | case 'B': |
| 1922 | return result.b; |
| 1923 | case 'C': |
| 1924 | return result.c; |
| 1925 | case 'S': |
| 1926 | return result.s; |
| 1927 | case 'I': |
| 1928 | return result.i; |
| 1929 | case 'J': |
| 1930 | return result.j; |
| 1931 | case 'V': |
| 1932 | return 0; |
| 1933 | default: |
| 1934 | LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char; |
| 1935 | return 0; |
| 1936 | } |
| 1937 | } |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 1938 | } |
| 1939 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1940 | // We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value |
| 1941 | // for the method pointer. |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1942 | // |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1943 | // It is valid to use this, as at the usage points here (returns from C functions) we are assuming |
| 1944 | // to hold the mutator lock (see SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) annotations). |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1945 | |
| 1946 | template<InvokeType type, bool access_check> |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1947 | static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1948 | mirror::ArtMethod* caller_method, |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 1949 | Thread* self, StackReference<mirror::ArtMethod>* sp); |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1950 | |
| 1951 | template<InvokeType type, bool access_check> |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1952 | static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1953 | mirror::ArtMethod* caller_method, |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 1954 | Thread* self, StackReference<mirror::ArtMethod>* sp) { |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1955 | ScopedQuickEntrypointChecks sqec(self); |
| 1956 | DCHECK_EQ(sp->AsMirrorPtr(), Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1957 | mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, |
| 1958 | type); |
| 1959 | if (UNLIKELY(method == nullptr)) { |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1960 | const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile(); |
| 1961 | uint32_t shorty_len; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1962 | const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1963 | { |
| 1964 | // Remember the args in case a GC happens in FindMethodFromCode. |
| 1965 | ScopedObjectAccessUnchecked soa(self->GetJniEnv()); |
| 1966 | RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa); |
| 1967 | visitor.VisitArguments(); |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 1968 | method = FindMethodFromCode<type, access_check>(method_idx, &this_object, &caller_method, |
| 1969 | self); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1970 | visitor.FixupReferences(); |
| 1971 | } |
| 1972 | |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1973 | if (UNLIKELY(method == nullptr)) { |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1974 | CHECK(self->IsExceptionPending()); |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1975 | return GetTwoWordFailureValue(); // Failure. |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1976 | } |
| 1977 | } |
| 1978 | DCHECK(!self->IsExceptionPending()); |
| 1979 | const void* code = method->GetEntryPointFromQuickCompiledCode(); |
| 1980 | |
| 1981 | // When we return, the caller will branch to this address, so it had better not be 0! |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1982 | DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method) |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1983 | << " location: " |
| 1984 | << method->GetDexFile()->GetLocation(); |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1985 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1986 | return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code), |
| 1987 | reinterpret_cast<uintptr_t>(method)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1988 | } |
| 1989 | |
Nicolas Geoffray | 8689a0a | 2014-04-04 09:26:24 +0100 | [diff] [blame] | 1990 | // Explicit artInvokeCommon template function declarations to please analysis tool. |
| 1991 | #define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \ |
| 1992 | template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \ |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1993 | TwoWordReturn artInvokeCommon<type, access_check>(uint32_t method_idx, \ |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1994 | mirror::Object* this_object, \ |
| 1995 | mirror::ArtMethod* caller_method, \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 1996 | Thread* self, \ |
| 1997 | StackReference<mirror::ArtMethod>* sp) \ |
Nicolas Geoffray | 8689a0a | 2014-04-04 09:26:24 +0100 | [diff] [blame] | 1998 | |
| 1999 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false); |
| 2000 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true); |
| 2001 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false); |
| 2002 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true); |
| 2003 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false); |
| 2004 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true); |
| 2005 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false); |
| 2006 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true); |
| 2007 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false); |
| 2008 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true); |
| 2009 | #undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL |
| 2010 | |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2011 | // See comments in runtime_support_asm.S |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2012 | extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck( |
| 2013 | uint32_t method_idx, mirror::Object* this_object, |
| 2014 | mirror::ArtMethod* caller_method, Thread* self, |
| 2015 | StackReference<mirror::ArtMethod>* sp) |
| 2016 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 2017 | return artInvokeCommon<kInterface, true>(method_idx, this_object, |
| 2018 | caller_method, self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2019 | } |
| 2020 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2021 | extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck( |
| 2022 | uint32_t method_idx, mirror::Object* this_object, |
| 2023 | mirror::ArtMethod* caller_method, Thread* self, |
| 2024 | StackReference<mirror::ArtMethod>* sp) |
| 2025 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 2026 | return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method, |
| 2027 | self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2028 | } |
| 2029 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2030 | extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck( |
| 2031 | uint32_t method_idx, mirror::Object* this_object, |
| 2032 | mirror::ArtMethod* caller_method, Thread* self, |
| 2033 | StackReference<mirror::ArtMethod>* sp) |
| 2034 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 2035 | return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method, |
| 2036 | self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2037 | } |
| 2038 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2039 | extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck( |
| 2040 | uint32_t method_idx, mirror::Object* this_object, |
| 2041 | mirror::ArtMethod* caller_method, Thread* self, |
| 2042 | StackReference<mirror::ArtMethod>* sp) |
| 2043 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 2044 | return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method, |
| 2045 | self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2046 | } |
| 2047 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2048 | extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck( |
| 2049 | uint32_t method_idx, mirror::Object* this_object, |
| 2050 | mirror::ArtMethod* caller_method, Thread* self, |
| 2051 | StackReference<mirror::ArtMethod>* sp) |
| 2052 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 2053 | return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method, |
| 2054 | self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2055 | } |
| 2056 | |
| 2057 | // Determine target of interface dispatch. This object is known non-null. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2058 | extern "C" TwoWordReturn artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method, |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 2059 | mirror::Object* this_object, |
| 2060 | mirror::ArtMethod* caller_method, |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 2061 | Thread* self, |
| 2062 | StackReference<mirror::ArtMethod>* sp) |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2063 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2064 | ScopedQuickEntrypointChecks sqec(self); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2065 | mirror::ArtMethod* method; |
| 2066 | if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) { |
| 2067 | method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 2068 | if (UNLIKELY(method == nullptr)) { |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2069 | ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object, |
| 2070 | caller_method); |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2071 | return GetTwoWordFailureValue(); // Failure. |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2072 | } |
| 2073 | } else { |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2074 | DCHECK(interface_method == Runtime::Current()->GetResolutionMethod()); |
Alexei Zavjalov | 41c507a | 2014-05-15 16:02:46 +0700 | [diff] [blame] | 2075 | |
| 2076 | // Find the caller PC. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2077 | constexpr size_t pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kRefsAndArgs); |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 2078 | uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) + pc_offset); |
Alexei Zavjalov | 41c507a | 2014-05-15 16:02:46 +0700 | [diff] [blame] | 2079 | |
| 2080 | // Map the caller PC to a dex PC. |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2081 | uint32_t dex_pc = caller_method->ToDexPc(caller_pc); |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 2082 | const DexFile::CodeItem* code = caller_method->GetCodeItem(); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2083 | CHECK_LT(dex_pc, code->insns_size_in_code_units_); |
| 2084 | const Instruction* instr = Instruction::At(&code->insns_[dex_pc]); |
| 2085 | Instruction::Code instr_code = instr->Opcode(); |
| 2086 | CHECK(instr_code == Instruction::INVOKE_INTERFACE || |
| 2087 | instr_code == Instruction::INVOKE_INTERFACE_RANGE) |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 2088 | << "Unexpected call into interface trampoline: " << instr->DumpString(nullptr); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2089 | uint32_t dex_method_idx; |
| 2090 | if (instr_code == Instruction::INVOKE_INTERFACE) { |
| 2091 | dex_method_idx = instr->VRegB_35c(); |
| 2092 | } else { |
| 2093 | DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE); |
| 2094 | dex_method_idx = instr->VRegB_3rc(); |
| 2095 | } |
| 2096 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2097 | const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache() |
| 2098 | ->GetDexFile(); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2099 | uint32_t shorty_len; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2100 | const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), |
| 2101 | &shorty_len); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2102 | { |
| 2103 | // Remember the args in case a GC happens in FindMethodFromCode. |
| 2104 | ScopedObjectAccessUnchecked soa(self->GetJniEnv()); |
| 2105 | RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa); |
| 2106 | visitor.VisitArguments(); |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 2107 | method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, &caller_method, |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2108 | self); |
| 2109 | visitor.FixupReferences(); |
| 2110 | } |
| 2111 | |
| 2112 | if (UNLIKELY(method == nullptr)) { |
| 2113 | CHECK(self->IsExceptionPending()); |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2114 | return GetTwoWordFailureValue(); // Failure. |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2115 | } |
| 2116 | } |
| 2117 | const void* code = method->GetEntryPointFromQuickCompiledCode(); |
| 2118 | |
| 2119 | // When we return, the caller will branch to this address, so it had better not be 0! |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 2120 | DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method) |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2121 | << " location: " << method->GetDexFile()->GetLocation(); |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 2122 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2123 | return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code), |
| 2124 | reinterpret_cast<uintptr_t>(method)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2125 | } |
| 2126 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 2127 | } // namespace art |