Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 17 | #include "art_method-inl.h" |
Andreas Gampe | 8228cdf | 2017-05-30 15:03:54 -0700 | [diff] [blame] | 18 | #include "base/callee_save_type.h" |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 19 | #include "base/enums.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 20 | #include "callee_save_frame.h" |
Dragos Sbirlea | bd136a2 | 2013-08-13 18:07:04 -0700 | [diff] [blame] | 21 | #include "common_throws.h" |
Vladimir Marko | c7aa87e | 2018-05-24 15:19:52 +0100 | [diff] [blame] | 22 | #include "class_root.h" |
Vladimir Marko | 606adb3 | 2018-04-05 14:49:24 +0100 | [diff] [blame] | 23 | #include "debug_print.h" |
Andreas Gampe | 513061a | 2017-06-01 09:17:34 -0700 | [diff] [blame] | 24 | #include "debugger.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 25 | #include "dex/dex_file-inl.h" |
| 26 | #include "dex/dex_file_types.h" |
| 27 | #include "dex/dex_instruction-inl.h" |
David Sehr | 312f3b2 | 2018-03-19 08:39:26 -0700 | [diff] [blame] | 28 | #include "dex/method_reference.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 29 | #include "entrypoints/entrypoint_utils-inl.h" |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 30 | #include "entrypoints/quick/callee_save_frame.h" |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 31 | #include "entrypoints/runtime_asm_entrypoints.h" |
Ian Rogers | 83883d7 | 2013-10-21 21:07:24 -0700 | [diff] [blame] | 32 | #include "gc/accounting/card_table-inl.h" |
Andreas Gampe | 75a7db6 | 2016-09-26 12:04:26 -0700 | [diff] [blame] | 33 | #include "imt_conflict_table.h" |
| 34 | #include "imtable-inl.h" |
Vladimir Marko | f3c52b4 | 2017-11-17 17:32:12 +0000 | [diff] [blame] | 35 | #include "index_bss_mapping.h" |
Alex Light | b7edcda | 2017-04-27 13:20:31 -0700 | [diff] [blame] | 36 | #include "instrumentation.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 37 | #include "interpreter/interpreter.h" |
Orion Hodson | 4c8e12e | 2018-05-18 08:33:20 +0100 | [diff] [blame] | 38 | #include "interpreter/interpreter_common.h" |
Vladimir Marko | 6ec2a1b | 2018-05-22 15:33:48 +0100 | [diff] [blame] | 39 | #include "interpreter/shadow_frame-inl.h" |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 40 | #include "jit/jit.h" |
Alex Light | 2d441b1 | 2018-06-08 15:33:21 -0700 | [diff] [blame] | 41 | #include "jit/jit_code_cache.h" |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 42 | #include "linear_alloc.h" |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 43 | #include "method_handles.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 44 | #include "mirror/class-inl.h" |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 45 | #include "mirror/dex_cache-inl.h" |
Mathieu Chartier | fc58af4 | 2015-04-16 18:00:39 -0700 | [diff] [blame] | 46 | #include "mirror/method.h" |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 47 | #include "mirror/method_handle_impl.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 48 | #include "mirror/object-inl.h" |
| 49 | #include "mirror/object_array-inl.h" |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 50 | #include "mirror/var_handle.h" |
Vladimir Marko | d3d00c0 | 2019-11-07 15:09:07 +0000 | [diff] [blame] | 51 | #include "oat.h" |
Vladimir Marko | 0eb882b | 2017-05-15 13:39:18 +0100 | [diff] [blame] | 52 | #include "oat_file.h" |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 53 | #include "oat_quick_method_header.h" |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 54 | #include "quick_exception_handler.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 55 | #include "runtime.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 56 | #include "scoped_thread_state_change-inl.h" |
Andreas Gampe | b302592 | 2015-09-01 14:45:00 -0700 | [diff] [blame] | 57 | #include "stack.h" |
Andreas Gampe | 513061a | 2017-06-01 09:17:34 -0700 | [diff] [blame] | 58 | #include "thread-inl.h" |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 59 | #include "var_handles.h" |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 60 | #include "well_known_classes.h" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 61 | |
| 62 | namespace art { |
| 63 | |
Andreas Gampe | 8228cdf | 2017-05-30 15:03:54 -0700 | [diff] [blame] | 64 | // Visits the arguments as saved to the stack by a CalleeSaveType::kRefAndArgs callee save frame. |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 65 | class QuickArgumentVisitor { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 66 | // Number of bytes for each out register in the caller method's frame. |
| 67 | static constexpr size_t kBytesStackArgLocation = 4; |
Alexei Zavjalov | 41c507a | 2014-05-15 16:02:46 +0700 | [diff] [blame] | 68 | // Frame size in bytes of a callee-save frame for RefsAndArgs. |
| 69 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 70 | RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs); |
| 71 | // Offset of first GPR arg. |
| 72 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = |
| 73 | RuntimeCalleeSaveFrame::GetGpr1Offset(CalleeSaveType::kSaveRefsAndArgs); |
| 74 | // Offset of first FPR arg. |
| 75 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = |
| 76 | RuntimeCalleeSaveFrame::GetFpr1Offset(CalleeSaveType::kSaveRefsAndArgs); |
| 77 | // Offset of return address. |
| 78 | static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_ReturnPcOffset = |
| 79 | RuntimeCalleeSaveFrame::GetReturnPcOffset(CalleeSaveType::kSaveRefsAndArgs); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 80 | #if defined(__arm__) |
| 81 | // The callee save frame is pointed to by SP. |
| 82 | // | argN | | |
| 83 | // | ... | | |
| 84 | // | arg4 | | |
| 85 | // | arg3 spill | | Caller's frame |
| 86 | // | arg2 spill | | |
| 87 | // | arg1 spill | | |
| 88 | // | Method* | --- |
| 89 | // | LR | |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 90 | // | ... | 4x6 bytes callee saves |
| 91 | // | R3 | |
| 92 | // | R2 | |
| 93 | // | R1 | |
| 94 | // | S15 | |
| 95 | // | : | |
| 96 | // | S0 | |
| 97 | // | | 4x2 bytes padding |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 98 | // | Method* | <- sp |
Andreas Gampe | 217d6d3 | 2017-09-18 12:48:20 -0700 | [diff] [blame] | 99 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
| 100 | static constexpr bool kAlignPairRegister = true; |
| 101 | static constexpr bool kQuickSoftFloatAbi = false; |
| 102 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = true; |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 103 | static constexpr bool kQuickSkipOddFpRegisters = false; |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 104 | static constexpr size_t kNumQuickGprArgs = 3; |
Andreas Gampe | 217d6d3 | 2017-09-18 12:48:20 -0700 | [diff] [blame] | 105 | static constexpr size_t kNumQuickFprArgs = 16; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 106 | static constexpr bool kGprFprLockstep = false; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 107 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 108 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 109 | } |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 110 | #elif defined(__aarch64__) |
| 111 | // The callee save frame is pointed to by SP. |
| 112 | // | argN | | |
| 113 | // | ... | | |
| 114 | // | arg4 | | |
| 115 | // | arg3 spill | | Caller's frame |
| 116 | // | arg2 spill | | |
| 117 | // | arg1 spill | | |
| 118 | // | Method* | --- |
| 119 | // | LR | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 120 | // | X29 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 121 | // | : | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 122 | // | X20 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 123 | // | X7 | |
| 124 | // | : | |
| 125 | // | X1 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 126 | // | D7 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 127 | // | : | |
| 128 | // | D0 | |
| 129 | // | | padding |
| 130 | // | Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 131 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 132 | static constexpr bool kAlignPairRegister = false; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 133 | static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 134 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 135 | static constexpr bool kQuickSkipOddFpRegisters = false; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 136 | static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs. |
| 137 | static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 138 | static constexpr bool kGprFprLockstep = false; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 139 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 140 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 141 | } |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 142 | #elif defined(__mips__) && !defined(__LP64__) |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 143 | // The callee save frame is pointed to by SP. |
| 144 | // | argN | | |
| 145 | // | ... | | |
| 146 | // | arg4 | | |
| 147 | // | arg3 spill | | Caller's frame |
| 148 | // | arg2 spill | | |
| 149 | // | arg1 spill | | |
| 150 | // | Method* | --- |
| 151 | // | RA | |
| 152 | // | ... | callee saves |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 153 | // | T1 | arg5 |
| 154 | // | T0 | arg4 |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 155 | // | A3 | arg3 |
| 156 | // | A2 | arg2 |
| 157 | // | A1 | arg1 |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 158 | // | F19 | |
| 159 | // | F18 | f_arg5 |
| 160 | // | F17 | |
| 161 | // | F16 | f_arg4 |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 162 | // | F15 | |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 163 | // | F14 | f_arg3 |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 164 | // | F13 | |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 165 | // | F12 | f_arg2 |
| 166 | // | F11 | |
| 167 | // | F10 | f_arg1 |
| 168 | // | F9 | |
| 169 | // | F8 | f_arg0 |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 170 | // | | padding |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 171 | // | A0/Method* | <- sp |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 172 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
| 173 | static constexpr bool kAlignPairRegister = true; |
| 174 | static constexpr bool kQuickSoftFloatAbi = false; |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 175 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 176 | static constexpr bool kQuickSkipOddFpRegisters = true; |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 177 | static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs. |
| 178 | static constexpr size_t kNumQuickFprArgs = 12; // 6 arguments passed in FPRs. Floats can be |
| 179 | // passed only in even numbered registers and each |
| 180 | // double occupies two registers. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 181 | static constexpr bool kGprFprLockstep = false; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 182 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 183 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 184 | } |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 185 | #elif defined(__mips__) && defined(__LP64__) |
| 186 | // The callee save frame is pointed to by SP. |
| 187 | // | argN | | |
| 188 | // | ... | | |
| 189 | // | arg4 | | |
| 190 | // | arg3 spill | | Caller's frame |
| 191 | // | arg2 spill | | |
| 192 | // | arg1 spill | | |
| 193 | // | Method* | --- |
| 194 | // | RA | |
| 195 | // | ... | callee saves |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 196 | // | A7 | arg7 |
| 197 | // | A6 | arg6 |
| 198 | // | A5 | arg5 |
| 199 | // | A4 | arg4 |
| 200 | // | A3 | arg3 |
| 201 | // | A2 | arg2 |
| 202 | // | A1 | arg1 |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 203 | // | F19 | f_arg7 |
| 204 | // | F18 | f_arg6 |
| 205 | // | F17 | f_arg5 |
| 206 | // | F16 | f_arg4 |
| 207 | // | F15 | f_arg3 |
| 208 | // | F14 | f_arg2 |
| 209 | // | F13 | f_arg1 |
| 210 | // | F12 | f_arg0 |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 211 | // | | padding |
| 212 | // | A0/Method* | <- sp |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 213 | // NOTE: for Mip64, when A0 is skipped, F12 is also skipped. |
Douglas Leung | d18e083 | 2015-02-09 15:22:26 -0800 | [diff] [blame] | 214 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 215 | static constexpr bool kAlignPairRegister = false; |
| 216 | static constexpr bool kQuickSoftFloatAbi = false; |
| 217 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 218 | static constexpr bool kQuickSkipOddFpRegisters = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 219 | static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs. |
| 220 | static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs. |
| 221 | static constexpr bool kGprFprLockstep = true; |
| 222 | |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 223 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
| 224 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
| 225 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 226 | #elif defined(__i386__) |
| 227 | // The callee save frame is pointed to by SP. |
| 228 | // | argN | | |
| 229 | // | ... | | |
| 230 | // | arg4 | | |
| 231 | // | arg3 spill | | Caller's frame |
| 232 | // | arg2 spill | | |
| 233 | // | arg1 spill | | |
| 234 | // | Method* | --- |
| 235 | // | Return | |
| 236 | // | EBP,ESI,EDI | callee saves |
| 237 | // | EBX | arg3 |
| 238 | // | EDX | arg2 |
| 239 | // | ECX | arg1 |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 240 | // | XMM3 | float arg 4 |
| 241 | // | XMM2 | float arg 3 |
| 242 | // | XMM1 | float arg 2 |
| 243 | // | XMM0 | float arg 1 |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 244 | // | EAX/Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 245 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 246 | static constexpr bool kAlignPairRegister = false; |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 247 | static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 248 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 249 | static constexpr bool kQuickSkipOddFpRegisters = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 250 | static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs. |
Mark P Mendell | 966c3ae | 2015-01-27 15:45:27 +0000 | [diff] [blame] | 251 | static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 252 | static constexpr bool kGprFprLockstep = false; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 253 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 254 | return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 255 | } |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 256 | #elif defined(__x86_64__) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 257 | // The callee save frame is pointed to by SP. |
| 258 | // | argN | | |
| 259 | // | ... | | |
| 260 | // | reg. arg spills | | Caller's frame |
| 261 | // | Method* | --- |
| 262 | // | Return | |
| 263 | // | R15 | callee save |
| 264 | // | R14 | callee save |
| 265 | // | R13 | callee save |
| 266 | // | R12 | callee save |
| 267 | // | R9 | arg5 |
| 268 | // | R8 | arg4 |
| 269 | // | RSI/R6 | arg1 |
| 270 | // | RBP/R5 | callee save |
| 271 | // | RBX/R3 | callee save |
| 272 | // | RDX/R2 | arg2 |
| 273 | // | RCX/R1 | arg3 |
| 274 | // | XMM7 | float arg 8 |
| 275 | // | XMM6 | float arg 7 |
| 276 | // | XMM5 | float arg 6 |
| 277 | // | XMM4 | float arg 5 |
| 278 | // | XMM3 | float arg 4 |
| 279 | // | XMM2 | float arg 3 |
| 280 | // | XMM1 | float arg 2 |
| 281 | // | XMM0 | float arg 1 |
| 282 | // | Padding | |
| 283 | // | RDI/Method* | <- sp |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 284 | static constexpr bool kSplitPairAcrossRegisterAndStack = false; |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 285 | static constexpr bool kAlignPairRegister = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 286 | static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 287 | static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false; |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 288 | static constexpr bool kQuickSkipOddFpRegisters = false; |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 289 | static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs. |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 290 | static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 291 | static constexpr bool kGprFprLockstep = false; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 292 | static size_t GprIndexToGprOffset(uint32_t gpr_index) { |
| 293 | switch (gpr_index) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 294 | case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 295 | case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 296 | case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 297 | case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
| 298 | case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA)); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 299 | default: |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 300 | LOG(FATAL) << "Unexpected GPR index: " << gpr_index; |
Elliott Hughes | c1896c9 | 2018-11-29 11:33:18 -0800 | [diff] [blame] | 301 | UNREACHABLE(); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 302 | } |
| 303 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 304 | #else |
| 305 | #error "Unsupported architecture" |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 306 | #endif |
| 307 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 308 | public: |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 309 | // Special handling for proxy methods. Proxy methods are instance methods so the |
| 310 | // 'this' object is the 1st argument. They also have the same frame layout as the |
| 311 | // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the |
| 312 | // 1st GPR. |
Roland Levillain | fa854e4 | 2018-02-07 13:09:55 +0000 | [diff] [blame] | 313 | static StackReference<mirror::Object>* GetProxyThisObjectReference(ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 314 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Nicolas Geoffray | 3a09092 | 2015-11-24 09:17:30 +0000 | [diff] [blame] | 315 | CHECK((*sp)->IsProxyMethod()); |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 316 | CHECK_GT(kNumQuickGprArgs, 0u); |
| 317 | constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR. |
| 318 | size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset + |
| 319 | GprIndexToGprOffset(kThisGprIndex); |
| 320 | uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset; |
Roland Levillain | fa854e4 | 2018-02-07 13:09:55 +0000 | [diff] [blame] | 321 | return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address); |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 322 | } |
| 323 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 324 | static ArtMethod* GetCallingMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 325 | DCHECK((*sp)->IsCalleeSaveMethod()); |
Andreas Gampe | 8228cdf | 2017-05-30 15:03:54 -0700 | [diff] [blame] | 326 | return GetCalleeSaveMethodCaller(sp, CalleeSaveType::kSaveRefsAndArgs); |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 327 | } |
| 328 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 329 | static ArtMethod* GetOuterMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 330 | DCHECK((*sp)->IsCalleeSaveMethod()); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 331 | uint8_t* previous_sp = |
| 332 | reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 333 | return *reinterpret_cast<ArtMethod**>(previous_sp); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 334 | } |
| 335 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 336 | static uint32_t GetCallingDexPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 337 | DCHECK((*sp)->IsCalleeSaveMethod()); |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 338 | constexpr size_t callee_frame_size = |
| 339 | RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 340 | ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>( |
| 341 | reinterpret_cast<uintptr_t>(sp) + callee_frame_size); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 342 | uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 343 | const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc); |
| 344 | uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 345 | |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 346 | if (current_code->IsOptimized()) { |
David Srbecky | 0d4567f | 2019-05-30 22:45:40 +0100 | [diff] [blame] | 347 | CodeInfo code_info = CodeInfo::DecodeInlineInfoOnly(current_code); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 348 | StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 349 | DCHECK(stack_map.IsValid()); |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 350 | BitTableRange<InlineInfo> inline_infos = code_info.GetInlineInfosOf(stack_map); |
| 351 | if (!inline_infos.empty()) { |
| 352 | return inline_infos.back().GetDexPc(); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 353 | } else { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 354 | return stack_map.GetDexPc(); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 355 | } |
| 356 | } else { |
Nicolas Geoffray | a00b54b | 2019-12-03 14:36:42 +0000 | [diff] [blame] | 357 | return current_code->ToDexPc(caller_sp, outer_pc); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 358 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 359 | } |
| 360 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 361 | // For the given quick ref and args quick frame, return the caller's PC. |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 362 | static uintptr_t GetCallingPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 363 | DCHECK((*sp)->IsCalleeSaveMethod()); |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 364 | uint8_t* return_adress_spill = |
| 365 | reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_ReturnPcOffset; |
| 366 | return *reinterpret_cast<uintptr_t*>(return_adress_spill); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 367 | } |
| 368 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 369 | QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 370 | uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) : |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 371 | is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len), |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 372 | gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset), |
| 373 | fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset), |
| 374 | stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 375 | + sizeof(ArtMethod*)), // Skip ArtMethod*. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 376 | gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0), |
| 377 | cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) { |
Andreas Gampe | 575e78c | 2014-11-03 23:41:03 -0800 | [diff] [blame] | 378 | static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0), |
| 379 | "Number of Quick FPR arguments unexpected"); |
| 380 | static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled), |
| 381 | "Double alignment unexpected"); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 382 | // For register alignment, we want to assume that counters(fpr_double_index_) are even if the |
| 383 | // next register is even. |
Andreas Gampe | 575e78c | 2014-11-03 23:41:03 -0800 | [diff] [blame] | 384 | static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0, |
| 385 | "Number of Quick FPR arguments not even"); |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 386 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 387 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 388 | |
| 389 | virtual ~QuickArgumentVisitor() {} |
| 390 | |
| 391 | virtual void Visit() = 0; |
| 392 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 393 | Primitive::Type GetParamPrimitiveType() const { |
| 394 | return cur_type_; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 395 | } |
| 396 | |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 397 | uint8_t* GetParamAddress() const { |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 398 | if (!kQuickSoftFloatAbi) { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 399 | Primitive::Type type = GetParamPrimitiveType(); |
| 400 | if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) { |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 401 | if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) { |
| 402 | if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) { |
| 403 | return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA)); |
| 404 | } |
| 405 | } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 406 | return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA)); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 407 | } |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 408 | return stack_args_ + (stack_index_ * kBytesStackArgLocation); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 409 | } |
| 410 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 411 | if (gpr_index_ < kNumQuickGprArgs) { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 412 | return gpr_args_ + GprIndexToGprOffset(gpr_index_); |
| 413 | } |
| 414 | return stack_args_ + (stack_index_ * kBytesStackArgLocation); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 415 | } |
| 416 | |
| 417 | bool IsSplitLongOrDouble() const { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 418 | if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || |
| 419 | (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) { |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 420 | return is_split_long_or_double_; |
| 421 | } else { |
| 422 | return false; // An optimization for when GPR and FPRs are 64bit. |
| 423 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 424 | } |
| 425 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 426 | bool IsParamAReference() const { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 427 | return GetParamPrimitiveType() == Primitive::kPrimNot; |
| 428 | } |
| 429 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 430 | bool IsParamALongOrDouble() const { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 431 | Primitive::Type type = GetParamPrimitiveType(); |
| 432 | return type == Primitive::kPrimLong || type == Primitive::kPrimDouble; |
| 433 | } |
| 434 | |
| 435 | uint64_t ReadSplitLongParam() const { |
Nicolas Geoffray | 425f239 | 2015-01-08 14:52:29 +0000 | [diff] [blame] | 436 | // The splitted long is always available through the stack. |
| 437 | return *reinterpret_cast<uint64_t*>(stack_args_ |
| 438 | + stack_index_ * kBytesStackArgLocation); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 439 | } |
| 440 | |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 441 | void IncGprIndex() { |
| 442 | gpr_index_++; |
| 443 | if (kGprFprLockstep) { |
| 444 | fpr_index_++; |
| 445 | } |
| 446 | } |
| 447 | |
| 448 | void IncFprIndex() { |
| 449 | fpr_index_++; |
| 450 | if (kGprFprLockstep) { |
| 451 | gpr_index_++; |
| 452 | } |
| 453 | } |
| 454 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 455 | void VisitArguments() REQUIRES_SHARED(Locks::mutator_lock_) { |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 456 | // (a) 'stack_args_' should point to the first method's argument |
| 457 | // (b) whatever the argument type it is, the 'stack_index_' should |
| 458 | // be moved forward along with every visiting. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 459 | gpr_index_ = 0; |
| 460 | fpr_index_ = 0; |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 461 | if (kQuickDoubleRegAlignedFloatBackFilled) { |
| 462 | fpr_double_index_ = 0; |
| 463 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 464 | stack_index_ = 0; |
| 465 | if (!is_static_) { // Handle this. |
| 466 | cur_type_ = Primitive::kPrimNot; |
| 467 | is_split_long_or_double_ = false; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 468 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 469 | stack_index_++; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 470 | if (kNumQuickGprArgs > 0) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 471 | IncGprIndex(); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 472 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 473 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 474 | for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) { |
| 475 | cur_type_ = Primitive::GetType(shorty_[shorty_index]); |
| 476 | switch (cur_type_) { |
| 477 | case Primitive::kPrimNot: |
| 478 | case Primitive::kPrimBoolean: |
| 479 | case Primitive::kPrimByte: |
| 480 | case Primitive::kPrimChar: |
| 481 | case Primitive::kPrimShort: |
| 482 | case Primitive::kPrimInt: |
| 483 | is_split_long_or_double_ = false; |
| 484 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 485 | stack_index_++; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 486 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 487 | IncGprIndex(); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 488 | } |
| 489 | break; |
| 490 | case Primitive::kPrimFloat: |
| 491 | is_split_long_or_double_ = false; |
| 492 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 493 | stack_index_++; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 494 | if (kQuickSoftFloatAbi) { |
| 495 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 496 | IncGprIndex(); |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 497 | } |
| 498 | } else { |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 499 | if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 500 | IncFprIndex(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 501 | if (kQuickDoubleRegAlignedFloatBackFilled) { |
| 502 | // Double should not overlap with float. |
| 503 | // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4. |
| 504 | fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2)); |
| 505 | // Float should not overlap with double. |
| 506 | if (fpr_index_ % 2 == 0) { |
| 507 | fpr_index_ = std::max(fpr_double_index_, fpr_index_); |
| 508 | } |
Goran Jakovljevic | ff73498 | 2015-08-24 12:58:55 +0000 | [diff] [blame] | 509 | } else if (kQuickSkipOddFpRegisters) { |
| 510 | IncFprIndex(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 511 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 512 | } |
| 513 | } |
| 514 | break; |
| 515 | case Primitive::kPrimDouble: |
| 516 | case Primitive::kPrimLong: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 517 | if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) { |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 518 | if (cur_type_ == Primitive::kPrimLong && |
| 519 | #if defined(__mips__) && !defined(__LP64__) |
| 520 | (gpr_index_ == 0 || gpr_index_ == 2) && |
| 521 | #else |
| 522 | gpr_index_ == 0 && |
| 523 | #endif |
| 524 | kAlignPairRegister) { |
| 525 | // Currently, this is only for ARM and MIPS, where we align long parameters with |
| 526 | // even-numbered registers by skipping R1 (on ARM) or A1(A3) (on MIPS) and using |
| 527 | // R2 (on ARM) or A2(T0) (on MIPS) instead. |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 528 | IncGprIndex(); |
Nicolas Geoffray | 69c15d3 | 2015-01-13 11:42:13 +0000 | [diff] [blame] | 529 | } |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 530 | is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) && |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 531 | ((gpr_index_ + 1) == kNumQuickGprArgs); |
Mark Mendell | 3e6a3bf | 2015-01-19 14:09:22 -0500 | [diff] [blame] | 532 | if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) { |
| 533 | // We don't want to split this. Pass over this register. |
| 534 | gpr_index_++; |
| 535 | is_split_long_or_double_ = false; |
| 536 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 537 | Visit(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 538 | if (kBytesStackArgLocation == 4) { |
| 539 | stack_index_+= 2; |
| 540 | } else { |
| 541 | CHECK_EQ(kBytesStackArgLocation, 8U); |
| 542 | stack_index_++; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 543 | } |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 544 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 545 | IncGprIndex(); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 546 | if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) { |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 547 | if (gpr_index_ < kNumQuickGprArgs) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 548 | IncGprIndex(); |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 549 | } |
| 550 | } |
| 551 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 552 | } else { |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 553 | is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) && |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 554 | ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 555 | Visit(); |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 556 | if (kBytesStackArgLocation == 4) { |
| 557 | stack_index_+= 2; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 558 | } else { |
Vladimir Kostyukov | 1dd61ba | 2014-04-02 18:42:20 +0700 | [diff] [blame] | 559 | CHECK_EQ(kBytesStackArgLocation, 8U); |
| 560 | stack_index_++; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 561 | } |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 562 | if (kQuickDoubleRegAlignedFloatBackFilled) { |
| 563 | if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) { |
| 564 | fpr_double_index_ += 2; |
| 565 | // Float should not overlap with double. |
| 566 | if (fpr_index_ % 2 == 0) { |
| 567 | fpr_index_ = std::max(fpr_double_index_, fpr_index_); |
| 568 | } |
| 569 | } |
| 570 | } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 571 | IncFprIndex(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 572 | if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) { |
| 573 | if (fpr_index_ + 1 < kNumQuickFprArgs + 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 574 | IncFprIndex(); |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 575 | } |
| 576 | } |
| 577 | } |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 578 | } |
| 579 | break; |
| 580 | default: |
| 581 | LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_; |
| 582 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 583 | } |
| 584 | } |
| 585 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 586 | protected: |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 587 | const bool is_static_; |
| 588 | const char* const shorty_; |
| 589 | const uint32_t shorty_len_; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 590 | |
| 591 | private: |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 592 | uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame. |
| 593 | uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame. |
| 594 | uint8_t* const stack_args_; // Address of stack arguments in caller's frame. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 595 | uint32_t gpr_index_; // Index into spilled GPRs. |
Zheng Xu | 5667fdb | 2014-10-23 18:29:55 +0800 | [diff] [blame] | 596 | // Index into spilled FPRs. |
| 597 | // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_ |
| 598 | // holds a higher register number. |
| 599 | uint32_t fpr_index_; |
| 600 | // Index into spilled FPRs for aligned double. |
| 601 | // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in |
| 602 | // terms of singles, may be behind fpr_index. |
| 603 | uint32_t fpr_double_index_; |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 604 | uint32_t stack_index_; // Index into arguments on the stack. |
| 605 | // The current type of argument during VisitArguments. |
| 606 | Primitive::Type cur_type_; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 607 | // Does a 64bit parameter straddle the register and stack arguments? |
| 608 | bool is_split_long_or_double_; |
| 609 | }; |
| 610 | |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 611 | // Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It |
| 612 | // allows to use the QuickArgumentVisitor constants without moving all the code in its own module. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 613 | extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 614 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Roland Levillain | fa854e4 | 2018-02-07 13:09:55 +0000 | [diff] [blame] | 615 | return QuickArgumentVisitor::GetProxyThisObjectReference(sp)->AsMirrorPtr(); |
| 616 | } |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 617 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 618 | // Visits arguments on the stack placing them into the shadow frame. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 619 | class BuildQuickShadowFrameVisitor final : public QuickArgumentVisitor { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 620 | public: |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 621 | BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty, |
| 622 | uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) : |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 623 | QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {} |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 624 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 625 | void Visit() REQUIRES_SHARED(Locks::mutator_lock_) override; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 626 | |
| 627 | private: |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 628 | ShadowFrame* const sf_; |
| 629 | uint32_t cur_reg_; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 630 | |
Dragos Sbirlea | bd136a2 | 2013-08-13 18:07:04 -0700 | [diff] [blame] | 631 | DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 632 | }; |
| 633 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 634 | void BuildQuickShadowFrameVisitor::Visit() { |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 635 | Primitive::Type type = GetParamPrimitiveType(); |
| 636 | switch (type) { |
| 637 | case Primitive::kPrimLong: // Fall-through. |
| 638 | case Primitive::kPrimDouble: |
| 639 | if (IsSplitLongOrDouble()) { |
| 640 | sf_->SetVRegLong(cur_reg_, ReadSplitLongParam()); |
| 641 | } else { |
| 642 | sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress())); |
| 643 | } |
| 644 | ++cur_reg_; |
| 645 | break; |
| 646 | case Primitive::kPrimNot: { |
| 647 | StackReference<mirror::Object>* stack_ref = |
| 648 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 649 | sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr()); |
| 650 | } |
| 651 | break; |
| 652 | case Primitive::kPrimBoolean: // Fall-through. |
| 653 | case Primitive::kPrimByte: // Fall-through. |
| 654 | case Primitive::kPrimChar: // Fall-through. |
| 655 | case Primitive::kPrimShort: // Fall-through. |
| 656 | case Primitive::kPrimInt: // Fall-through. |
| 657 | case Primitive::kPrimFloat: |
| 658 | sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress())); |
| 659 | break; |
| 660 | case Primitive::kPrimVoid: |
| 661 | LOG(FATAL) << "UNREACHABLE"; |
Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 662 | UNREACHABLE(); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 663 | } |
| 664 | ++cur_reg_; |
| 665 | } |
| 666 | |
Mingyao Yang | 417528d | 2017-09-13 12:10:40 -0700 | [diff] [blame] | 667 | // Don't inline. See b/65159206. |
| 668 | NO_INLINE |
| 669 | static void HandleDeoptimization(JValue* result, |
| 670 | ArtMethod* method, |
| 671 | ShadowFrame* deopt_frame, |
| 672 | ManagedStack* fragment) |
| 673 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 674 | // Coming from partial-fragment deopt. |
| 675 | Thread* self = Thread::Current(); |
| 676 | if (kIsDebugBuild) { |
| 677 | // Sanity-check: are the methods as expected? We check that the last shadow frame (the bottom |
| 678 | // of the call-stack) corresponds to the called method. |
| 679 | ShadowFrame* linked = deopt_frame; |
| 680 | while (linked->GetLink() != nullptr) { |
| 681 | linked = linked->GetLink(); |
| 682 | } |
| 683 | CHECK_EQ(method, linked->GetMethod()) << method->PrettyMethod() << " " |
| 684 | << ArtMethod::PrettyMethod(linked->GetMethod()); |
| 685 | } |
| 686 | |
| 687 | if (VLOG_IS_ON(deopt)) { |
| 688 | // Print out the stack to verify that it was a partial-fragment deopt. |
| 689 | LOG(INFO) << "Continue-ing from deopt. Stack is:"; |
| 690 | QuickExceptionHandler::DumpFramesWithType(self, true); |
| 691 | } |
| 692 | |
| 693 | ObjPtr<mirror::Throwable> pending_exception; |
| 694 | bool from_code = false; |
| 695 | DeoptimizationMethodType method_type; |
| 696 | self->PopDeoptimizationContext(/* out */ result, |
| 697 | /* out */ &pending_exception, |
| 698 | /* out */ &from_code, |
| 699 | /* out */ &method_type); |
| 700 | |
| 701 | // Push a transition back into managed code onto the linked list in thread. |
| 702 | self->PushManagedStackFragment(fragment); |
| 703 | |
| 704 | // Ensure that the stack is still in order. |
| 705 | if (kIsDebugBuild) { |
| 706 | class DummyStackVisitor : public StackVisitor { |
| 707 | public: |
| 708 | explicit DummyStackVisitor(Thread* self_in) REQUIRES_SHARED(Locks::mutator_lock_) |
| 709 | : StackVisitor(self_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {} |
| 710 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 711 | bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) { |
Mingyao Yang | 417528d | 2017-09-13 12:10:40 -0700 | [diff] [blame] | 712 | // Nothing to do here. In a debug build, SanityCheckFrame will do the work in the walking |
| 713 | // logic. Just always say we want to continue. |
| 714 | return true; |
| 715 | } |
| 716 | }; |
| 717 | DummyStackVisitor dsv(self); |
| 718 | dsv.WalkStack(); |
| 719 | } |
| 720 | |
| 721 | // Restore the exception that was pending before deoptimization then interpret the |
| 722 | // deoptimized frames. |
| 723 | if (pending_exception != nullptr) { |
| 724 | self->SetException(pending_exception); |
| 725 | } |
| 726 | interpreter::EnterInterpreterFromDeoptimize(self, |
| 727 | deopt_frame, |
| 728 | result, |
| 729 | from_code, |
| 730 | DeoptimizationMethodType::kDefault); |
| 731 | } |
| 732 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 733 | extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 734 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 735 | // Ensure we don't get thread suspension until the object arguments are safely in the shadow |
| 736 | // frame. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 737 | ScopedQuickEntrypointChecks sqec(self); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 738 | |
Alex Light | 9139e00 | 2015-10-09 15:59:48 -0700 | [diff] [blame] | 739 | if (UNLIKELY(!method->IsInvokable())) { |
| 740 | method->ThrowInvocationTimeError(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 741 | return 0; |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 742 | } |
| 743 | |
| 744 | JValue tmp_value; |
| 745 | ShadowFrame* deopt_frame = self->PopStackedShadowFrame( |
Mingyao Yang | f711f2c | 2016-05-23 12:29:39 -0700 | [diff] [blame] | 746 | StackedShadowFrameType::kDeoptimizationShadowFrame, false); |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 747 | ManagedStack fragment; |
| 748 | |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 749 | DCHECK(!method->IsNative()) << method->PrettyMethod(); |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 750 | uint32_t shorty_len = 0; |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 751 | ArtMethod* non_proxy_method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 752 | DCHECK(non_proxy_method->GetCodeItem() != nullptr) << method->PrettyMethod(); |
David Sehr | 0225f8e | 2018-01-31 08:52:24 +0000 | [diff] [blame] | 753 | CodeItemDataAccessor accessor(non_proxy_method->DexInstructionData()); |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 754 | const char* shorty = non_proxy_method->GetShorty(&shorty_len); |
| 755 | |
| 756 | JValue result; |
Alex Light | 0aa7a5a | 2018-10-10 15:58:14 +0000 | [diff] [blame] | 757 | bool force_frame_pop = false; |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 758 | |
Mingyao Yang | 417528d | 2017-09-13 12:10:40 -0700 | [diff] [blame] | 759 | if (UNLIKELY(deopt_frame != nullptr)) { |
| 760 | HandleDeoptimization(&result, method, deopt_frame, &fragment); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 761 | } else { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 762 | const char* old_cause = self->StartAssertNoThreadSuspension( |
| 763 | "Building interpreter shadow frame"); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 764 | uint16_t num_regs = accessor.RegistersSize(); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 765 | // No last shadow coming from quick. |
Andreas Gampe | b302592 | 2015-09-01 14:45:00 -0700 | [diff] [blame] | 766 | ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 767 | CREATE_SHADOW_FRAME(num_regs, /* link= */ nullptr, method, /* dex_pc= */ 0); |
Andreas Gampe | b302592 | 2015-09-01 14:45:00 -0700 | [diff] [blame] | 768 | ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get(); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 769 | size_t first_arg_reg = accessor.RegistersSize() - accessor.InsSize(); |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 770 | BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len, |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 771 | shadow_frame, first_arg_reg); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 772 | shadow_frame_builder.VisitArguments(); |
| 773 | // Push a transition back into managed code onto the linked list in thread. |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 774 | self->PushManagedStackFragment(&fragment); |
| 775 | self->PushShadowFrame(shadow_frame); |
| 776 | self->EndAssertNoThreadSuspension(old_cause); |
| 777 | |
Vladimir Marko | 5115a4d | 2019-10-17 14:56:47 +0100 | [diff] [blame] | 778 | if (NeedsClinitCheckBeforeCall(method)) { |
Vladimir Marko | bf12191 | 2019-06-04 13:49:05 +0100 | [diff] [blame] | 779 | ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass(); |
Vladimir Marko | 8e11065 | 2019-07-30 10:14:41 +0100 | [diff] [blame] | 780 | if (UNLIKELY(!declaring_class->IsVisiblyInitialized())) { |
Vladimir Marko | bf12191 | 2019-06-04 13:49:05 +0100 | [diff] [blame] | 781 | // Ensure static method's class is initialized. |
| 782 | StackHandleScope<1> hs(self); |
Vladimir Marko | 8e11065 | 2019-07-30 10:14:41 +0100 | [diff] [blame] | 783 | Handle<mirror::Class> h_class(hs.NewHandle(declaring_class)); |
Vladimir Marko | bf12191 | 2019-06-04 13:49:05 +0100 | [diff] [blame] | 784 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) { |
Vladimir Marko | 8e11065 | 2019-07-30 10:14:41 +0100 | [diff] [blame] | 785 | DCHECK(Thread::Current()->IsExceptionPending()) << method->PrettyMethod(); |
Vladimir Marko | bf12191 | 2019-06-04 13:49:05 +0100 | [diff] [blame] | 786 | self->PopManagedStackFragment(fragment); |
| 787 | return 0; |
| 788 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 789 | } |
| 790 | } |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 791 | |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 792 | result = interpreter::EnterInterpreterFromEntryPoint(self, accessor, shadow_frame); |
Alex Light | 0aa7a5a | 2018-10-10 15:58:14 +0000 | [diff] [blame] | 793 | force_frame_pop = shadow_frame->GetForcePopFrame(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 794 | } |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 795 | |
| 796 | // Pop transition. |
| 797 | self->PopManagedStackFragment(fragment); |
| 798 | |
| 799 | // Request a stack deoptimization if needed |
| 800 | ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp); |
Mingyao Yang | f711f2c | 2016-05-23 12:29:39 -0700 | [diff] [blame] | 801 | uintptr_t caller_pc = QuickArgumentVisitor::GetCallingPc(sp); |
Mingyao Yang | a3549d2 | 2016-06-02 17:01:02 -0700 | [diff] [blame] | 802 | // If caller_pc is the instrumentation exit stub, the stub will check to see if deoptimization |
Alex Light | 3dacdd6 | 2019-03-12 15:45:47 +0000 | [diff] [blame] | 803 | // should be done and it knows the real return pc. NB If the upcall is null we don't need to do |
| 804 | // anything. This can happen during shutdown or early startup. |
| 805 | if (UNLIKELY( |
| 806 | caller != nullptr && |
| 807 | caller_pc != reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) && |
| 808 | (self->IsForceInterpreter() || Dbg::IsForcedInterpreterNeededForUpcall(self, caller)))) { |
Nicolas Geoffray | 433b79a | 2017-01-30 20:54:45 +0000 | [diff] [blame] | 809 | if (!Runtime::Current()->IsAsyncDeoptimizeable(caller_pc)) { |
| 810 | LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method " |
| 811 | << caller->PrettyMethod(); |
| 812 | } else { |
Alex Light | 0aa7a5a | 2018-10-10 15:58:14 +0000 | [diff] [blame] | 813 | VLOG(deopt) << "Forcing deoptimization on return from method " << method->PrettyMethod() |
| 814 | << " to " << caller->PrettyMethod() |
| 815 | << (force_frame_pop ? " for frame-pop" : ""); |
| 816 | DCHECK(!force_frame_pop || result.GetJ() == 0) << "Force frame pop should have no result."; |
| 817 | if (force_frame_pop && self->GetException() != nullptr) { |
| 818 | LOG(WARNING) << "Suppressing exception for instruction-retry: " |
| 819 | << self->GetException()->Dump(); |
| 820 | } |
Nicolas Geoffray | 433b79a | 2017-01-30 20:54:45 +0000 | [diff] [blame] | 821 | // Push the context of the deoptimization stack so we can restore the return value and the |
| 822 | // exception before executing the deoptimized frames. |
| 823 | self->PushDeoptimizationContext( |
Mingyao Yang | 2ee1790 | 2017-08-30 11:37:08 -0700 | [diff] [blame] | 824 | result, |
| 825 | shorty[0] == 'L' || shorty[0] == '[', /* class or array */ |
Alex Light | 0aa7a5a | 2018-10-10 15:58:14 +0000 | [diff] [blame] | 826 | force_frame_pop ? nullptr : self->GetException(), |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 827 | /* from_code= */ false, |
Mingyao Yang | 2ee1790 | 2017-08-30 11:37:08 -0700 | [diff] [blame] | 828 | DeoptimizationMethodType::kDefault); |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 829 | |
Nicolas Geoffray | 433b79a | 2017-01-30 20:54:45 +0000 | [diff] [blame] | 830 | // Set special exception to cause deoptimization. |
| 831 | self->SetException(Thread::GetDeoptimizationException()); |
| 832 | } |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 833 | } |
| 834 | |
| 835 | // No need to restore the args since the method has already been run by the interpreter. |
| 836 | return result.GetJ(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 837 | } |
| 838 | |
| 839 | // Visits arguments on the stack placing them into the args vector, Object* arguments are converted |
| 840 | // to jobjects. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 841 | class BuildQuickArgumentVisitor final : public QuickArgumentVisitor { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 842 | public: |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 843 | BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len, |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 844 | ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) : |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 845 | QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {} |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 846 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 847 | void Visit() REQUIRES_SHARED(Locks::mutator_lock_) override; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 848 | |
| 849 | private: |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 850 | ScopedObjectAccessUnchecked* const soa_; |
| 851 | std::vector<jvalue>* const args_; |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 852 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 853 | DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor); |
| 854 | }; |
| 855 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 856 | void BuildQuickArgumentVisitor::Visit() { |
| 857 | jvalue val; |
| 858 | Primitive::Type type = GetParamPrimitiveType(); |
| 859 | switch (type) { |
| 860 | case Primitive::kPrimNot: { |
| 861 | StackReference<mirror::Object>* stack_ref = |
| 862 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 863 | val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr()); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 864 | break; |
| 865 | } |
| 866 | case Primitive::kPrimLong: // Fall-through. |
| 867 | case Primitive::kPrimDouble: |
| 868 | if (IsSplitLongOrDouble()) { |
| 869 | val.j = ReadSplitLongParam(); |
| 870 | } else { |
| 871 | val.j = *reinterpret_cast<jlong*>(GetParamAddress()); |
| 872 | } |
| 873 | break; |
| 874 | case Primitive::kPrimBoolean: // Fall-through. |
| 875 | case Primitive::kPrimByte: // Fall-through. |
| 876 | case Primitive::kPrimChar: // Fall-through. |
| 877 | case Primitive::kPrimShort: // Fall-through. |
| 878 | case Primitive::kPrimInt: // Fall-through. |
| 879 | case Primitive::kPrimFloat: |
| 880 | val.i = *reinterpret_cast<jint*>(GetParamAddress()); |
| 881 | break; |
| 882 | case Primitive::kPrimVoid: |
| 883 | LOG(FATAL) << "UNREACHABLE"; |
Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 884 | UNREACHABLE(); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 885 | } |
| 886 | args_->push_back(val); |
| 887 | } |
| 888 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 889 | // Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method |
| 890 | // which is responsible for recording callee save registers. We explicitly place into jobjects the |
| 891 | // incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a |
| 892 | // field within the proxy object, which will box the primitive arguments and deal with error cases. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 893 | extern "C" uint64_t artQuickProxyInvokeHandler( |
| 894 | ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 895 | REQUIRES_SHARED(Locks::mutator_lock_) { |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 896 | DCHECK(proxy_method->IsProxyMethod()) << proxy_method->PrettyMethod(); |
| 897 | DCHECK(receiver->GetClass()->IsProxyClass()) << proxy_method->PrettyMethod(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 898 | // Ensure we don't get thread suspension until the object arguments are safely in jobjects. |
| 899 | const char* old_cause = |
| 900 | self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments"); |
| 901 | // Register the top of the managed stack, making stack crawlable. |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 902 | DCHECK_EQ((*sp), proxy_method) << proxy_method->PrettyMethod(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 903 | self->VerifyStack(); |
| 904 | // Start new JNI local reference state. |
| 905 | JNIEnvExt* env = self->GetJniEnv(); |
| 906 | ScopedObjectAccessUnchecked soa(env); |
| 907 | ScopedJniEnvLocalRefState env_state(env); |
| 908 | // Create local ref. copies of proxy method and the receiver. |
| 909 | jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver); |
| 910 | |
| 911 | // Placing arguments into args vector and remove the receiver. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 912 | ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize); |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 913 | CHECK(!non_proxy_method->IsStatic()) << proxy_method->PrettyMethod() << " " |
| 914 | << non_proxy_method->PrettyMethod(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 915 | std::vector<jvalue> args; |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 916 | uint32_t shorty_len = 0; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 917 | const char* shorty = non_proxy_method->GetShorty(&shorty_len); |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 918 | BuildQuickArgumentVisitor local_ref_visitor( |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 919 | sp, /* is_static= */ false, shorty, shorty_len, &soa, &args); |
Brian Carlstrom | d3633d5 | 2013-08-20 21:06:26 -0700 | [diff] [blame] | 920 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 921 | local_ref_visitor.VisitArguments(); |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 922 | DCHECK_GT(args.size(), 0U) << proxy_method->PrettyMethod(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 923 | args.erase(args.begin()); |
| 924 | |
| 925 | // Convert proxy method into expected interface method. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 926 | ArtMethod* interface_method = proxy_method->FindOverriddenMethod(kRuntimePointerSize); |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 927 | DCHECK(interface_method != nullptr) << proxy_method->PrettyMethod(); |
| 928 | DCHECK(!interface_method->IsProxyMethod()) << interface_method->PrettyMethod(); |
Mathieu Chartier | fc58af4 | 2015-04-16 18:00:39 -0700 | [diff] [blame] | 929 | self->EndAssertNoThreadSuspension(old_cause); |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 930 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize); |
Andreas Gampe | e01e364 | 2016-07-25 13:06:04 -0700 | [diff] [blame] | 931 | DCHECK(!Runtime::Current()->IsActiveTransaction()); |
Andreas Gampe | ee29a07 | 2017-11-02 15:28:09 -0700 | [diff] [blame] | 932 | ObjPtr<mirror::Method> interface_reflect_method = |
| 933 | mirror::Method::CreateFromArtMethod<kRuntimePointerSize, false>(soa.Self(), interface_method); |
| 934 | if (interface_reflect_method == nullptr) { |
| 935 | soa.Self()->AssertPendingOOMException(); |
| 936 | return 0; |
| 937 | } |
| 938 | jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_reflect_method); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 939 | |
| 940 | // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code |
Alex Light | c916736 | 2018-06-11 16:46:43 -0700 | [diff] [blame] | 941 | // that performs allocations or instrumentation events. |
| 942 | instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation(); |
| 943 | if (instr->HasMethodEntryListeners()) { |
| 944 | instr->MethodEnterEvent(soa.Self(), |
Vladimir Marko | 19711d4 | 2019-04-12 14:05:34 +0100 | [diff] [blame] | 945 | soa.Decode<mirror::Object>(rcvr_jobj), |
Alex Light | c916736 | 2018-06-11 16:46:43 -0700 | [diff] [blame] | 946 | proxy_method, |
| 947 | 0); |
| 948 | if (soa.Self()->IsExceptionPending()) { |
| 949 | instr->MethodUnwindEvent(self, |
Vladimir Marko | 19711d4 | 2019-04-12 14:05:34 +0100 | [diff] [blame] | 950 | soa.Decode<mirror::Object>(rcvr_jobj), |
Alex Light | c916736 | 2018-06-11 16:46:43 -0700 | [diff] [blame] | 951 | proxy_method, |
| 952 | 0); |
| 953 | return 0; |
| 954 | } |
| 955 | } |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 956 | JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args); |
Alex Light | c916736 | 2018-06-11 16:46:43 -0700 | [diff] [blame] | 957 | if (soa.Self()->IsExceptionPending()) { |
| 958 | if (instr->HasMethodUnwindListeners()) { |
| 959 | instr->MethodUnwindEvent(self, |
Vladimir Marko | 19711d4 | 2019-04-12 14:05:34 +0100 | [diff] [blame] | 960 | soa.Decode<mirror::Object>(rcvr_jobj), |
Alex Light | c916736 | 2018-06-11 16:46:43 -0700 | [diff] [blame] | 961 | proxy_method, |
| 962 | 0); |
| 963 | } |
| 964 | } else if (instr->HasMethodExitListeners()) { |
| 965 | instr->MethodExitEvent(self, |
Vladimir Marko | 19711d4 | 2019-04-12 14:05:34 +0100 | [diff] [blame] | 966 | soa.Decode<mirror::Object>(rcvr_jobj), |
Alex Light | c916736 | 2018-06-11 16:46:43 -0700 | [diff] [blame] | 967 | proxy_method, |
| 968 | 0, |
Alex Light | b7c640d | 2019-03-20 15:52:13 -0700 | [diff] [blame] | 969 | {}, |
Alex Light | c916736 | 2018-06-11 16:46:43 -0700 | [diff] [blame] | 970 | result); |
| 971 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 972 | return result.GetJ(); |
| 973 | } |
| 974 | |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 975 | // Visitor returning a reference argument at a given position in a Quick stack frame. |
| 976 | // NOTE: Only used for testing purposes. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 977 | class GetQuickReferenceArgumentAtVisitor final : public QuickArgumentVisitor { |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 978 | public: |
| 979 | GetQuickReferenceArgumentAtVisitor(ArtMethod** sp, |
| 980 | const char* shorty, |
| 981 | uint32_t shorty_len, |
| 982 | size_t arg_pos) |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 983 | : QuickArgumentVisitor(sp, /* is_static= */ false, shorty, shorty_len), |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 984 | cur_pos_(0u), |
| 985 | arg_pos_(arg_pos), |
| 986 | ref_arg_(nullptr) { |
| 987 | CHECK_LT(arg_pos, shorty_len) << "Argument position greater than the number arguments"; |
| 988 | } |
| 989 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 990 | void Visit() REQUIRES_SHARED(Locks::mutator_lock_) override { |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 991 | if (cur_pos_ == arg_pos_) { |
| 992 | Primitive::Type type = GetParamPrimitiveType(); |
| 993 | CHECK_EQ(type, Primitive::kPrimNot) << "Argument at searched position is not a reference"; |
| 994 | ref_arg_ = reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 995 | } |
| 996 | ++cur_pos_; |
| 997 | } |
| 998 | |
| 999 | StackReference<mirror::Object>* GetReferenceArgument() { |
| 1000 | return ref_arg_; |
| 1001 | } |
| 1002 | |
| 1003 | private: |
| 1004 | // The position of the currently visited argument. |
| 1005 | size_t cur_pos_; |
| 1006 | // The position of the searched argument. |
| 1007 | const size_t arg_pos_; |
| 1008 | // The reference argument, if found. |
| 1009 | StackReference<mirror::Object>* ref_arg_; |
| 1010 | |
| 1011 | DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentAtVisitor); |
| 1012 | }; |
| 1013 | |
| 1014 | // Returning reference argument at position `arg_pos` in Quick stack frame at address `sp`. |
| 1015 | // NOTE: Only used for testing purposes. |
| 1016 | extern "C" StackReference<mirror::Object>* artQuickGetProxyReferenceArgumentAt(size_t arg_pos, |
| 1017 | ArtMethod** sp) |
| 1018 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 1019 | ArtMethod* proxy_method = *sp; |
| 1020 | ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize); |
| 1021 | CHECK(!non_proxy_method->IsStatic()) |
| 1022 | << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod(); |
| 1023 | uint32_t shorty_len = 0; |
| 1024 | const char* shorty = non_proxy_method->GetShorty(&shorty_len); |
| 1025 | GetQuickReferenceArgumentAtVisitor ref_arg_visitor(sp, shorty, shorty_len, arg_pos); |
| 1026 | ref_arg_visitor.VisitArguments(); |
| 1027 | StackReference<mirror::Object>* ref_arg = ref_arg_visitor.GetReferenceArgument(); |
| 1028 | return ref_arg; |
| 1029 | } |
| 1030 | |
| 1031 | // Visitor returning all the reference arguments in a Quick stack frame. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 1032 | class GetQuickReferenceArgumentsVisitor final : public QuickArgumentVisitor { |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 1033 | public: |
| 1034 | GetQuickReferenceArgumentsVisitor(ArtMethod** sp, |
| 1035 | bool is_static, |
| 1036 | const char* shorty, |
| 1037 | uint32_t shorty_len) |
| 1038 | : QuickArgumentVisitor(sp, is_static, shorty, shorty_len) {} |
| 1039 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 1040 | void Visit() REQUIRES_SHARED(Locks::mutator_lock_) override { |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 1041 | Primitive::Type type = GetParamPrimitiveType(); |
| 1042 | if (type == Primitive::kPrimNot) { |
| 1043 | StackReference<mirror::Object>* ref_arg = |
| 1044 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 1045 | ref_args_.push_back(ref_arg); |
| 1046 | } |
| 1047 | } |
| 1048 | |
| 1049 | std::vector<StackReference<mirror::Object>*> GetReferenceArguments() { |
| 1050 | return ref_args_; |
| 1051 | } |
| 1052 | |
| 1053 | private: |
| 1054 | // The reference arguments. |
| 1055 | std::vector<StackReference<mirror::Object>*> ref_args_; |
| 1056 | |
| 1057 | DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentsVisitor); |
| 1058 | }; |
| 1059 | |
| 1060 | // Returning all reference arguments in Quick stack frame at address `sp`. |
| 1061 | std::vector<StackReference<mirror::Object>*> GetProxyReferenceArguments(ArtMethod** sp) |
| 1062 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 1063 | ArtMethod* proxy_method = *sp; |
| 1064 | ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize); |
| 1065 | CHECK(!non_proxy_method->IsStatic()) |
| 1066 | << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod(); |
| 1067 | uint32_t shorty_len = 0; |
| 1068 | const char* shorty = non_proxy_method->GetShorty(&shorty_len); |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 1069 | GetQuickReferenceArgumentsVisitor ref_args_visitor(sp, /*is_static=*/ false, shorty, shorty_len); |
Roland Levillain | ad0777d | 2018-02-12 20:00:18 +0000 | [diff] [blame] | 1070 | ref_args_visitor.VisitArguments(); |
| 1071 | std::vector<StackReference<mirror::Object>*> ref_args = ref_args_visitor.GetReferenceArguments(); |
| 1072 | return ref_args; |
| 1073 | } |
| 1074 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1075 | // Read object references held in arguments from quick frames and place in a JNI local references, |
| 1076 | // so they don't get garbage collected. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 1077 | class RememberForGcArgumentVisitor final : public QuickArgumentVisitor { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1078 | public: |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1079 | RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, |
| 1080 | uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) : |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1081 | QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {} |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1082 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 1083 | void Visit() REQUIRES_SHARED(Locks::mutator_lock_) override; |
Mathieu Chartier | 07d447b | 2013-09-26 11:57:43 -0700 | [diff] [blame] | 1084 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 1085 | void FixupReferences() REQUIRES_SHARED(Locks::mutator_lock_); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1086 | |
| 1087 | private: |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 1088 | ScopedObjectAccessUnchecked* const soa_; |
Mathieu Chartier | 5275bcb | 2014-02-20 17:16:42 -0800 | [diff] [blame] | 1089 | // References which we must update when exiting in case the GC moved the objects. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1090 | std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_; |
| 1091 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 1092 | DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1093 | }; |
| 1094 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 1095 | void RememberForGcArgumentVisitor::Visit() { |
| 1096 | if (IsParamAReference()) { |
| 1097 | StackReference<mirror::Object>* stack_ref = |
| 1098 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
| 1099 | jobject reference = |
| 1100 | soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr()); |
| 1101 | references_.push_back(std::make_pair(reference, stack_ref)); |
| 1102 | } |
| 1103 | } |
| 1104 | |
| 1105 | void RememberForGcArgumentVisitor::FixupReferences() { |
| 1106 | // Fixup any references which may have changed. |
| 1107 | for (const auto& pair : references_) { |
Mathieu Chartier | 1a5337f | 2016-10-13 13:48:23 -0700 | [diff] [blame] | 1108 | pair.second->Assign(soa_->Decode<mirror::Object>(pair.first)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 1109 | soa_->Env()->DeleteLocalRef(pair.first); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 1110 | } |
| 1111 | } |
| 1112 | |
Alex Light | b7edcda | 2017-04-27 13:20:31 -0700 | [diff] [blame] | 1113 | extern "C" const void* artInstrumentationMethodEntryFromCode(ArtMethod* method, |
| 1114 | mirror::Object* this_object, |
| 1115 | Thread* self, |
| 1116 | ArtMethod** sp) |
| 1117 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 1118 | const void* result; |
| 1119 | // Instrumentation changes the stack. Thus, when exiting, the stack cannot be verified, so skip |
| 1120 | // that part. |
| 1121 | ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false); |
| 1122 | instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); |
Alex Light | 6cae5ea | 2018-06-07 17:07:02 -0700 | [diff] [blame] | 1123 | DCHECK(!method->IsProxyMethod()) |
| 1124 | << "Proxy method " << method->PrettyMethod() |
| 1125 | << " (declaring class: " << method->GetDeclaringClass()->PrettyClass() << ")" |
| 1126 | << " should not hit instrumentation entrypoint."; |
Alex Light | b7edcda | 2017-04-27 13:20:31 -0700 | [diff] [blame] | 1127 | if (instrumentation->IsDeoptimized(method)) { |
| 1128 | result = GetQuickToInterpreterBridge(); |
| 1129 | } else { |
Alex Light | 2d441b1 | 2018-06-08 15:33:21 -0700 | [diff] [blame] | 1130 | // This will get the entry point either from the oat file, the JIT or the appropriate bridge |
| 1131 | // method if none of those can be found. |
| 1132 | result = instrumentation->GetCodeForInvoke(method); |
| 1133 | jit::Jit* jit = Runtime::Current()->GetJit(); |
| 1134 | DCHECK_NE(result, GetQuickInstrumentationEntryPoint()) << method->PrettyMethod(); |
| 1135 | DCHECK(jit == nullptr || |
| 1136 | // Native methods come through here in Interpreter entrypoints. We might not have |
| 1137 | // disabled jit-gc but that is fine since we won't return jit-code for native methods. |
| 1138 | method->IsNative() || |
| 1139 | !jit->GetCodeCache()->GetGarbageCollectCode()); |
| 1140 | DCHECK(!method->IsNative() || |
| 1141 | jit == nullptr || |
| 1142 | !jit->GetCodeCache()->ContainsPc(result)) |
| 1143 | << method->PrettyMethod() << " code will jump to possibly cleaned up jit code!"; |
Alex Light | b7edcda | 2017-04-27 13:20:31 -0700 | [diff] [blame] | 1144 | } |
| 1145 | |
| 1146 | bool interpreter_entry = (result == GetQuickToInterpreterBridge()); |
| 1147 | bool is_static = method->IsStatic(); |
| 1148 | uint32_t shorty_len; |
| 1149 | const char* shorty = |
| 1150 | method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty(&shorty_len); |
| 1151 | |
| 1152 | ScopedObjectAccessUnchecked soa(self); |
| 1153 | RememberForGcArgumentVisitor visitor(sp, is_static, shorty, shorty_len, &soa); |
| 1154 | visitor.VisitArguments(); |
| 1155 | |
| 1156 | instrumentation->PushInstrumentationStackFrame(self, |
| 1157 | is_static ? nullptr : this_object, |
| 1158 | method, |
| 1159 | QuickArgumentVisitor::GetCallingPc(sp), |
| 1160 | interpreter_entry); |
| 1161 | |
| 1162 | visitor.FixupReferences(); |
| 1163 | if (UNLIKELY(self->IsExceptionPending())) { |
| 1164 | return nullptr; |
| 1165 | } |
| 1166 | CHECK(result != nullptr) << method->PrettyMethod(); |
| 1167 | return result; |
| 1168 | } |
| 1169 | |
| 1170 | extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self, |
| 1171 | ArtMethod** sp, |
| 1172 | uint64_t* gpr_result, |
| 1173 | uint64_t* fpr_result) |
| 1174 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 1175 | DCHECK_EQ(reinterpret_cast<uintptr_t>(self), reinterpret_cast<uintptr_t>(Thread::Current())); |
| 1176 | CHECK(gpr_result != nullptr); |
| 1177 | CHECK(fpr_result != nullptr); |
| 1178 | // Instrumentation exit stub must not be entered with a pending exception. |
| 1179 | CHECK(!self->IsExceptionPending()) << "Enter instrumentation exit stub with pending exception " |
| 1180 | << self->GetException()->Dump(); |
| 1181 | // Compute address of return PC and sanity check that it currently holds 0. |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 1182 | constexpr size_t return_pc_offset = |
| 1183 | RuntimeCalleeSaveFrame::GetReturnPcOffset(CalleeSaveType::kSaveEverything); |
Alex Light | b7edcda | 2017-04-27 13:20:31 -0700 | [diff] [blame] | 1184 | uintptr_t* return_pc = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) + |
| 1185 | return_pc_offset); |
| 1186 | CHECK_EQ(*return_pc, 0U); |
| 1187 | |
| 1188 | // Pop the frame filling in the return pc. The low half of the return value is 0 when |
| 1189 | // deoptimization shouldn't be performed with the high-half having the return address. When |
| 1190 | // deoptimization should be performed the low half is zero and the high-half the address of the |
| 1191 | // deoptimization entry point. |
| 1192 | instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); |
| 1193 | TwoWordReturn return_or_deoptimize_pc = instrumentation->PopInstrumentationStackFrame( |
| 1194 | self, return_pc, gpr_result, fpr_result); |
Vladimir Marko | fac2178 | 2018-03-13 17:01:09 +0000 | [diff] [blame] | 1195 | if (self->IsExceptionPending() || self->ObserveAsyncException()) { |
Alex Light | b7edcda | 2017-04-27 13:20:31 -0700 | [diff] [blame] | 1196 | return GetTwoWordFailureValue(); |
| 1197 | } |
| 1198 | return return_or_deoptimize_pc; |
| 1199 | } |
| 1200 | |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1201 | static std::string DumpInstruction(ArtMethod* method, uint32_t dex_pc) |
| 1202 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 1203 | if (dex_pc == static_cast<uint32_t>(-1)) { |
| 1204 | CHECK(method == jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt)); |
| 1205 | return "<native>"; |
| 1206 | } else { |
| 1207 | CodeItemInstructionAccessor accessor = method->DexInstructions(); |
| 1208 | CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits()); |
| 1209 | return accessor.InstructionAt(dex_pc).DumpString(method->GetDexFile()); |
| 1210 | } |
| 1211 | } |
| 1212 | |
Vladimir Marko | 606adb3 | 2018-04-05 14:49:24 +0100 | [diff] [blame] | 1213 | static void DumpB74410240ClassData(ObjPtr<mirror::Class> klass) |
| 1214 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 1215 | std::string storage; |
| 1216 | const char* descriptor = klass->GetDescriptor(&storage); |
| 1217 | LOG(FATAL_WITHOUT_ABORT) << " " << DescribeLoaders(klass->GetClassLoader(), descriptor); |
| 1218 | const OatDexFile* oat_dex_file = klass->GetDexFile().GetOatDexFile(); |
| 1219 | if (oat_dex_file != nullptr) { |
| 1220 | const OatFile* oat_file = oat_dex_file->GetOatFile(); |
| 1221 | const char* dex2oat_cmdline = |
| 1222 | oat_file->GetOatHeader().GetStoreValueByKey(OatHeader::kDex2OatCmdLineKey); |
| 1223 | LOG(FATAL_WITHOUT_ABORT) << " OatFile: " << oat_file->GetLocation() |
| 1224 | << "; " << (dex2oat_cmdline != nullptr ? dex2oat_cmdline : "<not recorded>"); |
| 1225 | } |
| 1226 | } |
| 1227 | |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1228 | static void DumpB74410240DebugData(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 1229 | // Mimick the search for the caller and dump some data while doing so. |
Vladimir Marko | 606adb3 | 2018-04-05 14:49:24 +0100 | [diff] [blame] | 1230 | LOG(FATAL_WITHOUT_ABORT) << "Dumping debugging data, please attach a bugreport to b/74410240."; |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1231 | |
| 1232 | constexpr CalleeSaveType type = CalleeSaveType::kSaveRefsAndArgs; |
| 1233 | CHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(type)); |
| 1234 | |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 1235 | constexpr size_t callee_frame_size = RuntimeCalleeSaveFrame::GetFrameSize(type); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1236 | auto** caller_sp = reinterpret_cast<ArtMethod**>( |
| 1237 | reinterpret_cast<uintptr_t>(sp) + callee_frame_size); |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 1238 | constexpr size_t callee_return_pc_offset = RuntimeCalleeSaveFrame::GetReturnPcOffset(type); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1239 | uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>( |
| 1240 | (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset)); |
| 1241 | ArtMethod* outer_method = *caller_sp; |
| 1242 | |
| 1243 | if (UNLIKELY(caller_pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()))) { |
| 1244 | LOG(FATAL_WITHOUT_ABORT) << "Method: " << outer_method->PrettyMethod() |
| 1245 | << " native pc: " << caller_pc << " Instrumented!"; |
| 1246 | return; |
| 1247 | } |
| 1248 | |
| 1249 | const OatQuickMethodHeader* current_code = outer_method->GetOatQuickMethodHeader(caller_pc); |
| 1250 | CHECK(current_code != nullptr); |
| 1251 | CHECK(current_code->IsOptimized()); |
| 1252 | uintptr_t native_pc_offset = current_code->NativeQuickPcOffset(caller_pc); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 1253 | CodeInfo code_info(current_code); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 1254 | StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1255 | CHECK(stack_map.IsValid()); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 1256 | uint32_t dex_pc = stack_map.GetDexPc(); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1257 | |
| 1258 | // Log the outer method and its associated dex file and class table pointer which can be used |
| 1259 | // to find out if the inlined methods were defined by other dex file(s) or class loader(s). |
| 1260 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 1261 | LOG(FATAL_WITHOUT_ABORT) << "Outer: " << outer_method->PrettyMethod() |
| 1262 | << " native pc: " << caller_pc |
| 1263 | << " dex pc: " << dex_pc |
| 1264 | << " dex file: " << outer_method->GetDexFile()->GetLocation() |
| 1265 | << " class table: " << class_linker->ClassTableForClassLoader(outer_method->GetClassLoader()); |
Vladimir Marko | 606adb3 | 2018-04-05 14:49:24 +0100 | [diff] [blame] | 1266 | DumpB74410240ClassData(outer_method->GetDeclaringClass()); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1267 | LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(outer_method, dex_pc); |
| 1268 | |
| 1269 | ArtMethod* caller = outer_method; |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 1270 | BitTableRange<InlineInfo> inline_infos = code_info.GetInlineInfosOf(stack_map); |
| 1271 | for (InlineInfo inline_info : inline_infos) { |
| 1272 | const char* tag = ""; |
| 1273 | dex_pc = inline_info.GetDexPc(); |
| 1274 | if (inline_info.EncodesArtMethod()) { |
| 1275 | tag = "encoded "; |
| 1276 | caller = inline_info.GetArtMethod(); |
| 1277 | } else { |
David Srbecky | 8cd5454 | 2018-07-15 23:58:44 +0100 | [diff] [blame] | 1278 | uint32_t method_index = code_info.GetMethodIndexOf(inline_info); |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 1279 | if (dex_pc == static_cast<uint32_t>(-1)) { |
| 1280 | tag = "special "; |
| 1281 | CHECK(inline_info.Equals(inline_infos.back())); |
| 1282 | caller = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt); |
| 1283 | CHECK_EQ(caller->GetDexMethodIndex(), method_index); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1284 | } else { |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 1285 | ObjPtr<mirror::DexCache> dex_cache = caller->GetDexCache(); |
| 1286 | ObjPtr<mirror::ClassLoader> class_loader = caller->GetClassLoader(); |
| 1287 | caller = class_linker->LookupResolvedMethod(method_index, dex_cache, class_loader); |
| 1288 | CHECK(caller != nullptr); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1289 | } |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1290 | } |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 1291 | LOG(FATAL_WITHOUT_ABORT) << "InlineInfo #" << inline_info.Row() |
| 1292 | << ": " << tag << caller->PrettyMethod() |
| 1293 | << " dex pc: " << dex_pc |
| 1294 | << " dex file: " << caller->GetDexFile()->GetLocation() |
| 1295 | << " class table: " |
| 1296 | << class_linker->ClassTableForClassLoader(caller->GetClassLoader()); |
| 1297 | DumpB74410240ClassData(caller->GetDeclaringClass()); |
| 1298 | LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(caller, dex_pc); |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1299 | } |
| 1300 | } |
| 1301 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1302 | // Lazily resolve a method for quick. Called by stub code. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1303 | extern "C" const void* artQuickResolutionTrampoline( |
| 1304 | ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 1305 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | 3b45ef2 | 2015-05-26 21:34:09 -0700 | [diff] [blame] | 1306 | // The resolution trampoline stashes the resolved method into the callee-save frame to transport |
| 1307 | // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely |
| 1308 | // does not have the same stack layout as the callee-save method). |
| 1309 | ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1310 | // Start new JNI local reference state |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1311 | JNIEnvExt* env = self->GetJniEnv(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1312 | ScopedObjectAccessUnchecked soa(env); |
| 1313 | ScopedJniEnvLocalRefState env_state(env); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1314 | const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up"); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1315 | |
| 1316 | // Compute details about the called method (avoid GCs) |
| 1317 | ClassLinker* linker = Runtime::Current()->GetClassLinker(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1318 | InvokeType invoke_type; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1319 | MethodReference called_method(nullptr, 0); |
| 1320 | const bool called_method_known_on_entry = !called->IsRuntimeMethod(); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1321 | ArtMethod* caller = nullptr; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1322 | if (!called_method_known_on_entry) { |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 1323 | caller = QuickArgumentVisitor::GetCallingMethod(sp); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1324 | called_method.dex_file = caller->GetDexFile(); |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 1325 | |
David Srbecky | 2c76257 | 2018-06-27 10:09:11 +0100 | [diff] [blame] | 1326 | { |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 1327 | uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp); |
David Sehr | 0225f8e | 2018-01-31 08:52:24 +0000 | [diff] [blame] | 1328 | CodeItemInstructionAccessor accessor(caller->DexInstructions()); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 1329 | CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits()); |
| 1330 | const Instruction& instr = accessor.InstructionAt(dex_pc); |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 1331 | Instruction::Code instr_code = instr.Opcode(); |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 1332 | bool is_range; |
| 1333 | switch (instr_code) { |
| 1334 | case Instruction::INVOKE_DIRECT: |
| 1335 | invoke_type = kDirect; |
| 1336 | is_range = false; |
| 1337 | break; |
| 1338 | case Instruction::INVOKE_DIRECT_RANGE: |
| 1339 | invoke_type = kDirect; |
| 1340 | is_range = true; |
| 1341 | break; |
| 1342 | case Instruction::INVOKE_STATIC: |
| 1343 | invoke_type = kStatic; |
| 1344 | is_range = false; |
| 1345 | break; |
| 1346 | case Instruction::INVOKE_STATIC_RANGE: |
| 1347 | invoke_type = kStatic; |
| 1348 | is_range = true; |
| 1349 | break; |
| 1350 | case Instruction::INVOKE_SUPER: |
| 1351 | invoke_type = kSuper; |
| 1352 | is_range = false; |
| 1353 | break; |
| 1354 | case Instruction::INVOKE_SUPER_RANGE: |
| 1355 | invoke_type = kSuper; |
| 1356 | is_range = true; |
| 1357 | break; |
| 1358 | case Instruction::INVOKE_VIRTUAL: |
| 1359 | invoke_type = kVirtual; |
| 1360 | is_range = false; |
| 1361 | break; |
| 1362 | case Instruction::INVOKE_VIRTUAL_RANGE: |
| 1363 | invoke_type = kVirtual; |
| 1364 | is_range = true; |
| 1365 | break; |
| 1366 | case Instruction::INVOKE_INTERFACE: |
| 1367 | invoke_type = kInterface; |
| 1368 | is_range = false; |
| 1369 | break; |
| 1370 | case Instruction::INVOKE_INTERFACE_RANGE: |
| 1371 | invoke_type = kInterface; |
| 1372 | is_range = true; |
| 1373 | break; |
| 1374 | default: |
Vladimir Marko | 5b4b9a0 | 2018-03-16 09:42:09 +0000 | [diff] [blame] | 1375 | DumpB74410240DebugData(sp); |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 1376 | LOG(FATAL) << "Unexpected call into trampoline: " << instr.DumpString(nullptr); |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 1377 | UNREACHABLE(); |
| 1378 | } |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 1379 | called_method.index = (is_range) ? instr.VRegB_3rc() : instr.VRegB_35c(); |
David Srbecky | 2c76257 | 2018-06-27 10:09:11 +0100 | [diff] [blame] | 1380 | VLOG(dex) << "Accessed dex file for invoke " << invoke_type << " " |
| 1381 | << called_method.index; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1382 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1383 | } else { |
| 1384 | invoke_type = kStatic; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1385 | called_method.dex_file = called->GetDexFile(); |
Mathieu Chartier | fc8b422 | 2017-09-17 13:44:24 -0700 | [diff] [blame] | 1386 | called_method.index = called->GetDexMethodIndex(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1387 | } |
| 1388 | uint32_t shorty_len; |
| 1389 | const char* shorty = |
Mathieu Chartier | fc8b422 | 2017-09-17 13:44:24 -0700 | [diff] [blame] | 1390 | called_method.dex_file->GetMethodShorty(called_method.GetMethodId(), &shorty_len); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 1391 | RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1392 | visitor.VisitArguments(); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1393 | self->EndAssertNoThreadSuspension(old_cause); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1394 | const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface; |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1395 | // Resolve method filling in dex cache. |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1396 | if (!called_method_known_on_entry) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1397 | StackHandleScope<1> hs(self); |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 1398 | mirror::Object* dummy = nullptr; |
| 1399 | HandleWrapper<mirror::Object> h_receiver( |
| 1400 | hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy)); |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1401 | DCHECK_EQ(caller->GetDexFile(), called_method.dex_file); |
Vladimir Marko | ba11882 | 2017-06-12 15:41:56 +0100 | [diff] [blame] | 1402 | called = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>( |
Mathieu Chartier | fc8b422 | 2017-09-17 13:44:24 -0700 | [diff] [blame] | 1403 | self, called_method.index, caller, invoke_type); |
Vladimir Marko | 0eb882b | 2017-05-15 13:39:18 +0100 | [diff] [blame] | 1404 | |
| 1405 | // Update .bss entry in oat file if any. |
| 1406 | if (called != nullptr && called_method.dex_file->GetOatDexFile() != nullptr) { |
Vladimir Marko | f3c52b4 | 2017-11-17 17:32:12 +0000 | [diff] [blame] | 1407 | size_t bss_offset = IndexBssMappingLookup::GetBssOffset( |
| 1408 | called_method.dex_file->GetOatDexFile()->GetMethodBssMapping(), |
| 1409 | called_method.index, |
| 1410 | called_method.dex_file->NumMethodIds(), |
| 1411 | static_cast<size_t>(kRuntimePointerSize)); |
| 1412 | if (bss_offset != IndexBssMappingLookup::npos) { |
| 1413 | DCHECK_ALIGNED(bss_offset, static_cast<size_t>(kRuntimePointerSize)); |
| 1414 | const OatFile* oat_file = called_method.dex_file->GetOatDexFile()->GetOatFile(); |
| 1415 | ArtMethod** method_entry = reinterpret_cast<ArtMethod**>(const_cast<uint8_t*>( |
| 1416 | oat_file->BssBegin() + bss_offset)); |
| 1417 | DCHECK_GE(method_entry, oat_file->GetBssMethods().data()); |
| 1418 | DCHECK_LT(method_entry, |
| 1419 | oat_file->GetBssMethods().data() + oat_file->GetBssMethods().size()); |
Vladimir Marko | d5fd5c3 | 2019-07-02 14:46:32 +0100 | [diff] [blame] | 1420 | std::atomic<ArtMethod*>* atomic_entry = |
| 1421 | reinterpret_cast<std::atomic<ArtMethod*>*>(method_entry); |
| 1422 | static_assert(sizeof(*method_entry) == sizeof(*atomic_entry), "Size check."); |
| 1423 | atomic_entry->store(called, std::memory_order_release); |
Vladimir Marko | 0eb882b | 2017-05-15 13:39:18 +0100 | [diff] [blame] | 1424 | } |
| 1425 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1426 | } |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1427 | const void* code = nullptr; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1428 | if (LIKELY(!self->IsExceptionPending())) { |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1429 | // Incompatible class change should have been handled in resolve method. |
Brian Carlstrom | 2ec6520 | 2014-03-03 15:16:37 -0800 | [diff] [blame] | 1430 | CHECK(!called->CheckIncompatibleClassChange(invoke_type)) |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 1431 | << called->PrettyMethod() << " " << invoke_type; |
Nicolas Geoffray | e523423 | 2015-12-02 09:06:11 +0000 | [diff] [blame] | 1432 | if (virtual_or_interface || invoke_type == kSuper) { |
| 1433 | // Refine called method based on receiver for kVirtual/kInterface, and |
| 1434 | // caller for kSuper. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1435 | ArtMethod* orig_called = called; |
Mathieu Chartier | 55871bf | 2014-02-27 10:24:50 -0800 | [diff] [blame] | 1436 | if (invoke_type == kVirtual) { |
Nicolas Geoffray | e523423 | 2015-12-02 09:06:11 +0000 | [diff] [blame] | 1437 | CHECK(receiver != nullptr) << invoke_type; |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1438 | called = receiver->GetClass()->FindVirtualMethodForVirtual(called, kRuntimePointerSize); |
Nicolas Geoffray | e523423 | 2015-12-02 09:06:11 +0000 | [diff] [blame] | 1439 | } else if (invoke_type == kInterface) { |
| 1440 | CHECK(receiver != nullptr) << invoke_type; |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1441 | called = receiver->GetClass()->FindVirtualMethodForInterface(called, kRuntimePointerSize); |
Nicolas Geoffray | e523423 | 2015-12-02 09:06:11 +0000 | [diff] [blame] | 1442 | } else { |
| 1443 | DCHECK_EQ(invoke_type, kSuper); |
| 1444 | CHECK(caller != nullptr) << invoke_type; |
Vladimir Marko | ba11882 | 2017-06-12 15:41:56 +0100 | [diff] [blame] | 1445 | ObjPtr<mirror::Class> ref_class = linker->LookupResolvedType( |
Vladimir Marko | 666ee3d | 2017-12-11 18:37:36 +0000 | [diff] [blame] | 1446 | caller->GetDexFile()->GetMethodId(called_method.index).class_idx_, caller); |
Alex Light | fedd91d | 2016-01-07 14:49:16 -0800 | [diff] [blame] | 1447 | if (ref_class->IsInterface()) { |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1448 | called = ref_class->FindVirtualMethodForInterfaceSuper(called, kRuntimePointerSize); |
Alex Light | fedd91d | 2016-01-07 14:49:16 -0800 | [diff] [blame] | 1449 | } else { |
| 1450 | called = caller->GetDeclaringClass()->GetSuperClass()->GetVTableEntry( |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1451 | called->GetMethodIndex(), kRuntimePointerSize); |
Alex Light | fedd91d | 2016-01-07 14:49:16 -0800 | [diff] [blame] | 1452 | } |
Mathieu Chartier | 55871bf | 2014-02-27 10:24:50 -0800 | [diff] [blame] | 1453 | } |
Mingyao Yang | f486778 | 2014-05-05 11:55:02 -0700 | [diff] [blame] | 1454 | |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 1455 | CHECK(called != nullptr) << orig_called->PrettyMethod() << " " |
| 1456 | << mirror::Object::PrettyTypeOf(receiver) << " " |
Mingyao Yang | f486778 | 2014-05-05 11:55:02 -0700 | [diff] [blame] | 1457 | << invoke_type << " " << orig_called->GetVtableIndex(); |
Ian Rogers | 83883d7 | 2013-10-21 21:07:24 -0700 | [diff] [blame] | 1458 | } |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 1459 | |
Vladimir Marko | 5115a4d | 2019-10-17 14:56:47 +0100 | [diff] [blame] | 1460 | ObjPtr<mirror::Class> called_class = called->GetDeclaringClass(); |
| 1461 | if (NeedsClinitCheckBeforeCall(called) && !called_class->IsVisiblyInitialized()) { |
| 1462 | // Ensure that the called method's class is initialized. |
| 1463 | StackHandleScope<1> hs(soa.Self()); |
| 1464 | HandleWrapperObjPtr<mirror::Class> h_called_class(hs.NewHandleWrapper(&called_class)); |
| 1465 | linker->EnsureInitialized(soa.Self(), h_called_class, true, true); |
| 1466 | } |
Alex Light | 3dacdd6 | 2019-03-12 15:45:47 +0000 | [diff] [blame] | 1467 | bool force_interpreter = self->IsForceInterpreter() && !called->IsNative(); |
Nicolas Geoffray | 623d4f1 | 2019-09-30 13:45:51 +0100 | [diff] [blame] | 1468 | if (called_class->IsInitialized() || called_class->IsInitializing()) { |
Alex Light | fc58809 | 2020-01-23 15:39:08 -0800 | [diff] [blame^] | 1469 | if (UNLIKELY(force_interpreter)) { |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 1470 | // If we are single-stepping or the called method is deoptimized (by a |
| 1471 | // breakpoint, for example), then we have to execute the called method |
| 1472 | // with the interpreter. |
| 1473 | code = GetQuickToInterpreterBridge(); |
Daniel Mihalyi | eb07669 | 2014-08-22 17:33:31 +0200 | [diff] [blame] | 1474 | } else { |
| 1475 | code = called->GetEntryPointFromQuickCompiledCode(); |
Nicolas Geoffray | 623d4f1 | 2019-09-30 13:45:51 +0100 | [diff] [blame] | 1476 | if (linker->IsQuickResolutionStub(code)) { |
| 1477 | DCHECK_EQ(invoke_type, kStatic); |
| 1478 | // Go to JIT or oat and grab code. |
| 1479 | code = linker->GetQuickOatCodeFor(called); |
| 1480 | if (called_class->IsInitialized()) { |
| 1481 | // Only update the entrypoint once the class is initialized. Other |
| 1482 | // threads still need to go through the resolution stub. |
| 1483 | Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(called, code); |
| 1484 | } |
| 1485 | } |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1486 | } |
| 1487 | } else { |
| 1488 | DCHECK(called_class->IsErroneous()); |
| 1489 | } |
| 1490 | } |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1491 | CHECK_EQ(code == nullptr, self->IsExceptionPending()); |
Mathieu Chartier | 07d447b | 2013-09-26 11:57:43 -0700 | [diff] [blame] | 1492 | // Fixup any locally saved objects may have moved during a GC. |
| 1493 | visitor.FixupReferences(); |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1494 | // Place called method in callee-save frame to be placed as first argument to quick method. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1495 | *sp = called; |
| 1496 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 1497 | return code; |
| 1498 | } |
| 1499 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1500 | /* |
| 1501 | * This class uses a couple of observations to unite the different calling conventions through |
| 1502 | * a few constants. |
| 1503 | * |
| 1504 | * 1) Number of registers used for passing is normally even, so counting down has no penalty for |
| 1505 | * possible alignment. |
| 1506 | * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point |
| 1507 | * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote |
| 1508 | * when we have to split things |
| 1509 | * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats |
| 1510 | * and we can use Int handling directly. |
| 1511 | * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code |
| 1512 | * necessary when widening. Also, widening of Ints will take place implicitly, and the |
| 1513 | * extension should be compatible with Aarch64, which mandates copying the available bits |
| 1514 | * into LSB and leaving the rest unspecified. |
| 1515 | * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on |
| 1516 | * the stack. |
| 1517 | * 6) There is only little endian. |
| 1518 | * |
| 1519 | * |
| 1520 | * Actual work is supposed to be done in a delegate of the template type. The interface is as |
| 1521 | * follows: |
| 1522 | * |
| 1523 | * void PushGpr(uintptr_t): Add a value for the next GPR |
| 1524 | * |
| 1525 | * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need |
| 1526 | * padding, that is, think the architecture is 32b and aligns 64b. |
| 1527 | * |
| 1528 | * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to |
| 1529 | * split this if necessary. The current state will have aligned, if |
| 1530 | * necessary. |
| 1531 | * |
| 1532 | * void PushStack(uintptr_t): Push a value to the stack. |
| 1533 | * |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1534 | * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr, |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1535 | * as this might be important for null initialization. |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1536 | * Must return the jobject, that is, the reference to the |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1537 | * entry in the HandleScope (nullptr if necessary). |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1538 | * |
| 1539 | */ |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1540 | template<class T> class BuildNativeCallFrameStateMachine { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1541 | public: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1542 | #if defined(__arm__) |
| 1543 | // TODO: These are all dummy values! |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1544 | static constexpr bool kNativeSoftFloatAbi = true; |
| 1545 | static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3 |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1546 | static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs. |
| 1547 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1548 | static constexpr size_t kRegistersNeededForLong = 2; |
| 1549 | static constexpr size_t kRegistersNeededForDouble = 2; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1550 | static constexpr bool kMultiRegistersAligned = true; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1551 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1552 | static constexpr bool kMultiGPRegistersWidened = false; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1553 | static constexpr bool kAlignLongOnStack = true; |
| 1554 | static constexpr bool kAlignDoubleOnStack = true; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1555 | #elif defined(__aarch64__) |
| 1556 | static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI. |
| 1557 | static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs. |
| 1558 | static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs. |
| 1559 | |
| 1560 | static constexpr size_t kRegistersNeededForLong = 1; |
| 1561 | static constexpr size_t kRegistersNeededForDouble = 1; |
| 1562 | static constexpr bool kMultiRegistersAligned = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1563 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1564 | static constexpr bool kMultiGPRegistersWidened = false; |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1565 | static constexpr bool kAlignLongOnStack = false; |
| 1566 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1567 | #elif defined(__mips__) && !defined(__LP64__) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1568 | static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI. |
Douglas Leung | 735b855 | 2014-10-31 12:21:40 -0700 | [diff] [blame] | 1569 | static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs. |
| 1570 | static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1571 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1572 | static constexpr size_t kRegistersNeededForLong = 2; |
| 1573 | static constexpr size_t kRegistersNeededForDouble = 2; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1574 | static constexpr bool kMultiRegistersAligned = true; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1575 | static constexpr bool kMultiFPRegistersWidened = true; |
| 1576 | static constexpr bool kMultiGPRegistersWidened = false; |
Douglas Leung | 735b855 | 2014-10-31 12:21:40 -0700 | [diff] [blame] | 1577 | static constexpr bool kAlignLongOnStack = true; |
| 1578 | static constexpr bool kAlignDoubleOnStack = true; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1579 | #elif defined(__mips__) && defined(__LP64__) |
| 1580 | // Let the code prepare GPRs only and we will load the FPRs with same data. |
| 1581 | static constexpr bool kNativeSoftFloatAbi = true; |
| 1582 | static constexpr size_t kNumNativeGprArgs = 8; |
| 1583 | static constexpr size_t kNumNativeFprArgs = 0; |
| 1584 | |
| 1585 | static constexpr size_t kRegistersNeededForLong = 1; |
| 1586 | static constexpr size_t kRegistersNeededForDouble = 1; |
| 1587 | static constexpr bool kMultiRegistersAligned = false; |
| 1588 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1589 | static constexpr bool kMultiGPRegistersWidened = true; |
| 1590 | static constexpr bool kAlignLongOnStack = false; |
| 1591 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1592 | #elif defined(__i386__) |
| 1593 | // TODO: Check these! |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1594 | static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1595 | static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs. |
| 1596 | static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs. |
| 1597 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1598 | static constexpr size_t kRegistersNeededForLong = 2; |
| 1599 | static constexpr size_t kRegistersNeededForDouble = 2; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1600 | static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1601 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1602 | static constexpr bool kMultiGPRegistersWidened = false; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1603 | static constexpr bool kAlignLongOnStack = false; |
| 1604 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1605 | #elif defined(__x86_64__) |
| 1606 | static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI. |
| 1607 | static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs. |
| 1608 | static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs. |
| 1609 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1610 | static constexpr size_t kRegistersNeededForLong = 1; |
| 1611 | static constexpr size_t kRegistersNeededForDouble = 1; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1612 | static constexpr bool kMultiRegistersAligned = false; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1613 | static constexpr bool kMultiFPRegistersWidened = false; |
| 1614 | static constexpr bool kMultiGPRegistersWidened = false; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1615 | static constexpr bool kAlignLongOnStack = false; |
| 1616 | static constexpr bool kAlignDoubleOnStack = false; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1617 | #else |
| 1618 | #error "Unsupported architecture" |
| 1619 | #endif |
| 1620 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1621 | public: |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1622 | explicit BuildNativeCallFrameStateMachine(T* delegate) |
| 1623 | : gpr_index_(kNumNativeGprArgs), |
| 1624 | fpr_index_(kNumNativeFprArgs), |
| 1625 | stack_entries_(0), |
| 1626 | delegate_(delegate) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1627 | // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff |
| 1628 | // the next register is even; counting down is just to make the compiler happy... |
Andreas Gampe | 575e78c | 2014-11-03 23:41:03 -0800 | [diff] [blame] | 1629 | static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even"); |
| 1630 | static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even"); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1631 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1632 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1633 | virtual ~BuildNativeCallFrameStateMachine() {} |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1634 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1635 | bool HavePointerGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1636 | return gpr_index_ > 0; |
| 1637 | } |
| 1638 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1639 | void AdvancePointer(const void* val) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1640 | if (HavePointerGpr()) { |
| 1641 | gpr_index_--; |
| 1642 | PushGpr(reinterpret_cast<uintptr_t>(val)); |
| 1643 | } else { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1644 | stack_entries_++; // TODO: have a field for pointer length as multiple of 32b |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1645 | PushStack(reinterpret_cast<uintptr_t>(val)); |
| 1646 | gpr_index_ = 0; |
| 1647 | } |
| 1648 | } |
| 1649 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1650 | bool HaveHandleScopeGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1651 | return gpr_index_ > 0; |
| 1652 | } |
| 1653 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 1654 | void AdvanceHandleScope(mirror::Object* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1655 | uintptr_t handle = PushHandle(ptr); |
| 1656 | if (HaveHandleScopeGpr()) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1657 | gpr_index_--; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1658 | PushGpr(handle); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1659 | } else { |
| 1660 | stack_entries_++; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1661 | PushStack(handle); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1662 | gpr_index_ = 0; |
| 1663 | } |
| 1664 | } |
| 1665 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1666 | bool HaveIntGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1667 | return gpr_index_ > 0; |
| 1668 | } |
| 1669 | |
| 1670 | void AdvanceInt(uint32_t val) { |
| 1671 | if (HaveIntGpr()) { |
| 1672 | gpr_index_--; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1673 | if (kMultiGPRegistersWidened) { |
| 1674 | DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t)); |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1675 | PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val))); |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1676 | } else { |
| 1677 | PushGpr(val); |
| 1678 | } |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1679 | } else { |
| 1680 | stack_entries_++; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1681 | if (kMultiGPRegistersWidened) { |
| 1682 | DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t)); |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1683 | PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val))); |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1684 | } else { |
| 1685 | PushStack(val); |
| 1686 | } |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1687 | gpr_index_ = 0; |
| 1688 | } |
| 1689 | } |
| 1690 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1691 | bool HaveLongGpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1692 | return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0); |
| 1693 | } |
| 1694 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1695 | bool LongGprNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1696 | return kRegistersNeededForLong > 1 && // only pad when using multiple registers |
| 1697 | kAlignLongOnStack && // and when it needs alignment |
| 1698 | (gpr_index_ & 1) == 1; // counter is odd, see constructor |
| 1699 | } |
| 1700 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1701 | bool LongStackNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1702 | return kRegistersNeededForLong > 1 && // only pad when using multiple registers |
| 1703 | kAlignLongOnStack && // and when it needs 8B alignment |
| 1704 | (stack_entries_ & 1) == 1; // counter is odd |
| 1705 | } |
| 1706 | |
| 1707 | void AdvanceLong(uint64_t val) { |
| 1708 | if (HaveLongGpr()) { |
| 1709 | if (LongGprNeedsPadding()) { |
| 1710 | PushGpr(0); |
| 1711 | gpr_index_--; |
| 1712 | } |
| 1713 | if (kRegistersNeededForLong == 1) { |
| 1714 | PushGpr(static_cast<uintptr_t>(val)); |
| 1715 | } else { |
| 1716 | PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF)); |
| 1717 | PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF)); |
| 1718 | } |
| 1719 | gpr_index_ -= kRegistersNeededForLong; |
| 1720 | } else { |
| 1721 | if (LongStackNeedsPadding()) { |
| 1722 | PushStack(0); |
| 1723 | stack_entries_++; |
| 1724 | } |
| 1725 | if (kRegistersNeededForLong == 1) { |
| 1726 | PushStack(static_cast<uintptr_t>(val)); |
| 1727 | stack_entries_++; |
| 1728 | } else { |
| 1729 | PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF)); |
| 1730 | PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF)); |
| 1731 | stack_entries_ += 2; |
| 1732 | } |
| 1733 | gpr_index_ = 0; |
| 1734 | } |
| 1735 | } |
| 1736 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1737 | bool HaveFloatFpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1738 | return fpr_index_ > 0; |
| 1739 | } |
| 1740 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1741 | void AdvanceFloat(float val) { |
| 1742 | if (kNativeSoftFloatAbi) { |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1743 | AdvanceInt(bit_cast<uint32_t, float>(val)); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1744 | } else { |
| 1745 | if (HaveFloatFpr()) { |
| 1746 | fpr_index_--; |
| 1747 | if (kRegistersNeededForDouble == 1) { |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1748 | if (kMultiFPRegistersWidened) { |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1749 | PushFpr8(bit_cast<uint64_t, double>(val)); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1750 | } else { |
| 1751 | // No widening, just use the bits. |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1752 | PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val))); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1753 | } |
| 1754 | } else { |
| 1755 | PushFpr4(val); |
| 1756 | } |
| 1757 | } else { |
| 1758 | stack_entries_++; |
Andreas Gampe | 1a5c406 | 2015-01-15 12:10:47 -0800 | [diff] [blame] | 1759 | if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1760 | // Need to widen before storing: Note the "double" in the template instantiation. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1761 | // Note: We need to jump through those hoops to make the compiler happy. |
| 1762 | DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t)); |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1763 | PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val))); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1764 | } else { |
Roland Levillain | da4d79b | 2015-03-24 14:36:11 +0000 | [diff] [blame] | 1765 | PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val))); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1766 | } |
| 1767 | fpr_index_ = 0; |
| 1768 | } |
| 1769 | } |
| 1770 | } |
| 1771 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1772 | bool HaveDoubleFpr() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1773 | return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0); |
| 1774 | } |
| 1775 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1776 | bool DoubleFprNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1777 | return kRegistersNeededForDouble > 1 && // only pad when using multiple registers |
| 1778 | kAlignDoubleOnStack && // and when it needs alignment |
| 1779 | (fpr_index_ & 1) == 1; // counter is odd, see constructor |
| 1780 | } |
| 1781 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1782 | bool DoubleStackNeedsPadding() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1783 | return kRegistersNeededForDouble > 1 && // only pad when using multiple registers |
| 1784 | kAlignDoubleOnStack && // and when it needs 8B alignment |
| 1785 | (stack_entries_ & 1) == 1; // counter is odd |
| 1786 | } |
| 1787 | |
| 1788 | void AdvanceDouble(uint64_t val) { |
| 1789 | if (kNativeSoftFloatAbi) { |
| 1790 | AdvanceLong(val); |
| 1791 | } else { |
| 1792 | if (HaveDoubleFpr()) { |
| 1793 | if (DoubleFprNeedsPadding()) { |
| 1794 | PushFpr4(0); |
| 1795 | fpr_index_--; |
| 1796 | } |
| 1797 | PushFpr8(val); |
| 1798 | fpr_index_ -= kRegistersNeededForDouble; |
| 1799 | } else { |
| 1800 | if (DoubleStackNeedsPadding()) { |
| 1801 | PushStack(0); |
| 1802 | stack_entries_++; |
| 1803 | } |
| 1804 | if (kRegistersNeededForDouble == 1) { |
| 1805 | PushStack(static_cast<uintptr_t>(val)); |
| 1806 | stack_entries_++; |
| 1807 | } else { |
| 1808 | PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF)); |
| 1809 | PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF)); |
| 1810 | stack_entries_ += 2; |
| 1811 | } |
| 1812 | fpr_index_ = 0; |
| 1813 | } |
| 1814 | } |
| 1815 | } |
| 1816 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1817 | uint32_t GetStackEntries() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1818 | return stack_entries_; |
| 1819 | } |
| 1820 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1821 | uint32_t GetNumberOfUsedGprs() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1822 | return kNumNativeGprArgs - gpr_index_; |
| 1823 | } |
| 1824 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1825 | uint32_t GetNumberOfUsedFprs() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1826 | return kNumNativeFprArgs - fpr_index_; |
| 1827 | } |
| 1828 | |
| 1829 | private: |
| 1830 | void PushGpr(uintptr_t val) { |
| 1831 | delegate_->PushGpr(val); |
| 1832 | } |
| 1833 | void PushFpr4(float val) { |
| 1834 | delegate_->PushFpr4(val); |
| 1835 | } |
| 1836 | void PushFpr8(uint64_t val) { |
| 1837 | delegate_->PushFpr8(val); |
| 1838 | } |
| 1839 | void PushStack(uintptr_t val) { |
| 1840 | delegate_->PushStack(val); |
| 1841 | } |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 1842 | uintptr_t PushHandle(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1843 | return delegate_->PushHandle(ref); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1844 | } |
| 1845 | |
| 1846 | uint32_t gpr_index_; // Number of free GPRs |
| 1847 | uint32_t fpr_index_; // Number of free FPRs |
| 1848 | uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not |
| 1849 | // extended |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1850 | T* const delegate_; // What Push implementation gets called |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1851 | }; |
| 1852 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1853 | // Computes the sizes of register stacks and call stack area. Handling of references can be extended |
| 1854 | // in subclasses. |
| 1855 | // |
| 1856 | // To handle native pointers, use "L" in the shorty for an object reference, which simulates |
| 1857 | // them with handles. |
| 1858 | class ComputeNativeCallFrameSize { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1859 | public: |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1860 | ComputeNativeCallFrameSize() : num_stack_entries_(0) {} |
| 1861 | |
| 1862 | virtual ~ComputeNativeCallFrameSize() {} |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1863 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1864 | uint32_t GetStackSize() const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1865 | return num_stack_entries_ * sizeof(uintptr_t); |
| 1866 | } |
| 1867 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1868 | uint8_t* LayoutCallStack(uint8_t* sp8) const { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1869 | sp8 -= GetStackSize(); |
Andreas Gampe | 779f8c9 | 2014-06-09 18:29:38 -0700 | [diff] [blame] | 1870 | // Align by kStackAlignment. |
| 1871 | sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment)); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1872 | return sp8; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1873 | } |
| 1874 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1875 | uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr) |
| 1876 | const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1877 | // Assumption is OK right now, as we have soft-float arm |
| 1878 | size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs; |
| 1879 | sp8 -= fregs * sizeof(uintptr_t); |
| 1880 | *start_fpr = reinterpret_cast<uint32_t*>(sp8); |
| 1881 | size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs; |
| 1882 | sp8 -= iregs * sizeof(uintptr_t); |
| 1883 | *start_gpr = reinterpret_cast<uintptr_t*>(sp8); |
| 1884 | return sp8; |
| 1885 | } |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1886 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1887 | uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr, |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1888 | uint32_t** start_fpr) const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1889 | // Native call stack. |
| 1890 | sp8 = LayoutCallStack(sp8); |
| 1891 | *start_stack = reinterpret_cast<uintptr_t*>(sp8); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1892 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1893 | // Put fprs and gprs below. |
| 1894 | sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1895 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1896 | // Return the new bottom. |
| 1897 | return sp8; |
| 1898 | } |
| 1899 | |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 1900 | virtual void WalkHeader( |
| 1901 | BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm ATTRIBUTE_UNUSED) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 1902 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1903 | } |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1904 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 1905 | void Walk(const char* shorty, uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1906 | BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this); |
| 1907 | |
| 1908 | WalkHeader(&sm); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1909 | |
| 1910 | for (uint32_t i = 1; i < shorty_len; ++i) { |
| 1911 | Primitive::Type cur_type_ = Primitive::GetType(shorty[i]); |
| 1912 | switch (cur_type_) { |
| 1913 | case Primitive::kPrimNot: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1914 | // TODO: fix abuse of mirror types. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1915 | sm.AdvanceHandleScope( |
| 1916 | reinterpret_cast<mirror::Object*>(0x12345678)); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1917 | break; |
| 1918 | |
| 1919 | case Primitive::kPrimBoolean: |
| 1920 | case Primitive::kPrimByte: |
| 1921 | case Primitive::kPrimChar: |
| 1922 | case Primitive::kPrimShort: |
| 1923 | case Primitive::kPrimInt: |
| 1924 | sm.AdvanceInt(0); |
| 1925 | break; |
| 1926 | case Primitive::kPrimFloat: |
| 1927 | sm.AdvanceFloat(0); |
| 1928 | break; |
| 1929 | case Primitive::kPrimDouble: |
| 1930 | sm.AdvanceDouble(0); |
| 1931 | break; |
| 1932 | case Primitive::kPrimLong: |
| 1933 | sm.AdvanceLong(0); |
| 1934 | break; |
| 1935 | default: |
| 1936 | LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty; |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 1937 | UNREACHABLE(); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1938 | } |
| 1939 | } |
| 1940 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 1941 | num_stack_entries_ = sm.GetStackEntries(); |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1942 | } |
| 1943 | |
| 1944 | void PushGpr(uintptr_t /* val */) { |
| 1945 | // not optimizing registers, yet |
| 1946 | } |
| 1947 | |
| 1948 | void PushFpr4(float /* val */) { |
| 1949 | // not optimizing registers, yet |
| 1950 | } |
| 1951 | |
| 1952 | void PushFpr8(uint64_t /* val */) { |
| 1953 | // not optimizing registers, yet |
| 1954 | } |
| 1955 | |
| 1956 | void PushStack(uintptr_t /* val */) { |
| 1957 | // counting is already done in the superclass |
| 1958 | } |
| 1959 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1960 | virtual uintptr_t PushHandle(mirror::Object* /* ptr */) { |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1961 | return reinterpret_cast<uintptr_t>(nullptr); |
| 1962 | } |
| 1963 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1964 | protected: |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1965 | uint32_t num_stack_entries_; |
| 1966 | }; |
| 1967 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 1968 | class ComputeGenericJniFrameSize final : public ComputeNativeCallFrameSize { |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1969 | public: |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 1970 | explicit ComputeGenericJniFrameSize(bool critical_native) |
| 1971 | : num_handle_scope_references_(0), critical_native_(critical_native) {} |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1972 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1973 | // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs |
| 1974 | // is at *m = sp. Will update to point to the bottom of the save frame. |
| 1975 | // |
| 1976 | // Note: assumes ComputeAll() has been run before. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1977 | void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 1978 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1979 | ArtMethod* method = **m; |
| 1980 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1981 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1982 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1983 | uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp); |
| 1984 | |
| 1985 | // First, fix up the layout of the callee-save frame. |
| 1986 | // We have to squeeze in the HandleScope, and relocate the method pointer. |
| 1987 | |
| 1988 | // "Free" the slot for the method. |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 1989 | sp8 += sizeof(void*); // In the callee-save frame we use a full pointer. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1990 | |
| 1991 | // Under the callee saves put handle scope and new method stack reference. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1992 | size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1993 | size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1994 | |
| 1995 | sp8 -= scope_and_method; |
| 1996 | // Align by kStackAlignment. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1997 | sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment)); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1998 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1999 | uint8_t* sp8_table = sp8 + sizeof(ArtMethod*); |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 2000 | *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(), |
| 2001 | num_handle_scope_references_); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2002 | |
| 2003 | // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us. |
| 2004 | uint8_t* method_pointer = sp8; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2005 | auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer); |
| 2006 | *new_method_ref = method; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2007 | *m = new_method_ref; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2008 | } |
| 2009 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2010 | // Adds space for the cookie. Note: may leave stack unaligned. |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 2011 | void LayoutCookie(uint8_t** sp) const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2012 | // Reference cookie and padding |
| 2013 | *sp -= 8; |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 2014 | } |
| 2015 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2016 | // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie. |
| 2017 | // Returns the new bottom. Note: this may be unaligned. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2018 | uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2019 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2020 | // First, fix up the layout of the callee-save frame. |
| 2021 | // We have to squeeze in the HandleScope, and relocate the method pointer. |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 2022 | LayoutCalleeSaveFrame(self, m, sp, handle_scope); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2023 | |
| 2024 | // The bottom of the callee-save frame is now where the method is, *m. |
| 2025 | uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m); |
| 2026 | |
| 2027 | // Add space for cookie. |
| 2028 | LayoutCookie(&sp8); |
| 2029 | |
| 2030 | return sp8; |
| 2031 | } |
| 2032 | |
| 2033 | // WARNING: After this, *sp won't be pointing to the method anymore! |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2034 | uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len, |
| 2035 | HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr, |
| 2036 | uint32_t** start_fpr) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2037 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2038 | Walk(shorty, shorty_len); |
| 2039 | |
| 2040 | // JNI part. |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 2041 | uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2042 | |
| 2043 | sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr); |
| 2044 | |
| 2045 | // Return the new bottom. |
| 2046 | return sp8; |
| 2047 | } |
| 2048 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 2049 | uintptr_t PushHandle(mirror::Object* /* ptr */) override; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2050 | |
| 2051 | // Add JNIEnv* and jobj/jclass before the shorty-derived elements. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 2052 | void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) override |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2053 | REQUIRES_SHARED(Locks::mutator_lock_); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2054 | |
| 2055 | private: |
| 2056 | uint32_t num_handle_scope_references_; |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2057 | const bool critical_native_; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2058 | }; |
| 2059 | |
| 2060 | uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) { |
| 2061 | num_handle_scope_references_++; |
| 2062 | return reinterpret_cast<uintptr_t>(nullptr); |
| 2063 | } |
| 2064 | |
| 2065 | void ComputeGenericJniFrameSize::WalkHeader( |
| 2066 | BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) { |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2067 | // First 2 parameters are always excluded for @CriticalNative. |
| 2068 | if (UNLIKELY(critical_native_)) { |
| 2069 | return; |
| 2070 | } |
| 2071 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2072 | // JNIEnv |
| 2073 | sm->AdvancePointer(nullptr); |
| 2074 | |
| 2075 | // Class object or this as first argument |
| 2076 | sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678)); |
| 2077 | } |
| 2078 | |
| 2079 | // Class to push values to three separate regions. Used to fill the native call part. Adheres to |
| 2080 | // the template requirements of BuildGenericJniFrameStateMachine. |
| 2081 | class FillNativeCall { |
| 2082 | public: |
| 2083 | FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) : |
| 2084 | cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {} |
| 2085 | |
| 2086 | virtual ~FillNativeCall() {} |
| 2087 | |
| 2088 | void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) { |
| 2089 | cur_gpr_reg_ = gpr_regs; |
| 2090 | cur_fpr_reg_ = fpr_regs; |
| 2091 | cur_stack_arg_ = stack_args; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 2092 | } |
| 2093 | |
| 2094 | void PushGpr(uintptr_t val) { |
| 2095 | *cur_gpr_reg_ = val; |
| 2096 | cur_gpr_reg_++; |
| 2097 | } |
| 2098 | |
| 2099 | void PushFpr4(float val) { |
| 2100 | *cur_fpr_reg_ = val; |
| 2101 | cur_fpr_reg_++; |
| 2102 | } |
| 2103 | |
| 2104 | void PushFpr8(uint64_t val) { |
| 2105 | uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_); |
| 2106 | *tmp = val; |
| 2107 | cur_fpr_reg_ += 2; |
| 2108 | } |
| 2109 | |
| 2110 | void PushStack(uintptr_t val) { |
| 2111 | *cur_stack_arg_ = val; |
| 2112 | cur_stack_arg_++; |
| 2113 | } |
| 2114 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2115 | virtual uintptr_t PushHandle(mirror::Object*) REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2116 | LOG(FATAL) << "(Non-JNI) Native call does not use handles."; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 2117 | UNREACHABLE(); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2118 | } |
| 2119 | |
| 2120 | private: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2121 | uintptr_t* cur_gpr_reg_; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2122 | uint32_t* cur_fpr_reg_; |
| 2123 | uintptr_t* cur_stack_arg_; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2124 | }; |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 2125 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2126 | // Visits arguments on the stack placing them into a region lower down the stack for the benefit |
| 2127 | // of transitioning into native code. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 2128 | class BuildGenericJniFrameVisitor final : public QuickArgumentVisitor { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2129 | public: |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2130 | BuildGenericJniFrameVisitor(Thread* self, |
| 2131 | bool is_static, |
| 2132 | bool critical_native, |
| 2133 | const char* shorty, |
| 2134 | uint32_t shorty_len, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2135 | ArtMethod*** sp) |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2136 | : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2137 | jni_call_(nullptr, nullptr, nullptr, nullptr, critical_native), |
| 2138 | sm_(&jni_call_) { |
| 2139 | ComputeGenericJniFrameSize fsc(critical_native); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2140 | uintptr_t* start_gpr_reg; |
| 2141 | uint32_t* start_fpr_reg; |
| 2142 | uintptr_t* start_stack_arg; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 2143 | bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len, |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 2144 | &handle_scope_, |
| 2145 | &start_stack_arg, |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2146 | &start_gpr_reg, &start_fpr_reg); |
| 2147 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2148 | jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_); |
| 2149 | |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2150 | // First 2 parameters are always excluded for CriticalNative methods. |
| 2151 | if (LIKELY(!critical_native)) { |
| 2152 | // jni environment is always first argument |
| 2153 | sm_.AdvancePointer(self->GetJniEnv()); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2154 | |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2155 | if (is_static) { |
Vladimir Marko | d93e374 | 2018-07-18 10:58:13 +0100 | [diff] [blame] | 2156 | sm_.AdvanceHandleScope((**sp)->GetDeclaringClass().Ptr()); |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2157 | } // else "this" reference is already handled by QuickArgumentVisitor. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2158 | } |
| 2159 | } |
| 2160 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 2161 | void Visit() REQUIRES_SHARED(Locks::mutator_lock_) override; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2162 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2163 | void FinalizeHandleScope(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2164 | |
Vladimir Marko | f39745e | 2016-01-26 12:16:55 +0000 | [diff] [blame] | 2165 | StackReference<mirror::Object>* GetFirstHandleScopeEntry() { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2166 | return handle_scope_->GetHandle(0).GetReference(); |
| 2167 | } |
| 2168 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2169 | jobject GetFirstHandleScopeJObject() const REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2170 | return handle_scope_->GetHandle(0).ToJObject(); |
| 2171 | } |
| 2172 | |
Ian Rogers | 1428dce | 2014-10-21 15:02:15 -0700 | [diff] [blame] | 2173 | void* GetBottomOfUsedArea() const { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2174 | return bottom_of_used_area_; |
| 2175 | } |
| 2176 | |
| 2177 | private: |
| 2178 | // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall. |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 2179 | class FillJniCall final : public FillNativeCall { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2180 | public: |
| 2181 | FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2182 | HandleScope* handle_scope, bool critical_native) |
| 2183 | : FillNativeCall(gpr_regs, fpr_regs, stack_args), |
| 2184 | handle_scope_(handle_scope), |
| 2185 | cur_entry_(0), |
| 2186 | critical_native_(critical_native) {} |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2187 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 2188 | uintptr_t PushHandle(mirror::Object* ref) override REQUIRES_SHARED(Locks::mutator_lock_); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2189 | |
| 2190 | void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) { |
| 2191 | FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args); |
| 2192 | handle_scope_ = scope; |
| 2193 | cur_entry_ = 0U; |
| 2194 | } |
| 2195 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2196 | void ResetRemainingScopeSlots() REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2197 | // Initialize padding entries. |
| 2198 | size_t expected_slots = handle_scope_->NumberOfReferences(); |
| 2199 | while (cur_entry_ < expected_slots) { |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 2200 | handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2201 | } |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2202 | |
| 2203 | if (!critical_native_) { |
| 2204 | // Non-critical natives have at least the self class (jclass) or this (jobject). |
| 2205 | DCHECK_NE(cur_entry_, 0U); |
| 2206 | } |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2207 | } |
| 2208 | |
Mathieu Chartier | 1432a5b | 2016-10-04 15:41:42 -0700 | [diff] [blame] | 2209 | bool CriticalNative() const { |
| 2210 | return critical_native_; |
| 2211 | } |
| 2212 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2213 | private: |
| 2214 | HandleScope* handle_scope_; |
| 2215 | size_t cur_entry_; |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2216 | const bool critical_native_; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2217 | }; |
| 2218 | |
| 2219 | HandleScope* handle_scope_; |
| 2220 | FillJniCall jni_call_; |
| 2221 | void* bottom_of_used_area_; |
| 2222 | |
| 2223 | BuildNativeCallFrameStateMachine<FillJniCall> sm_; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2224 | |
| 2225 | DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor); |
| 2226 | }; |
| 2227 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2228 | uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) { |
| 2229 | uintptr_t tmp; |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 2230 | MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_); |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2231 | h.Assign(ref); |
| 2232 | tmp = reinterpret_cast<uintptr_t>(h.ToJObject()); |
| 2233 | cur_entry_++; |
| 2234 | return tmp; |
| 2235 | } |
| 2236 | |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 2237 | void BuildGenericJniFrameVisitor::Visit() { |
| 2238 | Primitive::Type type = GetParamPrimitiveType(); |
| 2239 | switch (type) { |
| 2240 | case Primitive::kPrimLong: { |
| 2241 | jlong long_arg; |
| 2242 | if (IsSplitLongOrDouble()) { |
| 2243 | long_arg = ReadSplitLongParam(); |
| 2244 | } else { |
| 2245 | long_arg = *reinterpret_cast<jlong*>(GetParamAddress()); |
| 2246 | } |
| 2247 | sm_.AdvanceLong(long_arg); |
| 2248 | break; |
| 2249 | } |
| 2250 | case Primitive::kPrimDouble: { |
| 2251 | uint64_t double_arg; |
| 2252 | if (IsSplitLongOrDouble()) { |
| 2253 | // Read into union so that we don't case to a double. |
| 2254 | double_arg = ReadSplitLongParam(); |
| 2255 | } else { |
| 2256 | double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress()); |
| 2257 | } |
| 2258 | sm_.AdvanceDouble(double_arg); |
| 2259 | break; |
| 2260 | } |
| 2261 | case Primitive::kPrimNot: { |
| 2262 | StackReference<mirror::Object>* stack_ref = |
| 2263 | reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress()); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 2264 | sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr()); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 2265 | break; |
| 2266 | } |
| 2267 | case Primitive::kPrimFloat: |
| 2268 | sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress())); |
| 2269 | break; |
| 2270 | case Primitive::kPrimBoolean: // Fall-through. |
| 2271 | case Primitive::kPrimByte: // Fall-through. |
| 2272 | case Primitive::kPrimChar: // Fall-through. |
| 2273 | case Primitive::kPrimShort: // Fall-through. |
| 2274 | case Primitive::kPrimInt: // Fall-through. |
| 2275 | sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress())); |
| 2276 | break; |
| 2277 | case Primitive::kPrimVoid: |
| 2278 | LOG(FATAL) << "UNREACHABLE"; |
Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 2279 | UNREACHABLE(); |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 2280 | } |
| 2281 | } |
| 2282 | |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 2283 | void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) { |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2284 | // Clear out rest of the scope. |
| 2285 | jni_call_.ResetRemainingScopeSlots(); |
Mathieu Chartier | 1432a5b | 2016-10-04 15:41:42 -0700 | [diff] [blame] | 2286 | if (!jni_call_.CriticalNative()) { |
| 2287 | // Install HandleScope. |
| 2288 | self->PushHandleScope(handle_scope_); |
| 2289 | } |
Ian Rogers | 9758f79 | 2014-03-13 09:02:55 -0700 | [diff] [blame] | 2290 | } |
| 2291 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 2292 | /* |
| 2293 | * Initializes an alloca region assumed to be directly below sp for a native call: |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 2294 | * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers. |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 2295 | * The final element on the stack is a pointer to the native code. |
| 2296 | * |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 2297 | * On entry, the stack has a standard callee-save frame above sp, and an alloca below it. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 2298 | * We need to fix this, as the handle scope needs to go into the callee-save frame. |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 2299 | * |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 2300 | * The return of this function denotes: |
| 2301 | * 1) How many bytes of the alloca can be released, if the value is non-negative. |
| 2302 | * 2) An error, if the value is negative. |
| 2303 | */ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2304 | extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2305 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 2306 | // Note: We cannot walk the stack properly until fixed up below. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2307 | ArtMethod* called = *sp; |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 2308 | DCHECK(called->IsNative()) << called->PrettyMethod(true); |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 2309 | Runtime* runtime = Runtime::Current(); |
Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 2310 | uint32_t shorty_len = 0; |
| 2311 | const char* shorty = called->GetShorty(&shorty_len); |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 2312 | bool critical_native = called->IsCriticalNative(); |
| 2313 | bool fast_native = called->IsFastNative(); |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2314 | bool normal_native = !critical_native && !fast_native; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2315 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2316 | // Run the visitor and update sp. |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2317 | BuildGenericJniFrameVisitor visitor(self, |
| 2318 | called->IsStatic(), |
| 2319 | critical_native, |
| 2320 | shorty, |
| 2321 | shorty_len, |
| 2322 | &sp); |
Mathieu Chartier | be08cf5 | 2016-09-13 13:41:24 -0700 | [diff] [blame] | 2323 | { |
| 2324 | ScopedAssertNoThreadSuspension sants(__FUNCTION__); |
| 2325 | visitor.VisitArguments(); |
| 2326 | // FinalizeHandleScope pushes the handle scope on the thread. |
| 2327 | visitor.FinalizeHandleScope(self); |
| 2328 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2329 | |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 2330 | // Fix up managed-stack things in Thread. After this we can walk the stack. |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 2331 | self->SetTopOfStackTagged(sp); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2332 | |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 2333 | self->VerifyStack(); |
| 2334 | |
Vladimir Marko | f8655b3 | 2018-03-21 17:53:56 +0000 | [diff] [blame] | 2335 | // We can now walk the stack if needed by JIT GC from MethodEntered() for JIT-on-first-use. |
| 2336 | jit::Jit* jit = runtime->GetJit(); |
| 2337 | if (jit != nullptr) { |
| 2338 | jit->MethodEntered(self, called); |
| 2339 | } |
| 2340 | |
Nicolas Geoffray | 5a0b672 | 2019-09-24 15:09:40 +0100 | [diff] [blame] | 2341 | // We can set the entrypoint of a native method to generic JNI even when the |
| 2342 | // class hasn't been initialized, so we need to do the initialization check |
| 2343 | // before invoking the native code. |
Vladimir Marko | 5115a4d | 2019-10-17 14:56:47 +0100 | [diff] [blame] | 2344 | if (NeedsClinitCheckBeforeCall(called)) { |
| 2345 | ObjPtr<mirror::Class> declaring_class = called->GetDeclaringClass(); |
| 2346 | if (UNLIKELY(!declaring_class->IsVisiblyInitialized())) { |
| 2347 | // Ensure static method's class is initialized. |
| 2348 | StackHandleScope<1> hs(self); |
| 2349 | Handle<mirror::Class> h_class(hs.NewHandle(declaring_class)); |
Vladimir Marko | 7dac864 | 2019-11-06 17:09:30 +0000 | [diff] [blame] | 2350 | if (!runtime->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) { |
Vladimir Marko | 5115a4d | 2019-10-17 14:56:47 +0100 | [diff] [blame] | 2351 | DCHECK(Thread::Current()->IsExceptionPending()) << called->PrettyMethod(); |
| 2352 | self->PopHandleScope(); |
| 2353 | // A negative value denotes an error. |
| 2354 | return GetTwoWordFailureValue(); |
| 2355 | } |
Nicolas Geoffray | 5a0b672 | 2019-09-24 15:09:40 +0100 | [diff] [blame] | 2356 | } |
| 2357 | } |
| 2358 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2359 | uint32_t cookie; |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2360 | uint32_t* sp32; |
| 2361 | // Skip calling JniMethodStart for @CriticalNative. |
| 2362 | if (LIKELY(!critical_native)) { |
| 2363 | // Start JNI, save the cookie. |
| 2364 | if (called->IsSynchronized()) { |
| 2365 | DCHECK(normal_native) << " @FastNative and synchronize is not supported"; |
| 2366 | cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self); |
| 2367 | if (self->IsExceptionPending()) { |
| 2368 | self->PopHandleScope(); |
| 2369 | // A negative value denotes an error. |
| 2370 | return GetTwoWordFailureValue(); |
| 2371 | } |
| 2372 | } else { |
| 2373 | if (fast_native) { |
| 2374 | cookie = JniMethodFastStart(self); |
| 2375 | } else { |
| 2376 | DCHECK(normal_native); |
| 2377 | cookie = JniMethodStart(self); |
| 2378 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2379 | } |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 2380 | sp32 = reinterpret_cast<uint32_t*>(sp); |
| 2381 | *(sp32 - 1) = cookie; |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2382 | } |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2383 | |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 2384 | // Retrieve the stored native code. |
Vladimir Marko | 08d0984 | 2019-12-02 12:38:49 +0000 | [diff] [blame] | 2385 | // Note that it may point to the lookup stub or trampoline. |
| 2386 | // FIXME: This is broken for @CriticalNative as the art_jni_dlsym_lookup_stub |
| 2387 | // does not handle that case. Calls from compiled stubs are also broken. |
Alex Light | d78ddec | 2017-04-18 15:20:38 -0700 | [diff] [blame] | 2388 | void const* nativeCode = called->GetEntryPointFromJni(); |
Andreas Gampe | 9054683 | 2014-03-12 18:07:19 -0700 | [diff] [blame] | 2389 | |
Alexey Frunze | 1b8464d | 2016-11-12 17:22:05 -0800 | [diff] [blame] | 2390 | #if defined(__mips__) && !defined(__LP64__) |
| 2391 | // On MIPS32 if the first two arguments are floating-point, we need to know their types |
| 2392 | // so that art_quick_generic_jni_trampoline can correctly extract them from the stack |
| 2393 | // and load into floating-point registers. |
| 2394 | // Possible arrangements of first two floating-point arguments on the stack (32-bit FPU |
| 2395 | // view): |
| 2396 | // (1) |
| 2397 | // | DOUBLE | DOUBLE | other args, if any |
| 2398 | // | F12 | F13 | F14 | F15 | |
| 2399 | // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16 |
| 2400 | // (2) |
| 2401 | // | DOUBLE | FLOAT | (PAD) | other args, if any |
| 2402 | // | F12 | F13 | F14 | | |
| 2403 | // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16 |
| 2404 | // (3) |
| 2405 | // | FLOAT | (PAD) | DOUBLE | other args, if any |
| 2406 | // | F12 | | F14 | F15 | |
| 2407 | // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16 |
| 2408 | // (4) |
| 2409 | // | FLOAT | FLOAT | other args, if any |
| 2410 | // | F12 | F14 | |
| 2411 | // | SP+0 | SP+4 | SP+8 |
| 2412 | // As you can see, only the last case (4) is special. In all others we can just |
| 2413 | // load F12/F13 and F14/F15 in the same manner. |
| 2414 | // Set bit 0 of the native code address to 1 in this case (valid code addresses |
| 2415 | // are always a multiple of 4 on MIPS32, so we have 2 spare bits available). |
| 2416 | if (nativeCode != nullptr && |
| 2417 | shorty != nullptr && |
| 2418 | shorty_len >= 3 && |
| 2419 | shorty[1] == 'F' && |
| 2420 | shorty[2] == 'F') { |
| 2421 | nativeCode = reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(nativeCode) | 1); |
| 2422 | } |
| 2423 | #endif |
| 2424 | |
Andreas Gampe | 21a6ec5 | 2019-03-28 10:55:50 -0700 | [diff] [blame] | 2425 | VLOG(third_party_jni) << "GenericJNI: " |
| 2426 | << called->PrettyMethod() |
| 2427 | << " -> " |
| 2428 | << std::hex << reinterpret_cast<uintptr_t>(nativeCode); |
| 2429 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2430 | // Return native code addr(lo) and bottom of alloca address(hi). |
| 2431 | return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()), |
| 2432 | reinterpret_cast<uintptr_t>(nativeCode)); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2433 | } |
| 2434 | |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 2435 | // Defined in quick_jni_entrypoints.cc. |
| 2436 | extern uint64_t GenericJniMethodEnd(Thread* self, uint32_t saved_local_ref_cookie, |
| 2437 | jvalue result, uint64_t result_f, ArtMethod* called, |
| 2438 | HandleScope* handle_scope); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2439 | /* |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 2440 | * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2441 | * unlocking. |
| 2442 | */ |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 2443 | extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, |
| 2444 | jvalue result, |
| 2445 | uint64_t result_f) { |
| 2446 | // We're here just back from a native call. We don't have the shared mutator lock at this point |
| 2447 | // yet until we call GoToRunnable() later in GenericJniMethodEnd(). Accessing objects or doing |
| 2448 | // anything that requires a mutator lock before that would cause problems as GC may have the |
| 2449 | // exclusive mutator lock and may be moving objects, etc. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2450 | ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame(); |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 2451 | DCHECK(self->GetManagedStack()->GetTopQuickFrameTag()); |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 2452 | uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2453 | ArtMethod* called = *sp; |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 2454 | uint32_t cookie = *(sp32 - 1); |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 2455 | HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp) + sizeof(*sp)); |
| 2456 | return GenericJniMethodEnd(self, cookie, result, result_f, called, table); |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 2457 | } |
| 2458 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2459 | // We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value |
| 2460 | // for the method pointer. |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 2461 | // |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2462 | // It is valid to use this, as at the usage points here (returns from C functions) we are assuming |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2463 | // to hold the mutator lock (see REQUIRES_SHARED(Locks::mutator_lock_) annotations). |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2464 | |
Vladimir Marko | f79aa7f | 2017-07-04 16:58:55 +0100 | [diff] [blame] | 2465 | template <InvokeType type, bool access_check> |
Mathieu Chartier | ef41db7 | 2016-10-25 15:08:01 -0700 | [diff] [blame] | 2466 | static TwoWordReturn artInvokeCommon(uint32_t method_idx, |
| 2467 | ObjPtr<mirror::Object> this_object, |
| 2468 | Thread* self, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2469 | ArtMethod** sp) { |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2470 | ScopedQuickEntrypointChecks sqec(self); |
Andreas Gampe | 8228cdf | 2017-05-30 15:03:54 -0700 | [diff] [blame] | 2471 | DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs)); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2472 | ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp); |
Vladimir Marko | f79aa7f | 2017-07-04 16:58:55 +0100 | [diff] [blame] | 2473 | ArtMethod* method = FindMethodFast<type, access_check>(method_idx, this_object, caller_method); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2474 | if (UNLIKELY(method == nullptr)) { |
Vladimir Marko | 7e09737 | 2018-11-28 16:40:59 +0000 | [diff] [blame] | 2475 | const DexFile* dex_file = caller_method->GetDexFile(); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2476 | uint32_t shorty_len; |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2477 | const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2478 | { |
| 2479 | // Remember the args in case a GC happens in FindMethodFromCode. |
| 2480 | ScopedObjectAccessUnchecked soa(self->GetJniEnv()); |
| 2481 | RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa); |
| 2482 | visitor.VisitArguments(); |
Mathieu Chartier | ef41db7 | 2016-10-25 15:08:01 -0700 | [diff] [blame] | 2483 | method = FindMethodFromCode<type, access_check>(method_idx, |
| 2484 | &this_object, |
| 2485 | caller_method, |
Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 2486 | self); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2487 | visitor.FixupReferences(); |
| 2488 | } |
| 2489 | |
Ian Rogers | e0a02da | 2014-12-02 14:10:53 -0800 | [diff] [blame] | 2490 | if (UNLIKELY(method == nullptr)) { |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2491 | CHECK(self->IsExceptionPending()); |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2492 | return GetTwoWordFailureValue(); // Failure. |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2493 | } |
| 2494 | } |
| 2495 | DCHECK(!self->IsExceptionPending()); |
| 2496 | const void* code = method->GetEntryPointFromQuickCompiledCode(); |
| 2497 | |
| 2498 | // When we return, the caller will branch to this address, so it had better not be 0! |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 2499 | DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod() |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2500 | << " location: " |
| 2501 | << method->GetDexFile()->GetLocation(); |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 2502 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2503 | return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code), |
| 2504 | reinterpret_cast<uintptr_t>(method)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2505 | } |
| 2506 | |
Nicolas Geoffray | 8689a0a | 2014-04-04 09:26:24 +0100 | [diff] [blame] | 2507 | // Explicit artInvokeCommon template function declarations to please analysis tool. |
| 2508 | #define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2509 | template REQUIRES_SHARED(Locks::mutator_lock_) \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2510 | TwoWordReturn artInvokeCommon<type, access_check>( \ |
Mathieu Chartier | ef41db7 | 2016-10-25 15:08:01 -0700 | [diff] [blame] | 2511 | uint32_t method_idx, ObjPtr<mirror::Object> his_object, Thread* self, ArtMethod** sp) |
Nicolas Geoffray | 8689a0a | 2014-04-04 09:26:24 +0100 | [diff] [blame] | 2512 | |
| 2513 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false); |
| 2514 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true); |
| 2515 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false); |
| 2516 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true); |
| 2517 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false); |
| 2518 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true); |
| 2519 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false); |
| 2520 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true); |
| 2521 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false); |
| 2522 | EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true); |
| 2523 | #undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL |
| 2524 | |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2525 | // See comments in runtime_support_asm.S |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2526 | extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck( |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2527 | uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2528 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 2529 | return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2530 | } |
| 2531 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2532 | extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck( |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2533 | uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2534 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 2535 | return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2536 | } |
| 2537 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2538 | extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck( |
Mathieu Chartier | ef41db7 | 2016-10-25 15:08:01 -0700 | [diff] [blame] | 2539 | uint32_t method_idx, |
| 2540 | mirror::Object* this_object ATTRIBUTE_UNUSED, |
| 2541 | Thread* self, |
| 2542 | ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 2543 | // For static, this_object is not required and may be random garbage. Don't pass it down so that |
| 2544 | // it doesn't cause ObjPtr alignment failure check. |
| 2545 | return artInvokeCommon<kStatic, true>(method_idx, nullptr, self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2546 | } |
| 2547 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2548 | extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck( |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2549 | uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2550 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 2551 | return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2552 | } |
| 2553 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2554 | extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck( |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 2555 | uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2556 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 2557 | return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2558 | } |
| 2559 | |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 2560 | // Helper function for art_quick_imt_conflict_trampoline to look up the interface method. |
| 2561 | extern "C" ArtMethod* artLookupResolvedMethod(uint32_t method_index, ArtMethod* referrer) |
| 2562 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 2563 | ScopedAssertNoThreadSuspension ants(__FUNCTION__); |
| 2564 | DCHECK(!referrer->IsProxyMethod()); |
| 2565 | ArtMethod* result = Runtime::Current()->GetClassLinker()->LookupResolvedMethod( |
| 2566 | method_index, referrer->GetDexCache(), referrer->GetClassLoader()); |
| 2567 | DCHECK(result == nullptr || |
| 2568 | result->GetDeclaringClass()->IsInterface() || |
| 2569 | result->GetDeclaringClass() == |
| 2570 | WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object)) |
| 2571 | << result->PrettyMethod(); |
| 2572 | return result; |
| 2573 | } |
| 2574 | |
Jeff Hao | 5667f56 | 2017-02-27 19:32:01 -0800 | [diff] [blame] | 2575 | // Determine target of interface dispatch. The interface method and this object are known non-null. |
| 2576 | // The interface method is the method returned by the dex cache in the conflict trampoline. |
| 2577 | extern "C" TwoWordReturn artInvokeInterfaceTrampoline(ArtMethod* interface_method, |
Mathieu Chartier | ef41db7 | 2016-10-25 15:08:01 -0700 | [diff] [blame] | 2578 | mirror::Object* raw_this_object, |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 2579 | Thread* self, |
| 2580 | ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 2581 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2582 | ScopedQuickEntrypointChecks sqec(self); |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2583 | StackHandleScope<2> hs(self); |
| 2584 | Handle<mirror::Object> this_object = hs.NewHandle(raw_this_object); |
| 2585 | Handle<mirror::Class> cls = hs.NewHandle(this_object->GetClass()); |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 2586 | |
Nicolas Geoffray | 5bf7bac | 2016-07-06 14:18:23 +0000 | [diff] [blame] | 2587 | ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp); |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 2588 | ArtMethod* method = nullptr; |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 2589 | ImTable* imt = cls->GetImt(kRuntimePointerSize); |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 2590 | |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2591 | if (UNLIKELY(interface_method == nullptr)) { |
Vladimir Marko | 07bfbac | 2017-07-06 14:55:02 +0100 | [diff] [blame] | 2592 | // The interface method is unresolved, so resolve it in the dex file of the caller. |
Jeff Hao | 5667f56 | 2017-02-27 19:32:01 -0800 | [diff] [blame] | 2593 | // Fetch the dex_method_idx of the target interface method from the caller. |
| 2594 | uint32_t dex_method_idx; |
| 2595 | uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 2596 | const Instruction& instr = caller_method->DexInstructions().InstructionAt(dex_pc); |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2597 | Instruction::Code instr_code = instr.Opcode(); |
Jeff Hao | 5667f56 | 2017-02-27 19:32:01 -0800 | [diff] [blame] | 2598 | DCHECK(instr_code == Instruction::INVOKE_INTERFACE || |
| 2599 | instr_code == Instruction::INVOKE_INTERFACE_RANGE) |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2600 | << "Unexpected call into interface trampoline: " << instr.DumpString(nullptr); |
Jeff Hao | 5667f56 | 2017-02-27 19:32:01 -0800 | [diff] [blame] | 2601 | if (instr_code == Instruction::INVOKE_INTERFACE) { |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2602 | dex_method_idx = instr.VRegB_35c(); |
Jeff Hao | 5667f56 | 2017-02-27 19:32:01 -0800 | [diff] [blame] | 2603 | } else { |
| 2604 | DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE); |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2605 | dex_method_idx = instr.VRegB_3rc(); |
Jeff Hao | 5667f56 | 2017-02-27 19:32:01 -0800 | [diff] [blame] | 2606 | } |
| 2607 | |
Vladimir Marko | 7e09737 | 2018-11-28 16:40:59 +0000 | [diff] [blame] | 2608 | const DexFile& dex_file = *caller_method->GetDexFile(); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2609 | uint32_t shorty_len; |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2610 | const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(dex_method_idx), |
| 2611 | &shorty_len); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2612 | { |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2613 | // Remember the args in case a GC happens in ClassLinker::ResolveMethod(). |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2614 | ScopedObjectAccessUnchecked soa(self->GetJniEnv()); |
| 2615 | RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa); |
| 2616 | visitor.VisitArguments(); |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2617 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 2618 | interface_method = class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>( |
| 2619 | self, dex_method_idx, caller_method, kInterface); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2620 | visitor.FixupReferences(); |
| 2621 | } |
| 2622 | |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2623 | if (UNLIKELY(interface_method == nullptr)) { |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2624 | CHECK(self->IsExceptionPending()); |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2625 | return GetTwoWordFailureValue(); // Failure. |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2626 | } |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2627 | } |
| 2628 | |
| 2629 | DCHECK(!interface_method->IsRuntimeMethod()); |
| 2630 | // Look whether we have a match in the ImtConflictTable. |
David Srbecky | e36e7f2 | 2018-11-14 14:21:23 +0000 | [diff] [blame] | 2631 | uint32_t imt_index = interface_method->GetImtIndex(); |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2632 | ArtMethod* conflict_method = imt->Get(imt_index, kRuntimePointerSize); |
| 2633 | if (LIKELY(conflict_method->IsRuntimeMethod())) { |
| 2634 | ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize); |
| 2635 | DCHECK(current_table != nullptr); |
| 2636 | method = current_table->Lookup(interface_method, kRuntimePointerSize); |
| 2637 | } else { |
| 2638 | // It seems we aren't really a conflict method! |
| 2639 | if (kIsDebugBuild) { |
| 2640 | ArtMethod* m = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize); |
| 2641 | CHECK_EQ(conflict_method, m) |
| 2642 | << interface_method->PrettyMethod() << " / " << conflict_method->PrettyMethod() << " / " |
| 2643 | << " / " << ArtMethod::PrettyMethod(m) << " / " << cls->PrettyClass(); |
| 2644 | } |
| 2645 | method = conflict_method; |
| 2646 | } |
| 2647 | if (method != nullptr) { |
| 2648 | return GetTwoWordSuccessValue( |
| 2649 | reinterpret_cast<uintptr_t>(method->GetEntryPointFromQuickCompiledCode()), |
| 2650 | reinterpret_cast<uintptr_t>(method)); |
| 2651 | } |
| 2652 | |
| 2653 | // No match, use the IfTable. |
| 2654 | method = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize); |
| 2655 | if (UNLIKELY(method == nullptr)) { |
| 2656 | ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch( |
| 2657 | interface_method, this_object.Get(), caller_method); |
| 2658 | return GetTwoWordFailureValue(); // Failure. |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2659 | } |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 2660 | |
| 2661 | // We arrive here if we have found an implementation, and it is not in the ImtConflictTable. |
| 2662 | // We create a new table with the new pair { interface_method, method }. |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2663 | DCHECK(conflict_method->IsRuntimeMethod()); |
| 2664 | ArtMethod* new_conflict_method = Runtime::Current()->GetClassLinker()->AddMethodToConflictTable( |
| 2665 | cls.Get(), |
| 2666 | conflict_method, |
| 2667 | interface_method, |
| 2668 | method, |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 2669 | /*force_new_conflict_method=*/false); |
Vladimir Marko | 302f69c | 2017-07-25 15:27:15 +0100 | [diff] [blame] | 2670 | if (new_conflict_method != conflict_method) { |
| 2671 | // Update the IMT if we create a new conflict method. No fence needed here, as the |
| 2672 | // data is consistent. |
| 2673 | imt->Set(imt_index, |
| 2674 | new_conflict_method, |
| 2675 | kRuntimePointerSize); |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 2676 | } |
| 2677 | |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2678 | const void* code = method->GetEntryPointFromQuickCompiledCode(); |
| 2679 | |
| 2680 | // When we return, the caller will branch to this address, so it had better not be 0! |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 2681 | DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod() |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 2682 | << " location: " << method->GetDexFile()->GetLocation(); |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 2683 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2684 | return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code), |
| 2685 | reinterpret_cast<uintptr_t>(method)); |
Mathieu Chartier | 5f3ded4 | 2014-04-03 15:25:30 -0700 | [diff] [blame] | 2686 | } |
| 2687 | |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2688 | // Returns uint64_t representing raw bits from JValue. |
| 2689 | extern "C" uint64_t artInvokePolymorphic(mirror::Object* raw_receiver, Thread* self, ArtMethod** sp) |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2690 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 2691 | ScopedQuickEntrypointChecks sqec(self); |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2692 | DCHECK(raw_receiver != nullptr); |
Andreas Gampe | 8228cdf | 2017-05-30 15:03:54 -0700 | [diff] [blame] | 2693 | DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs)); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2694 | |
| 2695 | // Start new JNI local reference state |
| 2696 | JNIEnvExt* env = self->GetJniEnv(); |
| 2697 | ScopedObjectAccessUnchecked soa(env); |
| 2698 | ScopedJniEnvLocalRefState env_state(env); |
| 2699 | const char* old_cause = self->StartAssertNoThreadSuspension("Making stack arguments safe."); |
| 2700 | |
| 2701 | // From the instruction, get the |callsite_shorty| and expose arguments on the stack to the GC. |
| 2702 | ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp); |
| 2703 | uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp); |
Mathieu Chartier | 73f21d4 | 2018-01-02 14:26:50 -0800 | [diff] [blame] | 2704 | const Instruction& inst = caller_method->DexInstructions().InstructionAt(dex_pc); |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2705 | DCHECK(inst.Opcode() == Instruction::INVOKE_POLYMORPHIC || |
| 2706 | inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE); |
Orion Hodson | 06d10a7 | 2018-05-14 08:53:38 +0100 | [diff] [blame] | 2707 | const dex::ProtoIndex proto_idx(inst.VRegH()); |
Vladimir Marko | 666ee3d | 2017-12-11 18:37:36 +0000 | [diff] [blame] | 2708 | const char* shorty = caller_method->GetDexFile()->GetShorty(proto_idx); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2709 | const size_t shorty_length = strlen(shorty); |
| 2710 | static const bool kMethodIsStatic = false; // invoke() and invokeExact() are not static. |
| 2711 | RememberForGcArgumentVisitor gc_visitor(sp, kMethodIsStatic, shorty, shorty_length, &soa); |
Orion Hodson | fea84dd | 2017-01-16 13:52:20 +0000 | [diff] [blame] | 2712 | gc_visitor.VisitArguments(); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2713 | |
Orion Hodson | 43f0cdb | 2017-10-10 14:47:32 +0100 | [diff] [blame] | 2714 | // Wrap raw_receiver in a Handle for safety. |
| 2715 | StackHandleScope<3> hs(self); |
| 2716 | Handle<mirror::Object> receiver_handle(hs.NewHandle(raw_receiver)); |
| 2717 | raw_receiver = nullptr; |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2718 | self->EndAssertNoThreadSuspension(old_cause); |
| 2719 | |
Orion Hodson | 43f0cdb | 2017-10-10 14:47:32 +0100 | [diff] [blame] | 2720 | // Resolve method. |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2721 | ClassLinker* linker = Runtime::Current()->GetClassLinker(); |
Vladimir Marko | ba11882 | 2017-06-12 15:41:56 +0100 | [diff] [blame] | 2722 | ArtMethod* resolved_method = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>( |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2723 | self, inst.VRegB(), caller_method, kVirtual); |
Orion Hodson | 43f0cdb | 2017-10-10 14:47:32 +0100 | [diff] [blame] | 2724 | |
Orion Hodson | e7732be | 2017-10-11 14:35:20 +0100 | [diff] [blame] | 2725 | Handle<mirror::MethodType> method_type( |
| 2726 | hs.NewHandle(linker->ResolveMethodType(self, proto_idx, caller_method))); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2727 | if (UNLIKELY(method_type.IsNull())) { |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2728 | // This implies we couldn't resolve one or more types in this method handle. |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2729 | CHECK(self->IsExceptionPending()); |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2730 | return 0UL; |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2731 | } |
| 2732 | |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2733 | DCHECK_EQ(ArtMethod::NumArgRegisters(shorty) + 1u, (uint32_t)inst.VRegA()); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2734 | DCHECK_EQ(resolved_method->IsStatic(), kMethodIsStatic); |
| 2735 | |
| 2736 | // Fix references before constructing the shadow frame. |
| 2737 | gc_visitor.FixupReferences(); |
| 2738 | |
| 2739 | // Construct shadow frame placing arguments consecutively from |first_arg|. |
Vladimir Marko | d7559b7 | 2017-09-28 13:50:37 +0100 | [diff] [blame] | 2740 | const bool is_range = (inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE); |
| 2741 | const size_t num_vregs = is_range ? inst.VRegA_4rcc() : inst.VRegA_45cc(); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2742 | const size_t first_arg = 0; |
| 2743 | ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 2744 | CREATE_SHADOW_FRAME(num_vregs, /* link= */ nullptr, resolved_method, dex_pc); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2745 | ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get(); |
| 2746 | ScopedStackedShadowFramePusher |
| 2747 | frame_pusher(self, shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction); |
| 2748 | BuildQuickShadowFrameVisitor shadow_frame_builder(sp, |
| 2749 | kMethodIsStatic, |
| 2750 | shorty, |
| 2751 | strlen(shorty), |
| 2752 | shadow_frame, |
| 2753 | first_arg); |
| 2754 | shadow_frame_builder.VisitArguments(); |
| 2755 | |
| 2756 | // Push a transition back into managed code onto the linked list in thread. |
| 2757 | ManagedStack fragment; |
| 2758 | self->PushManagedStackFragment(&fragment); |
| 2759 | |
| 2760 | // Call DoInvokePolymorphic with |is_range| = true, as shadow frame has argument registers in |
| 2761 | // consecutive order. |
Orion Hodson | 960d4f7 | 2017-11-10 15:32:38 +0000 | [diff] [blame] | 2762 | RangeInstructionOperands operands(first_arg + 1, num_vregs - 1); |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2763 | Intrinsics intrinsic = static_cast<Intrinsics>(resolved_method->GetIntrinsic()); |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2764 | JValue result; |
Orion Hodson | 43f0cdb | 2017-10-10 14:47:32 +0100 | [diff] [blame] | 2765 | bool success = false; |
Vladimir Marko | c7aa87e | 2018-05-24 15:19:52 +0100 | [diff] [blame] | 2766 | if (resolved_method->GetDeclaringClass() == GetClassRoot<mirror::MethodHandle>(linker)) { |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2767 | Handle<mirror::MethodHandle> method_handle(hs.NewHandle( |
Vladimir Marko | d7e9bbf | 2019-03-28 13:18:57 +0000 | [diff] [blame] | 2768 | ObjPtr<mirror::MethodHandle>::DownCast(receiver_handle.Get()))); |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2769 | if (intrinsic == Intrinsics::kMethodHandleInvokeExact) { |
| 2770 | success = MethodHandleInvokeExact(self, |
| 2771 | *shadow_frame, |
| 2772 | method_handle, |
| 2773 | method_type, |
| 2774 | &operands, |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2775 | &result); |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2776 | } else { |
| 2777 | DCHECK_EQ(static_cast<uint32_t>(intrinsic), |
| 2778 | static_cast<uint32_t>(Intrinsics::kMethodHandleInvoke)); |
| 2779 | success = MethodHandleInvoke(self, |
| 2780 | *shadow_frame, |
| 2781 | method_handle, |
| 2782 | method_type, |
| 2783 | &operands, |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2784 | &result); |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2785 | } |
| 2786 | } else { |
Vladimir Marko | c7aa87e | 2018-05-24 15:19:52 +0100 | [diff] [blame] | 2787 | DCHECK_EQ(GetClassRoot<mirror::VarHandle>(linker), resolved_method->GetDeclaringClass()); |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2788 | Handle<mirror::VarHandle> var_handle(hs.NewHandle( |
Vladimir Marko | d7e9bbf | 2019-03-28 13:18:57 +0000 | [diff] [blame] | 2789 | ObjPtr<mirror::VarHandle>::DownCast(receiver_handle.Get()))); |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2790 | mirror::VarHandle::AccessMode access_mode = |
| 2791 | mirror::VarHandle::GetAccessModeByIntrinsic(intrinsic); |
| 2792 | success = VarHandleInvokeAccessor(self, |
Orion Hodson | 960d4f7 | 2017-11-10 15:32:38 +0000 | [diff] [blame] | 2793 | *shadow_frame, |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2794 | var_handle, |
Orion Hodson | 960d4f7 | 2017-11-10 15:32:38 +0000 | [diff] [blame] | 2795 | method_type, |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2796 | access_mode, |
Orion Hodson | 960d4f7 | 2017-11-10 15:32:38 +0000 | [diff] [blame] | 2797 | &operands, |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2798 | &result); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2799 | } |
Orion Hodson | 537a4fe | 2018-05-15 13:57:58 +0100 | [diff] [blame] | 2800 | |
Orion Hodson | 43f0cdb | 2017-10-10 14:47:32 +0100 | [diff] [blame] | 2801 | DCHECK(success || self->IsExceptionPending()); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2802 | |
| 2803 | // Pop transition record. |
| 2804 | self->PopManagedStackFragment(fragment); |
| 2805 | |
Orion Hodson | cd260eb | 2018-06-06 09:04:17 +0100 | [diff] [blame] | 2806 | return result.GetJ(); |
Orion Hodson | ac14139 | 2017-01-13 11:53:47 +0000 | [diff] [blame] | 2807 | } |
| 2808 | |
Orion Hodson | 4c8e12e | 2018-05-18 08:33:20 +0100 | [diff] [blame] | 2809 | // Returns uint64_t representing raw bits from JValue. |
| 2810 | extern "C" uint64_t artInvokeCustom(uint32_t call_site_idx, Thread* self, ArtMethod** sp) |
| 2811 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 2812 | ScopedQuickEntrypointChecks sqec(self); |
| 2813 | DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs)); |
| 2814 | |
| 2815 | // invoke-custom is effectively a static call (no receiver). |
| 2816 | static constexpr bool kMethodIsStatic = true; |
| 2817 | |
| 2818 | // Start new JNI local reference state |
| 2819 | JNIEnvExt* env = self->GetJniEnv(); |
| 2820 | ScopedObjectAccessUnchecked soa(env); |
| 2821 | ScopedJniEnvLocalRefState env_state(env); |
| 2822 | |
| 2823 | const char* old_cause = self->StartAssertNoThreadSuspension("Making stack arguments safe."); |
| 2824 | |
| 2825 | // From the instruction, get the |callsite_shorty| and expose arguments on the stack to the GC. |
| 2826 | ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp); |
| 2827 | uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp); |
| 2828 | const DexFile* dex_file = caller_method->GetDexFile(); |
| 2829 | const dex::ProtoIndex proto_idx(dex_file->GetProtoIndexForCallSite(call_site_idx)); |
| 2830 | const char* shorty = caller_method->GetDexFile()->GetShorty(proto_idx); |
| 2831 | const uint32_t shorty_len = strlen(shorty); |
| 2832 | |
| 2833 | // Construct the shadow frame placing arguments consecutively from |first_arg|. |
| 2834 | const size_t first_arg = 0; |
| 2835 | const size_t num_vregs = ArtMethod::NumArgRegisters(shorty); |
| 2836 | ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = |
Andreas Gampe | 98ea9d9 | 2018-10-19 14:06:15 -0700 | [diff] [blame] | 2837 | CREATE_SHADOW_FRAME(num_vregs, /* link= */ nullptr, caller_method, dex_pc); |
Orion Hodson | 4c8e12e | 2018-05-18 08:33:20 +0100 | [diff] [blame] | 2838 | ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get(); |
| 2839 | ScopedStackedShadowFramePusher |
| 2840 | frame_pusher(self, shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction); |
| 2841 | BuildQuickShadowFrameVisitor shadow_frame_builder(sp, |
| 2842 | kMethodIsStatic, |
| 2843 | shorty, |
| 2844 | shorty_len, |
| 2845 | shadow_frame, |
| 2846 | first_arg); |
| 2847 | shadow_frame_builder.VisitArguments(); |
| 2848 | |
| 2849 | // Push a transition back into managed code onto the linked list in thread. |
| 2850 | ManagedStack fragment; |
| 2851 | self->PushManagedStackFragment(&fragment); |
| 2852 | self->EndAssertNoThreadSuspension(old_cause); |
| 2853 | |
| 2854 | // Perform the invoke-custom operation. |
| 2855 | RangeInstructionOperands operands(first_arg, num_vregs); |
| 2856 | JValue result; |
| 2857 | bool success = |
| 2858 | interpreter::DoInvokeCustom(self, *shadow_frame, call_site_idx, &operands, &result); |
| 2859 | DCHECK(success || self->IsExceptionPending()); |
| 2860 | |
| 2861 | // Pop transition record. |
| 2862 | self->PopManagedStackFragment(fragment); |
| 2863 | |
| 2864 | return result.GetJ(); |
| 2865 | } |
| 2866 | |
Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 2867 | } // namespace art |