blob: 1b00b54acbe575633492c404573bc0f0650ab025 [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_STACK_H_
18#define ART_RUNTIME_STACK_H_
Elliott Hughes68e76522011-10-05 13:22:16 -070019
Mythri Alle72be14e2021-11-01 11:48:06 +000020#include <stdint.h>
Mythri Alle5097f832021-11-02 14:52:30 +000021
22#include <optional>
Ian Rogers40e3bac2012-11-20 00:09:14 -080023#include <string>
Elliott Hughes68e76522011-10-05 13:22:16 -070024
Andreas Gampe7fbc4a52018-11-28 08:26:47 -080025#include "base/locks.h"
Andreas Gampe03ec9302015-08-27 17:41:47 -070026#include "base/macros.h"
Mythri Alle5097f832021-11-02 14:52:30 +000027#include "deoptimization_kind.h"
Vladimir Marko439d1262019-04-12 14:45:07 +010028#include "obj_ptr.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010029#include "quick/quick_method_frame_info.h"
David Srbecky93bd3612018-07-02 19:30:18 +010030#include "stack_map.h"
Ian Rogerse63db272014-07-15 15:36:11 -070031
Elliott Hughes68e76522011-10-05 13:22:16 -070032namespace art {
33
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080035class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036} // namespace mirror
37
Mathieu Chartiere401d142015-04-22 13:56:20 -070038class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080039class Context;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070040class HandleScope;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010041class OatQuickMethodHeader;
Nicolas Geoffray57f61612015-05-15 13:20:41 +010042class ShadowFrame;
Elliott Hughes68e76522011-10-05 13:22:16 -070043class Thread;
Vladimir Marko3a21e382016-09-02 12:38:38 +010044union JValue;
Elliott Hughes68e76522011-10-05 13:22:16 -070045
Ian Rogers2bcb4a42012-11-08 10:39:18 -080046// The kind of vreg being accessed in calls to Set/GetVReg.
47enum VRegKind {
48 kReferenceVReg,
49 kIntVReg,
50 kFloatVReg,
51 kLongLoVReg,
52 kLongHiVReg,
53 kDoubleLoVReg,
54 kDoubleHiVReg,
55 kConstant,
56 kImpreciseConstant,
57 kUndefined,
58};
Vladimir Marko9974e3c2020-06-10 16:27:06 +010059std::ostream& operator<<(std::ostream& os, VRegKind rhs);
Ian Rogers2bcb4a42012-11-08 10:39:18 -080060
Mingyao Yang063fc772016-08-02 11:02:54 -070061// Size in bytes of the should_deoptimize flag on stack.
62// We just need 4 bytes for our purpose regardless of the architecture. Frame size
63// calculation will automatically do alignment for the final frame size.
64static constexpr size_t kShouldDeoptimizeFlagSize = 4;
65
Andreas Gampe36a296f2017-06-13 14:11:11 -070066/*
67 * Our current stack layout.
68 * The Dalvik registers come first, followed by the
69 * Method*, followed by other special temporaries if any, followed by
70 * regular compiler temporary. As of now we only have the Method* as
71 * as a special compiler temporary.
72 * A compiler temporary can be thought of as a virtual register that
73 * does not exist in the dex but holds intermediate values to help
74 * optimizations and code generation. A special compiler temporary is
75 * one whose location in frame is well known while non-special ones
76 * do not have a requirement on location in frame as long as code
77 * generator itself knows how to access them.
78 *
79 * TODO: Update this documentation?
80 *
81 * +-------------------------------+
82 * | IN[ins-1] | {Note: resides in caller's frame}
83 * | . |
84 * | IN[0] |
85 * | caller's ArtMethod | ... ArtMethod*
86 * +===============================+ {Note: start of callee's frame}
87 * | core callee-save spill | {variable sized}
88 * +-------------------------------+
89 * | fp callee-save spill |
90 * +-------------------------------+
91 * | filler word | {For compatibility, if V[locals-1] used as wide
92 * +-------------------------------+
93 * | V[locals-1] |
94 * | V[locals-2] |
95 * | . |
96 * | . | ... (reg == 2)
97 * | V[1] | ... (reg == 1)
98 * | V[0] | ... (reg == 0) <---- "locals_start"
99 * +-------------------------------+
100 * | stack alignment padding | {0 to (kStackAlignWords-1) of padding}
101 * +-------------------------------+
102 * | Compiler temp region | ... (reg >= max_num_special_temps)
103 * | . |
104 * | . |
105 * | V[max_num_special_temps + 1] |
106 * | V[max_num_special_temps + 0] |
107 * +-------------------------------+
108 * | OUT[outs-1] |
109 * | OUT[outs-2] |
110 * | . |
111 * | OUT[0] |
112 * | ArtMethod* | ... (reg == num_total_code_regs == special_temp_value) <<== sp, 16-byte aligned
113 * +===============================+
114 */
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800115
Ian Rogers0399dde2012-06-06 17:09:28 -0700116class StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100117 public:
118 // This enum defines a flag to control whether inlined frames are included
119 // when walking the stack.
120 enum class StackWalkKind {
121 kIncludeInlinedFrames,
122 kSkipInlinedFrames,
123 };
124
Ian Rogers0399dde2012-06-06 17:09:28 -0700125 protected:
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800126 StackVisitor(Thread* thread,
127 Context* context,
128 StackWalkKind walk_kind,
129 bool check_suspended = true);
Ian Rogers0399dde2012-06-06 17:09:28 -0700130
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100131 bool GetRegisterIfAccessible(uint32_t reg, DexRegisterLocation::Kind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700132 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100133
Ian Rogers0399dde2012-06-06 17:09:28 -0700134 public:
135 virtual ~StackVisitor() {}
Andreas Gampe6db6b4d2017-06-12 16:36:33 -0700136 StackVisitor(const StackVisitor&) = default;
137 StackVisitor(StackVisitor&&) = default;
Ian Rogers0399dde2012-06-06 17:09:28 -0700138
139 // Return 'true' if we should continue to visit more frames, 'false' to stop.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700140 virtual bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers0399dde2012-06-06 17:09:28 -0700141
Andreas Gampe585da952016-12-02 14:52:29 -0800142 enum class CountTransitions {
143 kYes,
144 kNo,
145 };
146
147 template <CountTransitions kCount = CountTransitions::kYes>
Vladimir Marko2196c652017-11-30 16:16:07 +0000148 void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700149
Andreas Gampec7d878d2018-11-19 18:42:06 +0000150 // Convenience helper function to walk the stack with a lambda as a visitor.
151 template <CountTransitions kCountTransitions = CountTransitions::kYes,
152 typename T>
153 ALWAYS_INLINE static void WalkStack(const T& fn,
154 Thread* thread,
155 Context* context,
156 StackWalkKind walk_kind,
157 bool check_suspended = true,
158 bool include_transitions = false)
159 REQUIRES_SHARED(Locks::mutator_lock_) {
160 class LambdaStackVisitor : public StackVisitor {
161 public:
162 LambdaStackVisitor(const T& fn,
163 Thread* thread,
164 Context* context,
165 StackWalkKind walk_kind,
166 bool check_suspended = true)
167 : StackVisitor(thread, context, walk_kind, check_suspended), fn_(fn) {}
168
169 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
170 return fn_(this);
171 }
172
173 private:
174 T fn_;
175 };
176 LambdaStackVisitor visitor(fn, thread, context, walk_kind, check_suspended);
177 visitor.template WalkStack<kCountTransitions>(include_transitions);
178 }
179
Sebastien Hertz26f72862015-09-15 09:52:07 +0200180 Thread* GetThread() const {
181 return thread_;
182 }
183
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700184 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi92d1a662014-05-15 21:43:59 -0700185
Alex Lightdba61482016-12-21 08:20:29 -0800186 // Sets this stack frame's method pointer. This requires a full lock of the MutatorLock. This
187 // doesn't work with inlined methods.
188 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_);
189
Nicolas Geoffrayccc61972015-10-01 14:34:20 +0100190 ArtMethod* GetOuterMethod() const {
191 return *GetCurrentQuickFrame();
192 }
193
Ian Rogers0399dde2012-06-06 17:09:28 -0700194 bool IsShadowFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800195 return cur_shadow_frame_ != nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700196 }
197
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700198 uint32_t GetDexPc(bool abort_on_failure = true) const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700199
Vladimir Markoabedfca2019-05-23 14:07:47 +0100200 ObjPtr<mirror::Object> GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800201
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700202 size_t GetNativePcOffset() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700203
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700204 // Returns the height of the stack in the managed stack frames, including transitions.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700205 size_t GetFrameHeight() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800206 return GetNumFrames() - cur_depth_ - 1;
Ian Rogers0399dde2012-06-06 17:09:28 -0700207 }
208
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700209 // Returns a frame ID for JDWP use, starting from 1.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700210 size_t GetFrameId() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700211 return GetFrameHeight() + 1;
212 }
213
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700214 size_t GetNumFrames() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700215 if (num_frames_ == 0) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100216 num_frames_ = ComputeNumFrames(thread_, walk_kind_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700217 }
218 return num_frames_;
219 }
220
Andreas Gampe140da3b2016-11-08 16:01:00 -0800221 size_t GetFrameDepth() const REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700222 return cur_depth_;
223 }
224
Ian Rogers5cf98192014-05-29 21:31:50 -0700225 // Get the method and dex pc immediately after the one that's currently being visited.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700226 bool GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700227 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700228
David Srbeckycffa2542019-07-01 15:31:41 +0100229 bool GetVReg(ArtMethod* m,
230 uint16_t vreg,
231 VRegKind kind,
232 uint32_t* val,
233 std::optional<DexRegisterLocation> location =
234 std::optional<DexRegisterLocation>()) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700235 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700236
Mathieu Chartiere401d142015-04-22 13:56:20 -0700237 bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200238 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700239 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200240
Mingyao Yang636b9252015-07-31 16:40:24 -0700241 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
242 // is triggered to make the values effective.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700243 bool SetVReg(ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700244 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700245
Mingyao Yang99170c62015-07-06 11:10:37 -0700246 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
247 // is triggered to make the values effective.
Vladimir Marko439d1262019-04-12 14:45:07 +0100248 bool SetVRegReference(ArtMethod* m, uint16_t vreg, ObjPtr<mirror::Object> new_value)
249 REQUIRES_SHARED(Locks::mutator_lock_);
250
251 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
252 // is triggered to make the values effective.
Mingyao Yang636b9252015-07-31 16:40:24 -0700253 bool SetVRegPair(ArtMethod* m,
254 uint16_t vreg,
255 uint64_t new_value,
256 VRegKind kind_lo,
257 VRegKind kind_hi)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700258 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700259
Mathieu Chartier815873e2014-02-13 18:02:13 -0800260 uintptr_t* GetGPRAddress(uint32_t reg) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700261
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700262 uintptr_t GetReturnPc() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000263 uintptr_t GetReturnPcAddr() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700264
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700265 void SetReturnPc(uintptr_t new_ret_pc) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700266
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100267 bool IsInInlinedFrame() const {
David Srbecky93bd3612018-07-02 19:30:18 +0100268 return !current_inline_frames_.empty();
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100269 }
270
David Srbecky93bd3612018-07-02 19:30:18 +0100271 InlineInfo GetCurrentInlinedFrame() const {
272 return current_inline_frames_.back();
David Brazdilefc3f022015-10-28 12:19:06 -0500273 }
274
Ian Rogers0399dde2012-06-06 17:09:28 -0700275 uintptr_t GetCurrentQuickFramePc() const {
276 return cur_quick_frame_pc_;
277 }
278
Mathieu Chartiere401d142015-04-22 13:56:20 -0700279 ArtMethod** GetCurrentQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700280 return cur_quick_frame_;
281 }
282
283 ShadowFrame* GetCurrentShadowFrame() const {
284 return cur_shadow_frame_;
285 }
286
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700287 std::string DescribeLocation() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers40e3bac2012-11-20 00:09:14 -0800288
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100289 static size_t ComputeNumFrames(Thread* thread, StackWalkKind walk_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700290 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800291
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700292 static void DescribeStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800293
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100294 const OatQuickMethodHeader* GetCurrentOatQuickMethodHeader() const {
295 return cur_oat_quick_method_header_;
296 }
297
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700298 QuickMethodFrameInfo GetCurrentQuickFrameInfo() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100299
Mythri Alle5097f832021-11-02 14:52:30 +0000300 void SetShouldDeoptimizeFlag(DeoptimizeFlagValue value) REQUIRES_SHARED(Locks::mutator_lock_) {
301 uint8_t* should_deoptimize_addr = GetShouldDeoptimizeFlagAddr();
302 *should_deoptimize_addr = *should_deoptimize_addr | static_cast<uint8_t>(value);
303 };
304
305 uint8_t GetShouldDeoptimizeFlag() const REQUIRES_SHARED(Locks::mutator_lock_) {
306 return *GetShouldDeoptimizeFlagAddr();
307 }
308
Ian Rogers0399dde2012-06-06 17:09:28 -0700309 private:
Ian Rogers5cf98192014-05-29 21:31:50 -0700310 // Private constructor known in the case that num_frames_ has already been computed.
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800311 StackVisitor(Thread* thread,
312 Context* context,
313 StackWalkKind walk_kind,
314 size_t num_frames,
315 bool check_suspended = true)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700316 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700317
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100318 bool IsAccessibleRegister(uint32_t reg, bool is_float) const {
319 return is_float ? IsAccessibleFPR(reg) : IsAccessibleGPR(reg);
320 }
321 uintptr_t GetRegister(uint32_t reg, bool is_float) const {
322 DCHECK(IsAccessibleRegister(reg, is_float));
323 return is_float ? GetFPR(reg) : GetGPR(reg);
324 }
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100325
326 bool IsAccessibleGPR(uint32_t reg) const;
327 uintptr_t GetGPR(uint32_t reg) const;
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100328
329 bool IsAccessibleFPR(uint32_t reg) const;
330 uintptr_t GetFPR(uint32_t reg) const;
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200331
Mingyao Yang99170c62015-07-06 11:10:37 -0700332 bool GetVRegFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700333 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100334 bool GetVRegFromOptimizedCode(ArtMethod* m,
335 uint16_t vreg,
336 VRegKind kind,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100337 uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700338 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100339
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100340 bool GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,
341 VRegKind kind_lo,
342 VRegKind kind_hi,
Mingyao Yang99170c62015-07-06 11:10:37 -0700343 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700344 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100345 bool GetVRegPairFromOptimizedCode(ArtMethod* m,
346 uint16_t vreg,
347 VRegKind kind_lo,
348 VRegKind kind_hi,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100349 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700350 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100351 bool GetVRegFromOptimizedCode(DexRegisterLocation location, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700352 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100353
Vladimir Marko439d1262019-04-12 14:45:07 +0100354 ShadowFrame* PrepareSetVReg(ArtMethod* m, uint16_t vreg, bool wide)
355 REQUIRES_SHARED(Locks::mutator_lock_);
356
Orion Hodson6aaa49d2020-07-28 15:53:04 +0100357 void ValidateFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700358
David Srbecky145a18a2019-06-03 14:35:22 +0100359 ALWAYS_INLINE CodeInfo* GetCurrentInlineInfo() const;
360 ALWAYS_INLINE StackMap* GetCurrentStackMap() const;
361
Ian Rogers7a22fa62013-01-23 12:16:16 -0800362 Thread* const thread_;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100363 const StackWalkKind walk_kind_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700364 ShadowFrame* cur_shadow_frame_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700365 ArtMethod** cur_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700366 uintptr_t cur_quick_frame_pc_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100367 const OatQuickMethodHeader* cur_oat_quick_method_header_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700368 // Lazily computed, number of frames in the stack.
369 size_t num_frames_;
370 // Depth of the frame we're currently at.
371 size_t cur_depth_;
David Srbecky93bd3612018-07-02 19:30:18 +0100372 // Current inlined frames of the method we are currently at.
373 // We keep poping frames from the end as we visit the frames.
374 BitTableRange<InlineInfo> current_inline_frames_;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700375
David Srbecky145a18a2019-06-03 14:35:22 +0100376 // Cache the most recently decoded inline info data.
377 // The 'current_inline_frames_' refers to this data, so we need to keep it alive anyway.
378 // Marked mutable since the cache fields are updated from const getters.
379 mutable std::pair<const OatQuickMethodHeader*, CodeInfo> cur_inline_info_;
380 mutable std::pair<uintptr_t, StackMap> cur_stack_map_;
381
Mythri Alle5097f832021-11-02 14:52:30 +0000382 uint8_t* GetShouldDeoptimizeFlagAddr() const REQUIRES_SHARED(Locks::mutator_lock_);
383
Ian Rogers0399dde2012-06-06 17:09:28 -0700384 protected:
385 Context* const context_;
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800386 const bool check_suspended_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700387};
388
Elliott Hughes68e76522011-10-05 13:22:16 -0700389} // namespace art
390
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700391#endif // ART_RUNTIME_STACK_H_