blob: 2a6fdc2b35f58397d4b1dfb42e025f70bc280720 [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_STACK_H_
18#define ART_RUNTIME_STACK_H_
Elliott Hughes68e76522011-10-05 13:22:16 -070019
David Srbeckycffa2542019-07-01 15:31:41 +010020#include <optional>
Elliott Hughes68e76522011-10-05 13:22:16 -070021#include <stdint.h>
Ian Rogers40e3bac2012-11-20 00:09:14 -080022#include <string>
Elliott Hughes68e76522011-10-05 13:22:16 -070023
Andreas Gampe7fbc4a52018-11-28 08:26:47 -080024#include "base/locks.h"
Andreas Gampe03ec9302015-08-27 17:41:47 -070025#include "base/macros.h"
Vladimir Marko439d1262019-04-12 14:45:07 +010026#include "obj_ptr.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010027#include "quick/quick_method_frame_info.h"
David Srbecky93bd3612018-07-02 19:30:18 +010028#include "stack_map.h"
Ian Rogerse63db272014-07-15 15:36:11 -070029
Elliott Hughes68e76522011-10-05 13:22:16 -070030namespace art {
31
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080032namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080033class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034} // namespace mirror
35
Mathieu Chartiere401d142015-04-22 13:56:20 -070036class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037class Context;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070038class HandleScope;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010039class OatQuickMethodHeader;
Nicolas Geoffray57f61612015-05-15 13:20:41 +010040class ShadowFrame;
Elliott Hughes68e76522011-10-05 13:22:16 -070041class Thread;
Vladimir Marko3a21e382016-09-02 12:38:38 +010042union JValue;
Elliott Hughes68e76522011-10-05 13:22:16 -070043
Ian Rogers2bcb4a42012-11-08 10:39:18 -080044// The kind of vreg being accessed in calls to Set/GetVReg.
45enum VRegKind {
46 kReferenceVReg,
47 kIntVReg,
48 kFloatVReg,
49 kLongLoVReg,
50 kLongHiVReg,
51 kDoubleLoVReg,
52 kDoubleHiVReg,
53 kConstant,
54 kImpreciseConstant,
55 kUndefined,
56};
Vladimir Marko9974e3c2020-06-10 16:27:06 +010057std::ostream& operator<<(std::ostream& os, VRegKind rhs);
Ian Rogers2bcb4a42012-11-08 10:39:18 -080058
Mingyao Yang063fc772016-08-02 11:02:54 -070059// Size in bytes of the should_deoptimize flag on stack.
60// We just need 4 bytes for our purpose regardless of the architecture. Frame size
61// calculation will automatically do alignment for the final frame size.
62static constexpr size_t kShouldDeoptimizeFlagSize = 4;
63
Andreas Gampe36a296f2017-06-13 14:11:11 -070064/*
65 * Our current stack layout.
66 * The Dalvik registers come first, followed by the
67 * Method*, followed by other special temporaries if any, followed by
68 * regular compiler temporary. As of now we only have the Method* as
69 * as a special compiler temporary.
70 * A compiler temporary can be thought of as a virtual register that
71 * does not exist in the dex but holds intermediate values to help
72 * optimizations and code generation. A special compiler temporary is
73 * one whose location in frame is well known while non-special ones
74 * do not have a requirement on location in frame as long as code
75 * generator itself knows how to access them.
76 *
77 * TODO: Update this documentation?
78 *
79 * +-------------------------------+
80 * | IN[ins-1] | {Note: resides in caller's frame}
81 * | . |
82 * | IN[0] |
83 * | caller's ArtMethod | ... ArtMethod*
84 * +===============================+ {Note: start of callee's frame}
85 * | core callee-save spill | {variable sized}
86 * +-------------------------------+
87 * | fp callee-save spill |
88 * +-------------------------------+
89 * | filler word | {For compatibility, if V[locals-1] used as wide
90 * +-------------------------------+
91 * | V[locals-1] |
92 * | V[locals-2] |
93 * | . |
94 * | . | ... (reg == 2)
95 * | V[1] | ... (reg == 1)
96 * | V[0] | ... (reg == 0) <---- "locals_start"
97 * +-------------------------------+
98 * | stack alignment padding | {0 to (kStackAlignWords-1) of padding}
99 * +-------------------------------+
100 * | Compiler temp region | ... (reg >= max_num_special_temps)
101 * | . |
102 * | . |
103 * | V[max_num_special_temps + 1] |
104 * | V[max_num_special_temps + 0] |
105 * +-------------------------------+
106 * | OUT[outs-1] |
107 * | OUT[outs-2] |
108 * | . |
109 * | OUT[0] |
110 * | ArtMethod* | ... (reg == num_total_code_regs == special_temp_value) <<== sp, 16-byte aligned
111 * +===============================+
112 */
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800113
Ian Rogers0399dde2012-06-06 17:09:28 -0700114class StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100115 public:
116 // This enum defines a flag to control whether inlined frames are included
117 // when walking the stack.
118 enum class StackWalkKind {
119 kIncludeInlinedFrames,
120 kSkipInlinedFrames,
121 };
122
Ian Rogers0399dde2012-06-06 17:09:28 -0700123 protected:
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800124 StackVisitor(Thread* thread,
125 Context* context,
126 StackWalkKind walk_kind,
127 bool check_suspended = true);
Ian Rogers0399dde2012-06-06 17:09:28 -0700128
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100129 bool GetRegisterIfAccessible(uint32_t reg, DexRegisterLocation::Kind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700130 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100131
Ian Rogers0399dde2012-06-06 17:09:28 -0700132 public:
133 virtual ~StackVisitor() {}
Andreas Gampe6db6b4d2017-06-12 16:36:33 -0700134 StackVisitor(const StackVisitor&) = default;
135 StackVisitor(StackVisitor&&) = default;
Ian Rogers0399dde2012-06-06 17:09:28 -0700136
137 // Return 'true' if we should continue to visit more frames, 'false' to stop.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700138 virtual bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers0399dde2012-06-06 17:09:28 -0700139
Andreas Gampe585da952016-12-02 14:52:29 -0800140 enum class CountTransitions {
141 kYes,
142 kNo,
143 };
144
145 template <CountTransitions kCount = CountTransitions::kYes>
Vladimir Marko2196c652017-11-30 16:16:07 +0000146 void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700147
Andreas Gampec7d878d2018-11-19 18:42:06 +0000148 // Convenience helper function to walk the stack with a lambda as a visitor.
149 template <CountTransitions kCountTransitions = CountTransitions::kYes,
150 typename T>
151 ALWAYS_INLINE static void WalkStack(const T& fn,
152 Thread* thread,
153 Context* context,
154 StackWalkKind walk_kind,
155 bool check_suspended = true,
156 bool include_transitions = false)
157 REQUIRES_SHARED(Locks::mutator_lock_) {
158 class LambdaStackVisitor : public StackVisitor {
159 public:
160 LambdaStackVisitor(const T& fn,
161 Thread* thread,
162 Context* context,
163 StackWalkKind walk_kind,
164 bool check_suspended = true)
165 : StackVisitor(thread, context, walk_kind, check_suspended), fn_(fn) {}
166
167 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
168 return fn_(this);
169 }
170
171 private:
172 T fn_;
173 };
174 LambdaStackVisitor visitor(fn, thread, context, walk_kind, check_suspended);
175 visitor.template WalkStack<kCountTransitions>(include_transitions);
176 }
177
Sebastien Hertz26f72862015-09-15 09:52:07 +0200178 Thread* GetThread() const {
179 return thread_;
180 }
181
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700182 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi92d1a662014-05-15 21:43:59 -0700183
Alex Lightdba61482016-12-21 08:20:29 -0800184 // Sets this stack frame's method pointer. This requires a full lock of the MutatorLock. This
185 // doesn't work with inlined methods.
186 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_);
187
Nicolas Geoffrayccc61972015-10-01 14:34:20 +0100188 ArtMethod* GetOuterMethod() const {
189 return *GetCurrentQuickFrame();
190 }
191
Ian Rogers0399dde2012-06-06 17:09:28 -0700192 bool IsShadowFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800193 return cur_shadow_frame_ != nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700194 }
195
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700196 uint32_t GetDexPc(bool abort_on_failure = true) const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700197
Vladimir Markoabedfca2019-05-23 14:07:47 +0100198 ObjPtr<mirror::Object> GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800199
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700200 size_t GetNativePcOffset() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700201
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700202 // Returns the height of the stack in the managed stack frames, including transitions.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700203 size_t GetFrameHeight() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800204 return GetNumFrames() - cur_depth_ - 1;
Ian Rogers0399dde2012-06-06 17:09:28 -0700205 }
206
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700207 // Returns a frame ID for JDWP use, starting from 1.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700208 size_t GetFrameId() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700209 return GetFrameHeight() + 1;
210 }
211
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700212 size_t GetNumFrames() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700213 if (num_frames_ == 0) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100214 num_frames_ = ComputeNumFrames(thread_, walk_kind_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700215 }
216 return num_frames_;
217 }
218
Andreas Gampe140da3b2016-11-08 16:01:00 -0800219 size_t GetFrameDepth() const REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700220 return cur_depth_;
221 }
222
Ian Rogers5cf98192014-05-29 21:31:50 -0700223 // Get the method and dex pc immediately after the one that's currently being visited.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700224 bool GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700225 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700226
David Srbeckycffa2542019-07-01 15:31:41 +0100227 bool GetVReg(ArtMethod* m,
228 uint16_t vreg,
229 VRegKind kind,
230 uint32_t* val,
231 std::optional<DexRegisterLocation> location =
232 std::optional<DexRegisterLocation>()) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700233 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700234
Mathieu Chartiere401d142015-04-22 13:56:20 -0700235 bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200236 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700237 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200238
Mingyao Yang636b9252015-07-31 16:40:24 -0700239 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
240 // is triggered to make the values effective.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700241 bool SetVReg(ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700242 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700243
Mingyao Yang99170c62015-07-06 11:10:37 -0700244 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
245 // is triggered to make the values effective.
Vladimir Marko439d1262019-04-12 14:45:07 +0100246 bool SetVRegReference(ArtMethod* m, uint16_t vreg, ObjPtr<mirror::Object> new_value)
247 REQUIRES_SHARED(Locks::mutator_lock_);
248
249 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
250 // is triggered to make the values effective.
Mingyao Yang636b9252015-07-31 16:40:24 -0700251 bool SetVRegPair(ArtMethod* m,
252 uint16_t vreg,
253 uint64_t new_value,
254 VRegKind kind_lo,
255 VRegKind kind_hi)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700256 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700257
Mathieu Chartier815873e2014-02-13 18:02:13 -0800258 uintptr_t* GetGPRAddress(uint32_t reg) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700259
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700260 uintptr_t GetReturnPc() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000261 uintptr_t GetReturnPcAddr() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700262
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700263 void SetReturnPc(uintptr_t new_ret_pc) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700264
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100265 bool IsInInlinedFrame() const {
David Srbecky93bd3612018-07-02 19:30:18 +0100266 return !current_inline_frames_.empty();
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100267 }
268
David Srbecky93bd3612018-07-02 19:30:18 +0100269 InlineInfo GetCurrentInlinedFrame() const {
270 return current_inline_frames_.back();
David Brazdilefc3f022015-10-28 12:19:06 -0500271 }
272
Ian Rogers0399dde2012-06-06 17:09:28 -0700273 uintptr_t GetCurrentQuickFramePc() const {
274 return cur_quick_frame_pc_;
275 }
276
Mathieu Chartiere401d142015-04-22 13:56:20 -0700277 ArtMethod** GetCurrentQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700278 return cur_quick_frame_;
279 }
280
281 ShadowFrame* GetCurrentShadowFrame() const {
282 return cur_shadow_frame_;
283 }
284
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700285 std::string DescribeLocation() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers40e3bac2012-11-20 00:09:14 -0800286
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100287 static size_t ComputeNumFrames(Thread* thread, StackWalkKind walk_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700288 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800289
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700290 static void DescribeStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800291
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100292 const OatQuickMethodHeader* GetCurrentOatQuickMethodHeader() const {
293 return cur_oat_quick_method_header_;
294 }
295
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700296 QuickMethodFrameInfo GetCurrentQuickFrameInfo() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100297
Ian Rogers0399dde2012-06-06 17:09:28 -0700298 private:
Ian Rogers5cf98192014-05-29 21:31:50 -0700299 // Private constructor known in the case that num_frames_ has already been computed.
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800300 StackVisitor(Thread* thread,
301 Context* context,
302 StackWalkKind walk_kind,
303 size_t num_frames,
304 bool check_suspended = true)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700305 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700306
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100307 bool IsAccessibleRegister(uint32_t reg, bool is_float) const {
308 return is_float ? IsAccessibleFPR(reg) : IsAccessibleGPR(reg);
309 }
310 uintptr_t GetRegister(uint32_t reg, bool is_float) const {
311 DCHECK(IsAccessibleRegister(reg, is_float));
312 return is_float ? GetFPR(reg) : GetGPR(reg);
313 }
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100314
315 bool IsAccessibleGPR(uint32_t reg) const;
316 uintptr_t GetGPR(uint32_t reg) const;
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100317
318 bool IsAccessibleFPR(uint32_t reg) const;
319 uintptr_t GetFPR(uint32_t reg) const;
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200320
Mingyao Yang99170c62015-07-06 11:10:37 -0700321 bool GetVRegFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700322 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100323 bool GetVRegFromOptimizedCode(ArtMethod* m,
324 uint16_t vreg,
325 VRegKind kind,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100326 uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700327 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100328
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100329 bool GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,
330 VRegKind kind_lo,
331 VRegKind kind_hi,
Mingyao Yang99170c62015-07-06 11:10:37 -0700332 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700333 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100334 bool GetVRegPairFromOptimizedCode(ArtMethod* m,
335 uint16_t vreg,
336 VRegKind kind_lo,
337 VRegKind kind_hi,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100338 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700339 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6624d582020-09-01 15:02:00 +0100340 bool GetVRegFromOptimizedCode(DexRegisterLocation location, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700341 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100342
Vladimir Marko439d1262019-04-12 14:45:07 +0100343 ShadowFrame* PrepareSetVReg(ArtMethod* m, uint16_t vreg, bool wide)
344 REQUIRES_SHARED(Locks::mutator_lock_);
345
Orion Hodson6aaa49d2020-07-28 15:53:04 +0100346 void ValidateFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700347
David Srbecky145a18a2019-06-03 14:35:22 +0100348 ALWAYS_INLINE CodeInfo* GetCurrentInlineInfo() const;
349 ALWAYS_INLINE StackMap* GetCurrentStackMap() const;
350
Ian Rogers7a22fa62013-01-23 12:16:16 -0800351 Thread* const thread_;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100352 const StackWalkKind walk_kind_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700353 ShadowFrame* cur_shadow_frame_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700354 ArtMethod** cur_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700355 uintptr_t cur_quick_frame_pc_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100356 const OatQuickMethodHeader* cur_oat_quick_method_header_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700357 // Lazily computed, number of frames in the stack.
358 size_t num_frames_;
359 // Depth of the frame we're currently at.
360 size_t cur_depth_;
David Srbecky93bd3612018-07-02 19:30:18 +0100361 // Current inlined frames of the method we are currently at.
362 // We keep poping frames from the end as we visit the frames.
363 BitTableRange<InlineInfo> current_inline_frames_;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700364
David Srbecky145a18a2019-06-03 14:35:22 +0100365 // Cache the most recently decoded inline info data.
366 // The 'current_inline_frames_' refers to this data, so we need to keep it alive anyway.
367 // Marked mutable since the cache fields are updated from const getters.
368 mutable std::pair<const OatQuickMethodHeader*, CodeInfo> cur_inline_info_;
369 mutable std::pair<uintptr_t, StackMap> cur_stack_map_;
370
Ian Rogers0399dde2012-06-06 17:09:28 -0700371 protected:
372 Context* const context_;
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800373 const bool check_suspended_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700374};
375
Elliott Hughes68e76522011-10-05 13:22:16 -0700376} // namespace art
377
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700378#endif // ART_RUNTIME_STACK_H_