Revert^4 "ART: Add StackVisitor accepting a lambda"

This reverts commit ec43a01e0ac948c59d5b1f9c3812f2901b48942a.

Reason for revert: Added missing code.

Bug: 115837065
Test: m test-art-host
Change-Id: Id30ccdf9aa9a6e56a914254793e399f2712c882d
diff --git a/openjdkjvmti/ti_stack.cc b/openjdkjvmti/ti_stack.cc
index 5de4a81..4a3eac8 100644
--- a/openjdkjvmti/ti_stack.cc
+++ b/openjdkjvmti/ti_stack.cc
@@ -673,34 +673,24 @@
   return ERR(NONE);
 }
 
-// Walks up the stack counting Java frames. This is not StackVisitor::ComputeNumFrames, as
-// runtime methods and transitions must not be counted.
-struct GetFrameCountVisitor : public art::StackVisitor {
-  explicit GetFrameCountVisitor(art::Thread* thread)
-      : art::StackVisitor(thread, nullptr, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        count(0) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
-    art::ArtMethod* m = GetMethod();
-    const bool do_count = !(m == nullptr || m->IsRuntimeMethod());
-    if (do_count) {
-      count++;
-    }
-    return true;
-  }
-
-  size_t count;
-};
-
 struct GetFrameCountClosure : public art::Closure {
  public:
   GetFrameCountClosure() : count(0) {}
 
   void Run(art::Thread* self) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
-    GetFrameCountVisitor visitor(self);
-    visitor.WalkStack(false);
-
-    count = visitor.count;
+    // This is not StackVisitor::ComputeNumFrames, as runtime methods and transitions must not be
+    // counted.
+    art::StackVisitor::WalkStack(
+        [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(art::Locks::mutator_lock_) {
+          art::ArtMethod* m = stack_visitor->GetMethod();
+          if (m != nullptr && !m->IsRuntimeMethod()) {
+            count++;
+          }
+          return true;
+        },
+        self,
+        /* context= */ nullptr,
+        art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
   }
 
   size_t count;
@@ -743,46 +733,30 @@
   return ERR(NONE);
 }
 
-// Walks up the stack 'n' callers, when used with Thread::WalkStack.
-struct GetLocationVisitor : public art::StackVisitor {
-  GetLocationVisitor(art::Thread* thread, size_t n_in)
-      : art::StackVisitor(thread, nullptr, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        n(n_in),
-        count(0),
-        caller(nullptr),
-        caller_dex_pc(0) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
-    art::ArtMethod* m = GetMethod();
-    const bool do_count = !(m == nullptr || m->IsRuntimeMethod());
-    if (do_count) {
-      DCHECK(caller == nullptr);
-      if (count == n) {
-        caller = m;
-        caller_dex_pc = GetDexPc(false);
-        return false;
-      }
-      count++;
-    }
-    return true;
-  }
-
-  const size_t n;
-  size_t count;
-  art::ArtMethod* caller;
-  uint32_t caller_dex_pc;
-};
-
 struct GetLocationClosure : public art::Closure {
  public:
   explicit GetLocationClosure(size_t n_in) : n(n_in), method(nullptr), dex_pc(0) {}
 
   void Run(art::Thread* self) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
-    GetLocationVisitor visitor(self, n);
-    visitor.WalkStack(false);
-
-    method = visitor.caller;
-    dex_pc = visitor.caller_dex_pc;
+    // Walks up the stack 'n' callers.
+    size_t count = 0u;
+    art::StackVisitor::WalkStack(
+        [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(art::Locks::mutator_lock_) {
+          art::ArtMethod* m = stack_visitor->GetMethod();
+          if (m != nullptr && !m->IsRuntimeMethod()) {
+            DCHECK(method == nullptr);
+            if (count == n) {
+              method = m;
+              dex_pc = stack_visitor->GetDexPc(/*abort_on_failure=*/false);
+              return false;
+            }
+            count++;
+          }
+          return true;
+        },
+        self,
+        /* context= */ nullptr,
+        art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
   }
 
   const size_t n;
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 3ad7fc9..4af97f0 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -2362,25 +2362,18 @@
 }
 
 static int GetStackDepth(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
-  struct CountStackDepthVisitor : public StackVisitor {
-    explicit CountStackDepthVisitor(Thread* thread_in)
-        : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-          depth(0) {}
-
-    // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
-    // annotalysis.
-    bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
-      if (!GetMethod()->IsRuntimeMethod()) {
-        ++depth;
-      }
-      return true;
-    }
-    size_t depth;
-  };
-
-  CountStackDepthVisitor visitor(thread);
-  visitor.WalkStack();
-  return visitor.depth;
+  size_t depth = 0u;
+  StackVisitor::WalkStack(
+      [&depth](const StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        if (!visitor->GetMethod()->IsRuntimeMethod()) {
+          ++depth;
+        }
+        return true;
+      },
+      thread,
+      /* context= */ nullptr,
+      StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  return depth;
 }
 
 JDWP::JdwpError Dbg::GetThreadFrameCount(JDWP::ObjectId thread_id, size_t* result) {
@@ -2398,47 +2391,10 @@
   return JDWP::ERR_NONE;
 }
 
-JDWP::JdwpError Dbg::GetThreadFrames(JDWP::ObjectId thread_id, size_t start_frame,
-                                     size_t frame_count, JDWP::ExpandBuf* buf) {
-  class GetFrameVisitor : public StackVisitor {
-   public:
-    GetFrameVisitor(Thread* thread, size_t start_frame_in, size_t frame_count_in,
-                    JDWP::ExpandBuf* buf_in)
-        REQUIRES_SHARED(Locks::mutator_lock_)
-        : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-          depth_(0),
-          start_frame_(start_frame_in),
-          frame_count_(frame_count_in),
-          buf_(buf_in) {
-      expandBufAdd4BE(buf_, frame_count_);
-    }
-
-    bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-      if (GetMethod()->IsRuntimeMethod()) {
-        return true;  // The debugger can't do anything useful with a frame that has no Method*.
-      }
-      if (depth_ >= start_frame_ + frame_count_) {
-        return false;
-      }
-      if (depth_ >= start_frame_) {
-        JDWP::FrameId frame_id(GetFrameId());
-        JDWP::JdwpLocation location;
-        SetJdwpLocation(&location, GetMethod(), GetDexPc());
-        VLOG(jdwp) << StringPrintf("    Frame %3zd: id=%3" PRIu64 " ", depth_, frame_id) << location;
-        expandBufAdd8BE(buf_, frame_id);
-        expandBufAddLocation(buf_, location);
-      }
-      ++depth_;
-      return true;
-    }
-
-   private:
-    size_t depth_;
-    const size_t start_frame_;
-    const size_t frame_count_;
-    JDWP::ExpandBuf* buf_;
-  };
-
+JDWP::JdwpError Dbg::GetThreadFrames(JDWP::ObjectId thread_id,
+                                     const size_t start_frame,
+                                     const size_t frame_count,
+                                     JDWP::ExpandBuf* buf) {
   ScopedObjectAccessUnchecked soa(Thread::Current());
   JDWP::JdwpError error;
   Thread* thread = DecodeThread(soa, thread_id, &error);
@@ -2448,8 +2404,34 @@
   if (!IsSuspendedForDebugger(soa, thread)) {
     return JDWP::ERR_THREAD_NOT_SUSPENDED;
   }
-  GetFrameVisitor visitor(thread, start_frame, frame_count, buf);
-  visitor.WalkStack();
+
+  expandBufAdd4BE(buf, frame_count);
+
+  size_t depth = 0u;
+  StackVisitor::WalkStack(
+      [&](StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        if (visitor->GetMethod()->IsRuntimeMethod()) {
+          return true;  // The debugger can't do anything useful with a frame that has no Method*.
+        }
+        if (depth >= start_frame + frame_count) {
+          return false;
+        }
+        if (depth >= start_frame) {
+          JDWP::FrameId frame_id(visitor->GetFrameId());
+          JDWP::JdwpLocation location;
+          SetJdwpLocation(&location, visitor->GetMethod(), visitor->GetDexPc());
+          VLOG(jdwp)
+              << StringPrintf("    Frame %3zd: id=%3" PRIu64 " ", depth, frame_id) << location;
+          expandBufAdd8BE(buf, frame_id);
+          expandBufAddLocation(buf, location);
+        }
+        ++depth;
+        return true;
+      },
+      thread,
+      /* context= */ nullptr,
+      StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+
   return JDWP::ERR_NONE;
 }
 
@@ -2530,28 +2512,6 @@
   Runtime::Current()->GetThreadList()->SuspendSelfForDebugger();
 }
 
-struct GetThisVisitor : public StackVisitor {
-  GetThisVisitor(Thread* thread, Context* context, JDWP::FrameId frame_id_in)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        this_object(nullptr),
-        frame_id(frame_id_in) {}
-
-  // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
-  // annotalysis.
-  bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
-    if (frame_id != GetFrameId()) {
-      return true;  // continue
-    } else {
-      this_object = GetThisObject();
-      return false;
-    }
-  }
-
-  mirror::Object* this_object;
-  JDWP::FrameId frame_id;
-};
-
 JDWP::JdwpError Dbg::GetThisObject(JDWP::ObjectId thread_id, JDWP::FrameId frame_id,
                                    JDWP::ObjectId* result) {
   ScopedObjectAccessUnchecked soa(Thread::Current());
@@ -2564,48 +2524,50 @@
     return JDWP::ERR_THREAD_NOT_SUSPENDED;
   }
   std::unique_ptr<Context> context(Context::Create());
-  GetThisVisitor visitor(thread, context.get(), frame_id);
-  visitor.WalkStack();
-  *result = gRegistry->Add(visitor.this_object);
+  mirror::Object* this_object = nullptr;
+  StackVisitor::WalkStack(
+      [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        if (frame_id != stack_visitor->GetFrameId()) {
+          return true;  // continue
+        } else {
+          this_object = stack_visitor->GetThisObject();
+          return false;
+        }
+      },
+      thread,
+      context.get(),
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  *result = gRegistry->Add(this_object);
   return JDWP::ERR_NONE;
 }
 
-// Walks the stack until we find the frame with the given FrameId.
-class FindFrameVisitor final : public StackVisitor {
- public:
-  FindFrameVisitor(Thread* thread, Context* context, JDWP::FrameId frame_id)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        frame_id_(frame_id),
-        error_(JDWP::ERR_INVALID_FRAMEID) {}
-
-  // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
-  // annotalysis.
-  bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
-    if (GetFrameId() != frame_id_) {
-      return true;  // Not our frame, carry on.
-    }
-    ArtMethod* m = GetMethod();
-    if (m->IsNative()) {
-      // We can't read/write local value from/into native method.
-      error_ = JDWP::ERR_OPAQUE_FRAME;
-    } else {
-      // We found our frame.
-      error_ = JDWP::ERR_NONE;
-    }
-    return false;
-  }
-
-  JDWP::JdwpError GetError() const {
-    return error_;
-  }
-
- private:
-  const JDWP::FrameId frame_id_;
-  JDWP::JdwpError error_;
-
-  DISALLOW_COPY_AND_ASSIGN(FindFrameVisitor);
-};
+template <typename FrameHandler>
+static JDWP::JdwpError FindAndHandleNonNativeFrame(Thread* thread,
+                                                   JDWP::FrameId frame_id,
+                                                   const FrameHandler& handler)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+  JDWP::JdwpError result = JDWP::ERR_INVALID_FRAMEID;
+  std::unique_ptr<Context> context(Context::Create());
+  StackVisitor::WalkStack(
+      [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        if (stack_visitor->GetFrameId() != frame_id) {
+          return true;  // Not our frame, carry on.
+        }
+        ArtMethod* m = stack_visitor->GetMethod();
+        if (m->IsNative()) {
+          // We can't read/write local value from/into native method.
+          result = JDWP::ERR_OPAQUE_FRAME;
+        } else {
+          // We found our frame.
+          result = handler(stack_visitor);
+        }
+        return false;
+      },
+      thread,
+      context.get(),
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  return result;
+}
 
 JDWP::JdwpError Dbg::GetLocalValues(JDWP::Request* request, JDWP::ExpandBuf* pReply) {
   JDWP::ObjectId thread_id = request->ReadThreadId();
@@ -2620,31 +2582,29 @@
   if (!IsSuspendedForDebugger(soa, thread)) {
     return JDWP::ERR_THREAD_NOT_SUSPENDED;
   }
-  // Find the frame with the given frame_id.
-  std::unique_ptr<Context> context(Context::Create());
-  FindFrameVisitor visitor(thread, context.get(), frame_id);
-  visitor.WalkStack();
-  if (visitor.GetError() != JDWP::ERR_NONE) {
-    return visitor.GetError();
-  }
 
-  // Read the values from visitor's context.
-  int32_t slot_count = request->ReadSigned32("slot count");
-  expandBufAdd4BE(pReply, slot_count);     /* "int values" */
-  for (int32_t i = 0; i < slot_count; ++i) {
-    uint32_t slot = request->ReadUnsigned32("slot");
-    JDWP::JdwpTag reqSigByte = request->ReadTag();
+  return FindAndHandleNonNativeFrame(
+      thread,
+      frame_id,
+      [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        // Read the values from visitor's context.
+        int32_t slot_count = request->ReadSigned32("slot count");
+        expandBufAdd4BE(pReply, slot_count);     /* "int values" */
+        for (int32_t i = 0; i < slot_count; ++i) {
+          uint32_t slot = request->ReadUnsigned32("slot");
+          JDWP::JdwpTag reqSigByte = request->ReadTag();
 
-    VLOG(jdwp) << "    --> slot " << slot << " " << reqSigByte;
+          VLOG(jdwp) << "    --> slot " << slot << " " << reqSigByte;
 
-    size_t width = Dbg::GetTagWidth(reqSigByte);
-    uint8_t* ptr = expandBufAddSpace(pReply, width + 1);
-    error = Dbg::GetLocalValue(visitor, soa, slot, reqSigByte, ptr, width);
-    if (error != JDWP::ERR_NONE) {
-      return error;
-    }
-  }
-  return JDWP::ERR_NONE;
+          size_t width = Dbg::GetTagWidth(reqSigByte);
+          uint8_t* ptr = expandBufAddSpace(pReply, width + 1);
+          error = Dbg::GetLocalValue(*stack_visitor, soa, slot, reqSigByte, ptr, width);
+          if (error != JDWP::ERR_NONE) {
+            return error;
+          }
+        }
+        return JDWP::ERR_NONE;
+      });
 }
 
 constexpr JDWP::JdwpError kStackFrameLocalAccessError = JDWP::ERR_ABSENT_INFORMATION;
@@ -2791,29 +2751,27 @@
   if (!IsSuspendedForDebugger(soa, thread)) {
     return JDWP::ERR_THREAD_NOT_SUSPENDED;
   }
-  // Find the frame with the given frame_id.
-  std::unique_ptr<Context> context(Context::Create());
-  FindFrameVisitor visitor(thread, context.get(), frame_id);
-  visitor.WalkStack();
-  if (visitor.GetError() != JDWP::ERR_NONE) {
-    return visitor.GetError();
-  }
 
-  // Writes the values into visitor's context.
-  int32_t slot_count = request->ReadSigned32("slot count");
-  for (int32_t i = 0; i < slot_count; ++i) {
-    uint32_t slot = request->ReadUnsigned32("slot");
-    JDWP::JdwpTag sigByte = request->ReadTag();
-    size_t width = Dbg::GetTagWidth(sigByte);
-    uint64_t value = request->ReadValue(width);
+  return FindAndHandleNonNativeFrame(
+      thread,
+      frame_id,
+      [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        // Writes the values into visitor's context.
+        int32_t slot_count = request->ReadSigned32("slot count");
+        for (int32_t i = 0; i < slot_count; ++i) {
+          uint32_t slot = request->ReadUnsigned32("slot");
+          JDWP::JdwpTag sigByte = request->ReadTag();
+          size_t width = Dbg::GetTagWidth(sigByte);
+          uint64_t value = request->ReadValue(width);
 
-    VLOG(jdwp) << "    --> slot " << slot << " " << sigByte << " " << value;
-    error = Dbg::SetLocalValue(thread, visitor, slot, sigByte, value, width);
-    if (error != JDWP::ERR_NONE) {
-      return error;
-    }
-  }
-  return JDWP::ERR_NONE;
+          VLOG(jdwp) << "    --> slot " << slot << " " << sigByte << " " << value;
+          error = Dbg::SetLocalValue(thread, *stack_visitor, slot, sigByte, value, width);
+          if (error != JDWP::ERR_NONE) {
+            return error;
+          }
+        }
+        return JDWP::ERR_NONE;
+      });
 }
 
 template<typename T>
@@ -2985,107 +2943,71 @@
   gJdwpState->PostFieldEvent(&location, f, this_object, field_value, true);
 }
 
-/**
- * Finds the location where this exception will be caught. We search until we reach the top
- * frame, in which case this exception is considered uncaught.
- */
-class CatchLocationFinder : public StackVisitor {
- public:
-  CatchLocationFinder(Thread* self, const Handle<mirror::Throwable>& exception, Context* context)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-    : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-      exception_(exception),
-      handle_scope_(self),
-      this_at_throw_(handle_scope_.NewHandle<mirror::Object>(nullptr)),
-      catch_method_(nullptr),
-      throw_method_(nullptr),
-      catch_dex_pc_(dex::kDexNoIndex),
-      throw_dex_pc_(dex::kDexNoIndex) {
-  }
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* method = GetMethod();
-    DCHECK(method != nullptr);
-    if (method->IsRuntimeMethod()) {
-      // Ignore callee save method.
-      DCHECK(method->IsCalleeSaveMethod());
-      return true;
-    }
-
-    uint32_t dex_pc = GetDexPc();
-    if (throw_method_ == nullptr) {
-      // First Java method found. It is either the method that threw the exception,
-      // or the Java native method that is reporting an exception thrown by
-      // native code.
-      this_at_throw_.Assign(GetThisObject());
-      throw_method_ = method;
-      throw_dex_pc_ = dex_pc;
-    }
-
-    if (dex_pc != dex::kDexNoIndex) {
-      StackHandleScope<1> hs(GetThread());
-      uint32_t found_dex_pc;
-      Handle<mirror::Class> exception_class(hs.NewHandle(exception_->GetClass()));
-      bool unused_clear_exception;
-      found_dex_pc = method->FindCatchBlock(exception_class, dex_pc, &unused_clear_exception);
-      if (found_dex_pc != dex::kDexNoIndex) {
-        catch_method_ = method;
-        catch_dex_pc_ = found_dex_pc;
-        return false;  // End stack walk.
-      }
-    }
-    return true;  // Continue stack walk.
-  }
-
-  ArtMethod* GetCatchMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return catch_method_;
-  }
-
-  ArtMethod* GetThrowMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return throw_method_;
-  }
-
-  mirror::Object* GetThisAtThrow() REQUIRES_SHARED(Locks::mutator_lock_) {
-    return this_at_throw_.Get();
-  }
-
-  uint32_t GetCatchDexPc() const {
-    return catch_dex_pc_;
-  }
-
-  uint32_t GetThrowDexPc() const {
-    return throw_dex_pc_;
-  }
-
- private:
-  const Handle<mirror::Throwable>& exception_;
-  StackHandleScope<1> handle_scope_;
-  MutableHandle<mirror::Object> this_at_throw_;
-  ArtMethod* catch_method_;
-  ArtMethod* throw_method_;
-  uint32_t catch_dex_pc_;
-  uint32_t throw_dex_pc_;
-
-  DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
-};
-
 void Dbg::PostException(mirror::Throwable* exception_object) {
   if (!IsDebuggerActive()) {
     return;
   }
   Thread* const self = Thread::Current();
-  StackHandleScope<1> handle_scope(self);
+  StackHandleScope<2> handle_scope(self);
   Handle<mirror::Throwable> h_exception(handle_scope.NewHandle(exception_object));
+  MutableHandle<mirror::Object> this_at_throw = handle_scope.NewHandle<mirror::Object>(nullptr);
   std::unique_ptr<Context> context(Context::Create());
-  CatchLocationFinder clf(self, h_exception, context.get());
-  clf.WalkStack(/* include_transitions= */ false);
-  JDWP::EventLocation exception_throw_location;
-  SetEventLocation(&exception_throw_location, clf.GetThrowMethod(), clf.GetThrowDexPc());
-  JDWP::EventLocation exception_catch_location;
-  SetEventLocation(&exception_catch_location, clf.GetCatchMethod(), clf.GetCatchDexPc());
 
-  gJdwpState->PostException(&exception_throw_location, h_exception.Get(), &exception_catch_location,
-                            clf.GetThisAtThrow());
+  ArtMethod* catch_method = nullptr;
+  ArtMethod* throw_method = nullptr;
+  uint32_t catch_dex_pc = dex::kDexNoIndex;
+  uint32_t throw_dex_pc = dex::kDexNoIndex;
+  StackVisitor::WalkStack(
+      /**
+       * Finds the location where this exception will be caught. We search until we reach the top
+       * frame, in which case this exception is considered uncaught.
+       */
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* method = stack_visitor->GetMethod();
+        DCHECK(method != nullptr);
+        if (method->IsRuntimeMethod()) {
+          // Ignore callee save method.
+          DCHECK(method->IsCalleeSaveMethod());
+          return true;
+        }
+
+        uint32_t dex_pc = stack_visitor->GetDexPc();
+        if (throw_method == nullptr) {
+          // First Java method found. It is either the method that threw the exception,
+          // or the Java native method that is reporting an exception thrown by
+          // native code.
+          this_at_throw.Assign(stack_visitor->GetThisObject());
+          throw_method = method;
+          throw_dex_pc = dex_pc;
+        }
+
+        if (dex_pc != dex::kDexNoIndex) {
+          StackHandleScope<1> hs(stack_visitor->GetThread());
+          uint32_t found_dex_pc;
+          Handle<mirror::Class> exception_class(hs.NewHandle(h_exception->GetClass()));
+          bool unused_clear_exception;
+          found_dex_pc = method->FindCatchBlock(exception_class, dex_pc, &unused_clear_exception);
+          if (found_dex_pc != dex::kDexNoIndex) {
+            catch_method = method;
+            catch_dex_pc = found_dex_pc;
+            return false;  // End stack walk.
+          }
+        }
+        return true;  // Continue stack walk.
+      },
+      self,
+      context.get(),
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+
+  JDWP::EventLocation exception_throw_location;
+  SetEventLocation(&exception_throw_location, throw_method, throw_dex_pc);
+  JDWP::EventLocation exception_catch_location;
+  SetEventLocation(&exception_catch_location, catch_method, catch_dex_pc);
+
+  gJdwpState->PostException(&exception_throw_location,
+                            h_exception.Get(),
+                            &exception_catch_location,
+                            this_at_throw.Get());
 }
 
 void Dbg::PostClassPrepare(mirror::Class* c) {
@@ -3649,56 +3571,6 @@
   return instrumentation->IsDeoptimized(m);
 }
 
-class NeedsDeoptimizationVisitor : public StackVisitor {
- public:
-  explicit NeedsDeoptimizationVisitor(Thread* self)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-    : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-      needs_deoptimization_(false) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    // The visitor is meant to be used when handling exception from compiled code only.
-    CHECK(!IsShadowFrame()) << "We only expect to visit compiled frame: "
-                            << ArtMethod::PrettyMethod(GetMethod());
-    ArtMethod* method = GetMethod();
-    if (method == nullptr) {
-      // We reach an upcall and don't need to deoptimize this part of the stack (ManagedFragment)
-      // so we can stop the visit.
-      DCHECK(!needs_deoptimization_);
-      return false;
-    }
-    if (Runtime::Current()->GetInstrumentation()->InterpretOnly()) {
-      // We found a compiled frame in the stack but instrumentation is set to interpret
-      // everything: we need to deoptimize.
-      needs_deoptimization_ = true;
-      return false;
-    }
-    if (Runtime::Current()->GetInstrumentation()->IsDeoptimized(method)) {
-      // We found a deoptimized method in the stack.
-      needs_deoptimization_ = true;
-      return false;
-    }
-    ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(GetFrameId());
-    if (frame != nullptr) {
-      // The debugger allocated a ShadowFrame to update a variable in the stack: we need to
-      // deoptimize the stack to execute (and deallocate) this frame.
-      needs_deoptimization_ = true;
-      return false;
-    }
-    return true;
-  }
-
-  bool NeedsDeoptimization() const {
-    return needs_deoptimization_;
-  }
-
- private:
-  // Do we need to deoptimize the stack?
-  bool needs_deoptimization_;
-
-  DISALLOW_COPY_AND_ASSIGN(NeedsDeoptimizationVisitor);
-};
-
 // Do we need to deoptimize the stack to handle an exception?
 bool Dbg::IsForcedInterpreterNeededForExceptionImpl(Thread* thread) {
   const SingleStepControl* const ssc = thread->GetSingleStepControl();
@@ -3708,9 +3580,45 @@
   }
   // Deoptimization is required if at least one method in the stack needs it. However we
   // skip frames that will be unwound (thus not executed).
-  NeedsDeoptimizationVisitor visitor(thread);
-  visitor.WalkStack(true);  // includes upcall.
-  return visitor.NeedsDeoptimization();
+  bool needs_deoptimization = false;
+  StackVisitor::WalkStack(
+      [&](art::StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        // The visitor is meant to be used when handling exception from compiled code only.
+        CHECK(!visitor->IsShadowFrame()) << "We only expect to visit compiled frame: "
+                                         << ArtMethod::PrettyMethod(visitor->GetMethod());
+        ArtMethod* method = visitor->GetMethod();
+        if (method == nullptr) {
+          // We reach an upcall and don't need to deoptimize this part of the stack (ManagedFragment)
+          // so we can stop the visit.
+          DCHECK(!needs_deoptimization);
+          return false;
+        }
+        if (Runtime::Current()->GetInstrumentation()->InterpretOnly()) {
+          // We found a compiled frame in the stack but instrumentation is set to interpret
+          // everything: we need to deoptimize.
+          needs_deoptimization = true;
+          return false;
+        }
+        if (Runtime::Current()->GetInstrumentation()->IsDeoptimized(method)) {
+          // We found a deoptimized method in the stack.
+          needs_deoptimization = true;
+          return false;
+        }
+        ShadowFrame* frame = visitor->GetThread()->FindDebuggerShadowFrame(visitor->GetFrameId());
+        if (frame != nullptr) {
+          // The debugger allocated a ShadowFrame to update a variable in the stack: we need to
+          // deoptimize the stack to execute (and deallocate) this frame.
+          needs_deoptimization = true;
+          return false;
+        }
+        return true;
+      },
+      thread,
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames,
+      /* check_suspended */ true,
+      /* include_transitions */ true);
+  return needs_deoptimization;
 }
 
 // Scoped utility class to suspend a thread so that we may do tasks such as walk its stack. Doesn't
diff --git a/runtime/gc/allocation_record.cc b/runtime/gc/allocation_record.cc
index e11fa5c..80e3394 100644
--- a/runtime/gc/allocation_record.cc
+++ b/runtime/gc/allocation_record.cc
@@ -184,34 +184,6 @@
   new_record_condition_.Broadcast(Thread::Current());
 }
 
-class AllocRecordStackVisitor : public StackVisitor {
- public:
-  AllocRecordStackVisitor(Thread* thread, size_t max_depth, AllocRecordStackTrace* trace_out)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        max_depth_(max_depth),
-        trace_(trace_out) {}
-
-  // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses
-  // annotalysis.
-  bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
-    if (trace_->GetDepth() >= max_depth_) {
-      return false;
-    }
-    ArtMethod* m = GetMethod();
-    // m may be null if we have inlined methods of unresolved classes. b/27858645
-    if (m != nullptr && !m->IsRuntimeMethod()) {
-      m = m->GetInterfaceMethodIfProxy(kRuntimePointerSize);
-      trace_->AddStackElement(AllocRecordStackTraceElement(m, GetDexPc()));
-    }
-    return true;
-  }
-
- private:
-  const size_t max_depth_;
-  AllocRecordStackTrace* const trace_;
-};
-
 void AllocRecordObjectMap::SetAllocTrackingEnabled(bool enable) {
   Thread* self = Thread::Current();
   Heap* heap = Runtime::Current()->GetHeap();
@@ -268,11 +240,26 @@
   // Get stack trace outside of lock in case there are allocations during the stack walk.
   // b/27858645.
   AllocRecordStackTrace trace;
-  AllocRecordStackVisitor visitor(self, max_stack_depth_, /*out*/ &trace);
   {
     StackHandleScope<1> hs(self);
     auto obj_wrapper = hs.NewHandleWrapper(obj);
-    visitor.WalkStack();
+
+    StackVisitor::WalkStack(
+        [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+          if (trace.GetDepth() >= max_stack_depth_) {
+            return false;
+          }
+          ArtMethod* m = stack_visitor->GetMethod();
+          // m may be null if we have inlined methods of unresolved classes. b/27858645
+          if (m != nullptr && !m->IsRuntimeMethod()) {
+            m = m->GetInterfaceMethodIfProxy(kRuntimePointerSize);
+            trace.AddStackElement(AllocRecordStackTraceElement(m, stack_visitor->GetDexPc()));
+          }
+          return true;
+        },
+        self,
+        /* context= */ nullptr,
+        art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
   }
 
   MutexLock mu(self, *Locks::alloc_tracker_lock_);
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index cbcaaef..12f1522 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -18,6 +18,8 @@
 
 #include <sstream>
 
+#include <android-base/logging.h>
+
 #include "arch/context.h"
 #include "art_field-inl.h"
 #include "art_method-inl.h"
@@ -1355,65 +1357,66 @@
 }
 
 // Try to get the shorty of a runtime method if it's an invocation stub.
-struct RuntimeMethodShortyVisitor : public StackVisitor {
-  explicit RuntimeMethodShortyVisitor(Thread* thread)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        shorty('V') {}
-
-  static uint16_t GetMethodIndexOfInvoke(ArtMethod* caller,
-                                         const Instruction& inst,
-                                         uint32_t dex_pc)
-        REQUIRES_SHARED(Locks::mutator_lock_) {
-    switch (inst.Opcode()) {
-      case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
-      case Instruction::INVOKE_VIRTUAL_QUICK: {
-        uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
-        CHECK_NE(method_idx, DexFile::kDexNoIndex16);
-        return method_idx;
-      }
-      default: {
-        return inst.VRegB();
-      }
-    }
-  }
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    if (m == nullptr || m->IsRuntimeMethod()) {
-      return true;
-    }
-    // The first Java method.
-    if (m->IsNative()) {
-      // Use JNI method's shorty for the jni stub.
-      shorty = m->GetShorty()[0];
-    } else if (m->IsProxyMethod()) {
-      // Proxy method just invokes its proxied method via
-      // art_quick_proxy_invoke_handler.
-      shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
-    } else {
-      const Instruction& instr = m->DexInstructions().InstructionAt(GetDexPc());
-      if (instr.IsInvoke()) {
-        uint16_t method_index = GetMethodIndexOfInvoke(m, instr, GetDexPc());
-        const DexFile* dex_file = m->GetDexFile();
-        if (interpreter::IsStringInit(dex_file, method_index)) {
-          // Invoking string init constructor is turned into invoking
-          // StringFactory.newStringFromChars() which returns a string.
-          shorty = 'L';
-        } else {
-          shorty = dex_file->GetMethodShorty(method_index)[0];
+static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
+  char shorty = 'V';
+  StackVisitor::WalkStack(
+      [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        if (m == nullptr || m->IsRuntimeMethod()) {
+          return true;
         }
-      } else {
-        // It could be that a non-invoke opcode invokes a stub, which in turn
-        // invokes Java code. In such cases, we should never expect a return
-        // value from the stub.
-      }
-    }
-    // Stop stack walking since we've seen a Java frame.
-    return false;
-  }
+        // The first Java method.
+        if (m->IsNative()) {
+          // Use JNI method's shorty for the jni stub.
+          shorty = m->GetShorty()[0];
+        } else if (m->IsProxyMethod()) {
+          // Proxy method just invokes its proxied method via
+          // art_quick_proxy_invoke_handler.
+          shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
+        } else {
+          const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
+          if (instr.IsInvoke()) {
+            auto get_method_index_fn = [](ArtMethod* caller,
+                                          const Instruction& inst,
+                                          uint32_t dex_pc)
+                REQUIRES_SHARED(Locks::mutator_lock_) {
+              switch (inst.Opcode()) {
+                case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
+                case Instruction::INVOKE_VIRTUAL_QUICK: {
+                  uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
+                  CHECK_NE(method_idx, DexFile::kDexNoIndex16);
+                  return method_idx;
+                }
+                default: {
+                  return static_cast<uint16_t>(inst.VRegB());
+                }
+              }
+            };
 
-  char shorty;
-};
+            uint16_t method_index = get_method_index_fn(m, instr, stack_visitor->GetDexPc());
+            const DexFile* dex_file = m->GetDexFile();
+            if (interpreter::IsStringInit(dex_file, method_index)) {
+              // Invoking string init constructor is turned into invoking
+              // StringFactory.newStringFromChars() which returns a string.
+              shorty = 'L';
+            } else {
+              shorty = dex_file->GetMethodShorty(method_index)[0];
+            }
+
+          } else {
+            // It could be that a non-invoke opcode invokes a stub, which in turn
+            // invokes Java code. In such cases, we should never expect a return
+            // value from the stub.
+          }
+        }
+        // Stop stack walking since we've seen a Java frame.
+        return false;
+      },
+      thread,
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  return shorty;
+}
 
 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
                                                             uintptr_t* return_pc,
@@ -1447,9 +1450,7 @@
       // for clinit, we need to pass return results to the caller.
       // We need the correct shorty to decide whether we need to pass the return
       // result for deoptimization below.
-      RuntimeMethodShortyVisitor visitor(self);
-      visitor.WalkStack();
-      return_shorty = visitor.shorty;
+      return_shorty = GetRuntimeMethodShorty(self);
     } else {
       // Some runtime methods such as allocations, unresolved field getters, etc.
       // have return value. We don't need to set return_value since MethodExitEvent()
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 1701ca8..7729838 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -18,7 +18,8 @@
 
 #include <sstream>
 
-#include "android-base/unique_fd.h"
+#include <android-base/logging.h>
+#include <android-base/unique_fd.h>
 
 #include "arch/context.h"
 #include "art_method-inl.h"
@@ -1272,31 +1273,6 @@
   }
 }
 
-class MarkCodeVisitor final : public StackVisitor {
- public:
-  MarkCodeVisitor(Thread* thread_in, JitCodeCache* code_cache_in, CodeCacheBitmap* bitmap)
-      : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kSkipInlinedFrames),
-        code_cache_(code_cache_in),
-        bitmap_(bitmap) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
-    if (method_header == nullptr) {
-      return true;
-    }
-    const void* code = method_header->GetCode();
-    if (code_cache_->ContainsPc(code)) {
-      // Use the atomic set version, as multiple threads are executing this code.
-      bitmap_->AtomicTestAndSet(FromCodeToAllocation(code));
-    }
-    return true;
-  }
-
- private:
-  JitCodeCache* const code_cache_;
-  CodeCacheBitmap* const bitmap_;
-};
-
 class MarkCodeClosure final : public Closure {
  public:
   MarkCodeClosure(JitCodeCache* code_cache, CodeCacheBitmap* bitmap, Barrier* barrier)
@@ -1305,8 +1281,24 @@
   void Run(Thread* thread) override REQUIRES_SHARED(Locks::mutator_lock_) {
     ScopedTrace trace(__PRETTY_FUNCTION__);
     DCHECK(thread == Thread::Current() || thread->IsSuspended());
-    MarkCodeVisitor visitor(thread, code_cache_, bitmap_);
-    visitor.WalkStack();
+    StackVisitor::WalkStack(
+        [&](const art::StackVisitor* stack_visitor) {
+          const OatQuickMethodHeader* method_header =
+              stack_visitor->GetCurrentOatQuickMethodHeader();
+          if (method_header == nullptr) {
+            return true;
+          }
+          const void* code = method_header->GetCode();
+          if (code_cache_->ContainsPc(code)) {
+            // Use the atomic set version, as multiple threads are executing this code.
+            bitmap_->AtomicTestAndSet(FromCodeToAllocation(code));
+          }
+          return true;
+        },
+        thread,
+        /* context= */ nullptr,
+        art::StackVisitor::StackWalkKind::kSkipInlinedFrames);
+
     if (kIsDebugBuild) {
       // The stack walking code queries the side instrumentation stack if it
       // sees an instrumentation exit pc, so the JIT code of methods in that stack
diff --git a/runtime/monitor.cc b/runtime/monitor.cc
index df2a8e2..6479283 100644
--- a/runtime/monitor.cc
+++ b/runtime/monitor.cc
@@ -277,43 +277,6 @@
   obj_ = GcRoot<mirror::Object>(object);
 }
 
-// Note: Adapted from CurrentMethodVisitor in thread.cc. We must not resolve here.
-
-struct NthCallerWithDexPcVisitor final : public StackVisitor {
-  explicit NthCallerWithDexPcVisitor(Thread* thread, size_t frame)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        method_(nullptr),
-        dex_pc_(0),
-        current_frame_number_(0),
-        wanted_frame_number_(frame) {}
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    if (m == nullptr || m->IsRuntimeMethod()) {
-      // Runtime method, upcall, or resolution issue. Skip.
-      return true;
-    }
-
-    // Is this the requested frame?
-    if (current_frame_number_ == wanted_frame_number_) {
-      method_ = m;
-      dex_pc_ = GetDexPc(/* abort_on_failure=*/ false);
-      return false;
-    }
-
-    // Look for more.
-    current_frame_number_++;
-    return true;
-  }
-
-  ArtMethod* method_;
-  uint32_t dex_pc_;
-
- private:
-  size_t current_frame_number_;
-  const size_t wanted_frame_number_;
-};
-
 // This function is inlined and just helps to not have the VLOG and ATRACE check at all the
 // potential tracing points.
 void Monitor::AtraceMonitorLock(Thread* self, mirror::Object* obj, bool is_wait) {
@@ -326,13 +289,41 @@
   // Wait() requires a deeper call stack to be useful. Otherwise you'll see "Waiting at
   // Object.java". Assume that we'll wait a nontrivial amount, so it's OK to do a longer
   // stack walk than if !is_wait.
-  NthCallerWithDexPcVisitor visitor(self, is_wait ? 1U : 0U);
-  visitor.WalkStack(false);
+  const size_t wanted_frame_number = is_wait ? 1U : 0U;
+
+  ArtMethod* method = nullptr;
+  uint32_t dex_pc = 0u;
+
+  size_t current_frame_number = 0u;
+  StackVisitor::WalkStack(
+      // Note: Adapted from CurrentMethodVisitor in thread.cc. We must not resolve here.
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        if (m == nullptr || m->IsRuntimeMethod()) {
+          // Runtime method, upcall, or resolution issue. Skip.
+          return true;
+        }
+
+        // Is this the requested frame?
+        if (current_frame_number == wanted_frame_number) {
+          method = m;
+          dex_pc = stack_visitor->GetDexPc(false /* abort_on_error*/);
+          return false;
+        }
+
+        // Look for more.
+        current_frame_number++;
+        return true;
+      },
+      self,
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+
   const char* prefix = is_wait ? "Waiting on " : "Locking ";
 
   const char* filename;
   int32_t line_number;
-  TranslateLocation(visitor.method_, visitor.dex_pc_, &filename, &line_number);
+  TranslateLocation(method, dex_pc, &filename, &line_number);
 
   // It would be nice to have a stable "ID" for the object here. However, the only stable thing
   // would be the identity hashcode. But we cannot use IdentityHashcode here: For one, there are
diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc
index afdfefa..d4e3d54 100644
--- a/runtime/quick_exception_handler.cc
+++ b/runtime/quick_exception_handler.cc
@@ -154,46 +154,36 @@
   DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
 };
 
-// Counts instrumentation stack frame prior to catch handler or upcall.
-class InstrumentationStackVisitor : public StackVisitor {
- public:
-  InstrumentationStackVisitor(Thread* self, size_t frame_depth)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        frame_depth_(frame_depth),
-        instrumentation_frames_to_pop_(0) {
-    CHECK_NE(frame_depth_, kInvalidFrameDepth);
-  }
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    size_t current_frame_depth = GetFrameDepth();
-    if (current_frame_depth < frame_depth_) {
-      CHECK(GetMethod() != nullptr);
-      if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
-        if (!IsInInlinedFrame()) {
-          // We do not count inlined frames, because we do not instrument them. The reason we
-          // include them in the stack walking is the check against `frame_depth_`, which is
-          // given to us by a visitor that visits inlined frames.
-          ++instrumentation_frames_to_pop_;
+static size_t GetInstrumentationFramesToPop(Thread* self, size_t frame_depth)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+  CHECK_NE(frame_depth, kInvalidFrameDepth);
+  size_t instrumentation_frames_to_pop = 0;
+  StackVisitor::WalkStack(
+      [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        size_t current_frame_depth = stack_visitor->GetFrameDepth();
+        if (current_frame_depth < frame_depth) {
+          CHECK(stack_visitor->GetMethod() != nullptr);
+          if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) ==
+                  stack_visitor->GetReturnPc())) {
+            if (!stack_visitor->IsInInlinedFrame()) {
+              // We do not count inlined frames, because we do not instrument them. The reason we
+              // include them in the stack walking is the check against `frame_depth_`, which is
+              // given to us by a visitor that visits inlined frames.
+              ++instrumentation_frames_to_pop;
+            }
+          }
+          return true;
         }
-      }
-      return true;
-    } else {
-      // We reached the frame of the catch handler or the upcall.
-      return false;
-    }
-  }
-
-  size_t GetInstrumentationFramesToPop() const {
-    return instrumentation_frames_to_pop_;
-  }
-
- private:
-  const size_t frame_depth_;
-  size_t instrumentation_frames_to_pop_;
-
-  DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
-};
+        // We reached the frame of the catch handler or the upcall.
+        return false;
+      },
+      self,
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames,
+      /* check_suspended */ true,
+      /* include_transitions */ true);
+  return instrumentation_frames_to_pop;
+}
 
 // Finds the appropriate exception catch after calling all method exit instrumentation functions.
 // Note that this might change the exception being thrown.
@@ -229,9 +219,8 @@
 
     // Figure out how many of those frames have instrumentation we need to remove (Should be the
     // exact same as number of new_pop_count if there aren't inlined frames).
-    InstrumentationStackVisitor instrumentation_visitor(self_, handler_frame_depth_);
-    instrumentation_visitor.WalkStack(true);
-    size_t instrumentation_frames_to_pop = instrumentation_visitor.GetInstrumentationFramesToPop();
+    size_t instrumentation_frames_to_pop =
+        GetInstrumentationFramesToPop(self_, handler_frame_depth_);
 
     if (kDebugExceptionDelivery) {
       if (*handler_quick_frame_ == nullptr) {
@@ -647,10 +636,8 @@
   DCHECK(is_deoptimization_) << "Non-deoptimization handlers should use FindCatch";
   uintptr_t return_pc = 0;
   if (method_tracing_active_) {
-    InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
-    visitor.WalkStack(true);
-
-    size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
+    size_t instrumentation_frames_to_pop =
+        GetInstrumentationFramesToPop(self_, handler_frame_depth_);
     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
     return_pc = instrumentation->PopFramesForDeoptimization(self_, instrumentation_frames_to_pop);
   }
@@ -671,53 +658,41 @@
   UNREACHABLE();
 }
 
-// Prints out methods with their type of frame.
-class DumpFramesWithTypeStackVisitor final : public StackVisitor {
- public:
-  explicit DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        show_details_(show_details) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* method = GetMethod();
-    if (show_details_) {
-      LOG(INFO) << "|> pc   = " << std::hex << GetCurrentQuickFramePc();
-      LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
-      if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
-        LOG(INFO) << "|> ret  = " << std::hex << GetReturnPc();
-      }
-    }
-    if (method == nullptr) {
-      // Transition, do go on, we want to unwind over bridges, all the way.
-      if (show_details_) {
-        LOG(INFO) << "N  <transition>";
-      }
-      return true;
-    } else if (method->IsRuntimeMethod()) {
-      if (show_details_) {
-        LOG(INFO) << "R  " << method->PrettyMethod(true);
-      }
-      return true;
-    } else {
-      bool is_shadow = GetCurrentShadowFrame() != nullptr;
-      LOG(INFO) << (is_shadow ? "S" : "Q")
-                << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
-                << " "
-                << method->PrettyMethod(true);
-      return true;  // Go on.
-    }
-  }
-
- private:
-  bool show_details_;
-
-  DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
-};
-
 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
-  DumpFramesWithTypeStackVisitor visitor(self, details);
-  visitor.WalkStack(true);
+  StackVisitor::WalkStack(
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* method = stack_visitor->GetMethod();
+        if (details) {
+          LOG(INFO) << "|> pc   = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
+          LOG(INFO) << "|> addr = " << std::hex
+              << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
+          if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
+            LOG(INFO) << "|> ret  = " << std::hex << stack_visitor->GetReturnPc();
+          }
+        }
+        if (method == nullptr) {
+          // Transition, do go on, we want to unwind over bridges, all the way.
+          if (details) {
+            LOG(INFO) << "N  <transition>";
+          }
+          return true;
+        } else if (method->IsRuntimeMethod()) {
+          if (details) {
+            LOG(INFO) << "R  " << method->PrettyMethod(true);
+          }
+          return true;
+        } else {
+          bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
+          LOG(INFO) << (is_shadow ? "S" : "Q")
+                    << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
+                    << " "
+                    << method->PrettyMethod(true);
+          return true;  // Go on.
+        }
+      },
+      self,
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
 }
 
 }  // namespace art
diff --git a/runtime/stack.h b/runtime/stack.h
index 02578d2..9d30115 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -143,6 +143,36 @@
   template <CountTransitions kCount = CountTransitions::kYes>
   void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_);
 
+  // Convenience helper function to walk the stack with a lambda as a visitor.
+  template <CountTransitions kCountTransitions = CountTransitions::kYes,
+            typename T>
+  ALWAYS_INLINE static void WalkStack(const T& fn,
+                                      Thread* thread,
+                                      Context* context,
+                                      StackWalkKind walk_kind,
+                                      bool check_suspended = true,
+                                      bool include_transitions = false)
+      REQUIRES_SHARED(Locks::mutator_lock_) {
+    class LambdaStackVisitor : public StackVisitor {
+     public:
+      LambdaStackVisitor(const T& fn,
+                         Thread* thread,
+                         Context* context,
+                         StackWalkKind walk_kind,
+                         bool check_suspended = true)
+          : StackVisitor(thread, context, walk_kind, check_suspended), fn_(fn) {}
+
+      bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
+        return fn_(this);
+      }
+
+     private:
+      T fn_;
+    };
+    LambdaStackVisitor visitor(fn, thread, context, walk_kind, check_suspended);
+    visitor.template WalkStack<kCountTransitions>(include_transitions);
+  }
+
   Thread* GetThread() const {
     return thread_;
   }
diff --git a/runtime/thread.cc b/runtime/thread.cc
index e9fed76..33cd9bb 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -3607,42 +3607,34 @@
   return result;
 }
 
-ArtMethod* Thread::GetCurrentMethod(uint32_t* dex_pc,
+ArtMethod* Thread::GetCurrentMethod(uint32_t* dex_pc_out,
                                     bool check_suspended,
                                     bool abort_on_error) const {
   // Note: this visitor may return with a method set, but dex_pc_ being DexFile:kDexNoIndex. This is
   //       so we don't abort in a special situation (thinlocked monitor) when dumping the Java
   //       stack.
-  struct CurrentMethodVisitor final : public StackVisitor {
-    CurrentMethodVisitor(Thread* thread, bool check_suspended, bool abort_on_error)
-        REQUIRES_SHARED(Locks::mutator_lock_)
-        : StackVisitor(thread,
-                       /* context= */nullptr,
-            StackVisitor::StackWalkKind::kIncludeInlinedFrames,
-            check_suspended),
-            method_(nullptr),
-            dex_pc_(0),
-            abort_on_error_(abort_on_error) {}
-    bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-      ArtMethod* m = GetMethod();
-      if (m->IsRuntimeMethod()) {
-        // Continue if this is a runtime method.
-        return true;
-      }
-      method_ = m;
-      dex_pc_ = GetDexPc(abort_on_error_);
-      return false;
-    }
-    ArtMethod* method_;
-    uint32_t dex_pc_;
-    const bool abort_on_error_;
-  };
-  CurrentMethodVisitor visitor(const_cast<Thread*>(this), check_suspended, abort_on_error);
-  visitor.WalkStack(false);
-  if (dex_pc != nullptr) {
-    *dex_pc = visitor.dex_pc_;
+  ArtMethod* method = nullptr;
+  uint32_t dex_pc = dex::kDexNoIndex;
+  StackVisitor::WalkStack(
+      [&](const StackVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = visitor->GetMethod();
+        if (m->IsRuntimeMethod()) {
+          // Continue if this is a runtime method.
+          return true;
+        }
+        method = m;
+        dex_pc = visitor->GetDexPc(abort_on_error);
+        return false;
+      },
+      const_cast<Thread*>(this),
+      /* context= */ nullptr,
+      StackVisitor::StackWalkKind::kIncludeInlinedFrames,
+      check_suspended);
+
+  if (dex_pc_out != nullptr) {
+    *dex_pc_out = dex_pc;
   }
-  return visitor.method_;
+  return method;
 }
 
 bool Thread::HoldsLock(ObjPtr<mirror::Object> object) const {
diff --git a/runtime/trace.cc b/runtime/trace.cc
index ad58c2e..f6c36cf 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -58,32 +58,6 @@
 static constexpr uint8_t kOpNewThread = 2U;
 static constexpr uint8_t kOpTraceSummary = 3U;
 
-class BuildStackTraceVisitor : public StackVisitor {
- public:
-  explicit BuildStackTraceVisitor(Thread* thread)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        method_trace_(Trace::AllocStackTrace()) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    // Ignore runtime frames (in particular callee save).
-    if (!m->IsRuntimeMethod()) {
-      method_trace_->push_back(m);
-    }
-    return true;
-  }
-
-  // Returns a stack trace where the topmost frame corresponds with the first element of the vector.
-  std::vector<ArtMethod*>* GetStackTrace() const {
-    return method_trace_;
-  }
-
- private:
-  std::vector<ArtMethod*>* const method_trace_;
-
-  DISALLOW_COPY_AND_ASSIGN(BuildStackTraceVisitor);
-};
-
 static const char     kTraceTokenChar             = '*';
 static const uint16_t kTraceHeaderLength          = 32;
 static const uint32_t kTraceMagicValue            = 0x574f4c53;
@@ -228,9 +202,19 @@
 }
 
 static void GetSample(Thread* thread, void* arg) REQUIRES_SHARED(Locks::mutator_lock_) {
-  BuildStackTraceVisitor build_trace_visitor(thread);
-  build_trace_visitor.WalkStack();
-  std::vector<ArtMethod*>* stack_trace = build_trace_visitor.GetStackTrace();
+  std::vector<ArtMethod*>* const stack_trace = Trace::AllocStackTrace();
+  StackVisitor::WalkStack(
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        // Ignore runtime frames (in particular callee save).
+        if (!m->IsRuntimeMethod()) {
+          stack_trace->push_back(m);
+        }
+        return true;
+      },
+      thread,
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
   Trace* the_trace = reinterpret_cast<Trace*>(arg);
   the_trace->CompareAndUpdateStackTrace(thread, stack_trace);
 }
diff --git a/test/461-get-reference-vreg/get_reference_vreg_jni.cc b/test/461-get-reference-vreg/get_reference_vreg_jni.cc
index ddc86df..817a647 100644
--- a/test/461-get-reference-vreg/get_reference_vreg_jni.cc
+++ b/test/461-get-reference-vreg/get_reference_vreg_jni.cc
@@ -25,62 +25,50 @@
 
 namespace {
 
-class TestVisitor : public StackVisitor {
- public:
-  TestVisitor(Thread* thread, Context* context, mirror::Object* this_value)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        this_value_(this_value),
-        found_method_index_(0) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    std::string m_name(m->GetName());
-
-    if (m_name.compare("$noinline$testThisWithInstanceCall") == 0) {
-      found_method_index_ = 1;
-      uint32_t value = 0;
-      CHECK(GetVReg(m, 1, kReferenceVReg, &value));
-      CHECK_EQ(reinterpret_cast<mirror::Object*>(value), this_value_);
-      CHECK_EQ(GetThisObject(), this_value_);
-    } else if (m_name.compare("$noinline$testThisWithStaticCall") == 0) {
-      found_method_index_ = 2;
-      uint32_t value = 0;
-      CHECK(GetVReg(m, 1, kReferenceVReg, &value));
-    } else if (m_name.compare("$noinline$testParameter") == 0) {
-      found_method_index_ = 3;
-      uint32_t value = 0;
-      CHECK(GetVReg(m, 1, kReferenceVReg, &value));
-    } else if (m_name.compare("$noinline$testObjectInScope") == 0) {
-      found_method_index_ = 4;
-      uint32_t value = 0;
-      CHECK(GetVReg(m, 0, kReferenceVReg, &value));
-    }
-
-    return true;
-  }
-
-  mirror::Object* this_value_;
-
-  // Value returned to Java to ensure the methods testSimpleVReg and testPairVReg
-  // have been found and tested.
-  jint found_method_index_;
-};
-
-extern "C" JNIEXPORT jint JNICALL Java_Main_doNativeCallRef(JNIEnv*, jobject value) {
+jint FindMethodIndex(jobject this_value_jobj) {
   ScopedObjectAccess soa(Thread::Current());
   std::unique_ptr<Context> context(Context::Create());
-  TestVisitor visitor(soa.Self(), context.get(), soa.Decode<mirror::Object>(value).Ptr());
-  visitor.WalkStack();
-  return visitor.found_method_index_;
+  ObjPtr<mirror::Object> this_value = soa.Decode<mirror::Object>(this_value_jobj);
+  jint found_method_index = 0;
+  StackVisitor::WalkStack(
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        std::string m_name(m->GetName());
+
+        if (m_name.compare("$noinline$testThisWithInstanceCall") == 0) {
+          found_method_index = 1;
+          uint32_t value = 0;
+          CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &value));
+          CHECK_EQ(reinterpret_cast<mirror::Object*>(value), this_value);
+          CHECK_EQ(stack_visitor->GetThisObject(), this_value);
+        } else if (m_name.compare("$noinline$testThisWithStaticCall") == 0) {
+          found_method_index = 2;
+          uint32_t value = 0;
+          CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &value));
+        } else if (m_name.compare("$noinline$testParameter") == 0) {
+          found_method_index = 3;
+          uint32_t value = 0;
+          CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &value));
+        } else if (m_name.compare("$noinline$testObjectInScope") == 0) {
+          found_method_index = 4;
+          uint32_t value = 0;
+          CHECK(stack_visitor->GetVReg(m, 0, kReferenceVReg, &value));
+        }
+
+        return true;
+      },
+      soa.Self(),
+      context.get(),
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  return found_method_index;
+}
+
+extern "C" JNIEXPORT jint JNICALL Java_Main_doNativeCallRef(JNIEnv*, jobject value) {
+  return FindMethodIndex(value);
 }
 
 extern "C" JNIEXPORT jint JNICALL Java_Main_doStaticNativeCallRef(JNIEnv*, jclass) {
-  ScopedObjectAccess soa(Thread::Current());
-  std::unique_ptr<Context> context(Context::Create());
-  TestVisitor visitor(soa.Self(), context.get(), nullptr);
-  visitor.WalkStack();
-  return visitor.found_method_index_;
+  return FindMethodIndex(nullptr);
 }
 
 }  // namespace
diff --git a/test/543-env-long-ref/env_long_ref.cc b/test/543-env-long-ref/env_long_ref.cc
index 165f5bf..1885f8d 100644
--- a/test/543-env-long-ref/env_long_ref.cc
+++ b/test/543-env-long-ref/env_long_ref.cc
@@ -23,44 +23,28 @@
 
 namespace art {
 
-namespace {
-
-class TestVisitor : public StackVisitor {
- public:
-  TestVisitor(const ScopedObjectAccess& soa, Context* context, jobject expected_value)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(soa.Self(), context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        expected_value_(expected_value),
-        found_(false),
-        soa_(soa) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    std::string m_name(m->GetName());
-
-    if (m_name == "testCase") {
-      found_ = true;
-      uint32_t value = 0;
-      CHECK(GetVReg(m, 1, kReferenceVReg, &value));
-      CHECK_EQ(reinterpret_cast<mirror::Object*>(value),
-               soa_.Decode<mirror::Object>(expected_value_).Ptr());
-    }
-    return true;
-  }
-
-  jobject expected_value_;
-  bool found_;
-  const ScopedObjectAccess& soa_;
-};
-
-}  // namespace
-
 extern "C" JNIEXPORT void JNICALL Java_Main_lookForMyRegisters(JNIEnv*, jclass, jobject value) {
   ScopedObjectAccess soa(Thread::Current());
   std::unique_ptr<Context> context(Context::Create());
-  TestVisitor visitor(soa, context.get(), value);
-  visitor.WalkStack();
-  CHECK(visitor.found_);
+  bool found = false;
+  StackVisitor::WalkStack(
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        std::string m_name(m->GetName());
+
+        if (m_name == "testCase") {
+          found = true;
+          uint32_t stack_value = 0;
+          CHECK(stack_visitor->GetVReg(m, 1, kReferenceVReg, &stack_value));
+          CHECK_EQ(reinterpret_cast<mirror::Object*>(stack_value),
+                   soa.Decode<mirror::Object>(value).Ptr());
+        }
+        return true;
+      },
+      soa.Self(),
+      context.get(),
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  CHECK(found);
 }
 
 }  // namespace art
diff --git a/test/570-checker-osr/osr.cc b/test/570-checker-osr/osr.cc
index 7b88842..b2b3634 100644
--- a/test/570-checker-osr/osr.cc
+++ b/test/570-checker-osr/osr.cc
@@ -23,39 +23,33 @@
 #include "scoped_thread_state_change-inl.h"
 #include "stack.h"
 #include "stack_map.h"
+#include "thread-current-inl.h"
 
 namespace art {
 
-class OsrVisitor : public StackVisitor {
- public:
-  explicit OsrVisitor(Thread* thread, const char* method_name)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        method_name_(method_name),
-        in_osr_method_(false),
-        in_interpreter_(false) {}
+namespace {
 
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    std::string m_name(m->GetName());
+template <typename Handler>
+void ProcessMethodWithName(JNIEnv* env, jstring method_name, const Handler& handler) {
+  ScopedUtfChars chars(env, method_name);
+  CHECK(chars.c_str() != nullptr);
+  ScopedObjectAccess soa(Thread::Current());
+  StackVisitor::WalkStack(
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        std::string m_name(stack_visitor->GetMethod()->GetName());
 
-    if (m_name.compare(method_name_) == 0) {
-      const OatQuickMethodHeader* header =
-          Runtime::Current()->GetJit()->GetCodeCache()->LookupOsrMethodHeader(m);
-      if (header != nullptr && header == GetCurrentOatQuickMethodHeader()) {
-        in_osr_method_ = true;
-      } else if (IsShadowFrame()) {
-        in_interpreter_ = true;
-      }
-      return false;
-    }
-    return true;
-  }
+        if (m_name.compare(chars.c_str()) == 0) {
+          handler(stack_visitor);
+          return false;
+        }
+        return true;
+      },
+      soa.Self(),
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+}
 
-  const char* const method_name_;
-  bool in_osr_method_;
-  bool in_interpreter_;
-};
+}  // namespace
 
 extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInOsrCode(JNIEnv* env,
                                                             jclass,
@@ -65,12 +59,19 @@
     // Just return true for non-jit configurations to stop the infinite loop.
     return JNI_TRUE;
   }
-  ScopedUtfChars chars(env, method_name);
-  CHECK(chars.c_str() != nullptr);
-  ScopedObjectAccess soa(Thread::Current());
-  OsrVisitor visitor(soa.Self(), chars.c_str());
-  visitor.WalkStack();
-  return visitor.in_osr_method_;
+  bool in_osr_code = false;
+  ProcessMethodWithName(
+      env,
+      method_name,
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        const OatQuickMethodHeader* header =
+            Runtime::Current()->GetJit()->GetCodeCache()->LookupOsrMethodHeader(m);
+        if (header != nullptr && header == stack_visitor->GetCurrentOatQuickMethodHeader()) {
+          in_osr_code = true;
+        }
+      });
+  return in_osr_code;
 }
 
 extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInInterpreter(JNIEnv* env,
@@ -80,86 +81,56 @@
     // The return value is irrelevant if we're not using JIT.
     return false;
   }
-  ScopedUtfChars chars(env, method_name);
-  CHECK(chars.c_str() != nullptr);
-  ScopedObjectAccess soa(Thread::Current());
-  OsrVisitor visitor(soa.Self(), chars.c_str());
-  visitor.WalkStack();
-  return visitor.in_interpreter_;
+  bool in_interpreter = false;
+  ProcessMethodWithName(
+      env,
+      method_name,
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        const OatQuickMethodHeader* header =
+            Runtime::Current()->GetJit()->GetCodeCache()->LookupOsrMethodHeader(m);
+        if ((header == nullptr || header != stack_visitor->GetCurrentOatQuickMethodHeader()) &&
+            stack_visitor->IsShadowFrame()) {
+          in_interpreter = true;
+        }
+      });
+  return in_interpreter;
 }
 
-class ProfilingInfoVisitor : public StackVisitor {
- public:
-  explicit ProfilingInfoVisitor(Thread* thread, const char* method_name)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        method_name_(method_name) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    std::string m_name(m->GetName());
-
-    if (m_name.compare(method_name_) == 0) {
-      ProfilingInfo::Create(Thread::Current(), m, /* retry_allocation */ true);
-      return false;
-    }
-    return true;
-  }
-
-  const char* const method_name_;
-};
-
 extern "C" JNIEXPORT void JNICALL Java_Main_ensureHasProfilingInfo(JNIEnv* env,
                                                                    jclass,
                                                                    jstring method_name) {
   if (!Runtime::Current()->UseJitCompilation()) {
     return;
   }
-  ScopedUtfChars chars(env, method_name);
-  CHECK(chars.c_str() != nullptr);
-  ScopedObjectAccess soa(Thread::Current());
-  ProfilingInfoVisitor visitor(soa.Self(), chars.c_str());
-  visitor.WalkStack();
+  ProcessMethodWithName(
+      env,
+      method_name,
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        ProfilingInfo::Create(Thread::Current(), m, /* retry_allocation */ true);
+      });
 }
 
-class OsrCheckVisitor : public StackVisitor {
- public:
-  OsrCheckVisitor(Thread* thread, const char* method_name)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        method_name_(method_name) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    ArtMethod* m = GetMethod();
-    std::string m_name(m->GetName());
-
-    jit::Jit* jit = Runtime::Current()->GetJit();
-    if (m_name.compare(method_name_) == 0) {
-      while (jit->GetCodeCache()->LookupOsrMethodHeader(m) == nullptr) {
-        // Sleep to yield to the compiler thread.
-        usleep(1000);
-        // Will either ensure it's compiled or do the compilation itself.
-        jit->CompileMethod(m, Thread::Current(), /* osr */ true);
-      }
-      return false;
-    }
-    return true;
-  }
-
-  const char* const method_name_;
-};
-
 extern "C" JNIEXPORT void JNICALL Java_Main_ensureHasOsrCode(JNIEnv* env,
                                                              jclass,
                                                              jstring method_name) {
   if (!Runtime::Current()->UseJitCompilation()) {
     return;
   }
-  ScopedUtfChars chars(env, method_name);
-  CHECK(chars.c_str() != nullptr);
-  ScopedObjectAccess soa(Thread::Current());
-  OsrCheckVisitor visitor(soa.Self(), chars.c_str());
-  visitor.WalkStack();
+  ProcessMethodWithName(
+      env,
+      method_name,
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        ArtMethod* m = stack_visitor->GetMethod();
+        jit::Jit* jit = Runtime::Current()->GetJit();
+        while (jit->GetCodeCache()->LookupOsrMethodHeader(m) == nullptr) {
+          // Sleep to yield to the compiler thread.
+          usleep(1000);
+          // Will either ensure it's compiled or do the compilation itself.
+          jit->CompileMethod(m, Thread::Current(), /* osr */ true);
+        }
+      });
 }
 
 }  // namespace art
diff --git a/test/common/stack_inspect.cc b/test/common/stack_inspect.cc
index 581aa74..393e773 100644
--- a/test/common/stack_inspect.cc
+++ b/test/common/stack_inspect.cc
@@ -66,42 +66,30 @@
 
 // public static native boolean isInterpretedFunction(String smali);
 
-// TODO Remove 'allow_runtime_frames' option once we have deoptimization through runtime frames.
-struct MethodIsInterpretedVisitor : public StackVisitor {
- public:
-  MethodIsInterpretedVisitor(Thread* thread, ArtMethod* goal, bool require_deoptable)
-      : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
-        goal_(goal),
-        method_is_interpreted_(true),
-        method_found_(false),
-        prev_was_runtime_(true),
-        require_deoptable_(require_deoptable) {}
-
-  bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
-    if (goal_ == GetMethod()) {
-      method_is_interpreted_ = (require_deoptable_ && prev_was_runtime_) || IsShadowFrame();
-      method_found_ = true;
-      return false;
-    }
-    prev_was_runtime_ = GetMethod()->IsRuntimeMethod();
-    return true;
-  }
-
-  bool IsInterpreted() {
-    return method_is_interpreted_;
-  }
-
-  bool IsFound() {
-    return method_found_;
-  }
-
- private:
-  const ArtMethod* goal_;
-  bool method_is_interpreted_;
-  bool method_found_;
-  bool prev_was_runtime_;
-  bool require_deoptable_;
-};
+static bool IsMethodInterpreted(Thread* self,
+                                const ArtMethod* goal,
+                                const bool require_deoptable,
+                                /* out */ bool* method_is_interpreted)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+  *method_is_interpreted = true;
+  bool method_found = false;
+  bool prev_was_runtime = true;
+  StackVisitor::WalkStack(
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        if (goal == stack_visitor->GetMethod()) {
+          *method_is_interpreted =
+              (require_deoptable && prev_was_runtime) || stack_visitor->IsShadowFrame();
+          method_found = true;
+          return false;
+        }
+        prev_was_runtime = stack_visitor->GetMethod()->IsRuntimeMethod();
+        return true;
+      },
+      self,
+      /* context= */ nullptr,
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  return method_found;
+}
 
 // TODO Remove 'require_deoptimizable' option once we have deoptimization through runtime frames.
 extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInterpretedFunction(
@@ -119,23 +107,18 @@
     env->ThrowNew(env->FindClass("java/lang/Error"), "Unable to interpret method argument!");
     return JNI_FALSE;
   }
-  bool result;
-  bool found;
   {
     ScopedObjectAccess soa(env);
     ArtMethod* goal = jni::DecodeArtMethod(id);
-    MethodIsInterpretedVisitor v(soa.Self(), goal, require_deoptimizable);
-    v.WalkStack();
+    bool is_interpreted;
+    if (!IsMethodInterpreted(soa.Self(), goal, require_deoptimizable, &is_interpreted)) {
+      env->ThrowNew(env->FindClass("java/lang/Error"), "Unable to find given method in stack!");
+      return JNI_FALSE;
+    }
     bool enters_interpreter = Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(
         goal->GetEntryPointFromQuickCompiledCode());
-    result = (v.IsInterpreted() || enters_interpreter);
-    found = v.IsFound();
+    return (is_interpreted || enters_interpreter);
   }
-  if (!found) {
-    env->ThrowNew(env->FindClass("java/lang/Error"), "Unable to find given method in stack!");
-    return JNI_FALSE;
-  }
-  return result;
 }
 
 // public static native void assertIsInterpreted();
@@ -196,24 +179,24 @@
   }
 }
 
-struct GetCallingFrameVisitor : public StackVisitor {
-  GetCallingFrameVisitor(Thread* thread, Context* context)
-      REQUIRES_SHARED(Locks::mutator_lock_)
-      : StackVisitor(thread, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
-
-  bool VisitFrame() override NO_THREAD_SAFETY_ANALYSIS {
-    // Discard stubs and Main.getThisOfCaller.
-    return GetMethod() == nullptr || GetMethod()->IsNative();
-  }
-};
-
 extern "C" JNIEXPORT jobject JNICALL Java_Main_getThisOfCaller(
     JNIEnv* env, jclass cls ATTRIBUTE_UNUSED) {
   ScopedObjectAccess soa(env);
   std::unique_ptr<art::Context> context(art::Context::Create());
-  GetCallingFrameVisitor visitor(soa.Self(), context.get());
-  visitor.WalkStack();
-  return soa.AddLocalReference<jobject>(visitor.GetThisObject());
+  jobject result = nullptr;
+  StackVisitor::WalkStack(
+      [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+        // Discard stubs and Main.getThisOfCaller.
+        if (stack_visitor->GetMethod() == nullptr || stack_visitor->GetMethod()->IsNative()) {
+          return true;
+        }
+        result = soa.AddLocalReference<jobject>(stack_visitor->GetThisObject());
+        return false;
+      },
+      soa.Self(),
+      context.get(),
+      art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+  return result;
 }
 
 }  // namespace art