More cleanup after removing instrumentation stubs

There's some comments / code that is not required anymore since we
removed instrumentation stubs. Removing the code. The changes:
1. Remove an old comment that no longer applies.
2. Don't need to skip running method exit hooks for GenericJni in
   non-debuggable runtimes.
3. Remove ShouldDeoptimizeCaller that takes an NthCallerVisitor which
   was only used by instrumentation stubs.

Bug: 206029744
Test: art/test.py
Change-Id: I4538a9357d9e24b847db638658a4f80e3687551d
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 56011c6..f0c5953 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -704,9 +704,6 @@
 
   // Check if caller needs to be deoptimized for instrumentation reasons.
   instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
-  // If caller_pc is the instrumentation exit stub, the stub will check to see if deoptimization
-  // should be done and it knows the real return pc. NB If the upcall is null we don't need to do
-  // anything. This can happen during shutdown or early startup.
   if (UNLIKELY(instr->ShouldDeoptimizeCaller(self, sp))) {
     ArtMethod* caller = QuickArgumentVisitor::GetOuterMethod(sp);
     uintptr_t caller_pc = QuickArgumentVisitor::GetCallingPc(sp);
@@ -2529,14 +2526,6 @@
                                   uint64_t* fpr_result,
                                   uint32_t frame_size)
   REQUIRES_SHARED(Locks::mutator_lock_) {
-  // For GenericJniTrampolines we call artMethodExitHook even for non debuggable runtimes though we
-  // still install instrumentation stubs. So just return early here so we don't call method exit
-  // twice. In all other cases (JITed JNI stubs / JITed code) we only call this for debuggable
-  // runtimes.
-  if (!Runtime::Current()->IsJavaDebuggable()) {
-    return;
-  }
-
   DCHECK_EQ(reinterpret_cast<uintptr_t>(self), reinterpret_cast<uintptr_t>(Thread::Current()));
   // Instrumentation exit stub must not be entered with a pending exception.
   CHECK(!self->IsExceptionPending())
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 760d0ab..45143e4 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -1581,23 +1581,7 @@
   ArtMethod* caller = *(reinterpret_cast<ArtMethod**>(caller_sp));
   uintptr_t caller_pc_addr = reinterpret_cast<uintptr_t>(sp) + (frame_size - sizeof(void*));
   uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(caller_pc_addr);
-  return ShouldDeoptimizeCaller(self, caller, caller_pc, caller_sp);
-}
 
-
-bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, const NthCallerVisitor& visitor) {
-  uintptr_t caller_sp = reinterpret_cast<uintptr_t>(visitor.GetCurrentQuickFrame());
-  // When the caller isn't executing quick code there is no need to deoptimize.
-  if (visitor.GetCurrentOatQuickMethodHeader() == nullptr) {
-    return false;
-  }
-  return ShouldDeoptimizeCaller(self, visitor.GetOuterMethod(), visitor.caller_pc, caller_sp);
-}
-
-bool Instrumentation::ShouldDeoptimizeCaller(Thread* self,
-                                             ArtMethod* caller,
-                                             uintptr_t caller_pc,
-                                             uintptr_t caller_sp) {
   if (caller == nullptr ||
       caller->IsNative() ||
       caller->IsRuntimeMethod()) {
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index 439a314..364acaf 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -523,23 +523,12 @@
                           DeoptimizationMethodType type,
                           JValue result,
                           bool is_ref) REQUIRES_SHARED(Locks::mutator_lock_);
-  // TODO(mythria): Update uses of ShouldDeoptimizeCaller that takes a visitor by a method that
-  // doesn't need to walk the stack. This is used on method exits to check if the caller needs a
-  // deoptimization.
-  bool ShouldDeoptimizeCaller(Thread* self, const NthCallerVisitor& visitor)
-      REQUIRES_SHARED(Locks::mutator_lock_);
   // This returns if the caller of runtime method requires a deoptimization. This checks both if the
   // method requires a deopt or if this particular frame needs a deopt because of a class
   // redefinition.
   bool ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_);
   bool ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp, size_t frame_size)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  // This is a helper function used by the two variants of ShouldDeoptimizeCaller.
-  // Remove this once ShouldDeoptimizeCaller is updated not to use NthCallerVisitor.
-  bool ShouldDeoptimizeCaller(Thread* self,
-                              ArtMethod* caller,
-                              uintptr_t caller_pc,
-                              uintptr_t caller_sp) REQUIRES_SHARED(Locks::mutator_lock_);
   // This returns if the specified method requires a deoptimization. This doesn't account if a stack
   // frame involving this method requires a deoptimization.
   bool NeedsSlowInterpreterForMethod(Thread* self, ArtMethod* method)