Revert "Suspend the runtime when switching interpreters."

This reverts commit 01e1d3217c9b6ad5b3f118a539bea6778ad6096c.

Reason for revert: JDWP JIT and Interpreter tests on hosts failing.

Change-Id: I95bbda936c573983ff0c6041267bffaa73315cb7
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index d68bf95..d004d64 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -253,6 +253,13 @@
   DCHECK(!shadow_frame.GetMethod()->IsAbstract());
   DCHECK(!shadow_frame.GetMethod()->IsNative());
 
+  // Check that we are using the right interpreter.
+  if (kIsDebugBuild && self->UseMterp() != CanUseMterp()) {
+    // The flag might be currently being updated on all threads. Retry with lock.
+    MutexLock tll_mu(self, *Locks::thread_list_lock_);
+    DCHECK_EQ(self->UseMterp(), CanUseMterp());
+  }
+
   if (LIKELY(!from_deoptimize)) {  // Entering the method, but not via deoptimization.
     if (kIsDebugBuild) {
       CHECK_EQ(shadow_frame.GetDexPC(), 0u);
diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h
index 62f5d91..a633a63 100644
--- a/runtime/interpreter/interpreter_common.h
+++ b/runtime/interpreter/interpreter_common.h
@@ -140,10 +140,8 @@
                                    uint16_t inst_data,
                                    JValue* result)
     REQUIRES_SHARED(Locks::mutator_lock_) {
-  DCHECK_EQ(self->UseMterp(), CanUseMterp());
   // Make sure to check for async exceptions before anything else.
-  if (is_mterp) {
-    DCHECK(self->UseMterp());
+  if (is_mterp && self->UseMterp()) {
     DCHECK(!self->ObserveAsyncException());
   } else if (UNLIKELY(self->ObserveAsyncException())) {
     return false;
@@ -221,7 +219,7 @@
   // If the bit is not set, we explicitly recheck all the conditions.
   // If any of the conditions get falsified, it is important to clear the bit.
   bool use_fast_path = false;
-  if (is_mterp) {
+  if (is_mterp && self->UseMterp()) {
     use_fast_path = called_method->UseFastInterpreterToInterpreterInvoke();
     if (!use_fast_path) {
       use_fast_path = UseFastInterpreterToInterpreterInvoke(called_method);
diff --git a/runtime/interpreter/mterp/mterp.cc b/runtime/interpreter/mterp/mterp.cc
index fd1430a..912c444 100644
--- a/runtime/interpreter/mterp/mterp.cc
+++ b/runtime/interpreter/mterp/mterp.cc
@@ -546,7 +546,12 @@
 
 extern "C" void MterpCheckBefore(Thread* self, ShadowFrame* shadow_frame, uint16_t* dex_pc_ptr)
     REQUIRES_SHARED(Locks::mutator_lock_) {
-  DCHECK(self->UseMterp());
+  // Check that we are using the right interpreter.
+  if (kIsDebugBuild && self->UseMterp() != CanUseMterp()) {
+    // The flag might be currently being updated on all threads. Retry with lock.
+    MutexLock tll_mu(self, *Locks::thread_list_lock_);
+    DCHECK_EQ(self->UseMterp(), CanUseMterp());
+  }
   DCHECK(!Runtime::Current()->IsActiveTransaction());
   const Instruction* inst = Instruction::At(dex_pc_ptr);
   uint16_t inst_data = inst->Fetch16(0);
diff --git a/runtime/runtime-inl.h b/runtime/runtime-inl.h
index c7731f4..2ffaf98 100644
--- a/runtime/runtime-inl.h
+++ b/runtime/runtime-inl.h
@@ -28,7 +28,6 @@
 #include "gc_root-inl.h"
 #include "interpreter/mterp/mterp.h"
 #include "obj_ptr-inl.h"
-#include "scoped_thread_state_change-inl.h"
 #include "thread_list.h"
 
 namespace art {
@@ -91,23 +90,12 @@
 }
 
 template<typename Action>
-void Runtime::DoAndMaybeSwitchInterpreter(Action lambda) {
-  Thread* self = Thread::Current();
-  if (Runtime::Current()->IsShuttingDown(self) || Locks::mutator_lock_->IsExclusiveHeld(self)) {
-    MutexLock tll_mu(self, *Locks::thread_list_lock_);
-    lambda();
-    Runtime::Current()->GetThreadList()->ForEach([](Thread* thread, void*) {
-        thread->tls32_.use_mterp.store(interpreter::CanUseMterp());
-    }, nullptr);
-  } else {
-    ScopedThreadStateChange tsc(self, kSuspended);
-    ScopedSuspendAll ssa(__FUNCTION__);
-    MutexLock tll_mu(self, *Locks::thread_list_lock_);
-    lambda();
-    Runtime::Current()->GetThreadList()->ForEach([](Thread* thread, void*) {
-        thread->tls32_.use_mterp.store(interpreter::CanUseMterp());
-    }, nullptr);
-  }
+void Runtime::DoAndMaybeSwitchInterpreter(Action lamda) {
+  MutexLock tll_mu(Thread::Current(), *Locks::thread_list_lock_);
+  lamda();
+  Runtime::Current()->GetThreadList()->ForEach([](Thread* thread, void*) {
+      thread->tls32_.use_mterp.store(interpreter::CanUseMterp());
+  }, nullptr);
 }
 
 }  // namespace art
diff --git a/runtime/runtime.h b/runtime/runtime.h
index bd85cf6..b76a658 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -669,7 +669,7 @@
   // It ensures that two calls do not interfere with each other and
   // it makes it possible to DCHECK that thread local flag is correct.
   template<typename Action>
-  static void DoAndMaybeSwitchInterpreter(Action lambda);
+  static void DoAndMaybeSwitchInterpreter(Action lamda);
 
   // Returns the build fingerprint, if set. Otherwise an empty string is returned.
   std::string GetFingerprint() {
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 26bfa44..6c41ae4 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -4156,11 +4156,7 @@
 
 void Thread::SetAsyncException(ObjPtr<mirror::Throwable> new_exception) {
   CHECK(new_exception != nullptr);
-  {
-    StackHandleScope<1> hs(Thread::Current());
-    auto h_exception = hs.NewHandleWrapper(&new_exception);
-    Runtime::Current()->SetAsyncExceptionsThrown();
-  }
+  Runtime::Current()->SetAsyncExceptionsThrown();
   if (kIsDebugBuild) {
     // Make sure we are in a checkpoint.
     MutexLock mu(Thread::Current(), *Locks::thread_suspend_count_lock_);