Address comments from aog/1180224.

Move handling of objects in interpreter cache to weak objects.

Bug: 119800099
Test: test.py
Change-Id: Ie7b2b2e285607a7c1460fd4f0b4ea690f9a16594
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 519655d..04e8d39 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -410,39 +410,6 @@
   return data - ComputeRootTableSize(roots);
 }
 
-// Use a sentinel for marking entries in the JIT table that have been cleared.
-// This helps diagnosing in case the compiled code tries to wrongly access such
-// entries.
-static mirror::Class* const weak_sentinel =
-    reinterpret_cast<mirror::Class*>(Context::kBadGprBase + 0xff);
-
-// Helper for the GC to process a weak class in a JIT root table.
-static inline void ProcessWeakClass(GcRoot<mirror::Class>* root_ptr,
-                                    IsMarkedVisitor* visitor,
-                                    mirror::Class* update)
-    REQUIRES_SHARED(Locks::mutator_lock_) {
-  // This does not need a read barrier because this is called by GC.
-  mirror::Class* cls = root_ptr->Read<kWithoutReadBarrier>();
-  if (cls != nullptr && cls != weak_sentinel) {
-    DCHECK((cls->IsClass<kDefaultVerifyFlags>()));
-    // Look at the classloader of the class to know if it has been unloaded.
-    // This does not need a read barrier because this is called by GC.
-    ObjPtr<mirror::Object> class_loader =
-        cls->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>();
-    if (class_loader == nullptr || visitor->IsMarked(class_loader.Ptr()) != nullptr) {
-      // The class loader is live, update the entry if the class has moved.
-      mirror::Class* new_cls = down_cast<mirror::Class*>(visitor->IsMarked(cls));
-      // Note that new_object can be null for CMS and newly allocated objects.
-      if (new_cls != nullptr && new_cls != cls) {
-        *root_ptr = GcRoot<mirror::Class>(new_cls);
-      }
-    } else {
-      // The class loader is not live, clear the entry.
-      *root_ptr = GcRoot<mirror::Class>(update);
-    }
-  }
-}
-
 void JitCodeCache::SweepRootTables(IsMarkedVisitor* visitor) {
   MutexLock mu(Thread::Current(), *Locks::jit_lock_);
   for (const auto& entry : method_code_map_) {
@@ -455,7 +422,7 @@
     for (uint32_t i = 0; i < number_of_roots; ++i) {
       // This does not need a read barrier because this is called by GC.
       mirror::Object* object = roots[i].Read<kWithoutReadBarrier>();
-      if (object == nullptr || object == weak_sentinel) {
+      if (object == nullptr || object == Runtime::GetWeakClassSentinel()) {
         // entry got deleted in a previous sweep.
       } else if (object->IsString<kDefaultVerifyFlags>()) {
         mirror::Object* new_object = visitor->IsMarked(object);
@@ -470,8 +437,10 @@
           roots[i] = GcRoot<mirror::Object>(new_object);
         }
       } else {
-        ProcessWeakClass(
-            reinterpret_cast<GcRoot<mirror::Class>*>(&roots[i]), visitor, weak_sentinel);
+        Runtime::ProcessWeakClass(
+            reinterpret_cast<GcRoot<mirror::Class>*>(&roots[i]),
+            visitor,
+            Runtime::GetWeakClassSentinel());
       }
     }
   }
@@ -480,7 +449,7 @@
     for (size_t i = 0; i < info->number_of_inline_caches_; ++i) {
       InlineCache* cache = &info->cache_[i];
       for (size_t j = 0; j < InlineCache::kIndividualCacheSize; ++j) {
-        ProcessWeakClass(&cache->classes_[j], visitor, nullptr);
+        Runtime::ProcessWeakClass(&cache->classes_[j], visitor, nullptr);
       }
     }
   }
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 3c6e437..fad0de6 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -702,6 +702,7 @@
     // from mutators. See b/32167580.
     GetJit()->GetCodeCache()->SweepRootTables(visitor);
   }
+  thread_list_->SweepInterpreterCaches(visitor);
 
   // All other generic system-weak holders.
   for (gc::AbstractSystemWeakHolder* holder : system_weak_holders_) {
@@ -2978,4 +2979,29 @@
   return !IsAotCompiler() && !(IsSystemServer() && jit_options_->GetSaveProfilingInfo());
 }
 
+void Runtime::ProcessWeakClass(GcRoot<mirror::Class>* root_ptr,
+                               IsMarkedVisitor* visitor,
+                               mirror::Class* update) {
+    // This does not need a read barrier because this is called by GC.
+  mirror::Class* cls = root_ptr->Read<kWithoutReadBarrier>();
+  if (cls != nullptr && cls != GetWeakClassSentinel()) {
+    DCHECK((cls->IsClass<kDefaultVerifyFlags>()));
+    // Look at the classloader of the class to know if it has been unloaded.
+    // This does not need a read barrier because this is called by GC.
+    ObjPtr<mirror::Object> class_loader =
+        cls->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>();
+    if (class_loader == nullptr || visitor->IsMarked(class_loader.Ptr()) != nullptr) {
+      // The class loader is live, update the entry if the class has moved.
+      mirror::Class* new_cls = down_cast<mirror::Class*>(visitor->IsMarked(cls));
+      // Note that new_object can be null for CMS and newly allocated objects.
+      if (new_cls != nullptr && new_cls != cls) {
+        *root_ptr = GcRoot<mirror::Class>(new_cls);
+      }
+    } else {
+      // The class loader is not live, clear the entry.
+      *root_ptr = GcRoot<mirror::Class>(update);
+    }
+  }
+}
+
 }  // namespace art
diff --git a/runtime/runtime.h b/runtime/runtime.h
index cfa67a8..b61225c 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -775,6 +775,20 @@
   // TODO: Remove this when this is no longer needed (b/116087961).
   GcRoot<mirror::Object> GetSentinel() REQUIRES_SHARED(Locks::mutator_lock_);
 
+
+  // Use a sentinel for marking entries in a table that have been cleared.
+  // This helps diagnosing in case code tries to wrongly access such
+  // entries.
+  static mirror::Class* GetWeakClassSentinel() {
+    return reinterpret_cast<mirror::Class*>(0xebadbeef);
+  }
+
+  // Helper for the GC to process a weak class in a table.
+  static void ProcessWeakClass(GcRoot<mirror::Class>* root_ptr,
+                               IsMarkedVisitor* visitor,
+                               mirror::Class* update)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
   // Create a normal LinearAlloc or low 4gb version if we are 64 bit AOT compiler.
   LinearAlloc* CreateLinearAlloc();
 
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 59a1705..f4e2b65 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -4037,18 +4037,39 @@
   for (instrumentation::InstrumentationStackFrame& frame : *GetInstrumentationStack()) {
     visitor->VisitRootIfNonNull(&frame.this_object_, RootInfo(kRootVMInternal, thread_id));
   }
+}
+
+void Thread::SweepInterpreterCache(IsMarkedVisitor* visitor) {
   for (InterpreterCache::Entry& entry : GetInterpreterCache()->GetArray()) {
     const Instruction* inst = reinterpret_cast<const Instruction*>(entry.first);
-    if (inst != nullptr &&
-        (inst->Opcode() == Instruction::NEW_INSTANCE ||
-         inst->Opcode() == Instruction::CHECK_CAST ||
-         inst->Opcode() == Instruction::INSTANCE_OF ||
-         inst->Opcode() == Instruction::NEW_ARRAY ||
-         inst->Opcode() == Instruction::CONST_CLASS ||
-         inst->Opcode() == Instruction::CONST_STRING ||
-         inst->Opcode() == Instruction::CONST_STRING_JUMBO)) {
-      visitor->VisitRootIfNonNull(reinterpret_cast<mirror::Object**>(&entry.second),
-                                  RootInfo(kRootThreadObject, thread_id));
+    if (inst != nullptr) {
+      if (inst->Opcode() == Instruction::NEW_INSTANCE ||
+          inst->Opcode() == Instruction::CHECK_CAST ||
+          inst->Opcode() == Instruction::INSTANCE_OF ||
+          inst->Opcode() == Instruction::NEW_ARRAY ||
+          inst->Opcode() == Instruction::CONST_CLASS) {
+        mirror::Class* cls = reinterpret_cast<mirror::Class*>(entry.second);
+        if (cls == nullptr || cls == Runtime::GetWeakClassSentinel()) {
+          // Entry got deleted in a previous sweep.
+          continue;
+        }
+        Runtime::ProcessWeakClass(
+            reinterpret_cast<GcRoot<mirror::Class>*>(&entry.second),
+            visitor,
+            Runtime::GetWeakClassSentinel());
+      } else if (inst->Opcode() == Instruction::CONST_STRING ||
+                 inst->Opcode() == Instruction::CONST_STRING_JUMBO) {
+        mirror::Object* object = reinterpret_cast<mirror::Object*>(entry.second);
+        mirror::Object* new_object = visitor->IsMarked(object);
+        // We know the string is marked because it's a strongly-interned string that
+        // is always alive (see b/117621117 for trying to make those strings weak).
+        // The IsMarked implementation of the CMS collector returns
+        // null for newly allocated objects, but we know those haven't moved. Therefore,
+        // only update the entry if we get a different non-null string.
+        if (new_object != nullptr && new_object != object) {
+          entry.second = reinterpret_cast<size_t>(new_object);
+        }
+      }
     }
   }
 }
diff --git a/runtime/thread.h b/runtime/thread.h
index d6faa95..d3361ee 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -89,6 +89,7 @@
 class DeoptimizationContextRecord;
 class DexFile;
 class FrameIdToShadowFrame;
+class IsMarkedVisitor;
 class JavaVMExt;
 class JNIEnvExt;
 class Monitor;
@@ -726,7 +727,7 @@
   }
 
   static constexpr uint32_t QuickEntryPointOffsetWithSize(size_t quick_entrypoint_offset,
-                                                PointerSize pointer_size) {
+                                                          PointerSize pointer_size) {
     if (pointer_size == PointerSize::k32) {
       return QuickEntryPointOffset<PointerSize::k32>(quick_entrypoint_offset).
           Uint32Value();
@@ -1470,6 +1471,8 @@
   template <bool kPrecise>
   void VisitRoots(RootVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_);
 
+  void SweepInterpreterCache(IsMarkedVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_);
+
   static bool IsAotCompiler();
 
   void ReleaseLongJumpContextInternal();
diff --git a/runtime/thread_list.cc b/runtime/thread_list.cc
index 98218e7..d7b12de 100644
--- a/runtime/thread_list.cc
+++ b/runtime/thread_list.cc
@@ -1545,6 +1545,13 @@
   }
 }
 
+void ThreadList::SweepInterpreterCaches(IsMarkedVisitor* visitor) const {
+  MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
+  for (const auto& thread : list_) {
+    thread->SweepInterpreterCache(visitor);
+  }
+}
+
 void ThreadList::VisitReflectiveTargets(ReflectiveValueVisitor *visitor) const {
   MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
   for (const auto& thread : list_) {
diff --git a/runtime/thread_list.h b/runtime/thread_list.h
index dad896e..4ea0995 100644
--- a/runtime/thread_list.h
+++ b/runtime/thread_list.h
@@ -37,6 +37,7 @@
 class GcPauseListener;
 }  // namespace gc
 class Closure;
+class IsMarkedVisitor;
 class RootVisitor;
 class Thread;
 class TimingLogger;
@@ -188,6 +189,10 @@
     return empty_checkpoint_barrier_.get();
   }
 
+  void SweepInterpreterCaches(IsMarkedVisitor* visitor) const
+      REQUIRES(!Locks::thread_list_lock_)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
  private:
   uint32_t AllocThreadId(Thread* self);
   void ReleaseThreadId(Thread* self, uint32_t id) REQUIRES(!Locks::allocated_thread_ids_lock_);