Fix class unloading with the CC collector.

Avoid unnecessarily decoding dex cache and class loader weak roots,
which would trigger read barriers.

Re-enable 141-class-unload with the CC collector.

Bug: 12687968
Bug: 24468364
Change-Id: Ib4c19f25000873cab0e06047040442d135285745
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index 3f18d9a..3777015 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -334,9 +334,9 @@
   Thread* const self = Thread::Current();
   ReaderMutexLock mu(self, *class_linker->DexLock());
   uint32_t size = 0u;
-  for (jobject weak_root : class_linker->GetDexCaches()) {
+  for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
     mirror::DexCache* dex_cache =
-        down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
+        down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
     if (dex_cache == nullptr || IsInBootImage(dex_cache)) {
       continue;
     }
@@ -683,8 +683,8 @@
   ScopedAssertNoThreadSuspension sa(self, __FUNCTION__);
   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);  // For ClassInClassTable
   ReaderMutexLock mu2(self, *class_linker->DexLock());
-  for (jobject weak_root : class_linker->GetDexCaches()) {
-    mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
+  for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
+    mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
     if (dex_cache == nullptr) {
       continue;
     }
@@ -806,8 +806,9 @@
   {
     ReaderMutexLock mu(self, *class_linker->DexLock());
     // Count number of dex caches not in the boot image.
-    for (jobject weak_root : class_linker->GetDexCaches()) {
-      mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
+    for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
+      mirror::DexCache* dex_cache =
+          down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
       dex_cache_count += IsInBootImage(dex_cache) ? 0u : 1u;
     }
   }
@@ -818,15 +819,17 @@
     ReaderMutexLock mu(self, *class_linker->DexLock());
     size_t non_image_dex_caches = 0;
     // Re-count number of non image dex caches.
-    for (jobject weak_root : class_linker->GetDexCaches()) {
-      mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
+    for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
+      mirror::DexCache* dex_cache =
+          down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
       non_image_dex_caches += IsInBootImage(dex_cache) ? 0u : 1u;
     }
     CHECK_EQ(dex_cache_count, non_image_dex_caches)
         << "The number of non-image dex caches changed.";
     size_t i = 0;
-    for (jobject weak_root : class_linker->GetDexCaches()) {
-      mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
+    for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
+      mirror::DexCache* dex_cache =
+          down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
       if (!IsInBootImage(dex_cache)) {
         dex_caches->Set<false>(i, dex_cache);
         ++i;
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 5a060af..cd83de6 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1618,9 +1618,9 @@
       dex_caches_.clear();
       {
         ReaderMutexLock mu(self, *class_linker->DexLock());
-        for (jobject weak_root : class_linker->GetDexCaches()) {
+        for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
           mirror::DexCache* dex_cache =
-              down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
+              down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
           if (dex_cache != nullptr) {
             dex_caches_.insert(dex_cache);
           }
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index f649972..dde1001 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -368,10 +368,12 @@
   mirror::Class::SetStatus(java_lang_Object, mirror::Class::kStatusLoaded, self);
 
   java_lang_Object->SetObjectSize(sizeof(mirror::Object));
-  runtime->SetSentinel(heap->AllocObject<true>(self,
-                                               java_lang_Object.Get(),
-                                               java_lang_Object->GetObjectSize(),
-                                               VoidFunctor()));
+  // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
+  // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
+  runtime->SetSentinel(heap->AllocNonMovableObject<true>(self,
+                                                         java_lang_Object.Get(),
+                                                         java_lang_Object->GetObjectSize(),
+                                                         VoidFunctor()));
 
   // Object[] next to hold class roots.
   Handle<mirror::Class> object_array_class(hs.NewHandle(
@@ -886,10 +888,12 @@
 
   mirror::Class* java_lang_Object = GetClassRoot(kJavaLangObject);
   java_lang_Object->SetObjectSize(sizeof(mirror::Object));
-  Runtime::Current()->SetSentinel(heap->AllocObject<true>(self,
-                                                          java_lang_Object,
-                                                          java_lang_Object->GetObjectSize(),
-                                                          VoidFunctor()));
+  // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
+  // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
+  runtime->SetSentinel(heap->AllocNonMovableObject<true>(self,
+                                                         java_lang_Object,
+                                                         java_lang_Object->GetObjectSize(),
+                                                         VoidFunctor()));
 
   CHECK_EQ(oat_file->GetOatHeader().GetDexFileCount(),
            static_cast<uint32_t>(dex_caches->GetLength()));
@@ -2324,17 +2328,22 @@
   // Clean up pass to remove null dex caches.
   // Null dex caches can occur due to class unloading and we are lazily removing null entries.
   JavaVMExt* const vm = self->GetJniEnv()->vm;
-  for (auto it = dex_caches_.begin(); it != dex_caches_.end();) {
-    mirror::Object* dex_cache_root = self->DecodeJObject(*it);
-    if (dex_cache_root == nullptr) {
-      vm->DeleteWeakGlobalRef(self, *it);
+  for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
+    DexCacheData data = *it;
+    if (self->IsJWeakCleared(data.weak_root)) {
+      vm->DeleteWeakGlobalRef(self, data.weak_root);
       it = dex_caches_.erase(it);
     } else {
       ++it;
     }
   }
-  dex_caches_.push_back(vm->AddWeakGlobalRef(self, dex_cache.Get()));
+  jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache.Get());
   dex_cache->SetDexFile(&dex_file);
+  DexCacheData data;
+  data.weak_root = dex_cache_jweak;
+  data.dex_file = dex_cache->GetDexFile();
+  data.resolved_types = dex_cache->GetResolvedTypes();
+  dex_caches_.push_back(data);
 }
 
 mirror::DexCache* ClassLinker::RegisterDexFile(const DexFile& dex_file, LinearAlloc* linear_alloc) {
@@ -2381,10 +2390,16 @@
                                                   const DexFile& dex_file,
                                                   bool allow_failure) {
   // Search assuming unique-ness of dex file.
-  for (jweak weak_root : dex_caches_) {
-    mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
-    if (dex_cache != nullptr && dex_cache->GetDexFile() == &dex_file) {
-      return dex_cache;
+  for (const DexCacheData& data : dex_caches_) {
+    // Avoid decoding (and read barriers) other unrelated dex caches.
+    if (data.dex_file == &dex_file) {
+      mirror::DexCache* dex_cache =
+          down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
+      if (dex_cache != nullptr) {
+        return dex_cache;
+      } else {
+        break;
+      }
     }
   }
   if (allow_failure) {
@@ -2392,8 +2407,8 @@
   }
   std::string location(dex_file.GetLocation());
   // Failure, dump diagnostic and abort.
-  for (jobject weak_root : dex_caches_) {
-    mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
+  for (const DexCacheData& data : dex_caches_) {
+    mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
     if (dex_cache != nullptr) {
       LOG(ERROR) << "Registered dex file " << dex_cache->GetDexFile()->GetLocation();
     }
@@ -2405,10 +2420,13 @@
 void ClassLinker::FixupDexCaches(ArtMethod* resolution_method) {
   Thread* const self = Thread::Current();
   ReaderMutexLock mu(self, dex_lock_);
-  for (jobject weak_root : dex_caches_) {
-    mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
-    if (dex_cache != nullptr) {
-      dex_cache->Fixup(resolution_method, image_pointer_size_);
+  for (const DexCacheData& data : dex_caches_) {
+    if (!self->IsJWeakCleared(data.weak_root)) {
+      mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(
+          self->DecodeJObject(data.weak_root));
+      if (dex_cache != nullptr) {
+        dex_cache->Fixup(resolution_method, image_pointer_size_);
+      }
     }
   }
 }
@@ -3346,15 +3364,18 @@
     Thread* const self = Thread::Current();
     ReaderMutexLock mu(self, dex_lock_);
     // Locate the dex cache of the original interface/Object
-    for (jobject weak_root : dex_caches_) {
-      mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
-      if (dex_cache != nullptr &&
-          proxy_method->HasSameDexCacheResolvedTypes(dex_cache->GetResolvedTypes(),
+    for (const DexCacheData& data : dex_caches_) {
+      if (!self->IsJWeakCleared(data.weak_root) &&
+          proxy_method->HasSameDexCacheResolvedTypes(data.resolved_types,
                                                      image_pointer_size_)) {
-        ArtMethod* resolved_method = dex_cache->GetResolvedMethod(
-            proxy_method->GetDexMethodIndex(), image_pointer_size_);
-        CHECK(resolved_method != nullptr);
-        return resolved_method;
+        mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(
+            self->DecodeJObject(data.weak_root));
+        if (dex_cache != nullptr) {
+          ArtMethod* resolved_method = dex_cache->GetResolvedMethod(
+              proxy_method->GetDexMethodIndex(), image_pointer_size_);
+          CHECK(resolved_method != nullptr);
+          return resolved_method;
+        }
       }
     }
   }
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 21f9e7b..a72b586 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -551,6 +551,17 @@
       REQUIRES(!Locks::classlinker_classes_lock_)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
+  struct DexCacheData {
+    // Weak root to the DexCache. Note: Do not decode this unnecessarily or else class unloading may
+    // not work properly.
+    jweak weak_root;
+    // The following two fields are caches to the DexCache's fields and here to avoid unnecessary
+    // jweak decode that triggers read barriers (and mark them alive unnecessarily and mess with
+    // class unloading.)
+    const DexFile* dex_file;
+    GcRoot<mirror::Class>* resolved_types;
+  };
+
  private:
   struct ClassLoaderData {
     jweak weak_root;  // Weak root to enable class unloading.
@@ -902,7 +913,8 @@
   size_t GetDexCacheCount() SHARED_REQUIRES(Locks::mutator_lock_, dex_lock_) {
     return dex_caches_.size();
   }
-  const std::list<jweak>& GetDexCaches() SHARED_REQUIRES(Locks::mutator_lock_, dex_lock_) {
+  const std::list<DexCacheData>& GetDexCachesData()
+      SHARED_REQUIRES(Locks::mutator_lock_, dex_lock_) {
     return dex_caches_;
   }
 
@@ -965,9 +977,9 @@
   std::vector<std::unique_ptr<const DexFile>> opened_dex_files_;
 
   mutable ReaderWriterMutex dex_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
-  // JNI weak globals to allow dex caches to get unloaded. We lazily delete weak globals when we
-  // register new dex files.
-  std::list<jweak> dex_caches_ GUARDED_BY(dex_lock_);
+  // JNI weak globals and side data to allow dex caches to get unloaded. We lazily delete weak
+  // globals when we register new dex files.
+  std::list<DexCacheData> dex_caches_ GUARDED_BY(dex_lock_);
 
   // This contains the class loaders which have class tables. It is populated by
   // InsertClassTableForClassLoader.
diff --git a/runtime/java_vm_ext.cc b/runtime/java_vm_ext.cc
index b5e28e9..7cc05f7 100644
--- a/runtime/java_vm_ext.cc
+++ b/runtime/java_vm_ext.cc
@@ -56,15 +56,17 @@
 class SharedLibrary {
  public:
   SharedLibrary(JNIEnv* env, Thread* self, const std::string& path, void* handle,
-                jobject class_loader)
+                jobject class_loader, void* class_loader_allocator)
       : path_(path),
         handle_(handle),
         needs_native_bridge_(false),
         class_loader_(env->NewWeakGlobalRef(class_loader)),
+        class_loader_allocator_(class_loader_allocator),
         jni_on_load_lock_("JNI_OnLoad lock"),
         jni_on_load_cond_("JNI_OnLoad condition variable", jni_on_load_lock_),
         jni_on_load_thread_id_(self->GetThreadId()),
         jni_on_load_result_(kPending) {
+    CHECK(class_loader_allocator_ != nullptr);
   }
 
   ~SharedLibrary() {
@@ -78,6 +80,10 @@
     return class_loader_;
   }
 
+  const void* GetClassLoaderAllocator() const {
+    return class_loader_allocator_;
+  }
+
   const std::string& GetPath() const {
     return path_;
   }
@@ -169,6 +175,9 @@
   // The ClassLoader this library is associated with, a weak global JNI reference that is
   // created/deleted with the scope of the library.
   const jweak class_loader_;
+  // Used to do equality check on class loaders so we can avoid decoding the weak root and read
+  // barriers that mess with class unloading.
+  const void* class_loader_allocator_;
 
   // Guards remaining items.
   Mutex jni_on_load_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
@@ -224,11 +233,15 @@
       SHARED_REQUIRES(Locks::mutator_lock_) {
     std::string jni_short_name(JniShortName(m));
     std::string jni_long_name(JniLongName(m));
-    const mirror::ClassLoader* declaring_class_loader = m->GetDeclaringClass()->GetClassLoader();
+    mirror::ClassLoader* const declaring_class_loader = m->GetDeclaringClass()->GetClassLoader();
     ScopedObjectAccessUnchecked soa(Thread::Current());
+    void* const declaring_class_loader_allocator =
+        Runtime::Current()->GetClassLinker()->GetAllocatorForClassLoader(declaring_class_loader);
+    CHECK(declaring_class_loader_allocator != nullptr);
     for (const auto& lib : libraries_) {
       SharedLibrary* const library = lib.second;
-      if (soa.Decode<mirror::ClassLoader*>(library->GetClassLoader()) != declaring_class_loader) {
+      // Use the allocator address for class loader equality to avoid unnecessary weak root decode.
+      if (library->GetClassLoaderAllocator() != declaring_class_loader_allocator) {
         // We only search libraries loaded by the appropriate ClassLoader.
         continue;
       }
@@ -269,7 +282,7 @@
         // If class_loader is a null jobject then it is the boot class loader. We should not unload
         // the native libraries of the boot class loader.
         if (class_loader != nullptr &&
-            soa.Decode<mirror::ClassLoader*>(class_loader) == nullptr) {
+            soa.Self()->IsJWeakCleared(class_loader)) {
           void* const sym = library->FindSymbol("JNI_OnUnload", nullptr);
           if (sym == nullptr) {
             VLOG(jni) << "[No JNI_OnUnload found in \"" << library->GetPath() << "\"]";
@@ -667,6 +680,19 @@
   return weak_globals_.SynchronizedGet(ref);
 }
 
+bool JavaVMExt::IsWeakGlobalCleared(Thread* self, IndirectRef ref) {
+  DCHECK_EQ(GetIndirectRefKind(ref), kWeakGlobal);
+  MutexLock mu(self, weak_globals_lock_);
+  while (UNLIKELY(!MayAccessWeakGlobals(self))) {
+    weak_globals_add_condition_.WaitHoldingLocks(self);
+  }
+  // When just checking a weak ref has been cleared, avoid triggering the read barrier in decode
+  // (DecodeWeakGlobal) so that we won't accidentally mark the object alive. Since the cleared
+  // sentinel is a non-moving object, we can compare the ref to it without the read barrier and
+  // decide if it's cleared.
+  return Runtime::Current()->IsClearedJniWeakGlobal(weak_globals_.Get<kWithoutReadBarrier>(ref));
+}
+
 void JavaVMExt::UpdateWeakGlobal(Thread* self, IndirectRef ref, mirror::Object* result) {
   MutexLock mu(self, weak_globals_lock_);
   weak_globals_.Update(ref, result);
@@ -703,8 +729,19 @@
     MutexLock mu(self, *Locks::jni_libraries_lock_);
     library = libraries_->Get(path);
   }
+  void* class_loader_allocator = nullptr;
+  {
+    ScopedObjectAccess soa(env);
+    // As the incoming class loader is reachable/alive during the call of this function,
+    // it's okay to decode it without worrying about unexpectedly marking it alive.
+    mirror::ClassLoader* loader = soa.Decode<mirror::ClassLoader*>(class_loader);
+    class_loader_allocator =
+        Runtime::Current()->GetClassLinker()->GetAllocatorForClassLoader(loader);
+    CHECK(class_loader_allocator != nullptr);
+  }
   if (library != nullptr) {
-    if (env->IsSameObject(library->GetClassLoader(), class_loader) == JNI_FALSE) {
+    // Use the allocator pointers for class loader equality to avoid unnecessary weak root decode.
+    if (library->GetClassLoaderAllocator() != class_loader_allocator) {
       // The library will be associated with class_loader. The JNI
       // spec says we can't load the same library into more than one
       // class loader.
@@ -765,7 +802,7 @@
   {
     // Create SharedLibrary ahead of taking the libraries lock to maintain lock ordering.
     std::unique_ptr<SharedLibrary> new_library(
-        new SharedLibrary(env, self, path, handle, class_loader));
+        new SharedLibrary(env, self, path, handle, class_loader, class_loader_allocator));
     MutexLock mu(self, *Locks::jni_libraries_lock_);
     library = libraries_->Get(path);
     if (library == nullptr) {  // We won race to get libraries_lock.
diff --git a/runtime/java_vm_ext.h b/runtime/java_vm_ext.h
index c1fbdc0..618f6fa 100644
--- a/runtime/java_vm_ext.h
+++ b/runtime/java_vm_ext.h
@@ -149,6 +149,11 @@
       SHARED_REQUIRES(Locks::mutator_lock_)
       REQUIRES(!weak_globals_lock_);
 
+  // Checks if the weak global ref has been cleared by the GC without decode (read barrier.)
+  bool IsWeakGlobalCleared(Thread* self, IndirectRef ref)
+      SHARED_REQUIRES(Locks::mutator_lock_)
+      REQUIRES(!weak_globals_lock_);
+
   Mutex& WeakGlobalsLock() RETURN_CAPABILITY(weak_globals_lock_) {
     return weak_globals_lock_;
   }
diff --git a/runtime/native/dalvik_system_DexFile.cc b/runtime/native/dalvik_system_DexFile.cc
index e85434ee..4cd3c3d 100644
--- a/runtime/native/dalvik_system_DexFile.cc
+++ b/runtime/native/dalvik_system_DexFile.cc
@@ -164,7 +164,6 @@
   if (env->ExceptionCheck()) {
     return 0;
   }
-
   Runtime* const runtime = Runtime::Current();
   ClassLinker* linker = runtime->GetClassLinker();
   std::vector<std::unique_ptr<const DexFile>> dex_files;
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 17f34c0..92a56a9 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -820,6 +820,7 @@
 void Runtime::SetSentinel(mirror::Object* sentinel) {
   CHECK(sentinel_.Read() == nullptr);
   CHECK(sentinel != nullptr);
+  CHECK(!heap_->IsMovableObject(sentinel));
   sentinel_ = GcRoot<mirror::Object>(sentinel);
 }
 
diff --git a/runtime/thread.cc b/runtime/thread.cc
index b0cf418..30eb254 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -1886,6 +1886,14 @@
   return result;
 }
 
+bool Thread::IsJWeakCleared(jweak obj) const {
+  CHECK(obj != nullptr);
+  IndirectRef ref = reinterpret_cast<IndirectRef>(obj);
+  IndirectRefKind kind = GetIndirectRefKind(ref);
+  CHECK_EQ(kind, kWeakGlobal);
+  return tlsPtr_.jni_env->vm->IsWeakGlobalCleared(const_cast<Thread*>(this), ref);
+}
+
 // Implements java.lang.Thread.interrupted.
 bool Thread::Interrupted() {
   MutexLock mu(Thread::Current(), *wait_mutex_);
diff --git a/runtime/thread.h b/runtime/thread.h
index 138c143..4624f27 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -445,6 +445,8 @@
 
   // Convert a jobject into a Object*
   mirror::Object* DecodeJObject(jobject obj) const SHARED_REQUIRES(Locks::mutator_lock_);
+  // Checks if the weak global ref has been cleared by the GC without decoding it.
+  bool IsJWeakCleared(jweak obj) const SHARED_REQUIRES(Locks::mutator_lock_);
 
   mirror::Object* GetMonitorEnterObject() const SHARED_REQUIRES(Locks::mutator_lock_) {
     return tlsPtr_.monitor_enter_object;
diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk
index 9d13da7..60b6591 100644
--- a/test/Android.run-test.mk
+++ b/test/Android.run-test.mk
@@ -498,10 +498,8 @@
 
 # Tests that should fail in the read barrier configuration.
 # 137: Read barrier forces interpreter. Cannot run this with the interpreter.
-# 141: Class unloading test is flaky with CC since CC seems to occasionally keep class loaders live.
 TEST_ART_BROKEN_READ_BARRIER_RUN_TESTS := \
   137-cfi \
-  141-class-unload
 
 ifeq ($(ART_USE_READ_BARRIER),true)
   ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(RUN_TYPES),$(PREBUILD_TYPES), \