Have ZeroLiveBytesForLargeObject process all the regions of a large object.

When executing `RegionSpace::ZeroLiveBytesForLargeObject(obj)`, in
addition to zero-ing the live bytes count of the large region starting
at address `obj`, also zero the live bytes count of the associated
large tail regions.

This addresses an issue with Sticky-Bit (Generational) CC collection,
where an assertion in RegionSpace::ClearFromSpace could fail when
processing a large unevac from-space region whose allocated bytes were
all live.

Test: ART run-tests & gtests, libcore tests, JDWP tests (host & device)
Test: Device/emulator boot test
Bug: 67628039
Bug: 12687968
Change-Id: I66379803adc05f6e090833e4e698a4b749fcaff0
diff --git a/runtime/gc/space/region_space-inl.h b/runtime/gc/space/region_space-inl.h
index 436eb2c..e30b63a 100644
--- a/runtime/gc/space/region_space-inl.h
+++ b/runtime/gc/space/region_space-inl.h
@@ -407,7 +407,7 @@
       --num_non_free_regions_;
     }
   }
-  if (end_addr < Limit()) {
+  if (kIsDebugBuild && end_addr < Limit()) {
     // If we aren't at the end of the space, check that the next region is not a large tail.
     Region* following_reg = RefToRegionLocked(reinterpret_cast<mirror::Object*>(end_addr));
     DCHECK(!following_reg->IsLargeTail());
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 1ed81d0..b2a0a97 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -209,6 +209,41 @@
   return result;
 }
 
+void RegionSpace::ZeroLiveBytesForLargeObject(mirror::Object* obj) {
+  // This method is only used when Generational CC collection is enabled.
+  DCHECK(kEnableGenerationalConcurrentCopyingCollection);
+
+  // This code uses a logic similar to the one used in RegionSpace::FreeLarge
+  // to traverse the regions supporting `obj`.
+  // TODO: Refactor.
+  DCHECK(IsLargeObject(obj));
+  DCHECK_ALIGNED(obj, kRegionSize);
+  size_t obj_size = obj->SizeOf<kDefaultVerifyFlags>();
+  DCHECK_GT(obj_size, space::RegionSpace::kRegionSize);
+  // Size of the memory area allocated for `obj`.
+  size_t obj_alloc_size = RoundUp(obj_size, space::RegionSpace::kRegionSize);
+  uint8_t* begin_addr = reinterpret_cast<uint8_t*>(obj);
+  uint8_t* end_addr = begin_addr + obj_alloc_size;
+  DCHECK_ALIGNED(end_addr, kRegionSize);
+
+  // Zero the live bytes of the large region and large tail regions containing the object.
+  MutexLock mu(Thread::Current(), region_lock_);
+  for (uint8_t* addr = begin_addr; addr < end_addr; addr += kRegionSize) {
+    Region* region = RefToRegionLocked(reinterpret_cast<mirror::Object*>(addr));
+    if (addr == begin_addr) {
+      DCHECK(region->IsLarge());
+    } else {
+      DCHECK(region->IsLargeTail());
+    }
+    region->ZeroLiveBytes();
+  }
+  if (kIsDebugBuild && end_addr < Limit()) {
+    // If we aren't at the end of the space, check that the next region is not a large tail.
+    Region* following_region = RefToRegionLocked(reinterpret_cast<mirror::Object*>(end_addr));
+    DCHECK(!following_region->IsLargeTail());
+  }
+}
+
 // Determine which regions to evacuate and mark them as
 // from-space. Mark the rest as unevacuated from-space.
 void RegionSpace::SetFromSpace(accounting::ReadBarrierTable* rb_table,
@@ -371,16 +406,7 @@
         while (i + regions_to_clear_bitmap < num_regions_) {
           Region* const cur = &regions_[i + regions_to_clear_bitmap];
           if (!cur->AllAllocatedBytesAreLive()) {
-#if 0
-            // FIXME: These tests fail the following assertion with Sticky-Bit (Generational) CC:
-            //
-            //   004-ThreadStress
-            //   061-out-of-memory
-            //   080-oom-throw
-            //   134-reg-promotion
-            //   617-clinit-oome
             DCHECK(!cur->IsLargeTail());
-#endif
             break;
           }
           CHECK(cur->IsInUnevacFromSpace());
diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h
index d86304a..3f9644d 100644
--- a/runtime/gc/space/region_space.h
+++ b/runtime/gc/space/region_space.h
@@ -271,12 +271,7 @@
 
   // Zero live bytes for a large object, used by young gen CC for marking newly allocated large
   // objects.
-  void ZeroLiveBytesForLargeObject(mirror::Object* ref) {
-    // This method is only used when Generational CC collection is enabled.
-    DCHECK(kEnableGenerationalConcurrentCopyingCollection);
-    DCHECK(IsLargeObject(ref));
-    RefToRegionUnlocked(ref)->ZeroLiveBytes();
-  }
+  void ZeroLiveBytesForLargeObject(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_);
 
   // Determine which regions to evacuate and tag them as
   // from-space. Tag the rest as unevacuated from-space.