Persistent bitmap for region space

Fix bug where region space was not safely walkable due to holes
in the unevac regions possibly having dangling class pointers.

No preformance change, RAM overhead 1.1-1.2% .Heap (non LOS).

Test: test-art-host
Test: https://android-review.googlesource.com/#/c/288907/

Bug: 31522820

Change-Id: Ic4f8b7175e117689cb1ce3e28b082cf63f1f7b5a
diff --git a/runtime/gc/accounting/space_bitmap.cc b/runtime/gc/accounting/space_bitmap.cc
index a968343..e2f5a1d 100644
--- a/runtime/gc/accounting/space_bitmap.cc
+++ b/runtime/gc/accounting/space_bitmap.cc
@@ -104,6 +104,48 @@
 }
 
 template<size_t kAlignment>
+void SpaceBitmap<kAlignment>::ClearRange(const mirror::Object* begin, const mirror::Object* end) {
+  uintptr_t begin_offset = reinterpret_cast<uintptr_t>(begin) - heap_begin_;
+  uintptr_t end_offset = reinterpret_cast<uintptr_t>(end) - heap_begin_;
+  // Align begin and end to word boundaries.
+  while (begin_offset < end_offset && OffsetBitIndex(begin_offset) != 0) {
+    Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + begin_offset));
+    begin_offset += kAlignment;
+  }
+  while (begin_offset < end_offset && OffsetBitIndex(end_offset) != 0) {
+    end_offset -= kAlignment;
+    Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + end_offset));
+  }
+  const uintptr_t start_index = OffsetToIndex(begin_offset);
+  const uintptr_t end_index = OffsetToIndex(end_offset);
+  Atomic<uintptr_t>* const mem_begin = &bitmap_begin_[start_index];
+  Atomic<uintptr_t>* const mem_end = &bitmap_begin_[end_index];
+  Atomic<uintptr_t>* const page_begin = AlignUp(mem_begin, kPageSize);
+  Atomic<uintptr_t>* const page_end = AlignDown(mem_end, kPageSize);
+  if (!kMadviseZeroes || page_begin >= page_end) {
+    // No possible area to madvise.
+    std::fill(reinterpret_cast<uint8_t*>(mem_begin),
+              reinterpret_cast<uint8_t*>(mem_end),
+              0);
+  } else {
+    // Spans one or more pages.
+    DCHECK_LE(mem_begin, page_begin);
+    DCHECK_LE(page_begin, page_end);
+    DCHECK_LE(page_end, mem_end);
+    std::fill(reinterpret_cast<uint8_t*>(mem_begin),
+              reinterpret_cast<uint8_t*>(page_begin),
+              0);
+    CHECK_NE(madvise(page_begin,
+                     reinterpret_cast<uint8_t*>(page_end) - reinterpret_cast<uint8_t*>(page_begin),
+                     MADV_DONTNEED),
+             -1) << "madvise failed";
+    std::fill(reinterpret_cast<uint8_t*>(page_end),
+             reinterpret_cast<uint8_t*>(mem_end),
+             0);
+  }
+}
+
+template<size_t kAlignment>
 void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
   DCHECK_EQ(Size(), source_bitmap->Size());
   const size_t count = source_bitmap->Size() / sizeof(intptr_t);
diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h
index 296663a..b136488 100644
--- a/runtime/gc/accounting/space_bitmap.h
+++ b/runtime/gc/accounting/space_bitmap.h
@@ -68,9 +68,13 @@
     return static_cast<T>(index * kAlignment * kBitsPerIntPtrT);
   }
 
+  ALWAYS_INLINE static constexpr uintptr_t OffsetBitIndex(uintptr_t offset) {
+    return (offset / kAlignment) % kBitsPerIntPtrT;
+  }
+
   // Bits are packed in the obvious way.
   static constexpr uintptr_t OffsetToMask(uintptr_t offset) {
-    return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerIntPtrT);
+    return static_cast<size_t>(1) << OffsetBitIndex(offset);
   }
 
   bool Set(const mirror::Object* obj) ALWAYS_INLINE {
@@ -87,6 +91,9 @@
   // Fill the bitmap with zeroes.  Returns the bitmap's memory to the system as a side-effect.
   void Clear();
 
+  // Clear a covered by the bitmap using madvise if possible.
+  void ClearRange(const mirror::Object* begin, const mirror::Object* end);
+
   bool Test(const mirror::Object* obj) const;
 
   // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
diff --git a/runtime/gc/accounting/space_bitmap_test.cc b/runtime/gc/accounting/space_bitmap_test.cc
index edb08ef..8c06cfd 100644
--- a/runtime/gc/accounting/space_bitmap_test.cc
+++ b/runtime/gc/accounting/space_bitmap_test.cc
@@ -62,7 +62,7 @@
 
   std::unique_ptr<ContinuousSpaceBitmap> space_bitmap(
       ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
-  EXPECT_TRUE(space_bitmap.get() != nullptr);
+  EXPECT_TRUE(space_bitmap != nullptr);
 
   // Set all the odd bits in the first BitsPerIntPtrT * 3 to one.
   for (size_t j = 0; j < kBitsPerIntPtrT * 3; ++j) {
@@ -87,6 +87,48 @@
   }
 }
 
+TEST_F(SpaceBitmapTest, ClearRange) {
+  uint8_t* heap_begin = reinterpret_cast<uint8_t*>(0x10000000);
+  size_t heap_capacity = 16 * MB;
+
+  std::unique_ptr<ContinuousSpaceBitmap> bitmap(
+      ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
+  EXPECT_TRUE(bitmap != nullptr);
+
+  // Set all of the bits in the bitmap.
+  for (size_t j = 0; j < heap_capacity; j += kObjectAlignment) {
+    const mirror::Object* obj = reinterpret_cast<mirror::Object*>(heap_begin + j);
+    bitmap->Set(obj);
+  }
+
+  std::vector<std::pair<uintptr_t, uintptr_t>> ranges = {
+      {0, 10 * KB + kObjectAlignment},
+      {kObjectAlignment, kObjectAlignment},
+      {kObjectAlignment, 2 * kObjectAlignment},
+      {kObjectAlignment, 5 * kObjectAlignment},
+      {1 * KB + kObjectAlignment, 2 * KB + 5 * kObjectAlignment},
+  };
+  // Try clearing a few ranges.
+  for (const std::pair<uintptr_t, uintptr_t>& range : ranges) {
+    const mirror::Object* obj_begin = reinterpret_cast<mirror::Object*>(heap_begin + range.first);
+    const mirror::Object* obj_end = reinterpret_cast<mirror::Object*>(heap_begin + range.second);
+    bitmap->ClearRange(obj_begin, obj_end);
+    // Boundaries should still be marked.
+    for (uintptr_t i = 0; i < range.first; i += kObjectAlignment) {
+      EXPECT_TRUE(bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
+    }
+    for (uintptr_t i = range.second; i < range.second + kPageSize; i += kObjectAlignment) {
+      EXPECT_TRUE(bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
+    }
+    // Everything inside should be cleared.
+    for (uintptr_t i = range.first; i < range.second; i += kObjectAlignment) {
+      EXPECT_FALSE(bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
+      bitmap->Set(reinterpret_cast<mirror::Object*>(heap_begin + i));
+    }
+  }
+}
+
+
 class SimpleCounter {
  public:
   explicit SimpleCounter(size_t* counter) : count_(counter) {}
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 1931caf..13af67e 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -170,10 +170,10 @@
       CHECK(space->IsZygoteSpace() || space->IsImageSpace());
       immune_spaces_.AddSpace(space);
     } else if (space == region_space_) {
-      accounting::ContinuousSpaceBitmap* bitmap =
-          accounting::ContinuousSpaceBitmap::Create("cc region space bitmap",
-                                                    space->Begin(), space->Capacity());
-      region_space_bitmap_ = bitmap;
+      // It is OK to clear the bitmap with mutators running since the only place it is read is
+      // VisitObjects which has exclusion with CC.
+      region_space_bitmap_ = region_space_->GetMarkBitmap();
+      region_space_bitmap_->Clear();
     }
   }
 }
@@ -1601,9 +1601,8 @@
     SwapBitmaps();
     heap_->UnBindBitmaps();
 
-    // Delete the region bitmap.
+    // The bitmap was cleared at the start of the GC, there is nothing we need to do here.
     DCHECK(region_space_bitmap_ != nullptr);
-    delete region_space_bitmap_;
     region_space_bitmap_ = nullptr;
   }
 
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index ba18699..918b8db 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -2490,6 +2490,8 @@
     } else {
       if (collector_type_ == kCollectorTypeCC) {
         region_space_->GetMemMap()->Protect(PROT_READ | PROT_WRITE);
+        // Evacuated everything out of the region space, clear the mark bitmap.
+        region_space_->GetMarkBitmap()->Clear();
       } else {
         bump_pointer_space_->GetMemMap()->Protect(PROT_READ | PROT_WRITE);
       }
diff --git a/runtime/gc/space/region_space-inl.h b/runtime/gc/space/region_space-inl.h
index 66fd62c..bbc634d 100644
--- a/runtime/gc/space/region_space-inl.h
+++ b/runtime/gc/space/region_space-inl.h
@@ -241,15 +241,28 @@
     } else if (r->IsLargeTail()) {
       // Do nothing.
     } else {
+      // For newly allocated and evacuated regions, live bytes will be -1.
       uint8_t* pos = r->Begin();
       uint8_t* top = r->Top();
-      while (pos < top) {
-        mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos);
-        if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
+      const bool need_bitmap =
+          r->LiveBytes() != static_cast<size_t>(-1) &&
+          r->LiveBytes() != static_cast<size_t>(top - pos);
+      if (need_bitmap) {
+        GetLiveBitmap()->VisitMarkedRange(
+            reinterpret_cast<uintptr_t>(pos),
+            reinterpret_cast<uintptr_t>(top),
+            [callback, arg](mirror::Object* obj) {
           callback(obj, arg);
-          pos = reinterpret_cast<uint8_t*>(GetNextObject(obj));
-        } else {
-          break;
+        });
+      } else {
+        while (pos < top) {
+          mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos);
+          if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
+            callback(obj, arg);
+            pos = reinterpret_cast<uint8_t*>(GetNextObject(obj));
+          } else {
+            break;
+          }
         }
       }
     }
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 23cae7c..35bc369 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -59,6 +59,8 @@
   for (size_t i = 0; i < num_regions_; ++i, region_addr += kRegionSize) {
     regions_[i] = Region(i, region_addr, region_addr + kRegionSize);
   }
+  mark_bitmap_.reset(
+      accounting::ContinuousSpaceBitmap::Create("region space live bitmap", Begin(), Capacity()));
   if (kIsDebugBuild) {
     CHECK_EQ(regions_[0].Begin(), Begin());
     for (size_t i = 0; i < num_regions_; ++i) {
@@ -215,7 +217,28 @@
       r->Clear();
       --num_non_free_regions_;
     } else if (r->IsInUnevacFromSpace()) {
+      size_t full_count = 0;
+      while (r->IsInUnevacFromSpace()) {
+        Region* const cur = &regions_[i + full_count];
+        if (i + full_count >= num_regions_ ||
+            cur->LiveBytes() != static_cast<size_t>(cur->Top() - cur->Begin())) {
+          break;
+        }
+        if (full_count != 0) {
+          cur->SetUnevacFromSpaceAsToSpace();
+        }
+        ++full_count;
+      }
+      // Note that r is the full_count == 0 iteration since it is not handled by the loop.
       r->SetUnevacFromSpaceAsToSpace();
+      if (full_count >= 1) {
+        GetLiveBitmap()->ClearRange(
+            reinterpret_cast<mirror::Object*>(r->Begin()),
+            reinterpret_cast<mirror::Object*>(r->Begin() + full_count * kRegionSize));
+        // Skip over extra regions we cleared.
+        // Subtract one for the for loop.
+        i += full_count - 1;
+      }
     }
   }
   evac_region_ = nullptr;
diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h
index 4e57a85..381ccfa 100644
--- a/runtime/gc/space/region_space.h
+++ b/runtime/gc/space/region_space.h
@@ -77,12 +77,10 @@
     return 0;
   }
   accounting::ContinuousSpaceBitmap* GetLiveBitmap() const OVERRIDE {
-    // No live bitmap.
-    return nullptr;
+    return mark_bitmap_.get();
   }
   accounting::ContinuousSpaceBitmap* GetMarkBitmap() const OVERRIDE {
-    // No mark bitmap.
-    return nullptr;
+    return mark_bitmap_.get();
   }
 
   void Clear() OVERRIDE REQUIRES(!region_lock_);
@@ -516,6 +514,9 @@
   Region* evac_region_;            // The region that's being evacuated to currently.
   Region full_region_;             // The dummy/sentinel region that looks full.
 
+  // Mark bitmap used by the GC.
+  std::unique_ptr<accounting::ContinuousSpaceBitmap> mark_bitmap_;
+
   DISALLOW_COPY_AND_ASSIGN(RegionSpace);
 };