ART: add longest consecutive free bytes in region dump

Test: Run art with -XX:DumpRegionInfoAfterGC on some benchmarks
Bug: 119486919
Change-Id: I03284f7eb27b04ca5458b86bbaf527cb1538f64c
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index aa09cbe..6696cc1 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -543,6 +543,10 @@
     return total_bytes_freed_ever_;
   }
 
+  space::RegionSpace* GetRegionSpace() const {
+    return region_space_;
+  }
+
   // Implements java.lang.Runtime.maxMemory, returning the maximum amount of memory a program can
   // consume. For a regular VM this would relate to the -Xmx option and would return -1 if no Xmx
   // were specified. Android apps start with a growth limit (small heap size) which is
diff --git a/runtime/gc/space/region_space-inl.h b/runtime/gc/space/region_space-inl.h
index 9f5c117..86a0a6e 100644
--- a/runtime/gc/space/region_space-inl.h
+++ b/runtime/gc/space/region_space-inl.h
@@ -249,27 +249,47 @@
     } else if (r->IsLargeTail()) {
       // Do nothing.
     } else {
-      // For newly allocated and evacuated regions, live bytes will be -1.
-      uint8_t* pos = r->Begin();
-      uint8_t* top = r->Top();
-      const bool need_bitmap =
-          r->LiveBytes() != static_cast<size_t>(-1) &&
-          r->LiveBytes() != static_cast<size_t>(top - pos);
-      if (need_bitmap) {
-        GetLiveBitmap()->VisitMarkedRange(
-            reinterpret_cast<uintptr_t>(pos),
-            reinterpret_cast<uintptr_t>(top),
-            visitor);
+      WalkNonLargeRegion(visitor, r);
+    }
+  }
+}
+
+template<typename Visitor>
+inline void RegionSpace::WalkNonLargeRegion(Visitor&& visitor, const Region* r) {
+  DCHECK(!r->IsLarge() && !r->IsLargeTail());
+  // For newly allocated and evacuated regions, live bytes will be -1.
+  uint8_t* pos = r->Begin();
+  uint8_t* top = r->Top();
+  // We need the region space bitmap to iterate over a region's objects
+  // if
+  // - its live bytes count is invalid (i.e. -1); or
+  // - its live bytes count is lower than the allocated bytes count.
+  //
+  // In both of the previous cases, we do not have the guarantee that
+  // all allocated objects are "alive" (i.e. valid), so we depend on
+  // the region space bitmap to identify which ones to visit.
+  //
+  // On the other hand, when all allocated bytes are known to be alive,
+  // we know that they form a range of consecutive objects (modulo
+  // object alignment constraints) that can be visited iteratively: we
+  // can compute the next object's location by using the current
+  // object's address and size (and object alignment constraints).
+  const bool need_bitmap =
+      r->LiveBytes() != static_cast<size_t>(-1) &&
+      r->LiveBytes() != static_cast<size_t>(top - pos);
+  if (need_bitmap) {
+    GetLiveBitmap()->VisitMarkedRange(
+        reinterpret_cast<uintptr_t>(pos),
+        reinterpret_cast<uintptr_t>(top),
+        visitor);
+  } else {
+    while (pos < top) {
+      mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos);
+      if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
+        visitor(obj);
+        pos = reinterpret_cast<uint8_t*>(GetNextObject(obj));
       } else {
-        while (pos < top) {
-          mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos);
-          if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
-            visitor(obj);
-            pos = reinterpret_cast<uint8_t*>(GetNextObject(obj));
-          } else {
-            break;
-          }
-        }
+        break;
       }
     }
   }
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 07783ba..79b370c 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -840,6 +840,9 @@
   if (live_bytes_ != static_cast<size_t>(-1)) {
     os << " ratio over allocated bytes="
        << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize));
+    uint64_t longest_consecutive_free_bytes = GetLongestConsecutiveFreeBytes();
+    os << " longest_consecutive_free_bytes=" << longest_consecutive_free_bytes
+       << " (" << PrettySize(longest_consecutive_free_bytes) << ")";
   }
 
   os << " is_newly_allocated=" << std::boolalpha << is_newly_allocated_ << std::noboolalpha
@@ -847,6 +850,30 @@
      << " thread=" << thread_ << '\n';
 }
 
+uint64_t RegionSpace::Region::GetLongestConsecutiveFreeBytes() const {
+  if (IsFree()) {
+    return kRegionSize;
+  }
+  if (IsLarge() || IsLargeTail()) {
+    return 0u;
+  }
+  uintptr_t max_gap = 0u;
+  uintptr_t prev_object_end = reinterpret_cast<uintptr_t>(Begin());
+  // Iterate through all live objects and find the largest free gap.
+  auto visitor = [&max_gap, &prev_object_end](mirror::Object* obj)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+    uintptr_t current = reinterpret_cast<uintptr_t>(obj);
+    uintptr_t diff = current - prev_object_end;
+    max_gap = std::max(diff, max_gap);
+    uintptr_t object_end = reinterpret_cast<uintptr_t>(obj) + obj->SizeOf();
+    prev_object_end = RoundUp(object_end, kAlignment);
+  };
+  space::RegionSpace* region_space = art::Runtime::Current()->GetHeap()->GetRegionSpace();
+  region_space->WalkNonLargeRegion(visitor, this);
+  return static_cast<uint64_t>(max_gap);
+}
+
+
 size_t RegionSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) {
   size_t num_bytes = obj->SizeOf();
   if (usable_size != nullptr) {
diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h
index 75c99ec..8a7dd4d 100644
--- a/runtime/gc/space/region_space.h
+++ b/runtime/gc/space/region_space.h
@@ -370,9 +370,6 @@
  private:
   RegionSpace(const std::string& name, MemMap&& mem_map);
 
-  template<bool kToSpaceOnly, typename Visitor>
-  ALWAYS_INLINE void WalkInternal(Visitor&& visitor) NO_THREAD_SAFETY_ANALYSIS;
-
   class Region {
    public:
     Region()
@@ -616,6 +613,8 @@
       DCHECK_LE(Top(), end_);
     }
 
+    uint64_t GetLongestConsecutiveFreeBytes() const;
+
    private:
     size_t idx_;                        // The region's index in the region space.
     size_t live_bytes_;                 // The live bytes. Used to compute the live percent.
@@ -640,6 +639,14 @@
     friend class RegionSpace;
   };
 
+  template<bool kToSpaceOnly, typename Visitor>
+  ALWAYS_INLINE void WalkInternal(Visitor&& visitor) NO_THREAD_SAFETY_ANALYSIS;
+
+  // Visitor will be iterating on objects in increasing address order.
+  template<typename Visitor>
+  ALWAYS_INLINE void WalkNonLargeRegion(Visitor&& visitor, const Region* r)
+      NO_THREAD_SAFETY_ANALYSIS;
+
   Region* RefToRegion(mirror::Object* ref) REQUIRES(!region_lock_) {
     MutexLock mu(Thread::Current(), region_lock_);
     return RefToRegionLocked(ref);