Use collector specific helper classes

Changed to use inner classes. Also changed some visitors to lambdas.

Bug: 29413717
Bug: 19534862

Change-Id: I631c8bfe5f795eda4623c5bb4f357f2dd12358e2
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 3f8f628..dd75006 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -192,7 +192,7 @@
 }
 
 // Used to switch the thread roots of a thread from from-space refs to to-space refs.
-class ThreadFlipVisitor : public Closure {
+class ConcurrentCopying::ThreadFlipVisitor : public Closure {
  public:
   ThreadFlipVisitor(ConcurrentCopying* concurrent_copying, bool use_tlab)
       : concurrent_copying_(concurrent_copying), use_tlab_(use_tlab) {
@@ -229,7 +229,7 @@
 };
 
 // Called back from Runtime::FlipThreadRoots() during a pause.
-class FlipCallback : public Closure {
+class ConcurrentCopying::FlipCallback : public Closure {
  public:
   explicit FlipCallback(ConcurrentCopying* concurrent_copying)
       : concurrent_copying_(concurrent_copying) {
@@ -304,10 +304,9 @@
 }
 
 // Used to visit objects in the immune spaces.
-class ConcurrentCopyingImmuneSpaceObjVisitor {
+class ConcurrentCopying::ImmuneSpaceObjVisitor {
  public:
-  explicit ConcurrentCopyingImmuneSpaceObjVisitor(ConcurrentCopying* cc)
-      : collector_(cc) {}
+  explicit ImmuneSpaceObjVisitor(ConcurrentCopying* cc) : collector_(cc) {}
 
   void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_)
       SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
@@ -388,7 +387,7 @@
   for (auto& space : immune_spaces_.GetSpaces()) {
     DCHECK(space->IsImageSpace() || space->IsZygoteSpace());
     accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
-    ConcurrentCopyingImmuneSpaceObjVisitor visitor(this);
+    ImmuneSpaceObjVisitor visitor(this);
     live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
                                   reinterpret_cast<uintptr_t>(space->Limit()),
                                   visitor);
@@ -487,7 +486,7 @@
   Runtime::Current()->BroadcastForNewSystemWeaks();
 }
 
-class DisableMarkingCheckpoint : public Closure {
+class ConcurrentCopying::DisableMarkingCheckpoint : public Closure {
  public:
   explicit DisableMarkingCheckpoint(ConcurrentCopying* concurrent_copying)
       : concurrent_copying_(concurrent_copying) {
@@ -683,9 +682,9 @@
 
 // The following visitors are used to verify that there's no references to the from-space left after
 // marking.
-class ConcurrentCopyingVerifyNoFromSpaceRefsVisitor : public SingleRootVisitor {
+class ConcurrentCopying::VerifyNoFromSpaceRefsVisitor : public SingleRootVisitor {
  public:
-  explicit ConcurrentCopyingVerifyNoFromSpaceRefsVisitor(ConcurrentCopying* collector)
+  explicit VerifyNoFromSpaceRefsVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
 
   void operator()(mirror::Object* ref) const
@@ -712,16 +711,16 @@
   ConcurrentCopying* const collector_;
 };
 
-class ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor {
+class ConcurrentCopying::VerifyNoFromSpaceRefsFieldVisitor {
  public:
-  explicit ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor(ConcurrentCopying* collector)
+  explicit VerifyNoFromSpaceRefsFieldVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
 
   void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
       SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
     mirror::Object* ref =
         obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset);
-    ConcurrentCopyingVerifyNoFromSpaceRefsVisitor visitor(collector_);
+    VerifyNoFromSpaceRefsVisitor visitor(collector_);
     visitor(ref);
   }
   void operator()(mirror::Class* klass, mirror::Reference* ref) const
@@ -739,7 +738,7 @@
 
   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
       SHARED_REQUIRES(Locks::mutator_lock_) {
-    ConcurrentCopyingVerifyNoFromSpaceRefsVisitor visitor(collector_);
+    VerifyNoFromSpaceRefsVisitor visitor(collector_);
     visitor(root->AsMirrorPtr());
   }
 
@@ -747,9 +746,9 @@
   ConcurrentCopying* const collector_;
 };
 
-class ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor {
+class ConcurrentCopying::VerifyNoFromSpaceRefsObjectVisitor {
  public:
-  explicit ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor(ConcurrentCopying* collector)
+  explicit VerifyNoFromSpaceRefsObjectVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
   void operator()(mirror::Object* obj) const
       SHARED_REQUIRES(Locks::mutator_lock_) {
@@ -761,7 +760,7 @@
     ConcurrentCopying* collector = reinterpret_cast<ConcurrentCopying*>(arg);
     space::RegionSpace* region_space = collector->RegionSpace();
     CHECK(!region_space->IsInFromSpace(obj)) << "Scanning object " << obj << " in from space";
-    ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor visitor(collector);
+    VerifyNoFromSpaceRefsFieldVisitor visitor(collector);
     obj->VisitReferences(visitor, visitor);
     if (kUseBakerReadBarrier) {
       CHECK(obj->GetReadBarrierPointer() == ReadBarrier::WhitePtr())
@@ -785,16 +784,15 @@
       CHECK(!thread->GetIsGcMarking());
     }
   }
-  ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor visitor(this);
+  VerifyNoFromSpaceRefsObjectVisitor visitor(this);
   // Roots.
   {
     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
-    ConcurrentCopyingVerifyNoFromSpaceRefsVisitor ref_visitor(this);
+    VerifyNoFromSpaceRefsVisitor ref_visitor(this);
     Runtime::Current()->VisitRoots(&ref_visitor);
   }
   // The to-space.
-  region_space_->WalkToSpace(ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor::ObjectCallback,
-                             this);
+  region_space_->WalkToSpace(VerifyNoFromSpaceRefsObjectVisitor::ObjectCallback, this);
   // Non-moving spaces.
   {
     WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
@@ -802,7 +800,7 @@
   }
   // The alloc stack.
   {
-    ConcurrentCopyingVerifyNoFromSpaceRefsVisitor ref_visitor(this);
+    VerifyNoFromSpaceRefsVisitor ref_visitor(this);
     for (auto* it = heap_->allocation_stack_->Begin(), *end = heap_->allocation_stack_->End();
         it < end; ++it) {
       mirror::Object* const obj = it->AsMirrorPtr();
@@ -817,9 +815,9 @@
 }
 
 // The following visitors are used to assert the to-space invariant.
-class ConcurrentCopyingAssertToSpaceInvariantRefsVisitor {
+class ConcurrentCopying::AssertToSpaceInvariantRefsVisitor {
  public:
-  explicit ConcurrentCopyingAssertToSpaceInvariantRefsVisitor(ConcurrentCopying* collector)
+  explicit AssertToSpaceInvariantRefsVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
 
   void operator()(mirror::Object* ref) const
@@ -835,16 +833,16 @@
   ConcurrentCopying* const collector_;
 };
 
-class ConcurrentCopyingAssertToSpaceInvariantFieldVisitor {
+class ConcurrentCopying::AssertToSpaceInvariantFieldVisitor {
  public:
-  explicit ConcurrentCopyingAssertToSpaceInvariantFieldVisitor(ConcurrentCopying* collector)
+  explicit AssertToSpaceInvariantFieldVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
 
   void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
       SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
     mirror::Object* ref =
         obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset);
-    ConcurrentCopyingAssertToSpaceInvariantRefsVisitor visitor(collector_);
+    AssertToSpaceInvariantRefsVisitor visitor(collector_);
     visitor(ref);
   }
   void operator()(mirror::Class* klass, mirror::Reference* ref ATTRIBUTE_UNUSED) const
@@ -861,7 +859,7 @@
 
   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
       SHARED_REQUIRES(Locks::mutator_lock_) {
-    ConcurrentCopyingAssertToSpaceInvariantRefsVisitor visitor(collector_);
+    AssertToSpaceInvariantRefsVisitor visitor(collector_);
     visitor(root->AsMirrorPtr());
   }
 
@@ -869,9 +867,9 @@
   ConcurrentCopying* const collector_;
 };
 
-class ConcurrentCopyingAssertToSpaceInvariantObjectVisitor {
+class ConcurrentCopying::AssertToSpaceInvariantObjectVisitor {
  public:
-  explicit ConcurrentCopyingAssertToSpaceInvariantObjectVisitor(ConcurrentCopying* collector)
+  explicit AssertToSpaceInvariantObjectVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
   void operator()(mirror::Object* obj) const
       SHARED_REQUIRES(Locks::mutator_lock_) {
@@ -884,7 +882,7 @@
     space::RegionSpace* region_space = collector->RegionSpace();
     CHECK(!region_space->IsInFromSpace(obj)) << "Scanning object " << obj << " in from space";
     collector->AssertToSpaceInvariant(nullptr, MemberOffset(0), obj);
-    ConcurrentCopyingAssertToSpaceInvariantFieldVisitor visitor(collector);
+    AssertToSpaceInvariantFieldVisitor visitor(collector);
     obj->VisitReferences(visitor, visitor);
   }
 
@@ -892,7 +890,7 @@
   ConcurrentCopying* const collector_;
 };
 
-class RevokeThreadLocalMarkStackCheckpoint : public Closure {
+class ConcurrentCopying::RevokeThreadLocalMarkStackCheckpoint : public Closure {
  public:
   RevokeThreadLocalMarkStackCheckpoint(ConcurrentCopying* concurrent_copying,
                                        bool disable_weak_ref_access)
@@ -1112,7 +1110,7 @@
     region_space_->AddLiveBytes(to_ref, alloc_size);
   }
   if (ReadBarrier::kEnableToSpaceInvariantChecks || kIsDebugBuild) {
-    ConcurrentCopyingAssertToSpaceInvariantObjectVisitor visitor(this);
+    AssertToSpaceInvariantObjectVisitor visitor(this);
     visitor(to_ref);
   }
 }
@@ -1484,9 +1482,9 @@
 }
 
 // Used to scan ref fields of an object.
-class ConcurrentCopyingRefFieldsVisitor {
+class ConcurrentCopying::RefFieldsVisitor {
  public:
-  explicit ConcurrentCopyingRefFieldsVisitor(ConcurrentCopying* collector)
+  explicit RefFieldsVisitor(ConcurrentCopying* collector)
       : collector_(collector) {}
 
   void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */)
@@ -1522,7 +1520,7 @@
 // Scan ref fields of an object.
 inline void ConcurrentCopying::Scan(mirror::Object* to_ref) {
   DCHECK(!region_space_->IsInFromSpace(to_ref));
-  ConcurrentCopyingRefFieldsVisitor visitor(this);
+  RefFieldsVisitor visitor(this);
   // Disable the read barrier for a performance reason.
   to_ref->VisitReferences</*kVisitNativeRoots*/true, kDefaultVerifyFlags, kWithoutReadBarrier>(
       visitor, visitor);
diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h
index afdc0f1..a986a7a 100644
--- a/runtime/gc/collector/concurrent_copying.h
+++ b/runtime/gc/collector/concurrent_copying.h
@@ -243,16 +243,21 @@
   accounting::ReadBarrierTable* rb_table_;
   bool force_evacuate_all_;  // True if all regions are evacuated.
 
-  friend class ConcurrentCopyingRefFieldsVisitor;
-  friend class ConcurrentCopyingImmuneSpaceObjVisitor;
-  friend class ConcurrentCopyingVerifyNoFromSpaceRefsVisitor;
-  friend class ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor;
-  friend class ConcurrentCopyingClearBlackPtrsVisitor;
-  friend class ConcurrentCopyingLostCopyVisitor;
-  friend class ThreadFlipVisitor;
-  friend class FlipCallback;
-  friend class ConcurrentCopyingComputeUnevacFromSpaceLiveRatioVisitor;
-  friend class RevokeThreadLocalMarkStackCheckpoint;
+  class AssertToSpaceInvariantFieldVisitor;
+  class AssertToSpaceInvariantObjectVisitor;
+  class AssertToSpaceInvariantRefsVisitor;
+  class ClearBlackPtrsVisitor;
+  class ComputeUnevacFromSpaceLiveRatioVisitor;
+  class DisableMarkingCheckpoint;
+  class FlipCallback;
+  class ImmuneSpaceObjVisitor;
+  class LostCopyVisitor;
+  class RefFieldsVisitor;
+  class RevokeThreadLocalMarkStackCheckpoint;
+  class VerifyNoFromSpaceRefsFieldVisitor;
+  class VerifyNoFromSpaceRefsObjectVisitor;
+  class VerifyNoFromSpaceRefsVisitor;
+  class ThreadFlipVisitor;
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(ConcurrentCopying);
 };
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index 61a9c9c..43482eb 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -52,8 +52,9 @@
 
 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
     : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
-      space_(nullptr), collector_name_(name_), updating_references_(false) {
-}
+      space_(nullptr),
+      collector_name_(name_),
+      updating_references_(false) {}
 
 void MarkCompact::RunPhases() {
   Thread* self = Thread::Current();
@@ -85,30 +86,20 @@
   ++live_objects_in_space_;
 }
 
-class CalculateObjectForwardingAddressVisitor {
- public:
-  explicit CalculateObjectForwardingAddressVisitor(MarkCompact* collector)
-      : collector_(collector) {}
-  void operator()(mirror::Object* obj) const REQUIRES(Locks::mutator_lock_,
-                                                                      Locks::heap_bitmap_lock_) {
-    DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
-    DCHECK(collector_->IsMarked(obj) != nullptr);
-    collector_->ForwardObject(obj);
-  }
-
- private:
-  MarkCompact* const collector_;
-};
 
 void MarkCompact::CalculateObjectForwardingAddresses() {
   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
   // The bump pointer in the space where the next forwarding address will be.
   bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
   // Visit all the marked objects in the bitmap.
-  CalculateObjectForwardingAddressVisitor visitor(this);
   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
                                                reinterpret_cast<uintptr_t>(space_->End()),
-                                               visitor);
+                                               [this](mirror::Object* obj)
+      REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
+    DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
+    DCHECK(IsMarked(obj) != nullptr);
+    ForwardObject(obj);
+  });
 }
 
 void MarkCompact::InitializePhase() {
@@ -129,17 +120,6 @@
       false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(), this);
 }
 
-class MCBitmapSetSlowPathVisitor {
- public:
-  void operator()(const mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) {
-    // Marking a large object, make sure its aligned as a sanity check.
-    if (!IsAligned<kPageSize>(obj)) {
-      Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
-      LOG(FATAL) << obj;
-    }
-  }
-};
-
 inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) {
   if (obj == nullptr) {
     return nullptr;
@@ -155,8 +135,15 @@
       }
     } else {
       DCHECK(!space_->HasAddress(obj));
-      MCBitmapSetSlowPathVisitor visitor;
-      if (!mark_bitmap_->Set(obj, visitor)) {
+      auto slow_path = [this](const mirror::Object* ref)
+          SHARED_REQUIRES(Locks::mutator_lock_) {
+        // Marking a large object, make sure its aligned as a sanity check.
+        if (!IsAligned<kPageSize>(ref)) {
+          Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
+          LOG(FATAL) << ref;
+        }
+      };
+      if (!mark_bitmap_->Set(obj, slow_path)) {
         // This object was not previously marked.
         MarkStackPush(obj);
       }
@@ -296,10 +283,9 @@
   }
 }
 
-class UpdateRootVisitor : public RootVisitor {
+class MarkCompact::UpdateRootVisitor : public RootVisitor {
  public:
-  explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {
-  }
+  explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {}
 
   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
       OVERRIDE REQUIRES(Locks::mutator_lock_)
@@ -332,10 +318,10 @@
   MarkCompact* const collector_;
 };
 
-class UpdateObjectReferencesVisitor {
+class MarkCompact::UpdateObjectReferencesVisitor {
  public:
-  explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {
-  }
+  explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {}
+
   void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_)
           REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
     collector_->UpdateObjectReferences(obj);
@@ -423,10 +409,9 @@
   }
 }
 
-class UpdateReferenceVisitor {
+class MarkCompact::UpdateReferenceVisitor {
  public:
-  explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {
-  }
+  explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {}
 
   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const
       ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
@@ -501,19 +486,6 @@
   return space != space_ && !immune_spaces_.ContainsSpace(space);
 }
 
-class MoveObjectVisitor {
- public:
-  explicit MoveObjectVisitor(MarkCompact* collector) : collector_(collector) {
-  }
-  void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_)
-          REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
-      collector_->MoveObject(obj, obj->SizeOf());
-  }
-
- private:
-  MarkCompact* const collector_;
-};
-
 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
   // Look at the forwarding address stored in the lock word to know where to copy.
   DCHECK(space_->HasAddress(obj)) << obj;
@@ -534,10 +506,13 @@
 void MarkCompact::MoveObjects() {
   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
   // Move the objects in the before forwarding bitmap.
-  MoveObjectVisitor visitor(this);
   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
                                                reinterpret_cast<uintptr_t>(space_->End()),
-                                               visitor);
+                                               [this](mirror::Object* obj)
+      SHARED_REQUIRES(Locks::heap_bitmap_lock_)
+      REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
+    MoveObject(obj, obj->SizeOf());
+  });
   CHECK(lock_words_to_restore_.empty());
 }
 
@@ -572,10 +547,9 @@
   heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
 }
 
-class MarkCompactMarkObjectVisitor {
+class MarkCompact::MarkObjectVisitor {
  public:
-  explicit MarkCompactMarkObjectVisitor(MarkCompact* collector) : collector_(collector) {
-  }
+  explicit MarkObjectVisitor(MarkCompact* collector) : collector_(collector) {}
 
   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
@@ -608,7 +582,7 @@
 
 // Visit all of the references of an object and update.
 void MarkCompact::ScanObject(mirror::Object* obj) {
-  MarkCompactMarkObjectVisitor visitor(this);
+  MarkObjectVisitor visitor(this);
   obj->VisitReferences(visitor, visitor);
 }
 
diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h
index 4831157..16abfb7 100644
--- a/runtime/gc/collector/mark_compact.h
+++ b/runtime/gc/collector/mark_compact.h
@@ -222,13 +222,10 @@
   bool updating_references_;
 
  private:
-  friend class BitmapSetSlowPathVisitor;
-  friend class CalculateObjectForwardingAddressVisitor;
-  friend class MarkCompactMarkObjectVisitor;
-  friend class MoveObjectVisitor;
-  friend class UpdateObjectReferencesVisitor;
-  friend class UpdateReferenceVisitor;
-  friend class UpdateRootVisitor;
+  class MarkObjectVisitor;
+  class UpdateObjectReferencesVisitor;
+  class UpdateReferenceVisitor;
+  class UpdateRootVisitor;
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkCompact);
 };
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index ac5931f..9f54f1c 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -266,7 +266,7 @@
   PreCleanCards();
 }
 
-class ScanObjectVisitor {
+class MarkSweep::ScanObjectVisitor {
  public:
   explicit ScanObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE
       : mark_sweep_(mark_sweep) {}
@@ -393,12 +393,14 @@
   return IsMarked(ref->AsMirrorPtr());
 }
 
-class MarkSweepMarkObjectSlowPath {
+class MarkSweep::MarkObjectSlowPath {
  public:
-  explicit MarkSweepMarkObjectSlowPath(MarkSweep* mark_sweep,
-                                       mirror::Object* holder = nullptr,
-                                       MemberOffset offset = MemberOffset(0))
-      : mark_sweep_(mark_sweep), holder_(holder), offset_(offset) {}
+  explicit MarkObjectSlowPath(MarkSweep* mark_sweep,
+                              mirror::Object* holder = nullptr,
+                              MemberOffset offset = MemberOffset(0))
+      : mark_sweep_(mark_sweep),
+        holder_(holder),
+        offset_(offset) {}
 
   void operator()(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
     if (kProfileLargeObjects) {
@@ -480,7 +482,7 @@
     if (kCountMarkedObjects) {
       ++mark_slowpath_count_;
     }
-    MarkSweepMarkObjectSlowPath visitor(this, holder, offset);
+    MarkObjectSlowPath visitor(this, holder, offset);
     // TODO: We already know that the object is not in the current_space_bitmap_ but MarkBitmap::Set
     // will check again.
     if (!mark_bitmap_->Set(obj, visitor)) {
@@ -515,7 +517,7 @@
   if (LIKELY(object_bitmap->HasAddress(obj))) {
     return !object_bitmap->AtomicTestAndSet(obj);
   }
-  MarkSweepMarkObjectSlowPath visitor(this);
+  MarkObjectSlowPath visitor(this);
   return !mark_bitmap_->AtomicTestAndSet(obj, visitor);
 }
 
@@ -534,7 +536,7 @@
   }
 }
 
-class VerifyRootMarkedVisitor : public SingleRootVisitor {
+class MarkSweep::VerifyRootMarkedVisitor : public SingleRootVisitor {
  public:
   explicit VerifyRootMarkedVisitor(MarkSweep* collector) : collector_(collector) { }
 
@@ -563,7 +565,7 @@
   }
 }
 
-class VerifyRootVisitor : public SingleRootVisitor {
+class MarkSweep::VerifyRootVisitor : public SingleRootVisitor {
  public:
   void VisitRoot(mirror::Object* root, const RootInfo& info) OVERRIDE
       SHARED_REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
@@ -610,7 +612,7 @@
       this, static_cast<VisitRootFlags>(flags | kVisitRootFlagNonMoving));
 }
 
-class DelayReferenceReferentVisitor {
+class MarkSweep::DelayReferenceReferentVisitor {
  public:
   explicit DelayReferenceReferentVisitor(MarkSweep* collector) : collector_(collector) {}
 
@@ -625,7 +627,7 @@
 };
 
 template <bool kUseFinger = false>
-class MarkStackTask : public Task {
+class MarkSweep::MarkStackTask : public Task {
  public:
   MarkStackTask(ThreadPool* thread_pool,
                 MarkSweep* mark_sweep,
@@ -783,7 +785,7 @@
   }
 };
 
-class CardScanTask : public MarkStackTask<false> {
+class MarkSweep::CardScanTask : public MarkStackTask<false> {
  public:
   CardScanTask(ThreadPool* thread_pool,
                MarkSweep* mark_sweep,
@@ -948,7 +950,7 @@
   }
 }
 
-class RecursiveMarkTask : public MarkStackTask<false> {
+class MarkSweep::RecursiveMarkTask : public MarkStackTask<false> {
  public:
   RecursiveMarkTask(ThreadPool* thread_pool,
                     MarkSweep* mark_sweep,
@@ -1061,7 +1063,7 @@
   Runtime::Current()->SweepSystemWeaks(this);
 }
 
-class VerifySystemWeakVisitor : public IsMarkedVisitor {
+class MarkSweep::VerifySystemWeakVisitor : public IsMarkedVisitor {
  public:
   explicit VerifySystemWeakVisitor(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) {}
 
@@ -1090,7 +1092,7 @@
   Runtime::Current()->SweepSystemWeaks(&visitor);
 }
 
-class CheckpointMarkThreadRoots : public Closure, public RootVisitor {
+class MarkSweep::CheckpointMarkThreadRoots : public Closure, public RootVisitor {
  public:
   CheckpointMarkThreadRoots(MarkSweep* mark_sweep,
                             bool revoke_ros_alloc_thread_local_buffers_at_checkpoint)
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 7168f96..9747031 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -353,17 +353,17 @@
   std::unique_ptr<MemMap> sweep_array_free_buffer_mem_map_;
 
  private:
-  friend class CardScanTask;
-  friend class CheckBitmapVisitor;
-  friend class CheckReferenceVisitor;
-  friend class CheckpointMarkThreadRoots;
-  friend class Heap;
-  friend class FifoMarkStackChunk;
-  friend class MarkObjectVisitor;
-  template<bool kUseFinger> friend class MarkStackTask;
-  friend class MarkSweepMarkObjectSlowPath;
-  friend class VerifyRootMarkedVisitor;
-  friend class VerifyRootVisitor;
+  class CardScanTask;
+  class CheckpointMarkThreadRoots;
+  class DelayReferenceReferentVisitor;
+  template<bool kUseFinger> class MarkStackTask;
+  class MarkObjectSlowPath;
+  class RecursiveMarkTask;
+  class ScanObjectParallelVisitor;
+  class ScanObjectVisitor;
+  class VerifyRootMarkedVisitor;
+  class VerifyRootVisitor;
+  class VerifySystemWeakVisitor;
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkSweep);
 };
diff --git a/runtime/gc/collector/semi_space-inl.h b/runtime/gc/collector/semi_space-inl.h
index e87b5ff..78fb2d2 100644
--- a/runtime/gc/collector/semi_space-inl.h
+++ b/runtime/gc/collector/semi_space-inl.h
@@ -26,21 +26,6 @@
 namespace gc {
 namespace collector {
 
-class BitmapSetSlowPathVisitor {
- public:
-  explicit BitmapSetSlowPathVisitor(SemiSpace* semi_space) : semi_space_(semi_space) {
-  }
-
-  void operator()(const mirror::Object* obj) const {
-    CHECK(!semi_space_->to_space_->HasAddress(obj)) << "Marking " << obj << " in to_space_";
-    // Marking a large object, make sure its aligned as a sanity check.
-    CHECK_ALIGNED(obj, kPageSize);
-  }
-
- private:
-  SemiSpace* const semi_space_;
-};
-
 inline mirror::Object* SemiSpace::GetForwardingAddressInFromSpace(mirror::Object* obj) const {
   DCHECK(from_space_->HasAddress(obj));
   LockWord lock_word = obj->GetLockWord(false);
@@ -76,8 +61,12 @@
     obj_ptr->Assign(forward_address);
   } else if (!collect_from_space_only_ && !immune_spaces_.IsInImmuneRegion(obj)) {
     DCHECK(!to_space_->HasAddress(obj)) << "Tried to mark " << obj << " in to-space";
-    BitmapSetSlowPathVisitor visitor(this);
-    if (!mark_bitmap_->Set(obj, visitor)) {
+    auto slow_path = [this](const mirror::Object* ref) {
+      CHECK(!to_space_->HasAddress(ref)) << "Marking " << ref << " in to_space_";
+      // Marking a large object, make sure its aligned as a sanity check.
+      CHECK_ALIGNED(ref, kPageSize);
+    };
+    if (!mark_bitmap_->Set(obj, slow_path)) {
       // This object was not previously marked.
       MarkStackPush(obj);
     }
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index f37daa5..7a4c025 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -282,22 +282,11 @@
   }
 }
 
-class SemiSpaceScanObjectVisitor {
- public:
-  explicit SemiSpaceScanObjectVisitor(SemiSpace* ss) : semi_space_(ss) {}
-  void operator()(Object* obj) const REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
-    DCHECK(obj != nullptr);
-    semi_space_->ScanObject(obj);
-  }
- private:
-  SemiSpace* const semi_space_;
-};
-
 // Used to verify that there's no references to the from-space.
-class SemiSpaceVerifyNoFromSpaceReferencesVisitor {
+class SemiSpace::VerifyNoFromSpaceReferencesVisitor {
  public:
-  explicit SemiSpaceVerifyNoFromSpaceReferencesVisitor(space::ContinuousMemMapAllocSpace* from_space) :
-      from_space_(from_space) {}
+  explicit VerifyNoFromSpaceReferencesVisitor(space::ContinuousMemMapAllocSpace* from_space)
+      : from_space_(from_space) {}
 
   void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const
       SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
@@ -331,23 +320,10 @@
 
 void SemiSpace::VerifyNoFromSpaceReferences(Object* obj) {
   DCHECK(!from_space_->HasAddress(obj)) << "Scanning object " << obj << " in from space";
-  SemiSpaceVerifyNoFromSpaceReferencesVisitor visitor(from_space_);
+  VerifyNoFromSpaceReferencesVisitor visitor(from_space_);
   obj->VisitReferences(visitor, VoidFunctor());
 }
 
-class SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor {
- public:
-  explicit SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor(SemiSpace* ss) : semi_space_(ss) {}
-  void operator()(Object* obj) const
-      SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
-    DCHECK(obj != nullptr);
-    semi_space_->VerifyNoFromSpaceReferences(obj);
-  }
-
- private:
-  SemiSpace* const semi_space_;
-};
-
 void SemiSpace::MarkReachableObjects() {
   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
   {
@@ -390,10 +366,12 @@
       } else {
         TimingLogger::ScopedTiming t2("VisitLiveBits", GetTimings());
         accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
-        SemiSpaceScanObjectVisitor visitor(this);
         live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
                                       reinterpret_cast<uintptr_t>(space->End()),
-                                      visitor);
+                                      [this](mirror::Object* obj)
+           REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
+          ScanObject(obj);
+        });
       }
       if (kIsDebugBuild) {
         // Verify that there are no from-space references that
@@ -401,10 +379,13 @@
         // card table) didn't miss any from-space references in the
         // space.
         accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
-        SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor visitor(this);
         live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
                                       reinterpret_cast<uintptr_t>(space->End()),
-                                      visitor);
+                                      [this](Object* obj)
+            SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
+          DCHECK(obj != nullptr);
+          VerifyNoFromSpaceReferences(obj);
+        });
       }
     }
   }
@@ -424,10 +405,12 @@
     // classes (primitive array classes) that could move though they
     // don't contain any other references.
     accounting::LargeObjectBitmap* large_live_bitmap = los->GetLiveBitmap();
-    SemiSpaceScanObjectVisitor visitor(this);
     large_live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(los->Begin()),
                                         reinterpret_cast<uintptr_t>(los->End()),
-                                        visitor);
+                                        [this](mirror::Object* obj)
+        REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
+      ScanObject(obj);
+    });
   }
   // Recursively process the mark stack.
   ProcessMarkStack();
@@ -697,10 +680,9 @@
   heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
 }
 
-class SemiSpaceMarkObjectVisitor {
+class SemiSpace::MarkObjectVisitor {
  public:
-  explicit SemiSpaceMarkObjectVisitor(SemiSpace* collector) : collector_(collector) {
-  }
+  explicit MarkObjectVisitor(SemiSpace* collector) : collector_(collector) {}
 
   void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const ALWAYS_INLINE
       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
@@ -739,7 +721,7 @@
 // Visit all of the references of an object and update.
 void SemiSpace::ScanObject(Object* obj) {
   DCHECK(!from_space_->HasAddress(obj)) << "Scanning object " << obj << " in from space";
-  SemiSpaceMarkObjectVisitor visitor(this);
+  MarkObjectVisitor visitor(this);
   obj->VisitReferences(visitor, visitor);
 }
 
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index 0199e1a..694e536 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -272,7 +272,9 @@
   bool swap_semi_spaces_;
 
  private:
-  friend class BitmapSetSlowPathVisitor;
+  class BitmapSetSlowPathVisitor;
+  class MarkObjectVisitor;
+  class VerifyNoFromSpaceReferencesVisitor;
   DISALLOW_IMPLICIT_CONSTRUCTORS(SemiSpace);
 };