Merge "Use mutator lock to guard adding and removing heap spaces"
diff --git a/compiler/image_test.cc b/compiler/image_test.cc
index b841675..b65fb36 100644
--- a/compiler/image_test.cc
+++ b/compiler/image_test.cc
@@ -181,7 +181,7 @@
     ASSERT_NE(0U, bitmap_section.Size());
 
     gc::Heap* heap = Runtime::Current()->GetHeap();
-    ASSERT_TRUE(!heap->GetContinuousSpaces().empty());
+    ASSERT_TRUE(heap->HaveContinuousSpaces());
     gc::space::ContinuousSpace* space = heap->GetNonMovingSpace();
     ASSERT_FALSE(space->IsImageSpace());
     ASSERT_TRUE(space != nullptr);
diff --git a/runtime/gc/accounting/mod_union_table_test.cc b/runtime/gc/accounting/mod_union_table_test.cc
index edab1b0..349d6ff 100644
--- a/runtime/gc/accounting/mod_union_table_test.cc
+++ b/runtime/gc/accounting/mod_union_table_test.cc
@@ -22,6 +22,7 @@
 #include "mirror/array-inl.h"
 #include "space_bitmap-inl.h"
 #include "thread-inl.h"
+#include "thread_list.h"
 
 namespace art {
 namespace gc {
@@ -184,7 +185,11 @@
   std::unique_ptr<space::DlMallocSpace> other_space(space::DlMallocSpace::Create(
       "other space", 128 * KB, 4 * MB, 4 * MB, nullptr, false));
   ASSERT_TRUE(other_space.get() != nullptr);
-  heap->AddSpace(other_space.get());
+  {
+    ScopedThreadSuspension sts(self, kSuspended);
+    ScopedSuspendAll ssa("Add image space");
+    heap->AddSpace(other_space.get());
+  }
   std::unique_ptr<ModUnionTable> table(ModUnionTableFactory::Create(
       type, space, other_space.get()));
   ASSERT_TRUE(table.get() != nullptr);
@@ -253,6 +258,8 @@
   std::ostringstream oss2;
   table->Dump(oss2);
   // Remove the space we added so it doesn't persist to the next test.
+  ScopedThreadSuspension sts(self, kSuspended);
+  ScopedSuspendAll ssa("Add image space");
   heap->RemoveSpace(other_space.get());
 }
 
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index bcfcb89..af7acbc 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -2066,8 +2066,9 @@
 }
 
 void ConcurrentCopying::FinishPhase() {
+  Thread* const self = Thread::Current();
   {
-    MutexLock mu(Thread::Current(), mark_stack_lock_);
+    MutexLock mu(self, mark_stack_lock_);
     CHECK_EQ(pooled_mark_stacks_.size(), kMarkStackPoolSize);
   }
   region_space_ = nullptr;
@@ -2075,7 +2076,8 @@
     MutexLock mu(Thread::Current(), skipped_blocks_lock_);
     skipped_blocks_map_.clear();
   }
-  WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
+  ReaderMutexLock mu(self, *Locks::mutator_lock_);
+  WriterMutexLock mu2(self, *Locks::heap_bitmap_lock_);
   heap_->ClearMarkedObjects();
 }
 
diff --git a/runtime/gc/collector/garbage_collector.h b/runtime/gc/collector/garbage_collector.h
index 954c80e..580486a 100644
--- a/runtime/gc/collector/garbage_collector.h
+++ b/runtime/gc/collector/garbage_collector.h
@@ -153,7 +153,9 @@
   void ResetCumulativeStatistics() REQUIRES(!pause_histogram_lock_);
   // Swap the live and mark bitmaps of spaces that are active for the collector. For partial GC,
   // this is the allocation space, for full GC then we swap the zygote bitmaps too.
-  void SwapBitmaps() REQUIRES(Locks::heap_bitmap_lock_);
+  void SwapBitmaps()
+      REQUIRES(Locks::heap_bitmap_lock_)
+      SHARED_REQUIRES(Locks::mutator_lock_);
   uint64_t GetTotalPausedTimeNs() REQUIRES(!pause_histogram_lock_);
   int64_t GetTotalFreedBytes() const {
     return total_freed_bytes_;
diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h
index 8a12094..4831157 100644
--- a/runtime/gc/collector/mark_compact.h
+++ b/runtime/gc/collector/mark_compact.h
@@ -106,7 +106,7 @@
       REQUIRES(Locks::mutator_lock_);
 
   // Sweeps unmarked objects to complete the garbage collection.
-  void Sweep(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
+  void Sweep(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
 
   // Sweeps unmarked objects to complete the garbage collection.
   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 5427f88..64c8e9a 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -1467,7 +1467,9 @@
   }
   CHECK(mark_stack_->IsEmpty());  // Ensure that the mark stack is empty.
   mark_stack_->Reset();
-  WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
+  Thread* const self = Thread::Current();
+  ReaderMutexLock mu(self, *Locks::mutator_lock_);
+  WriterMutexLock mu2(self, *Locks::heap_bitmap_lock_);
   heap_->ClearMarkedObjects();
 }
 
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 245f96b..b61bef7 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -85,7 +85,7 @@
   void Init();
 
   // Find the default mark bitmap.
-  void FindDefaultSpaceBitmap();
+  void FindDefaultSpaceBitmap() SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Marks all objects in the root set at the start of a garbage collection.
   void MarkRoots(Thread* self)
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index a905904..0199e1a 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -135,7 +135,9 @@
       REQUIRES(Locks::mutator_lock_);
 
   // Sweeps unmarked objects to complete the garbage collection.
-  virtual void Sweep(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
+  virtual void Sweep(bool swap_bitmaps)
+      REQUIRES(Locks::heap_bitmap_lock_)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Sweeps unmarked objects to complete the garbage collection.
   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 8cd8d73..137540a 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -801,6 +801,7 @@
   if (!Runtime::Current()->IsAotCompiler()) {
     return false;
   }
+  ScopedObjectAccess soa(Thread::Current());
   for (const auto& space : continuous_spaces_) {
     if (space->IsImageSpace() || space->IsZygoteSpace()) {
       return false;
@@ -1381,15 +1382,18 @@
   uint64_t total_alloc_space_allocated = 0;
   uint64_t total_alloc_space_size = 0;
   uint64_t managed_reclaimed = 0;
-  for (const auto& space : continuous_spaces_) {
-    if (space->IsMallocSpace()) {
-      gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
-      if (malloc_space->IsRosAllocSpace() || !CareAboutPauseTimes()) {
-        // Don't trim dlmalloc spaces if we care about pauses since this can hold the space lock
-        // for a long period of time.
-        managed_reclaimed += malloc_space->Trim();
+  {
+    ScopedObjectAccess soa(self);
+    for (const auto& space : continuous_spaces_) {
+      if (space->IsMallocSpace()) {
+        gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
+        if (malloc_space->IsRosAllocSpace() || !CareAboutPauseTimes()) {
+          // Don't trim dlmalloc spaces if we care about pauses since this can hold the space lock
+          // for a long period of time.
+          managed_reclaimed += malloc_space->Trim();
+        }
+        total_alloc_space_size += malloc_space->Size();
       }
-      total_alloc_space_size += malloc_space->Size();
     }
   }
   total_alloc_space_allocated = GetBytesAllocated();
@@ -1520,6 +1524,7 @@
 }
 
 void Heap::DumpSpaces(std::ostream& stream) const {
+  ScopedObjectAccess soa(Thread::Current());
   for (const auto& space : continuous_spaces_) {
     accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
     accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap();
@@ -1598,6 +1603,9 @@
 }
 
 space::RosAllocSpace* Heap::GetRosAllocSpace(gc::allocator::RosAlloc* rosalloc) const {
+  if (rosalloc_space_ != nullptr && rosalloc_space_->GetRosAlloc() == rosalloc) {
+    return rosalloc_space_;
+  }
   for (const auto& space : continuous_spaces_) {
     if (space->AsContinuousSpace()->IsRosAllocSpace()) {
       if (space->AsContinuousSpace()->AsRosAllocSpace()->GetRosAlloc() == rosalloc) {
@@ -3530,7 +3538,8 @@
 
 void Heap::ClampGrowthLimit() {
   // Use heap bitmap lock to guard against races with BindLiveToMarkBitmap.
-  WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
+  ScopedObjectAccess soa(Thread::Current());
+  WriterMutexLock mu(soa.Self(), *Locks::heap_bitmap_lock_);
   capacity_ = growth_limit_;
   for (const auto& space : continuous_spaces_) {
     if (space->IsMallocSpace()) {
@@ -3546,6 +3555,7 @@
 
 void Heap::ClearGrowthLimit() {
   growth_limit_ = capacity_;
+  ScopedObjectAccess soa(Thread::Current());
   for (const auto& space : continuous_spaces_) {
     if (space->IsMallocSpace()) {
       gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 1b7e2c9..c02e2d3 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -308,7 +308,10 @@
   void ThreadFlipEnd(Thread* self) REQUIRES(!*thread_flip_lock_);
 
   // Clear all of the mark bits, doesn't clear bitmaps which have the same live bits as mark bits.
-  void ClearMarkedObjects() REQUIRES(Locks::heap_bitmap_lock_);
+  // Mutator lock is required for GetContinuousSpaces.
+  void ClearMarkedObjects()
+      REQUIRES(Locks::heap_bitmap_lock_)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Initiates an explicit garbage collection.
   void CollectGarbage(bool clear_soft_references)
@@ -359,8 +362,12 @@
   // due to usage by tests.
   void SetSpaceAsDefault(space::ContinuousSpace* continuous_space)
       REQUIRES(!Locks::heap_bitmap_lock_);
-  void AddSpace(space::Space* space) REQUIRES(!Locks::heap_bitmap_lock_);
-  void RemoveSpace(space::Space* space) REQUIRES(!Locks::heap_bitmap_lock_);
+  void AddSpace(space::Space* space)
+      REQUIRES(!Locks::heap_bitmap_lock_)
+      REQUIRES(Locks::mutator_lock_);
+  void RemoveSpace(space::Space* space)
+    REQUIRES(!Locks::heap_bitmap_lock_)
+    REQUIRES(Locks::mutator_lock_);
 
   // Set target ideal heap utilization ratio, implements
   // dalvik.system.VMRuntime.setTargetHeapUtilization.
@@ -378,7 +385,13 @@
   void UpdateProcessState(ProcessState process_state)
       REQUIRES(!*pending_task_lock_, !*gc_complete_lock_);
 
-  const std::vector<space::ContinuousSpace*>& GetContinuousSpaces() const {
+  bool HaveContinuousSpaces() const NO_THREAD_SAFETY_ANALYSIS {
+    // No lock since vector empty is thread safe.
+    return !continuous_spaces_.empty();
+  }
+
+  const std::vector<space::ContinuousSpace*>& GetContinuousSpaces() const
+      SHARED_REQUIRES(Locks::mutator_lock_) {
     return continuous_spaces_;
   }
 
@@ -518,10 +531,13 @@
   // get the space that corresponds to an object's address. Current implementation searches all
   // spaces in turn. If fail_ok is false then failing to find a space will cause an abort.
   // TODO: consider using faster data structure like binary tree.
-  space::ContinuousSpace* FindContinuousSpaceFromObject(const mirror::Object*, bool fail_ok) const;
+  space::ContinuousSpace* FindContinuousSpaceFromObject(const mirror::Object*, bool fail_ok) const
+      SHARED_REQUIRES(Locks::mutator_lock_);
   space::DiscontinuousSpace* FindDiscontinuousSpaceFromObject(const mirror::Object*,
-                                                              bool fail_ok) const;
-  space::Space* FindSpaceFromObject(const mirror::Object*, bool fail_ok) const;
+                                                              bool fail_ok) const
+      SHARED_REQUIRES(Locks::mutator_lock_);
+  space::Space* FindSpaceFromObject(const mirror::Object*, bool fail_ok) const
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   void DumpForSigQuit(std::ostream& os) REQUIRES(!*gc_complete_lock_);
 
@@ -577,7 +593,9 @@
       REQUIRES(Locks::heap_bitmap_lock_);
 
   // Unbind any bound bitmaps.
-  void UnBindBitmaps() REQUIRES(Locks::heap_bitmap_lock_);
+  void UnBindBitmaps()
+      REQUIRES(Locks::heap_bitmap_lock_)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Returns the boot image spaces. There may be multiple boot image spaces.
   const std::vector<space::ImageSpace*>& GetBootImageSpaces() const {
@@ -604,7 +622,8 @@
   }
 
   // Return the corresponding rosalloc space.
-  space::RosAllocSpace* GetRosAllocSpace(gc::allocator::RosAlloc* rosalloc) const;
+  space::RosAllocSpace* GetRosAllocSpace(gc::allocator::RosAlloc* rosalloc) const
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   space::MallocSpace* GetNonMovingSpace() const {
     return non_moving_space_;
@@ -962,7 +981,8 @@
   void ProcessCards(TimingLogger* timings,
                     bool use_rem_sets,
                     bool process_alloc_space_cards,
-                    bool clear_alloc_space_cards);
+                    bool clear_alloc_space_cards)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Push an object onto the allocation stack.
   void PushOnAllocationStack(Thread* self, mirror::Object** obj)
@@ -1005,10 +1025,10 @@
       REQUIRES(!*gc_complete_lock_, !*pending_task_lock_, !*backtrace_lock_);
 
   // All-known continuous spaces, where objects lie within fixed bounds.
-  std::vector<space::ContinuousSpace*> continuous_spaces_;
+  std::vector<space::ContinuousSpace*> continuous_spaces_ GUARDED_BY(Locks::mutator_lock_);
 
   // All-known discontinuous spaces, where objects may be placed throughout virtual memory.
-  std::vector<space::DiscontinuousSpace*> discontinuous_spaces_;
+  std::vector<space::DiscontinuousSpace*> discontinuous_spaces_ GUARDED_BY(Locks::mutator_lock_);
 
   // All-known alloc spaces, where objects may be or have been allocated.
   std::vector<space::AllocSpace*> alloc_spaces_;
diff --git a/runtime/gc/space/dlmalloc_space.cc b/runtime/gc/space/dlmalloc_space.cc
index e754a52..455d28e 100644
--- a/runtime/gc/space/dlmalloc_space.cc
+++ b/runtime/gc/space/dlmalloc_space.cc
@@ -319,7 +319,7 @@
 namespace allocator {
 
 // Implement the dlmalloc morecore callback.
-void* ArtDlMallocMoreCore(void* mspace, intptr_t increment) {
+void* ArtDlMallocMoreCore(void* mspace, intptr_t increment) SHARED_REQUIRES(Locks::mutator_lock_) {
   Runtime* runtime = Runtime::Current();
   Heap* heap = runtime->GetHeap();
   ::art::gc::space::DlMallocSpace* dlmalloc_space = heap->GetDlMallocSpace();
diff --git a/runtime/gc/space/rosalloc_space.cc b/runtime/gc/space/rosalloc_space.cc
index 49126d2..fd4d0a1 100644
--- a/runtime/gc/space/rosalloc_space.cc
+++ b/runtime/gc/space/rosalloc_space.cc
@@ -247,7 +247,10 @@
 size_t RosAllocSpace::Trim() {
   VLOG(heap) << "RosAllocSpace::Trim() ";
   {
-    MutexLock mu(Thread::Current(), lock_);
+    Thread* const self = Thread::Current();
+    // SOA required for Rosalloc::Trim() -> ArtRosAllocMoreCore() -> Heap::GetRosAllocSpace.
+    ScopedObjectAccess soa(self);
+    MutexLock mu(self, lock_);
     // Trim to release memory at the end of the space.
     rosalloc_->Trim();
   }
@@ -373,7 +376,8 @@
 namespace allocator {
 
 // Callback from rosalloc when it needs to increase the footprint.
-void* ArtRosAllocMoreCore(allocator::RosAlloc* rosalloc, intptr_t increment) {
+void* ArtRosAllocMoreCore(allocator::RosAlloc* rosalloc, intptr_t increment)
+    SHARED_REQUIRES(Locks::mutator_lock_) {
   Heap* heap = Runtime::Current()->GetHeap();
   art::gc::space::RosAllocSpace* rosalloc_space = heap->GetRosAllocSpace(rosalloc);
   DCHECK(rosalloc_space != nullptr);
diff --git a/runtime/gc/space/space_create_test.cc b/runtime/gc/space/space_create_test.cc
index aea2d9f..170f927 100644
--- a/runtime/gc/space/space_create_test.cc
+++ b/runtime/gc/space/space_create_test.cc
@@ -170,7 +170,11 @@
 
   gc::Heap* heap = Runtime::Current()->GetHeap();
   space::Space* old_space = space;
-  heap->RemoveSpace(old_space);
+  {
+    ScopedThreadSuspension sts(self, kSuspended);
+    ScopedSuspendAll ssa("Add image space");
+    heap->RemoveSpace(old_space);
+  }
   heap->RevokeAllThreadLocalBuffers();
   space::ZygoteSpace* zygote_space = space->CreateZygoteSpace("alloc space",
                                                               heap->IsLowMemoryMode(),
diff --git a/runtime/gc/space/space_test.h b/runtime/gc/space/space_test.h
index e588eb3..20ef44a 100644
--- a/runtime/gc/space/space_test.h
+++ b/runtime/gc/space/space_test.h
@@ -27,6 +27,7 @@
 #include "mirror/class_loader.h"
 #include "mirror/object-inl.h"
 #include "scoped_thread_state_change.h"
+#include "thread_list.h"
 #include "zygote_space.h"
 
 namespace art {
@@ -43,7 +44,11 @@
     if (revoke) {
       heap->RevokeAllThreadLocalBuffers();
     }
-    heap->AddSpace(space);
+    {
+      ScopedThreadStateChange sts(Thread::Current(), kSuspended);
+      ScopedSuspendAll ssa("Add image space");
+      heap->AddSpace(space);
+    }
     heap->SetSpaceAsDefault(space);
   }
 
diff --git a/runtime/native/dalvik_system_VMDebug.cc b/runtime/native/dalvik_system_VMDebug.cc
index 8febb62..8f108fa 100644
--- a/runtime/native/dalvik_system_VMDebug.cc
+++ b/runtime/native/dalvik_system_VMDebug.cc
@@ -314,32 +314,33 @@
   size_t largeObjectsSize = 0;
   size_t largeObjectsUsed = 0;
   gc::Heap* heap = Runtime::Current()->GetHeap();
-  for (gc::space::ContinuousSpace* space : heap->GetContinuousSpaces()) {
-    if (space->IsImageSpace()) {
-      // Currently don't include the image space.
-    } else if (space->IsZygoteSpace()) {
-      gc::space::ZygoteSpace* zygote_space = space->AsZygoteSpace();
-      zygoteSize += zygote_space->Size();
-      zygoteUsed += zygote_space->GetBytesAllocated();
-    } else if (space->IsMallocSpace()) {
-      // This is a malloc space.
-      gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
-      allocSize += malloc_space->GetFootprint();
-      allocUsed += malloc_space->GetBytesAllocated();
-    } else if (space->IsBumpPointerSpace()) {
-      ScopedObjectAccess soa(env);
-      gc::space::BumpPointerSpace* bump_pointer_space = space->AsBumpPointerSpace();
-      allocSize += bump_pointer_space->Size();
-      allocUsed += bump_pointer_space->GetBytesAllocated();
+  {
+    ScopedObjectAccess soa(env);
+    for (gc::space::ContinuousSpace* space : heap->GetContinuousSpaces()) {
+      if (space->IsImageSpace()) {
+        // Currently don't include the image space.
+      } else if (space->IsZygoteSpace()) {
+        gc::space::ZygoteSpace* zygote_space = space->AsZygoteSpace();
+        zygoteSize += zygote_space->Size();
+        zygoteUsed += zygote_space->GetBytesAllocated();
+      } else if (space->IsMallocSpace()) {
+        // This is a malloc space.
+        gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
+        allocSize += malloc_space->GetFootprint();
+        allocUsed += malloc_space->GetBytesAllocated();
+      } else if (space->IsBumpPointerSpace()) {
+        gc::space::BumpPointerSpace* bump_pointer_space = space->AsBumpPointerSpace();
+        allocSize += bump_pointer_space->Size();
+        allocUsed += bump_pointer_space->GetBytesAllocated();
+      }
+    }
+    for (gc::space::DiscontinuousSpace* space : heap->GetDiscontinuousSpaces()) {
+      if (space->IsLargeObjectSpace()) {
+        largeObjectsSize += space->AsLargeObjectSpace()->GetBytesAllocated();
+        largeObjectsUsed += largeObjectsSize;
+      }
     }
   }
-  for (gc::space::DiscontinuousSpace* space : heap->GetDiscontinuousSpaces()) {
-    if (space->IsLargeObjectSpace()) {
-      largeObjectsSize += space->AsLargeObjectSpace()->GetBytesAllocated();
-      largeObjectsUsed += largeObjectsSize;
-    }
-  }
-
   size_t allocFree = allocSize - allocUsed;
   size_t zygoteFree = zygoteSize - zygoteUsed;
   size_t largeObjectsFree = largeObjectsSize - largeObjectsUsed;
diff --git a/runtime/oat_file_manager.cc b/runtime/oat_file_manager.cc
index b34b5505..de90f0a 100644
--- a/runtime/oat_file_manager.cc
+++ b/runtime/oat_file_manager.cc
@@ -30,6 +30,7 @@
 #include "oat_file_assistant.h"
 #include "scoped_thread_state_change.h"
 #include "thread-inl.h"
+#include "thread_list.h"
 
 namespace art {
 
@@ -376,7 +377,11 @@
           std::string temp_error_msg;
           // Add image space has a race condition since other threads could be reading from the
           // spaces array.
-          runtime->GetHeap()->AddSpace(image_space.get());
+          {
+            ScopedThreadSuspension sts(self, kSuspended);
+            ScopedSuspendAll ssa("Add image space");
+            runtime->GetHeap()->AddSpace(image_space.get());
+          }
           added_image_space = true;
           if (!runtime->GetClassLinker()->AddImageSpace(image_space.get(),
                                                         h_loader,
@@ -386,7 +391,11 @@
                                                         /*out*/&temp_error_msg)) {
             LOG(INFO) << "Failed to add image file " << temp_error_msg;
             dex_files.clear();
-            runtime->GetHeap()->RemoveSpace(image_space.get());
+            {
+              ScopedThreadSuspension sts(self, kSuspended);
+              ScopedSuspendAll ssa("Remove image space");
+              runtime->GetHeap()->RemoveSpace(image_space.get());
+            }
             added_image_space = false;
             // Non-fatal, don't update error_msg.
           }