Move ArtFields and ArtMethods to be a length prefixed array

Fixes race conditions between changing method and fields arrays
being seen in the wrong order by the GC.

Bug: 22832610
Change-Id: Ia21d6698f73ba207a6392c3d6b9be2658933073f
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index 0b26077..b85a129 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -825,35 +825,68 @@
         field_offset = MemberOffset(field_offset.Uint32Value() +
                                     sizeof(mirror::HeapReference<mirror::Object>));
       }
-      // Visit and assign offsets for fields.
+      // Visit and assign offsets for fields and field arrays.
       auto* as_klass = h_obj->AsClass();
-      ArtField* fields[] = { as_klass->GetSFields(), as_klass->GetIFields() };
-      size_t num_fields[] = { as_klass->NumStaticFields(), as_klass->NumInstanceFields() };
-      for (size_t i = 0; i < 2; ++i) {
-        for (size_t j = 0; j < num_fields[i]; ++j) {
-          auto* field = fields[i] + j;
-          auto it = native_object_reloc_.find(field);
-          CHECK(it == native_object_reloc_.end()) << "Field at index " << i << ":" << j
-              << " already assigned " << PrettyField(field);
-          native_object_reloc_.emplace(
-              field, NativeObjectReloc { bin_slot_sizes_[kBinArtField], kBinArtField });
-          bin_slot_sizes_[kBinArtField] += sizeof(ArtField);
+      LengthPrefixedArray<ArtField>* fields[] = {
+          as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
+      };
+      for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
+        // Total array length including header.
+        if (cur_fields != nullptr) {
+          const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
+          // Forward the entire array at once.
+          auto it = native_object_relocations_.find(cur_fields);
+          CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
+                                                  << " already forwarded";
+          size_t& offset = bin_slot_sizes_[kBinArtField];
+          native_object_relocations_.emplace(
+              cur_fields, NativeObjectRelocation {
+                  offset, kNativeObjectRelocationTypeArtFieldArray });
+          offset += header_size;
+          // Forward individual fields so that we can quickly find where they belong.
+          for (size_t i = 0, count = cur_fields->Length(); i < count; ++i) {
+            // Need to forward arrays separate of fields.
+            ArtField* field = &cur_fields->At(i);
+            auto it2 = native_object_relocations_.find(field);
+            CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
+                << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
+            native_object_relocations_.emplace(
+                field, NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
+            offset += sizeof(ArtField);
+          }
         }
       }
       // Visit and assign offsets for methods.
-      IterationRange<StrideIterator<ArtMethod>> method_arrays[] = {
-          as_klass->GetDirectMethods(target_ptr_size_),
-          as_klass->GetVirtualMethods(target_ptr_size_)
+      LengthPrefixedArray<ArtMethod>* method_arrays[] = {
+          as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
       };
-      for (auto& array : method_arrays) {
+      for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
+        if (array == nullptr) {
+          continue;
+        }
         bool any_dirty = false;
         size_t count = 0;
-        for (auto& m : array) {
+        const size_t method_size = ArtMethod::ObjectSize(target_ptr_size_);
+        auto iteration_range = MakeIterationRangeFromLengthPrefixedArray(array, method_size);
+        for (auto& m : iteration_range) {
           any_dirty = any_dirty || WillMethodBeDirty(&m);
           ++count;
         }
-        for (auto& m : array) {
-          AssignMethodOffset(&m, any_dirty ? kBinArtMethodDirty : kBinArtMethodClean);
+        NativeObjectRelocationType type = any_dirty ? kNativeObjectRelocationTypeArtMethodDirty :
+            kNativeObjectRelocationTypeArtMethodClean;
+        Bin bin_type = BinTypeForNativeRelocationType(type);
+        // Forward the entire array at once, but header first.
+        const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0, method_size);
+        auto it = native_object_relocations_.find(array);
+        CHECK(it == native_object_relocations_.end()) << "Method array " << array
+            << " already forwarded";
+        size_t& offset = bin_slot_sizes_[bin_type];
+        native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
+            any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
+                kNativeObjectRelocationTypeArtMethodArrayClean });
+        offset += header_size;
+        for (auto& m : iteration_range) {
+          AssignMethodOffset(&m, type);
         }
         (any_dirty ? dirty_methods_ : clean_methods_) += count;
       }
@@ -871,12 +904,13 @@
   }
 }
 
-void ImageWriter::AssignMethodOffset(ArtMethod* method, Bin bin) {
-  auto it = native_object_reloc_.find(method);
-  CHECK(it == native_object_reloc_.end()) << "Method " << method << " already assigned "
+void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
+  auto it = native_object_relocations_.find(method);
+  CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
       << PrettyMethod(method);
-  native_object_reloc_.emplace(method, NativeObjectReloc { bin_slot_sizes_[bin], bin });
-  bin_slot_sizes_[bin] += ArtMethod::ObjectSize(target_ptr_size_);
+  size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
+  native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
+  offset += ArtMethod::ObjectSize(target_ptr_size_);
 }
 
 void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
@@ -930,10 +964,20 @@
       runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
   image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
       runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
+
+  // Add room for fake length prefixed array.
+  const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
+  auto it = native_object_relocations_.find(&image_method_array_);
+  CHECK(it == native_object_relocations_.end());
+  size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
+  native_object_relocations_.emplace(&image_method_array_,
+                                     NativeObjectRelocation { offset, image_method_type });
+  CHECK_EQ(sizeof(image_method_array_), 8u);
+  offset += sizeof(image_method_array_);
   for (auto* m : image_methods_) {
     CHECK(m != nullptr);
     CHECK(m->IsRuntimeMethod());
-    AssignMethodOffset(m, kBinArtMethodDirty);
+    AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
   }
 
   // Calculate cumulative bin slot sizes.
@@ -953,10 +997,10 @@
   image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
 
   // Update the native relocations by adding their bin sums.
-  for (auto& pair : native_object_reloc_) {
-    auto& native_reloc = pair.second;
-    native_reloc.offset += image_objects_offset_begin_ +
-        bin_slot_previous_sizes_[native_reloc.bin_type];
+  for (auto& pair : native_object_relocations_) {
+    NativeObjectRelocation& relocation = pair.second;
+    Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
+    relocation.offset += image_objects_offset_begin_ + bin_slot_previous_sizes_[bin_type];
   }
 
   // Calculate how big the intern table will be after being serialized.
@@ -1025,8 +1069,8 @@
 }
 
 ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
-  auto it = native_object_reloc_.find(method);
-  CHECK(it != native_object_reloc_.end()) << PrettyMethod(method) << " @ " << method;
+  auto it = native_object_relocations_.find(method);
+  CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
   CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
   return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
 }
@@ -1064,20 +1108,34 @@
 
 void ImageWriter::CopyAndFixupNativeData() {
   // Copy ArtFields and methods to their locations and update the array for convenience.
-  for (auto& pair : native_object_reloc_) {
-    auto& native_reloc = pair.second;
-    if (native_reloc.bin_type == kBinArtField) {
-      auto* dest = image_->Begin() + native_reloc.offset;
-      DCHECK_GE(dest, image_->Begin() + image_end_);
-      memcpy(dest, pair.first, sizeof(ArtField));
-      reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
-          GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
-    } else {
-      CHECK(IsArtMethodBin(native_reloc.bin_type)) << native_reloc.bin_type;
-      auto* dest = image_->Begin() + native_reloc.offset;
-      DCHECK_GE(dest, image_->Begin() + image_end_);
-      CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
-                         reinterpret_cast<ArtMethod*>(dest));
+  for (auto& pair : native_object_relocations_) {
+    NativeObjectRelocation& relocation = pair.second;
+    auto* dest = image_->Begin() + relocation.offset;
+    DCHECK_GE(dest, image_->Begin() + image_end_);
+    switch (relocation.type) {
+      case kNativeObjectRelocationTypeArtField: {
+        memcpy(dest, pair.first, sizeof(ArtField));
+        reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
+            GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
+        break;
+      }
+      case kNativeObjectRelocationTypeArtMethodClean:
+      case kNativeObjectRelocationTypeArtMethodDirty: {
+        CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
+                           reinterpret_cast<ArtMethod*>(dest));
+        break;
+      }
+      // For arrays, copy just the header since the elements will get copied by their corresponding
+      // relocations.
+      case kNativeObjectRelocationTypeArtFieldArray: {
+        memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
+        break;
+      }
+      case kNativeObjectRelocationTypeArtMethodArrayClean:
+      case kNativeObjectRelocationTypeArtMethodArrayDirty: {
+        memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0));
+        break;
+      }
     }
   }
   // Fixup the image method roots.
@@ -1086,12 +1144,12 @@
   for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
     auto* m = image_methods_[i];
     CHECK(m != nullptr);
-    auto it = native_object_reloc_.find(m);
-    CHECK(it != native_object_reloc_.end()) << "No fowarding for " << PrettyMethod(m);
-    auto& native_reloc = it->second;
-    CHECK(methods_section.Contains(native_reloc.offset)) << native_reloc.offset << " not in "
+    auto it = native_object_relocations_.find(m);
+    CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(m);
+    NativeObjectRelocation& relocation = it->second;
+    CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
         << methods_section;
-    CHECK(IsArtMethodBin(native_reloc.bin_type)) << native_reloc.bin_type;
+    CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
     auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
     image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest);
   }
@@ -1143,9 +1201,9 @@
   for (size_t i = 0, count = num_elements; i < count; ++i) {
     auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
     if (elem != nullptr) {
-      auto it = native_object_reloc_.find(elem);
-      if (it == native_object_reloc_.end()) {
-        if (IsArtMethodBin(array_type)) {
+      auto it = native_object_relocations_.find(elem);
+      if (it == native_object_relocations_.end()) {
+        if (true) {
           auto* method = reinterpret_cast<ArtMethod*>(elem);
           LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
               << method << " idx=" << i << "/" << num_elements << " with declaring class "
@@ -1237,51 +1295,38 @@
   }
 };
 
+void* ImageWriter::NativeLocationInImage(void* obj) {
+  if (obj == nullptr) {
+    return nullptr;
+  }
+  auto it = native_object_relocations_.find(obj);
+  const NativeObjectRelocation& relocation = it->second;
+  CHECK(it != native_object_relocations_.end()) << obj;
+  return reinterpret_cast<void*>(image_begin_ + relocation.offset);
+}
+
 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
-  // Copy and fix up ArtFields in the class.
-  ArtField* fields[2] = { orig->GetSFields(), orig->GetIFields() };
-  size_t num_fields[2] = { orig->NumStaticFields(), orig->NumInstanceFields() };
   // Update the field arrays.
-  for (size_t i = 0; i < 2; ++i) {
-    if (num_fields[i] == 0) {
-      CHECK(fields[i] == nullptr);
-      continue;
-    }
-    auto it = native_object_reloc_.find(fields[i]);
-    CHECK(it != native_object_reloc_.end()) << PrettyClass(orig) << " : " << PrettyField(fields[i]);
-    auto* image_fields = reinterpret_cast<ArtField*>(image_begin_ + it->second.offset);
-    if (i == 0) {
-      copy->SetSFieldsUnchecked(image_fields);
-    } else {
-      copy->SetIFieldsUnchecked(image_fields);
-    }
-  }
-  // Update direct / virtual method arrays.
-  auto* direct_methods = orig->GetDirectMethodsPtr();
-  if (direct_methods != nullptr) {
-    auto it = native_object_reloc_.find(direct_methods);
-    CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
-    copy->SetDirectMethodsPtrUnchecked(
-        reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
-  }
-  auto* virtual_methods = orig->GetVirtualMethodsPtr();
-  if (virtual_methods != nullptr) {
-    auto it = native_object_reloc_.find(virtual_methods);
-    CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
-    copy->SetVirtualMethodsPtr(
-        reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
-  }
+  copy->SetSFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
+      NativeLocationInImage(orig->GetSFieldsPtr())));
+  copy->SetIFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
+      NativeLocationInImage(orig->GetIFieldsPtr())));
+  // Update direct and virtual method arrays.
+  copy->SetDirectMethodsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
+      NativeLocationInImage(orig->GetDirectMethodsPtr())));
+  copy->SetVirtualMethodsPtr(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
+      NativeLocationInImage(orig->GetVirtualMethodsPtr())));
   // Fix up embedded tables.
   if (orig->ShouldHaveEmbeddedImtAndVTable()) {
     for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) {
-      auto it = native_object_reloc_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
-      CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
+      auto it = native_object_relocations_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
+      CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
       copy->SetEmbeddedVTableEntryUnchecked(
           i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
     }
     for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
-      auto it = native_object_reloc_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
-      CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
+      auto it = native_object_relocations_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
+      CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
       copy->SetEmbeddedImTableEntry(
           i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
     }
@@ -1322,9 +1367,9 @@
       auto* dest = down_cast<mirror::AbstractMethod*>(copy);
       auto* src = down_cast<mirror::AbstractMethod*>(orig);
       ArtMethod* src_method = src->GetArtMethod();
-      auto it = native_object_reloc_.find(src_method);
-      CHECK(it != native_object_reloc_.end()) << "Missing relocation for AbstractMethod.artMethod "
-          << PrettyMethod(src_method);
+      auto it = native_object_relocations_.find(src_method);
+      CHECK(it != native_object_relocations_.end())
+          << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
       dest->SetArtMethod(
           reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
     }
@@ -1504,4 +1549,19 @@
       bin_slot_sizes_[kBinArtMethodClean] + intern_table_bytes_, kPageSize);
 }
 
+ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
+  switch (type) {
+    case kNativeObjectRelocationTypeArtField:
+    case kNativeObjectRelocationTypeArtFieldArray:
+      return kBinArtField;
+    case kNativeObjectRelocationTypeArtMethodClean:
+    case kNativeObjectRelocationTypeArtMethodArrayClean:
+      return kBinArtMethodClean;
+    case kNativeObjectRelocationTypeArtMethodDirty:
+    case kNativeObjectRelocationTypeArtMethodArrayDirty:
+      return kBinArtMethodDirty;
+  }
+  UNREACHABLE();
+}
+
 }  // namespace art
diff --git a/compiler/image_writer.h b/compiler/image_writer.h
index cabd918..eb6aa6f 100644
--- a/compiler/image_writer.h
+++ b/compiler/image_writer.h
@@ -30,6 +30,7 @@
 #include "base/macros.h"
 #include "driver/compiler_driver.h"
 #include "gc/space/space.h"
+#include "length_prefixed_array.h"
 #include "lock_word.h"
 #include "mem_map.h"
 #include "oat_file.h"
@@ -54,7 +55,8 @@
         quick_to_interpreter_bridge_offset_(0), compile_pic_(compile_pic),
         target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
         bin_slot_sizes_(), bin_slot_previous_sizes_(), bin_slot_count_(),
-        intern_table_bytes_(0u), dirty_methods_(0u), clean_methods_(0u) {
+        intern_table_bytes_(0u), image_method_array_(ImageHeader::kImageMethodsCount),
+        dirty_methods_(0u), clean_methods_(0u) {
     CHECK_NE(image_begin, 0U);
     std::fill(image_methods_, image_methods_ + arraysize(image_methods_), nullptr);
   }
@@ -129,9 +131,18 @@
     // Number of bins which are for mirror objects.
     kBinMirrorCount = kBinArtField,
   };
-
   friend std::ostream& operator<<(std::ostream& stream, const Bin& bin);
 
+  enum NativeObjectRelocationType {
+    kNativeObjectRelocationTypeArtField,
+    kNativeObjectRelocationTypeArtFieldArray,
+    kNativeObjectRelocationTypeArtMethodClean,
+    kNativeObjectRelocationTypeArtMethodArrayClean,
+    kNativeObjectRelocationTypeArtMethodDirty,
+    kNativeObjectRelocationTypeArtMethodArrayDirty,
+  };
+  friend std::ostream& operator<<(std::ostream& stream, const NativeObjectRelocationType& type);
+
   static constexpr size_t kBinBits = MinimumBitsToStore<uint32_t>(kBinMirrorCount - 1);
   // uint32 = typeof(lockword_)
   // Subtract read barrier bits since we want these to remain 0, or else it may result in DCHECK
@@ -204,10 +215,6 @@
     return offset == 0u ? nullptr : oat_data_begin_ + offset;
   }
 
-  static bool IsArtMethodBin(Bin bin) {
-    return bin == kBinArtMethodClean || bin == kBinArtMethodDirty;
-  }
-
   // Returns true if the class was in the original requested image classes list.
   bool IsImageClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_);
 
@@ -284,7 +291,12 @@
   bool WillMethodBeDirty(ArtMethod* m) const SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Assign the offset for an ArtMethod.
-  void AssignMethodOffset(ArtMethod* method, Bin bin) SHARED_REQUIRES(Locks::mutator_lock_);
+  void AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type)
+      SHARED_REQUIRES(Locks::mutator_lock_);
+
+  static Bin BinTypeForNativeRelocationType(NativeObjectRelocationType type);
+
+  void* NativeLocationInImage(void* obj);
 
   const CompilerDriver& compiler_driver_;
 
@@ -356,14 +368,21 @@
   // ArtField, ArtMethod relocating map. These are allocated as array of structs but we want to
   // have one entry per art field for convenience. ArtFields are placed right after the end of the
   // image objects (aka sum of bin_slot_sizes_). ArtMethods are placed right after the ArtFields.
-  struct NativeObjectReloc {
+  struct NativeObjectRelocation {
     uintptr_t offset;
-    Bin bin_type;
+    NativeObjectRelocationType type;
+
+    bool IsArtMethodRelocation() const {
+      return type == kNativeObjectRelocationTypeArtMethodClean ||
+          type == kNativeObjectRelocationTypeArtMethodDirty;
+    }
   };
-  std::unordered_map<void*, NativeObjectReloc> native_object_reloc_;
+  std::unordered_map<void*, NativeObjectRelocation> native_object_relocations_;
 
   // Runtime ArtMethods which aren't reachable from any Class but need to be copied into the image.
   ArtMethod* image_methods_[ImageHeader::kImageMethodsCount];
+  // Fake length prefixed array for image methods.
+  LengthPrefixedArray<ArtMethod> image_method_array_;
 
   // Counters for measurements, used for logging only.
   uint64_t dirty_methods_;
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index b8b6a5f..b61cfc9 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1615,16 +1615,12 @@
           // TODO: Dump fields.
           // Dump methods after.
           const auto& methods_section = image_header_.GetMethodsSection();
-          const auto pointer_size =
+          const size_t pointer_size =
               InstructionSetPointerSize(oat_dumper_->GetOatInstructionSet());
-          const auto method_size = ArtMethod::ObjectSize(pointer_size);
-          for (size_t pos = 0; pos < methods_section.Size(); pos += method_size) {
-            auto* method = reinterpret_cast<ArtMethod*>(
-                image_space->Begin() + pos + methods_section.Offset());
-            indent_os << method << " " << " ArtMethod: " << PrettyMethod(method) << "\n";
-            DumpMethod(method, this, indent_os);
-            indent_os << "\n";
-          }
+          DumpArtMethodVisitor visitor(this);
+          methods_section.VisitPackedArtMethods(&visitor,
+                                                image_space->Begin(),
+                                                ArtMethod::ObjectSize(pointer_size));
         }
       }
       // Dump the large objects separately.
@@ -1663,6 +1659,21 @@
   }
 
  private:
+  class DumpArtMethodVisitor : public ArtMethodVisitor {
+   public:
+    explicit DumpArtMethodVisitor(ImageDumper* image_dumper) : image_dumper_(image_dumper) {}
+
+    virtual void Visit(ArtMethod* method) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
+      std::ostream& indent_os = image_dumper_->vios_.Stream();
+      indent_os << method << " " << " ArtMethod: " << PrettyMethod(method) << "\n";
+      image_dumper_->DumpMethod(method, image_dumper_, indent_os);
+      indent_os << "\n";
+    }
+
+   private:
+    ImageDumper* const image_dumper_;
+  };
+
   static void PrettyObjectValue(std::ostream& os, mirror::Class* type, mirror::Object* value)
       SHARED_REQUIRES(Locks::mutator_lock_) {
     CHECK(type != nullptr);
@@ -1739,9 +1750,8 @@
     if (super != nullptr) {
       DumpFields(os, obj, super);
     }
-    ArtField* fields = klass->GetIFields();
-    for (size_t i = 0, count = klass->NumInstanceFields(); i < count; i++) {
-      PrintField(os, &fields[i], obj);
+    for (ArtField& field : klass->GetIFields()) {
+      PrintField(os, &field, obj);
     }
   }
 
@@ -1837,13 +1847,11 @@
       }
     } else if (obj->IsClass()) {
       mirror::Class* klass = obj->AsClass();
-      ArtField* sfields = klass->GetSFields();
-      const size_t num_fields = klass->NumStaticFields();
-      if (num_fields != 0) {
+      if (klass->NumStaticFields() != 0) {
         os << "STATICS:\n";
         ScopedIndentation indent2(&state->vios_);
-        for (size_t i = 0; i < num_fields; i++) {
-          PrintField(os, &sfields[i], sfields[i].GetDeclaringClass());
+        for (ArtField& field : klass->GetSFields()) {
+          PrintField(os, &field, field.GetDeclaringClass());
         }
       }
     } else {
diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc
index 1ed6597..283eea9 100644
--- a/patchoat/patchoat.cc
+++ b/patchoat/patchoat.cc
@@ -419,24 +419,44 @@
   return true;
 }
 
-void PatchOat::PatchArtFields(const ImageHeader* image_header) {
-  const auto& section = image_header->GetImageSection(ImageHeader::kSectionArtFields);
-  for (size_t pos = 0; pos < section.Size(); pos += sizeof(ArtField)) {
-    auto* src = reinterpret_cast<ArtField*>(heap_->Begin() + section.Offset() + pos);
-    auto* dest = RelocatedCopyOf(src);
-    dest->SetDeclaringClass(RelocatedAddressOfPointer(src->GetDeclaringClass()));
+class PatchOatArtFieldVisitor : public ArtFieldVisitor {
+ public:
+  explicit PatchOatArtFieldVisitor(PatchOat* patch_oat) : patch_oat_(patch_oat) {}
+
+  void Visit(ArtField* field) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
+    ArtField* const dest = patch_oat_->RelocatedCopyOf(field);
+    dest->SetDeclaringClass(patch_oat_->RelocatedAddressOfPointer(field->GetDeclaringClass()));
   }
+
+ private:
+  PatchOat* const patch_oat_;
+};
+
+void PatchOat::PatchArtFields(const ImageHeader* image_header) {
+  PatchOatArtFieldVisitor visitor(this);
+  const auto& section = image_header->GetImageSection(ImageHeader::kSectionArtFields);
+  section.VisitPackedArtFields(&visitor, heap_->Begin());
 }
 
+class PatchOatArtMethodVisitor : public ArtMethodVisitor {
+ public:
+  explicit PatchOatArtMethodVisitor(PatchOat* patch_oat) : patch_oat_(patch_oat) {}
+
+  void Visit(ArtMethod* method) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
+    ArtMethod* const dest = patch_oat_->RelocatedCopyOf(method);
+    patch_oat_->FixupMethod(method, dest);
+  }
+
+ private:
+  PatchOat* const patch_oat_;
+};
+
 void PatchOat::PatchArtMethods(const ImageHeader* image_header) {
   const auto& section = image_header->GetMethodsSection();
   const size_t pointer_size = InstructionSetPointerSize(isa_);
-  size_t method_size = ArtMethod::ObjectSize(pointer_size);
-  for (size_t pos = 0; pos < section.Size(); pos += method_size) {
-    auto* src = reinterpret_cast<ArtMethod*>(heap_->Begin() + section.Offset() + pos);
-    auto* dest = RelocatedCopyOf(src);
-    FixupMethod(src, dest);
-  }
+  const size_t method_size = ArtMethod::ObjectSize(pointer_size);
+  PatchOatArtMethodVisitor visitor(this);
+  section.VisitPackedArtMethods(&visitor, heap_->Begin(), method_size);
 }
 
 class FixupRootVisitor : public RootVisitor {
@@ -601,8 +621,8 @@
   if (object->IsClass<kVerifyNone>()) {
     auto* klass = object->AsClass();
     auto* copy_klass = down_cast<mirror::Class*>(copy);
-    copy_klass->SetSFieldsUnchecked(RelocatedAddressOfPointer(klass->GetSFields()));
-    copy_klass->SetIFieldsUnchecked(RelocatedAddressOfPointer(klass->GetIFields()));
+    copy_klass->SetSFieldsPtrUnchecked(RelocatedAddressOfPointer(klass->GetSFieldsPtr()));
+    copy_klass->SetIFieldsPtrUnchecked(RelocatedAddressOfPointer(klass->GetIFieldsPtr()));
     copy_klass->SetDirectMethodsPtrUnchecked(
         RelocatedAddressOfPointer(klass->GetDirectMethodsPtr()));
     copy_klass->SetVirtualMethodsPtr(RelocatedAddressOfPointer(klass->GetVirtualMethodsPtr()));
diff --git a/patchoat/patchoat.h b/patchoat/patchoat.h
index 466dacb..43cdaea 100644
--- a/patchoat/patchoat.h
+++ b/patchoat/patchoat.h
@@ -207,6 +207,8 @@
   TimingLogger* timings_;
 
   friend class FixupRootVisitor;
+  friend class PatchOatArtFieldVisitor;
+  friend class PatchOatArtMethodVisitor;
   DISALLOW_IMPLICIT_CONSTRUCTORS(PatchOat);
 };
 
diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc
index 195b3b3..e6710ed 100644
--- a/runtime/arch/stub_test.cc
+++ b/runtime/arch/stub_test.cc
@@ -2063,37 +2063,34 @@
   // Play with it...
 
   // Static fields.
-  ArtField* fields = c->GetSFields();
-  size_t num_fields = c->NumStaticFields();
-  for (size_t i = 0; i < num_fields; ++i) {
-    ArtField* f = &fields[i];
-    Primitive::Type type = f->GetTypeAsPrimitiveType();
+  for (ArtField& f : c->GetSFields()) {
+    Primitive::Type type = f.GetTypeAsPrimitiveType();
     if (test_type != type) {
      continue;
     }
     switch (type) {
       case Primitive::Type::kPrimBoolean:
-        GetSetBooleanStatic(f, self, m, test);
+        GetSetBooleanStatic(&f, self, m, test);
         break;
       case Primitive::Type::kPrimByte:
-        GetSetByteStatic(f, self, m, test);
+        GetSetByteStatic(&f, self, m, test);
         break;
       case Primitive::Type::kPrimChar:
-        GetSetCharStatic(f, self, m, test);
+        GetSetCharStatic(&f, self, m, test);
         break;
       case Primitive::Type::kPrimShort:
-        GetSetShortStatic(f, self, m, test);
+        GetSetShortStatic(&f, self, m, test);
         break;
       case Primitive::Type::kPrimInt:
-        GetSet32Static(f, self, m, test);
+        GetSet32Static(&f, self, m, test);
         break;
       case Primitive::Type::kPrimLong:
-        GetSet64Static(f, self, m, test);
+        GetSet64Static(&f, self, m, test);
         break;
       case Primitive::Type::kPrimNot:
         // Don't try array.
-        if (f->GetTypeDescriptor()[0] != '[') {
-          GetSetObjStatic(f, self, m, test);
+        if (f.GetTypeDescriptor()[0] != '[') {
+          GetSetObjStatic(&f, self, m, test);
         }
         break;
       default:
@@ -2102,37 +2099,34 @@
   }
 
   // Instance fields.
-  fields = c->GetIFields();
-  num_fields = c->NumInstanceFields();
-  for (size_t i = 0; i < num_fields; ++i) {
-    ArtField* f = &fields[i];
-    Primitive::Type type = f->GetTypeAsPrimitiveType();
+  for (ArtField& f : c->GetIFields()) {
+    Primitive::Type type = f.GetTypeAsPrimitiveType();
     if (test_type != type) {
       continue;
     }
     switch (type) {
       case Primitive::Type::kPrimBoolean:
-        GetSetBooleanInstance(&obj, f, self, m, test);
+        GetSetBooleanInstance(&obj, &f, self, m, test);
         break;
       case Primitive::Type::kPrimByte:
-        GetSetByteInstance(&obj, f, self, m, test);
+        GetSetByteInstance(&obj, &f, self, m, test);
         break;
       case Primitive::Type::kPrimChar:
-        GetSetCharInstance(&obj, f, self, m, test);
+        GetSetCharInstance(&obj, &f, self, m, test);
         break;
       case Primitive::Type::kPrimShort:
-        GetSetShortInstance(&obj, f, self, m, test);
+        GetSetShortInstance(&obj, &f, self, m, test);
         break;
       case Primitive::Type::kPrimInt:
-        GetSet32Instance(&obj, f, self, m, test);
+        GetSet32Instance(&obj, &f, self, m, test);
         break;
       case Primitive::Type::kPrimLong:
-        GetSet64Instance(&obj, f, self, m, test);
+        GetSet64Instance(&obj, &f, self, m, test);
         break;
       case Primitive::Type::kPrimNot:
         // Don't try array.
-        if (f->GetTypeDescriptor()[0] != '[') {
-          GetSetObjInstance(&obj, f, self, m, test);
+        if (f.GetTypeDescriptor()[0] != '[') {
+          GetSetObjInstance(&obj, &f, self, m, test);
         }
         break;
       default:
diff --git a/runtime/art_field.cc b/runtime/art_field.cc
index e4a5834..3737e0d 100644
--- a/runtime/art_field.cc
+++ b/runtime/art_field.cc
@@ -49,10 +49,9 @@
 
 ArtField* ArtField::FindInstanceFieldWithOffset(mirror::Class* klass, uint32_t field_offset) {
   DCHECK(klass != nullptr);
-  auto* instance_fields = klass->GetIFields();
-  for (size_t i = 0, count = klass->NumInstanceFields(); i < count; ++i) {
-    if (instance_fields[i].GetOffset().Uint32Value() == field_offset) {
-      return &instance_fields[i];
+  for (ArtField& field : klass->GetIFields()) {
+    if (field.GetOffset().Uint32Value() == field_offset) {
+      return &field;
     }
   }
   // We did not find field in the class: look into superclass.
@@ -62,10 +61,9 @@
 
 ArtField* ArtField::FindStaticFieldWithOffset(mirror::Class* klass, uint32_t field_offset) {
   DCHECK(klass != nullptr);
-  auto* static_fields = klass->GetSFields();
-  for (size_t i = 0, count = klass->NumStaticFields(); i < count; ++i) {
-    if (static_fields[i].GetOffset().Uint32Value() == field_offset) {
-      return &static_fields[i];
+  for (ArtField& field : klass->GetSFields()) {
+    if (field.GetOffset().Uint32Value() == field_offset) {
+      return &field;
     }
   }
   return nullptr;
diff --git a/runtime/asm_support.h b/runtime/asm_support.h
index 350a0d4..35acd42 100644
--- a/runtime/asm_support.h
+++ b/runtime/asm_support.h
@@ -141,10 +141,10 @@
 #define MIRROR_CLASS_ACCESS_FLAGS_OFFSET (36 + MIRROR_OBJECT_HEADER_SIZE)
 ADD_TEST_EQ(MIRROR_CLASS_ACCESS_FLAGS_OFFSET,
             art::mirror::Class::AccessFlagsOffset().Int32Value())
-#define MIRROR_CLASS_OBJECT_SIZE_OFFSET (112 + MIRROR_OBJECT_HEADER_SIZE)
+#define MIRROR_CLASS_OBJECT_SIZE_OFFSET (96 + MIRROR_OBJECT_HEADER_SIZE)
 ADD_TEST_EQ(MIRROR_CLASS_OBJECT_SIZE_OFFSET,
             art::mirror::Class::ObjectSizeOffset().Int32Value())
-#define MIRROR_CLASS_STATUS_OFFSET (124 + MIRROR_OBJECT_HEADER_SIZE)
+#define MIRROR_CLASS_STATUS_OFFSET (108 + MIRROR_OBJECT_HEADER_SIZE)
 ADD_TEST_EQ(MIRROR_CLASS_STATUS_OFFSET,
             art::mirror::Class::StatusOffset().Int32Value())
 
diff --git a/runtime/base/iteration_range.h b/runtime/base/iteration_range.h
index 6a0ef1f..cf02d32 100644
--- a/runtime/base/iteration_range.h
+++ b/runtime/base/iteration_range.h
@@ -49,6 +49,11 @@
   return IterationRange<Iter>(begin_it, end_it);
 }
 
+template <typename Iter>
+static inline IterationRange<Iter> MakeEmptyIterationRange(const Iter& it) {
+  return IterationRange<Iter>(it, it);
+}
+
 }  // namespace art
 
 #endif  // ART_RUNTIME_BASE_ITERATION_RANGE_H_
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 56fae81..62ba907 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1058,12 +1058,11 @@
   CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
   if (obj->IsClass()) {
     auto klass = obj->AsClass();
-    ArtField* fields[2] = { klass->GetSFields(), klass->GetIFields() };
-    size_t num_fields[2] = { klass->NumStaticFields(), klass->NumInstanceFields() };
-    for (size_t i = 0; i < 2; ++i) {
-      for (size_t j = 0; j < num_fields[i]; ++j) {
-        CHECK_EQ(fields[i][j].GetDeclaringClass(), klass);
-      }
+    for (ArtField& field : klass->GetIFields()) {
+      CHECK_EQ(field.GetDeclaringClass(), klass);
+    }
+    for (ArtField& field : klass->GetSFields()) {
+      CHECK_EQ(field.GetDeclaringClass(), klass);
     }
     auto* runtime = Runtime::Current();
     auto* image_space = runtime->GetHeap()->GetImageSpace();
@@ -2282,23 +2281,31 @@
   }
 }
 
-ArtField* ClassLinker::AllocArtFieldArray(Thread* self, size_t length) {
-  auto* const la = Runtime::Current()->GetLinearAlloc();
-  auto* ptr = reinterpret_cast<ArtField*>(la->AllocArray<ArtField>(self, length));
-  CHECK(ptr!= nullptr);
-  std::uninitialized_fill_n(ptr, length, ArtField());
-  return ptr;
+LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self, size_t length) {
+  if (length == 0) {
+    return nullptr;
+  }
+  auto* ret = new(Runtime::Current()->GetLinearAlloc()->Alloc(
+      self, LengthPrefixedArray<ArtField>::ComputeSize(length))) LengthPrefixedArray<ArtField>(
+          length);
+  CHECK(ret != nullptr);
+  std::uninitialized_fill_n(&ret->At(0), length, ArtField());
+  return ret;
 }
 
-ArtMethod* ClassLinker::AllocArtMethodArray(Thread* self, size_t length) {
-  const size_t method_size = ArtMethod::ObjectSize(image_pointer_size_);
-  uintptr_t ptr = reinterpret_cast<uintptr_t>(
-      Runtime::Current()->GetLinearAlloc()->Alloc(self, method_size * length));
-  CHECK_NE(ptr, 0u);
-  for (size_t i = 0; i < length; ++i) {
-    new(reinterpret_cast<void*>(ptr + i * method_size)) ArtMethod;
+LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self, size_t length) {
+  if (length == 0) {
+    return nullptr;
   }
-  return reinterpret_cast<ArtMethod*>(ptr);
+  const size_t method_size = ArtMethod::ObjectSize(image_pointer_size_);
+  auto* ret = new (Runtime::Current()->GetLinearAlloc()->Alloc(
+      self, LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size)))
+          LengthPrefixedArray<ArtMethod>(length);
+  CHECK(ret != nullptr);
+  for (size_t i = 0; i < length; ++i) {
+    new(reinterpret_cast<void*>(&ret->At(i, method_size))) ArtMethod;
+  }
+  return ret;
 }
 
 void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file,
@@ -2313,8 +2320,7 @@
     // We allow duplicate definitions of the same field in a class_data_item
     // but ignore the repeated indexes here, b/21868015.
     ClassDataItemIterator it(dex_file, class_data);
-    ArtField* sfields =
-        it.NumStaticFields() != 0 ? AllocArtFieldArray(self, it.NumStaticFields()) : nullptr;
+    LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, it.NumStaticFields());
     size_t num_sfields = 0;
     uint32_t last_field_idx = 0u;
     for (; it.HasNextStaticField(); it.Next()) {
@@ -2322,17 +2328,15 @@
       DCHECK_GE(field_idx, last_field_idx);  // Ordering enforced by DexFileVerifier.
       if (num_sfields == 0 || LIKELY(field_idx > last_field_idx)) {
         DCHECK_LT(num_sfields, it.NumStaticFields());
-        LoadField(it, klass, &sfields[num_sfields]);
+        LoadField(it, klass, &sfields->At(num_sfields));
         ++num_sfields;
         last_field_idx = field_idx;
       }
     }
-    klass->SetSFields(sfields);
-    klass->SetNumStaticFields(num_sfields);
+    klass->SetSFieldsPtr(sfields);
     DCHECK_EQ(klass->NumStaticFields(), num_sfields);
     // Load instance fields.
-    ArtField* ifields =
-        it.NumInstanceFields() != 0 ? AllocArtFieldArray(self, it.NumInstanceFields()) : nullptr;
+    LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self, it.NumInstanceFields());
     size_t num_ifields = 0u;
     last_field_idx = 0u;
     for (; it.HasNextInstanceField(); it.Next()) {
@@ -2340,7 +2344,7 @@
       DCHECK_GE(field_idx, last_field_idx);  // Ordering enforced by DexFileVerifier.
       if (num_ifields == 0 || LIKELY(field_idx > last_field_idx)) {
         DCHECK_LT(num_ifields, it.NumInstanceFields());
-        LoadField(it, klass, &ifields[num_ifields]);
+        LoadField(it, klass, &ifields->At(num_ifields));
         ++num_ifields;
         last_field_idx = field_idx;
       }
@@ -2352,18 +2356,11 @@
           << ", unique instance fields: " << num_ifields << "/" << it.NumInstanceFields() << ")";
       // NOTE: Not shrinking the over-allocated sfields/ifields.
     }
-    klass->SetIFields(ifields);
-    klass->SetNumInstanceFields(num_ifields);
+    klass->SetIFieldsPtr(ifields);
     DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
     // Load methods.
-    if (it.NumDirectMethods() != 0) {
-      klass->SetDirectMethodsPtr(AllocArtMethodArray(self, it.NumDirectMethods()));
-    }
-    klass->SetNumDirectMethods(it.NumDirectMethods());
-    if (it.NumVirtualMethods() != 0) {
-      klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, it.NumVirtualMethods()));
-    }
-    klass->SetNumVirtualMethods(it.NumVirtualMethods());
+    klass->SetDirectMethodsPtr(AllocArtMethodArray(self, it.NumDirectMethods()));
+    klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, it.NumVirtualMethods()));
     size_t class_def_method_index = 0;
     uint32_t last_dex_method_index = DexFile::kDexNoIndex;
     size_t last_class_def_method_index = 0;
@@ -2807,12 +2804,10 @@
   return nullptr;
 }
 
-void ClassLinker::UpdateClassVirtualMethods(mirror::Class* klass, ArtMethod* new_methods,
-                                            size_t new_num_methods) {
-  // TODO: Fix the race condition here. b/22832610
-  klass->SetNumVirtualMethods(new_num_methods);
+void ClassLinker::UpdateClassVirtualMethods(mirror::Class* klass,
+                                            LengthPrefixedArray<ArtMethod>* new_methods) {
   klass->SetVirtualMethodsPtr(new_methods);
-  // Need to mark the card so that the remembered sets and mod union tables get update.
+  // Need to mark the card so that the remembered sets and mod union tables get updated.
   Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(klass);
 }
 
@@ -3257,25 +3252,24 @@
 
   // Instance fields are inherited, but we add a couple of static fields...
   const size_t num_fields = 2;
-  ArtField* sfields = AllocArtFieldArray(self, num_fields);
-  klass->SetSFields(sfields);
-  klass->SetNumStaticFields(num_fields);
+  LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, num_fields);
+  klass->SetSFieldsPtr(sfields);
 
   // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
   // our proxy, so Class.getInterfaces doesn't return the flattened set.
-  ArtField* interfaces_sfield = &sfields[0];
-  interfaces_sfield->SetDexFieldIndex(0);
-  interfaces_sfield->SetDeclaringClass(klass.Get());
-  interfaces_sfield->SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
+  ArtField& interfaces_sfield = sfields->At(0);
+  interfaces_sfield.SetDexFieldIndex(0);
+  interfaces_sfield.SetDeclaringClass(klass.Get());
+  interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
 
   // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
-  ArtField* throws_sfield = &sfields[1];
-  throws_sfield->SetDexFieldIndex(1);
-  throws_sfield->SetDeclaringClass(klass.Get());
-  throws_sfield->SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
+  ArtField& throws_sfield = sfields->At(1);
+  throws_sfield.SetDexFieldIndex(1);
+  throws_sfield.SetDeclaringClass(klass.Get());
+  throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
 
   // Proxies have 1 direct method, the constructor
-  auto* directs = AllocArtMethodArray(self, 1);
+  LengthPrefixedArray<ArtMethod>* directs = AllocArtMethodArray(self, 1);
   // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
   // want to throw OOM in the future.
   if (UNLIKELY(directs == nullptr)) {
@@ -3283,13 +3277,12 @@
     return nullptr;
   }
   klass->SetDirectMethodsPtr(directs);
-  klass->SetNumDirectMethods(1u);
   CreateProxyConstructor(klass, klass->GetDirectMethodUnchecked(0, image_pointer_size_));
 
   // Create virtual method using specified prototypes.
   auto h_methods = hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>*>(methods));
   DCHECK_EQ(h_methods->GetClass(), mirror::Method::ArrayClass())
-    << PrettyClass(h_methods->GetClass());
+      << PrettyClass(h_methods->GetClass());
   const size_t num_virtual_methods = h_methods->GetLength();
   auto* virtuals = AllocArtMethodArray(self, num_virtual_methods);
   // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
@@ -3299,7 +3292,6 @@
     return nullptr;
   }
   klass->SetVirtualMethodsPtr(virtuals);
-  klass->SetNumVirtualMethods(num_virtual_methods);
   for (size_t i = 0; i < num_virtual_methods; ++i) {
     auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
     auto* prototype = h_methods->Get(i)->GetArtMethod();
@@ -3331,12 +3323,12 @@
   CHECK_NE(klass.Get(), new_class.Get());
   klass.Assign(new_class.Get());
 
-  CHECK_EQ(interfaces_sfield->GetDeclaringClass(), klass.Get());
-  interfaces_sfield->SetObject<false>(klass.Get(),
-                                      soa.Decode<mirror::ObjectArray<mirror::Class>*>(interfaces));
-  CHECK_EQ(throws_sfield->GetDeclaringClass(), klass.Get());
-  throws_sfield->SetObject<false>(klass.Get(),
-      soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class> >*>(throws));
+  CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
+  interfaces_sfield.SetObject<false>(klass.Get(),
+                                     soa.Decode<mirror::ObjectArray<mirror::Class>*>(interfaces));
+  CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
+  throws_sfield.SetObject<false>(
+      klass.Get(), soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class> >*>(throws));
 
   {
     // Lock on klass is released. Lock new class object.
@@ -3346,7 +3338,7 @@
 
   // sanity checks
   if (kIsDebugBuild) {
-    CHECK(klass->GetIFields() == nullptr);
+    CHECK(klass->GetIFieldsPtr() == nullptr);
     CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
 
     for (size_t i = 0; i < num_virtual_methods; ++i) {
@@ -3958,19 +3950,17 @@
 
 void ClassLinker::FixupTemporaryDeclaringClass(mirror::Class* temp_class,
                                                mirror::Class* new_class) {
-  ArtField* fields = new_class->GetIFields();
   DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
-  for (size_t i = 0, count = new_class->NumInstanceFields(); i < count; i++) {
-    if (fields[i].GetDeclaringClass() == temp_class) {
-      fields[i].SetDeclaringClass(new_class);
+  for (ArtField& field : new_class->GetIFields()) {
+    if (field.GetDeclaringClass() == temp_class) {
+      field.SetDeclaringClass(new_class);
     }
   }
 
-  fields = new_class->GetSFields();
   DCHECK_EQ(temp_class->NumStaticFields(), 0u);
-  for (size_t i = 0, count = new_class->NumStaticFields(); i < count; i++) {
-    if (fields[i].GetDeclaringClass() == temp_class) {
-      fields[i].SetDeclaringClass(new_class);
+  for (ArtField& field : new_class->GetSFields()) {
+    if (field.GetDeclaringClass() == temp_class) {
+      field.SetDeclaringClass(new_class);
     }
   }
 
@@ -4057,10 +4047,10 @@
     // same ArtFields with the same If this occurs, it causes bugs in remembered sets since the GC
     // may not see any references to the from space and clean the card. Though there was references
     // to the from space that got marked by the first class.
-    klass->SetNumDirectMethods(0);
-    klass->SetNumVirtualMethods(0);
-    klass->SetNumStaticFields(0);
-    klass->SetNumInstanceFields(0);
+    klass->SetDirectMethodsPtrUnchecked(nullptr);
+    klass->SetVirtualMethodsPtr(nullptr);
+    klass->SetSFieldsPtrUnchecked(nullptr);
+    klass->SetIFieldsPtrUnchecked(nullptr);
     if (UNLIKELY(h_new_class.Get() == nullptr)) {
       self->AssertPendingOOMException();
       mirror::Class::SetStatus(klass, mirror::Class::kStatusError, self);
@@ -4863,7 +4853,7 @@
       const bool super_interface = is_super && extend_super_iftable;
       auto method_array(hs2.NewHandle(iftable->GetMethodArray(i)));
 
-      ArtMethod* input_virtual_methods = nullptr;
+      LengthPrefixedArray<ArtMethod>* input_virtual_methods = nullptr;
       Handle<mirror::PointerArray> input_vtable_array = NullHandle<mirror::PointerArray>();
       int32_t input_array_length = 0;
       if (super_interface) {
@@ -4898,8 +4888,7 @@
         // matter which direction we go.  We walk it backward anyway.)
         for (k = input_array_length - 1; k >= 0; --k) {
           ArtMethod* vtable_method = input_virtual_methods != nullptr ?
-              reinterpret_cast<ArtMethod*>(
-                  reinterpret_cast<uintptr_t>(input_virtual_methods) + method_size * k) :
+              &input_virtual_methods->At(k, method_size) :
               input_vtable_array->GetElementPtrSize<ArtMethod*>(k, image_pointer_size_);
           ArtMethod* vtable_method_for_name_comparison =
               vtable_method->GetInterfaceMethodIfProxy(image_pointer_size_);
@@ -4955,13 +4944,17 @@
     const size_t old_method_count = klass->NumVirtualMethods();
     const size_t new_method_count = old_method_count + miranda_methods.size();
     // Attempt to realloc to save RAM if possible.
-    ArtMethod* old_virtuals = klass->GetVirtualMethodsPtr();
+    LengthPrefixedArray<ArtMethod>* old_virtuals = klass->GetVirtualMethodsPtr();
     // The Realloced virtual methods aren't visiblef from the class roots, so there is no issue
     // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
     // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
     // CopyFrom has internal read barriers.
-    auto* virtuals = reinterpret_cast<ArtMethod*>(runtime->GetLinearAlloc()->Realloc(
-        self, old_virtuals, old_method_count * method_size, new_method_count * method_size));
+    const size_t old_size = old_virtuals != nullptr ?
+        LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count, method_size) : 0u;
+    const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
+                                                                        method_size);
+    auto* virtuals = new(runtime->GetLinearAlloc()->Realloc(
+        self, old_virtuals, old_size, new_size))LengthPrefixedArray<ArtMethod>(new_method_count);
     if (UNLIKELY(virtuals == nullptr)) {
       self->AssertPendingOOMException();
       self->EndAssertNoThreadSuspension(old_cause);
@@ -4970,7 +4963,7 @@
     ScopedArenaUnorderedMap<ArtMethod*, ArtMethod*> move_table(allocator.Adapter());
     if (virtuals != old_virtuals) {
       // Maps from heap allocated miranda method to linear alloc miranda method.
-      StrideIterator<ArtMethod> out(reinterpret_cast<uintptr_t>(virtuals), method_size);
+      StrideIterator<ArtMethod> out = virtuals->Begin(method_size);
       // Copy over the old methods + miranda methods.
       for (auto& m : klass->GetVirtualMethods(image_pointer_size_)) {
         move_table.emplace(&m, &*out);
@@ -4980,8 +4973,7 @@
         ++out;
       }
     }
-    StrideIterator<ArtMethod> out(
-        reinterpret_cast<uintptr_t>(virtuals) + old_method_count * method_size, method_size);
+    StrideIterator<ArtMethod> out(virtuals->Begin(method_size) + old_method_count);
     // Copy over miranda methods before copying vtable since CopyOf may cause thread suspension and
     // we want the roots of the miranda methods to get visited.
     for (ArtMethod* mir_method : miranda_methods) {
@@ -4990,7 +4982,7 @@
       move_table.emplace(mir_method, &*out);
       ++out;
     }
-    UpdateClassVirtualMethods(klass.Get(), virtuals, new_method_count);
+    UpdateClassVirtualMethods(klass.Get(), virtuals);
     // Done copying methods, they are all roots in the class now, so we can end the no thread
     // suspension assert.
     self->EndAssertNoThreadSuspension(old_cause);
@@ -5003,8 +4995,7 @@
       self->AssertPendingOOMException();
       return false;
     }
-    out = StrideIterator<ArtMethod>(
-        reinterpret_cast<uintptr_t>(virtuals) + old_method_count * method_size, method_size);
+    out = StrideIterator<ArtMethod>(virtuals->Begin(method_size) + old_method_count);
     size_t vtable_pos = old_vtable_count;
     for (size_t i = old_method_count; i < new_method_count; ++i) {
       // Leave the declaring class alone as type indices are relative to it
@@ -5058,7 +5049,7 @@
     }
     // Put some random garbage in old virtuals to help find stale pointers.
     if (virtuals != old_virtuals) {
-      memset(old_virtuals, 0xFEu, ArtMethod::ObjectSize(image_pointer_size_) * old_method_count);
+      memset(old_virtuals, 0xFEu, old_size);
     }
   } else {
     self->EndAssertNoThreadSuspension(old_cause);
@@ -5120,7 +5111,8 @@
                              size_t* class_size) {
   self->AllowThreadSuspension();
   const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
-  ArtField* const fields = is_static ? klass->GetSFields() : klass->GetIFields();
+  LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
+      klass->GetIFieldsPtr();
 
   // Initialize field_offset
   MemberOffset field_offset(0);
@@ -5143,7 +5135,7 @@
   const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
       "Naked ArtField references in deque");
   for (size_t i = 0; i < num_fields; i++) {
-    grouped_and_sorted_fields.push_back(&fields[i]);
+    grouped_and_sorted_fields.push_back(&fields->At(i));
   }
   std::sort(grouped_and_sorted_fields.begin(), grouped_and_sorted_fields.end(),
             LinkFieldsComparator());
@@ -5188,7 +5180,8 @@
     // We know there are no non-reference fields in the Reference classes, and we know
     // that 'referent' is alphabetically last, so this is easy...
     CHECK_EQ(num_reference_fields, num_fields) << PrettyClass(klass.Get());
-    CHECK_STREQ(fields[num_fields - 1].GetName(), "referent") << PrettyClass(klass.Get());
+    CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
+        << PrettyClass(klass.Get());
     --num_reference_fields;
   }
 
@@ -5222,15 +5215,15 @@
                                     sizeof(mirror::HeapReference<mirror::Object>));
     MemberOffset current_ref_offset = start_ref_offset;
     for (size_t i = 0; i < num_fields; i++) {
-      ArtField* field = &fields[i];
+      ArtField* field = &fields->At(i);
       VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
           << " class=" << PrettyClass(klass.Get()) << " field=" << PrettyField(field) << " offset="
           << field->GetOffsetDuringLinking();
       if (i != 0) {
-        ArtField* const prev_field = &fields[i - 1];
+        ArtField* const prev_field = &fields->At(i - 1);
         // NOTE: The field names can be the same. This is not possible in the Java language
         // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
-        CHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
+        DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
       }
       Primitive::Type type = field->GetTypeAsPrimitiveType();
       bool is_primitive = type != Primitive::kPrimNot;
@@ -5868,7 +5861,8 @@
 }
 
 ArtMethod* ClassLinker::CreateRuntimeMethod() {
-  ArtMethod* method = AllocArtMethodArray(Thread::Current(), 1);
+  const size_t method_size = ArtMethod::ObjectSize(image_pointer_size_);
+  ArtMethod* method = &AllocArtMethodArray(Thread::Current(), 1)->At(0, method_size);
   CHECK(method != nullptr);
   method->SetDexMethodIndex(DexFile::kDexNoIndex);
   CHECK(method->IsRuntimeMethod());
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index c53ff61..17d6be6 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -344,9 +344,9 @@
   mirror::ObjectArray<mirror::String>* AllocStringArray(Thread* self, size_t length)
       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
 
-  ArtField* AllocArtFieldArray(Thread* self, size_t length);
+  LengthPrefixedArray<ArtField>* AllocArtFieldArray(Thread* self, size_t length);
 
-  ArtMethod* AllocArtMethodArray(Thread* self, size_t length);
+  LengthPrefixedArray<ArtMethod>* AllocArtMethodArray(Thread* self, size_t length);
 
   mirror::PointerArray* AllocPointerArray(Thread* self, size_t length)
       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
@@ -694,9 +694,10 @@
   bool CanWeInitializeClass(mirror::Class* klass, bool can_init_statics, bool can_init_parents)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
-  void UpdateClassVirtualMethods(mirror::Class* klass, ArtMethod* new_methods,
-                                 size_t new_num_methods)
-      SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Locks::classlinker_classes_lock_);
+  void UpdateClassVirtualMethods(mirror::Class* klass,
+                                 LengthPrefixedArray<ArtMethod>* new_methods)
+      SHARED_REQUIRES(Locks::mutator_lock_)
+      REQUIRES(!Locks::classlinker_classes_lock_);
 
   std::vector<const DexFile*> boot_class_path_;
   std::vector<std::unique_ptr<const DexFile>> opened_dex_files_;
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 3f8259a..4212dda 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -500,14 +500,10 @@
     addOffset(OFFSETOF_MEMBER(mirror::Class, ifields_), "iFields");
     addOffset(OFFSETOF_MEMBER(mirror::Class, iftable_), "ifTable");
     addOffset(OFFSETOF_MEMBER(mirror::Class, name_), "name");
-    addOffset(OFFSETOF_MEMBER(mirror::Class, num_direct_methods_), "numDirectMethods");
-    addOffset(OFFSETOF_MEMBER(mirror::Class, num_instance_fields_), "numInstanceFields");
     addOffset(OFFSETOF_MEMBER(mirror::Class, num_reference_instance_fields_),
               "numReferenceInstanceFields");
     addOffset(OFFSETOF_MEMBER(mirror::Class, num_reference_static_fields_),
               "numReferenceStaticFields");
-    addOffset(OFFSETOF_MEMBER(mirror::Class, num_static_fields_), "numStaticFields");
-    addOffset(OFFSETOF_MEMBER(mirror::Class, num_virtual_methods_), "numVirtualMethods");
     addOffset(OFFSETOF_MEMBER(mirror::Class, object_size_), "objectSize");
     addOffset(OFFSETOF_MEMBER(mirror::Class, primitive_type_), "primitiveType");
     addOffset(OFFSETOF_MEMBER(mirror::Class, reference_instance_offsets_),
@@ -841,21 +837,21 @@
   NullHandle<mirror::ClassLoader> class_loader;
   mirror::Class* c;
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Boolean;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Byte;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Character;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Double;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Float;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Integer;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Long;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
   c = class_linker_->FindClass(soa.Self(), "Ljava/lang/Short;", class_loader);
-  EXPECT_STREQ("value", c->GetIFields()[0].GetName());
+  EXPECT_STREQ("value", c->GetIFieldsPtr()->At(0).GetName());
 }
 
 TEST_F(ClassLinkerTest, TwoClassLoadersOneClass) {
diff --git a/runtime/gc/accounting/space_bitmap.cc b/runtime/gc/accounting/space_bitmap.cc
index 7914b66..b43f77f 100644
--- a/runtime/gc/accounting/space_bitmap.cc
+++ b/runtime/gc/accounting/space_bitmap.cc
@@ -195,11 +195,9 @@
     WalkInstanceFields(visited, callback, obj, super, arg);
   }
   // Walk instance fields
-  auto* fields = klass->GetIFields();
-  for (size_t i = 0, count = klass->NumInstanceFields(); i < count; ++i) {
-    ArtField* field = &fields[i];
-    if (!field->IsPrimitiveType()) {
-      mirror::Object* value = field->GetObj(obj);
+  for (ArtField& field : klass->GetIFields()) {
+    if (!field.IsPrimitiveType()) {
+      mirror::Object* value = field.GetObj(obj);
       if (value != nullptr) {
         WalkFieldsInOrder(visited, callback, value, arg);
       }
@@ -222,11 +220,9 @@
   WalkInstanceFields(visited, callback, obj, klass, arg);
   // Walk static fields of a Class
   if (obj->IsClass()) {
-    auto* sfields = klass->GetSFields();
-    for (size_t i = 0, count = klass->NumStaticFields(); i < count; ++i) {
-      ArtField* field = &sfields[i];
-      if (!field->IsPrimitiveType()) {
-        mirror::Object* value = field->GetObj(nullptr);
+    for (ArtField& field : klass->GetSFields()) {
+      if (!field.IsPrimitiveType()) {
+        mirror::Object* value = field.GetObj(nullptr);
         if (value != nullptr) {
           WalkFieldsInOrder(visited, callback, value, arg);
         }
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 5f617bd..59e39df 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -2912,14 +2912,10 @@
           if (!obj->IsObjectArray()) {
             mirror::Class* klass = is_static ? obj->AsClass() : obj->GetClass();
             CHECK(klass != nullptr);
-            auto* fields = is_static ? klass->GetSFields() : klass->GetIFields();
-            auto num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
-            CHECK_EQ(fields == nullptr, num_fields == 0u);
-            for (size_t i = 0; i < num_fields; ++i) {
-              ArtField* cur = &fields[i];
-              if (cur->GetOffset().Int32Value() == offset.Int32Value()) {
+            for (ArtField& field : is_static ? klass->GetSFields() : klass->GetIFields()) {
+              if (field.GetOffset().Int32Value() == offset.Int32Value()) {
                 LOG(ERROR) << (is_static ? "Static " : "") << "field in the live stack is "
-                          << PrettyField(cur);
+                           << PrettyField(&field);
                 break;
               }
             }
diff --git a/runtime/image.cc b/runtime/image.cc
index 44193da..ba1e58b 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -24,7 +24,7 @@
 namespace art {
 
 const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '1', '7', '\0' };
+const uint8_t ImageHeader::kImageVersion[] = { '0', '1', '8', '\0' };
 
 ImageHeader::ImageHeader(uint32_t image_begin,
                          uint32_t image_size,
@@ -147,4 +147,26 @@
   return os << "size=" << section.Size() << " range=" << section.Offset() << "-" << section.End();
 }
 
+void ImageSection::VisitPackedArtFields(ArtFieldVisitor* visitor, uint8_t* base) const {
+  for (size_t pos = 0; pos < Size(); ) {
+    auto* array = reinterpret_cast<LengthPrefixedArray<ArtField>*>(base + Offset() + pos);
+    for (size_t i = 0; i < array->Length(); ++i) {
+      visitor->Visit(&array->At(i, sizeof(ArtField)));
+    }
+    pos += array->ComputeSize(array->Length(), sizeof(ArtField));
+  }
+}
+
+void ImageSection::VisitPackedArtMethods(ArtMethodVisitor* visitor,
+                                         uint8_t* base,
+                                         size_t method_size) const {
+  for (size_t pos = 0; pos < Size(); ) {
+    auto* array = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(base + Offset() + pos);
+    for (size_t i = 0; i < array->Length(); ++i) {
+      visitor->Visit(&array->At(i, method_size));
+    }
+    pos += array->ComputeSize(array->Length(), method_size);
+  }
+}
+
 }  // namespace art
diff --git a/runtime/image.h b/runtime/image.h
index cc98ba6..eb26f7f 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -24,6 +24,23 @@
 
 namespace art {
 
+class ArtField;
+class ArtMethod;
+
+class ArtMethodVisitor {
+ public:
+  virtual ~ArtMethodVisitor() {}
+
+  virtual void Visit(ArtMethod* method) = 0;
+};
+
+class ArtFieldVisitor {
+ public:
+  virtual ~ArtFieldVisitor() {}
+
+  virtual void Visit(ArtField* method) = 0;
+};
+
 class PACKED(4) ImageSection {
  public:
   ImageSection() : offset_(0), size_(0) { }
@@ -47,6 +64,12 @@
     return offset - offset_ < size_;
   }
 
+  // Visit ArtMethods in the section starting at base.
+  void VisitPackedArtMethods(ArtMethodVisitor* visitor, uint8_t* base, size_t method_size) const;
+
+  // Visit ArtMethods in the section starting at base.
+  void VisitPackedArtFields(ArtFieldVisitor* visitor, uint8_t* base) const;
+
  private:
   uint32_t offset_;
   uint32_t size_;
diff --git a/runtime/interpreter/unstarted_runtime.cc b/runtime/interpreter/unstarted_runtime.cc
index 22701ac..c559389 100644
--- a/runtime/interpreter/unstarted_runtime.cc
+++ b/runtime/interpreter/unstarted_runtime.cc
@@ -229,20 +229,16 @@
   mirror::Class* klass = shadow_frame->GetVRegReference(arg_offset)->AsClass();
   mirror::String* name2 = shadow_frame->GetVRegReference(arg_offset + 1)->AsString();
   ArtField* found = nullptr;
-  ArtField* fields = klass->GetIFields();
-  for (int32_t i = 0, count = klass->NumInstanceFields(); i < count; ++i) {
-    ArtField* f = &fields[i];
-    if (name2->Equals(f->GetName())) {
-      found = f;
+  for (ArtField& field : klass->GetIFields()) {
+    if (name2->Equals(field.GetName())) {
+      found = &field;
       break;
     }
   }
   if (found == nullptr) {
-    fields = klass->GetSFields();
-    for (int32_t i = 0, count = klass->NumStaticFields(); i < count; ++i) {
-      ArtField* f = &fields[i];
-      if (name2->Equals(f->GetName())) {
-        found = f;
+    for (ArtField& field : klass->GetSFields()) {
+      if (name2->Equals(field.GetName())) {
+        found = &field;
         break;
       }
     }
diff --git a/runtime/jit/jit_code_cache_test.cc b/runtime/jit/jit_code_cache_test.cc
index cd123b9..555ad7c 100644
--- a/runtime/jit/jit_code_cache_test.cc
+++ b/runtime/jit/jit_code_cache_test.cc
@@ -50,7 +50,7 @@
   ASSERT_TRUE(code_cache->ContainsCodePtr(reserved_code));
   ASSERT_EQ(code_cache->NumMethods(), 1u);
   ClassLinker* const cl = Runtime::Current()->GetClassLinker();
-  auto* method = cl->AllocArtMethodArray(soa.Self(), 1);
+  ArtMethod* method = &cl->AllocArtMethodArray(soa.Self(), 1)->At(0, 0);
   ASSERT_FALSE(code_cache->ContainsMethod(method));
   method->SetEntryPointFromQuickCompiledCode(reserved_code);
   ASSERT_TRUE(code_cache->ContainsMethod(method));
diff --git a/runtime/length_prefixed_array.h b/runtime/length_prefixed_array.h
new file mode 100644
index 0000000..82176e3
--- /dev/null
+++ b/runtime/length_prefixed_array.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_LENGTH_PREFIXED_ARRAY_H_
+#define ART_RUNTIME_LENGTH_PREFIXED_ARRAY_H_
+
+#include <stddef.h>  // for offsetof()
+
+#include "linear_alloc.h"
+#include "stride_iterator.h"
+#include "base/iteration_range.h"
+
+namespace art {
+
+template<typename T>
+class LengthPrefixedArray {
+ public:
+  explicit LengthPrefixedArray(uint64_t length) : length_(length) {}
+
+  T& At(size_t index, size_t element_size = sizeof(T)) {
+    DCHECK_LT(index, length_);
+    return *reinterpret_cast<T*>(&data_[0] + index * element_size);
+  }
+
+  StrideIterator<T> Begin(size_t element_size = sizeof(T)) {
+    return StrideIterator<T>(reinterpret_cast<T*>(&data_[0]), element_size);
+  }
+
+  StrideIterator<T> End(size_t element_size = sizeof(T)) {
+    return StrideIterator<T>(reinterpret_cast<T*>(&data_[0] + element_size * length_),
+                             element_size);
+  }
+
+  static size_t OffsetOfElement(size_t index, size_t element_size = sizeof(T)) {
+    return offsetof(LengthPrefixedArray<T>, data_) + index * element_size;
+  }
+
+  static size_t ComputeSize(size_t num_elements, size_t element_size = sizeof(T)) {
+    return sizeof(LengthPrefixedArray<T>) + num_elements * element_size;
+  }
+
+  uint64_t Length() const {
+    return length_;
+  }
+
+ private:
+  uint64_t length_;  // 64 bits for padding reasons.
+  uint8_t data_[0];
+};
+
+// Returns empty iteration range if the array is null.
+template<typename T>
+IterationRange<StrideIterator<T>> MakeIterationRangeFromLengthPrefixedArray(
+    LengthPrefixedArray<T>* arr, size_t element_size) {
+  return arr != nullptr ?
+      MakeIterationRange(arr->Begin(element_size), arr->End(element_size)) :
+      MakeEmptyIterationRange(StrideIterator<T>(nullptr, 0));
+}
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_LENGTH_PREFIXED_ARRAY_H_
diff --git a/runtime/linear_alloc.h b/runtime/linear_alloc.h
index 743ee77..1b21527 100644
--- a/runtime/linear_alloc.h
+++ b/runtime/linear_alloc.h
@@ -33,7 +33,7 @@
   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
   void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
 
-  // Allocate and construct an array of structs of type T.
+  // Allocate an array of structs of type T.
   template<class T>
   T* AllocArray(Thread* self, size_t elements) REQUIRES(!lock_) {
     return reinterpret_cast<T*>(Alloc(self, elements * sizeof(T)));
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 6568487..887e204 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -28,6 +28,7 @@
 #include "dex_file.h"
 #include "gc/heap-inl.h"
 #include "iftable.h"
+#include "length_prefixed_array.h"
 #include "object_array-inl.h"
 #include "read_barrier-inl.h"
 #include "reference-inl.h"
@@ -61,25 +62,28 @@
   return GetFieldObject<DexCache, kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(Class, dex_cache_));
 }
 
-inline ArtMethod* Class::GetDirectMethodsPtr() {
+inline LengthPrefixedArray<ArtMethod>* Class::GetDirectMethodsPtr() {
   DCHECK(IsLoaded() || IsErroneous());
   return GetDirectMethodsPtrUnchecked();
 }
 
-inline ArtMethod* Class::GetDirectMethodsPtrUnchecked() {
-  return reinterpret_cast<ArtMethod*>(GetField64(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_)));
+inline LengthPrefixedArray<ArtMethod>* Class::GetDirectMethodsPtrUnchecked() {
+  return reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
+      GetField64(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_)));
 }
 
-inline ArtMethod* Class::GetVirtualMethodsPtrUnchecked() {
-  return reinterpret_cast<ArtMethod*>(GetField64(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_)));
+inline LengthPrefixedArray<ArtMethod>* Class::GetVirtualMethodsPtrUnchecked() {
+  return reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
+      GetField64(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_)));
 }
 
-inline void Class::SetDirectMethodsPtr(ArtMethod* new_direct_methods) {
+inline void Class::SetDirectMethodsPtr(LengthPrefixedArray<ArtMethod>* new_direct_methods) {
   DCHECK(GetDirectMethodsPtrUnchecked() == nullptr);
   SetDirectMethodsPtrUnchecked(new_direct_methods);
 }
 
-inline void Class::SetDirectMethodsPtrUnchecked(ArtMethod* new_direct_methods) {
+inline void Class::SetDirectMethodsPtrUnchecked(
+    LengthPrefixedArray<ArtMethod>* new_direct_methods) {
   SetField64<false>(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_),
                     reinterpret_cast<uint64_t>(new_direct_methods));
 }
@@ -88,25 +92,23 @@
   CheckPointerSize(pointer_size);
   auto* methods = GetDirectMethodsPtrUnchecked();
   DCHECK(methods != nullptr);
-  return reinterpret_cast<ArtMethod*>(reinterpret_cast<uintptr_t>(methods) +
-      ArtMethod::ObjectSize(pointer_size) * i);
+  return &methods->At(i, ArtMethod::ObjectSize(pointer_size));
 }
 
 inline ArtMethod* Class::GetDirectMethod(size_t i, size_t pointer_size) {
   CheckPointerSize(pointer_size);
   auto* methods = GetDirectMethodsPtr();
   DCHECK(methods != nullptr);
-  return reinterpret_cast<ArtMethod*>(reinterpret_cast<uintptr_t>(methods) +
-      ArtMethod::ObjectSize(pointer_size) * i);
+  return &methods->At(i, ArtMethod::ObjectSize(pointer_size));
 }
 
 template<VerifyObjectFlags kVerifyFlags>
-inline ArtMethod* Class::GetVirtualMethodsPtr() {
+inline LengthPrefixedArray<ArtMethod>* Class::GetVirtualMethodsPtr() {
   DCHECK(IsLoaded<kVerifyFlags>() || IsErroneous<kVerifyFlags>());
   return GetVirtualMethodsPtrUnchecked();
 }
 
-inline void Class::SetVirtualMethodsPtr(ArtMethod* new_virtual_methods) {
+inline void Class::SetVirtualMethodsPtr(LengthPrefixedArray<ArtMethod>* new_virtual_methods) {
   // TODO: we reassign virtual methods to grow the table for miranda
   // methods.. they should really just be assigned once.
   SetField64<false>(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_),
@@ -129,10 +131,9 @@
 
 inline ArtMethod* Class::GetVirtualMethodUnchecked(size_t i, size_t pointer_size) {
   CheckPointerSize(pointer_size);
-  auto* methods = GetVirtualMethodsPtrUnchecked();
+  LengthPrefixedArray<ArtMethod>* methods = GetVirtualMethodsPtrUnchecked();
   DCHECK(methods != nullptr);
-  return reinterpret_cast<ArtMethod*>(reinterpret_cast<uintptr_t>(methods) +
-      ArtMethod::ObjectSize(pointer_size) * i);
+  return &methods->At(i, ArtMethod::ObjectSize(pointer_size));
 }
 
 inline PointerArray* Class::GetVTable() {
@@ -423,9 +424,9 @@
   SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, iftable_), new_iftable);
 }
 
-inline ArtField* Class::GetIFields() {
+inline LengthPrefixedArray<ArtField>* Class::GetIFieldsPtr() {
   DCHECK(IsLoaded() || IsErroneous());
-  return GetFieldPtr<ArtField*>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_));
+  return GetFieldPtr<LengthPrefixedArray<ArtField>*>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_));
 }
 
 inline MemberOffset Class::GetFirstReferenceInstanceFieldOffset() {
@@ -458,46 +459,44 @@
   return MemberOffset(base);
 }
 
-inline void Class::SetIFields(ArtField* new_ifields) {
-  DCHECK(GetIFieldsUnchecked() == nullptr);
+inline void Class::SetIFieldsPtr(LengthPrefixedArray<ArtField>* new_ifields) {
+  DCHECK(GetIFieldsPtrUnchecked() == nullptr);
   return SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_), new_ifields);
 }
 
-inline void Class::SetIFieldsUnchecked(ArtField* new_ifields) {
+inline void Class::SetIFieldsPtrUnchecked(LengthPrefixedArray<ArtField>* new_ifields) {
   SetFieldPtr<false, true, kVerifyNone>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_), new_ifields);
 }
 
-inline ArtField* Class::GetSFieldsUnchecked() {
-  return GetFieldPtr<ArtField*>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_));
+inline LengthPrefixedArray<ArtField>* Class::GetSFieldsPtrUnchecked() {
+  return GetFieldPtr<LengthPrefixedArray<ArtField>*>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_));
 }
 
-inline ArtField* Class::GetIFieldsUnchecked() {
-  return GetFieldPtr<ArtField*>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_));
+inline LengthPrefixedArray<ArtField>* Class::GetIFieldsPtrUnchecked() {
+  return GetFieldPtr<LengthPrefixedArray<ArtField>*>(OFFSET_OF_OBJECT_MEMBER(Class, ifields_));
 }
 
-inline ArtField* Class::GetSFields() {
+inline LengthPrefixedArray<ArtField>* Class::GetSFieldsPtr() {
   DCHECK(IsLoaded() || IsErroneous()) << GetStatus();
-  return GetSFieldsUnchecked();
+  return GetSFieldsPtrUnchecked();
 }
 
-inline void Class::SetSFields(ArtField* new_sfields) {
+inline void Class::SetSFieldsPtr(LengthPrefixedArray<ArtField>* new_sfields) {
   DCHECK((IsRetired() && new_sfields == nullptr) ||
          GetFieldPtr<ArtField*>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_)) == nullptr);
   SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_), new_sfields);
 }
 
-inline void Class::SetSFieldsUnchecked(ArtField* new_sfields) {
+inline void Class::SetSFieldsPtrUnchecked(LengthPrefixedArray<ArtField>* new_sfields) {
   SetFieldPtr<false, true, kVerifyNone>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_), new_sfields);
 }
 
 inline ArtField* Class::GetStaticField(uint32_t i) {
-  DCHECK_LT(i, NumStaticFields());
-  return &GetSFields()[i];
+  return &GetSFieldsPtr()->At(i);
 }
 
 inline ArtField* Class::GetInstanceField(uint32_t i) {
-  DCHECK_LT(i, NumInstanceFields());
-  return &GetIFields()[i];
+  return &GetIFieldsPtr()->At(i);
 }
 
 template<VerifyObjectFlags kVerifyFlags>
@@ -813,85 +812,54 @@
 
 template<class Visitor>
 void mirror::Class::VisitNativeRoots(Visitor& visitor, size_t pointer_size) {
-  ArtField* const sfields = GetSFieldsUnchecked();
-  // Since we visit class roots while we may be writing these fields, check against null.
-  if (sfields != nullptr) {
-    for (size_t i = 0, count = NumStaticFields(); i < count; ++i) {
-      auto* f = &sfields[i];
-      // Visit roots first in case the declaring class gets moved.
-      f->VisitRoots(visitor);
-      if (kIsDebugBuild && IsResolved()) {
-        CHECK_EQ(f->GetDeclaringClass(), this) << GetStatus();
-      }
+  for (ArtField& field : GetSFieldsUnchecked()) {
+    // Visit roots first in case the declaring class gets moved.
+    field.VisitRoots(visitor);
+    if (kIsDebugBuild && IsResolved()) {
+      CHECK_EQ(field.GetDeclaringClass(), this) << GetStatus();
     }
   }
-  ArtField* const ifields = GetIFieldsUnchecked();
-  if (ifields != nullptr) {
-    for (size_t i = 0, count = NumInstanceFields(); i < count; ++i) {
-      auto* f = &ifields[i];
-      // Visit roots first in case the declaring class gets moved.
-      f->VisitRoots(visitor);
-      if (kIsDebugBuild && IsResolved()) {
-        CHECK_EQ(f->GetDeclaringClass(), this) << GetStatus();
-      }
+  for (ArtField& field : GetIFieldsUnchecked()) {
+    // Visit roots first in case the declaring class gets moved.
+    field.VisitRoots(visitor);
+    if (kIsDebugBuild && IsResolved()) {
+      CHECK_EQ(field.GetDeclaringClass(), this) << GetStatus();
     }
   }
-  // We may see GetDirectMethodsPtr() == null with NumDirectMethods() != 0 if the root marking
-  // thread reads a null DirectMethodsBegin() but a non null DirectMethodsBegin() due to a race
-  // SetDirectMethodsPtr from class linking. Same for virtual methods.
-  // In this case, it is safe to avoid marking the roots since we must be either the CC or CMS. If
-  // we are CMS then the roots are already marked through other sources, otherwise the roots are
-  // already marked due to the to-space invariant.
-  // Unchecked versions since we may visit roots of classes that aren't yet loaded.
-  if (GetDirectMethodsPtrUnchecked() != nullptr) {
-    for (auto& m : GetDirectMethods(pointer_size)) {
-      m.VisitRoots(visitor);
-    }
+  for (ArtMethod& method : GetDirectMethods(pointer_size)) {
+    method.VisitRoots(visitor);
   }
-  if (GetVirtualMethodsPtrUnchecked() != nullptr) {
-    for (auto& m : GetVirtualMethods(pointer_size)) {
-      m.VisitRoots(visitor);
-    }
+  for (ArtMethod& method : GetVirtualMethods(pointer_size)) {
+    method.VisitRoots(visitor);
   }
 }
 
-inline StrideIterator<ArtMethod> Class::DirectMethodsBegin(size_t pointer_size)  {
-  CheckPointerSize(pointer_size);
-  auto* methods = GetDirectMethodsPtrUnchecked();
-  auto stride = ArtMethod::ObjectSize(pointer_size);
-  return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods), stride);
-}
-
-inline StrideIterator<ArtMethod> Class::DirectMethodsEnd(size_t pointer_size) {
-  CheckPointerSize(pointer_size);
-  auto* methods = GetDirectMethodsPtrUnchecked();
-  auto stride = ArtMethod::ObjectSize(pointer_size);
-  auto count = NumDirectMethods();
-  return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods) + stride * count, stride);
-}
-
 inline IterationRange<StrideIterator<ArtMethod>> Class::GetDirectMethods(size_t pointer_size) {
   CheckPointerSize(pointer_size);
-  return MakeIterationRange(DirectMethodsBegin(pointer_size), DirectMethodsEnd(pointer_size));
-}
-
-inline StrideIterator<ArtMethod> Class::VirtualMethodsBegin(size_t pointer_size)  {
-  CheckPointerSize(pointer_size);
-  auto* methods = GetVirtualMethodsPtrUnchecked();
-  auto stride = ArtMethod::ObjectSize(pointer_size);
-  return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods), stride);
-}
-
-inline StrideIterator<ArtMethod> Class::VirtualMethodsEnd(size_t pointer_size) {
-  CheckPointerSize(pointer_size);
-  auto* methods = GetVirtualMethodsPtrUnchecked();
-  auto stride = ArtMethod::ObjectSize(pointer_size);
-  auto count = NumVirtualMethods();
-  return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods) + stride * count, stride);
+  return MakeIterationRangeFromLengthPrefixedArray(GetDirectMethodsPtrUnchecked(),
+                                                   ArtMethod::ObjectSize(pointer_size));
 }
 
 inline IterationRange<StrideIterator<ArtMethod>> Class::GetVirtualMethods(size_t pointer_size) {
-  return MakeIterationRange(VirtualMethodsBegin(pointer_size), VirtualMethodsEnd(pointer_size));
+  CheckPointerSize(pointer_size);
+  return MakeIterationRangeFromLengthPrefixedArray(GetVirtualMethodsPtrUnchecked(),
+                                                   ArtMethod::ObjectSize(pointer_size));
+}
+
+inline IterationRange<StrideIterator<ArtField>> Class::GetIFields() {
+  return MakeIterationRangeFromLengthPrefixedArray(GetIFieldsPtr(), sizeof(ArtField));
+}
+
+inline IterationRange<StrideIterator<ArtField>> Class::GetSFields() {
+  return MakeIterationRangeFromLengthPrefixedArray(GetSFieldsPtr(), sizeof(ArtField));
+}
+
+inline IterationRange<StrideIterator<ArtField>> Class::GetIFieldsUnchecked() {
+  return MakeIterationRangeFromLengthPrefixedArray(GetIFieldsPtrUnchecked(), sizeof(ArtField));
+}
+
+inline IterationRange<StrideIterator<ArtField>> Class::GetSFieldsUnchecked() {
+  return MakeIterationRangeFromLengthPrefixedArray(GetSFieldsPtrUnchecked(), sizeof(ArtField));
 }
 
 inline MemberOffset Class::EmbeddedImTableOffset(size_t pointer_size) {
@@ -940,6 +908,26 @@
   }
 }
 
+inline uint32_t Class::NumDirectMethods() {
+  LengthPrefixedArray<ArtMethod>* arr = GetDirectMethodsPtrUnchecked();
+  return arr != nullptr ? arr->Length() : 0u;
+}
+
+inline uint32_t Class::NumVirtualMethods() {
+  LengthPrefixedArray<ArtMethod>* arr = GetVirtualMethodsPtrUnchecked();
+  return arr != nullptr ? arr->Length() : 0u;
+}
+
+inline uint32_t Class::NumInstanceFields() {
+  LengthPrefixedArray<ArtField>* arr = GetIFieldsPtrUnchecked();
+  return arr != nullptr ? arr->Length() : 0u;
+}
+
+inline uint32_t Class::NumStaticFields() {
+  LengthPrefixedArray<ArtField>* arr = GetSFieldsPtrUnchecked();
+  return arr != nullptr ? arr->Length() : 0u;
+}
+
 }  // namespace mirror
 }  // namespace art
 
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index d95bcd8..f138936 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -41,7 +41,7 @@
 class ArtMethod;
 struct ClassOffsets;
 template<class T> class Handle;
-template<class T> class Handle;
+template<typename T> class LengthPrefixedArray;
 class Signature;
 class StringPiece;
 template<size_t kNumReferences> class PACKED(4) StackHandleScope;
@@ -656,21 +656,15 @@
   // Also updates the dex_cache_strings_ variable from new_dex_cache.
   void SetDexCache(DexCache* new_dex_cache) SHARED_REQUIRES(Locks::mutator_lock_);
 
-  ALWAYS_INLINE StrideIterator<ArtMethod> DirectMethodsBegin(size_t pointer_size)
-      SHARED_REQUIRES(Locks::mutator_lock_);
-
-  ALWAYS_INLINE StrideIterator<ArtMethod> DirectMethodsEnd(size_t pointer_size)
-      SHARED_REQUIRES(Locks::mutator_lock_);
-
   ALWAYS_INLINE IterationRange<StrideIterator<ArtMethod>> GetDirectMethods(size_t pointer_size)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
-  ArtMethod* GetDirectMethodsPtr() SHARED_REQUIRES(Locks::mutator_lock_);
+  LengthPrefixedArray<ArtMethod>* GetDirectMethodsPtr() SHARED_REQUIRES(Locks::mutator_lock_);
 
-  void SetDirectMethodsPtr(ArtMethod* new_direct_methods)
+  void SetDirectMethodsPtr(LengthPrefixedArray<ArtMethod>* new_direct_methods)
       SHARED_REQUIRES(Locks::mutator_lock_);
   // Used by image writer.
-  void SetDirectMethodsPtrUnchecked(ArtMethod* new_direct_methods)
+  void SetDirectMethodsPtrUnchecked(LengthPrefixedArray<ArtMethod>* new_direct_methods)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
   ALWAYS_INLINE ArtMethod* GetDirectMethod(size_t i, size_t pointer_size)
@@ -683,35 +677,20 @@
         SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Returns the number of static, private, and constructor methods.
-  ALWAYS_INLINE uint32_t NumDirectMethods() SHARED_REQUIRES(Locks::mutator_lock_) {
-    return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_direct_methods_));
-  }
-  void SetNumDirectMethods(uint32_t num) SHARED_REQUIRES(Locks::mutator_lock_) {
-    return SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, num_direct_methods_), num);
-  }
+  ALWAYS_INLINE uint32_t NumDirectMethods() SHARED_REQUIRES(Locks::mutator_lock_);
 
   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  ALWAYS_INLINE ArtMethod* GetVirtualMethodsPtr() SHARED_REQUIRES(Locks::mutator_lock_);
-
-  ALWAYS_INLINE StrideIterator<ArtMethod> VirtualMethodsBegin(size_t pointer_size)
-      SHARED_REQUIRES(Locks::mutator_lock_);
-
-  ALWAYS_INLINE StrideIterator<ArtMethod> VirtualMethodsEnd(size_t pointer_size)
+  ALWAYS_INLINE LengthPrefixedArray<ArtMethod>* GetVirtualMethodsPtr()
       SHARED_REQUIRES(Locks::mutator_lock_);
 
   ALWAYS_INLINE IterationRange<StrideIterator<ArtMethod>> GetVirtualMethods(size_t pointer_size)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
-  void SetVirtualMethodsPtr(ArtMethod* new_virtual_methods)
+  void SetVirtualMethodsPtr(LengthPrefixedArray<ArtMethod>* new_virtual_methods)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Returns the number of non-inherited virtual methods.
-  ALWAYS_INLINE uint32_t NumVirtualMethods() SHARED_REQUIRES(Locks::mutator_lock_) {
-    return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_virtual_methods_));
-  }
-  void SetNumVirtualMethods(uint32_t num) SHARED_REQUIRES(Locks::mutator_lock_) {
-    return SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, num_virtual_methods_), num);
-  }
+  ALWAYS_INLINE uint32_t NumVirtualMethods() SHARED_REQUIRES(Locks::mutator_lock_);
 
   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ArtMethod* GetVirtualMethod(size_t i, size_t pointer_size)
@@ -859,21 +838,19 @@
   ALWAYS_INLINE void SetIfTable(IfTable* new_iftable) SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Get instance fields of the class (See also GetSFields).
-  ArtField* GetIFields() SHARED_REQUIRES(Locks::mutator_lock_);
+  LengthPrefixedArray<ArtField>* GetIFieldsPtr() SHARED_REQUIRES(Locks::mutator_lock_);
 
-  void SetIFields(ArtField* new_ifields) SHARED_REQUIRES(Locks::mutator_lock_);
+  ALWAYS_INLINE IterationRange<StrideIterator<ArtField>> GetIFields()
+      SHARED_REQUIRES(Locks::mutator_lock_);
+
+  void SetIFieldsPtr(LengthPrefixedArray<ArtField>* new_ifields)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Unchecked edition has no verification flags.
-  void SetIFieldsUnchecked(ArtField* new_sfields) SHARED_REQUIRES(Locks::mutator_lock_);
+  void SetIFieldsPtrUnchecked(LengthPrefixedArray<ArtField>* new_sfields)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
-  uint32_t NumInstanceFields() SHARED_REQUIRES(Locks::mutator_lock_) {
-    return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_instance_fields_));
-  }
-
-  void SetNumInstanceFields(uint32_t num) SHARED_REQUIRES(Locks::mutator_lock_) {
-    return SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, num_instance_fields_), num);
-  }
-
+  uint32_t NumInstanceFields() SHARED_REQUIRES(Locks::mutator_lock_);
   ArtField* GetInstanceField(uint32_t i) SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Returns the number of instance fields containing reference types.
@@ -927,20 +904,18 @@
       SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Gets the static fields of the class.
-  ArtField* GetSFields() SHARED_REQUIRES(Locks::mutator_lock_);
+  LengthPrefixedArray<ArtField>* GetSFieldsPtr() SHARED_REQUIRES(Locks::mutator_lock_);
+  ALWAYS_INLINE IterationRange<StrideIterator<ArtField>> GetSFields()
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
-  void SetSFields(ArtField* new_sfields) SHARED_REQUIRES(Locks::mutator_lock_);
+  void SetSFieldsPtr(LengthPrefixedArray<ArtField>* new_sfields)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Unchecked edition has no verification flags.
-  void SetSFieldsUnchecked(ArtField* new_sfields) SHARED_REQUIRES(Locks::mutator_lock_);
+  void SetSFieldsPtrUnchecked(LengthPrefixedArray<ArtField>* new_sfields)
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
-  uint32_t NumStaticFields() SHARED_REQUIRES(Locks::mutator_lock_) {
-    return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_static_fields_));
-  }
-
-  void SetNumStaticFields(uint32_t num) SHARED_REQUIRES(Locks::mutator_lock_) {
-    return SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, num_static_fields_), num);
-  }
+  uint32_t NumStaticFields() SHARED_REQUIRES(Locks::mutator_lock_);
 
   // TODO: uint16_t
   ArtField* GetStaticField(uint32_t i) SHARED_REQUIRES(Locks::mutator_lock_);
@@ -1129,10 +1104,10 @@
     return pointer_size;
   }
 
-  ALWAYS_INLINE ArtMethod* GetDirectMethodsPtrUnchecked()
+  ALWAYS_INLINE LengthPrefixedArray<ArtMethod>* GetDirectMethodsPtrUnchecked()
       SHARED_REQUIRES(Locks::mutator_lock_);
 
-  ALWAYS_INLINE ArtMethod* GetVirtualMethodsPtrUnchecked()
+  ALWAYS_INLINE LengthPrefixedArray<ArtMethod>* GetVirtualMethodsPtrUnchecked()
       SHARED_REQUIRES(Locks::mutator_lock_);
 
  private:
@@ -1154,8 +1129,12 @@
   void CheckObjectAlloc() SHARED_REQUIRES(Locks::mutator_lock_);
 
   // Unchecked editions is for root visiting.
-  ArtField* GetSFieldsUnchecked() SHARED_REQUIRES(Locks::mutator_lock_);
-  ArtField* GetIFieldsUnchecked() SHARED_REQUIRES(Locks::mutator_lock_);
+  LengthPrefixedArray<ArtField>* GetSFieldsPtrUnchecked() SHARED_REQUIRES(Locks::mutator_lock_);
+  IterationRange<StrideIterator<ArtField>> GetSFieldsUnchecked()
+      SHARED_REQUIRES(Locks::mutator_lock_);
+  LengthPrefixedArray<ArtField>* GetIFieldsPtrUnchecked() SHARED_REQUIRES(Locks::mutator_lock_);
+  IterationRange<StrideIterator<ArtField>> GetIFieldsUnchecked()
+      SHARED_REQUIRES(Locks::mutator_lock_);
 
   bool ProxyDescriptorEquals(const char* match) SHARED_REQUIRES(Locks::mutator_lock_);
 
@@ -1216,7 +1195,7 @@
   // Note: Shuffled back.
   uint32_t access_flags_;
 
-  // static, private, and <init> methods. Pointer to an ArtMethod array.
+  // static, private, and <init> methods. Pointer to an ArtMethod length-prefixed array.
   uint64_t direct_methods_;
 
   // instance fields
@@ -1229,10 +1208,11 @@
   // ArtField arrays are allocated as an array of fields, and not an array of fields pointers.
   uint64_t ifields_;
 
-  // Static fields
+  // Static fields length-prefixed array.
   uint64_t sfields_;
 
-  // Virtual methods defined in this class; invoked through vtable. Pointer to an ArtMethod array.
+  // Virtual methods defined in this class; invoked through vtable. Pointer to an ArtMethod
+  // length-prefixed array.
   uint64_t virtual_methods_;
 
   // Total size of the Class instance; used when allocating storage on gc heap.
@@ -1250,24 +1230,12 @@
   // TODO: really 16bits
   int32_t dex_type_idx_;
 
-  // Number of direct fields.
-  uint32_t num_direct_methods_;
-
-  // Number of instance fields.
-  uint32_t num_instance_fields_;
-
   // Number of instance fields that are object refs.
   uint32_t num_reference_instance_fields_;
 
   // Number of static fields that are object refs,
   uint32_t num_reference_static_fields_;
 
-  // Number of static fields.
-  uint32_t num_static_fields_;
-
-  // Number of virtual methods.
-  uint32_t num_virtual_methods_;
-
   // Total object size; used when allocating storage on gc heap.
   // (For interfaces and abstract classes this will be zero.)
   // See also class_size_.
diff --git a/runtime/mirror/field.cc b/runtime/mirror/field.cc
index 02e4484..ff6847c 100644
--- a/runtime/mirror/field.cc
+++ b/runtime/mirror/field.cc
@@ -61,10 +61,10 @@
     DCHECK_EQ(declaring_class->NumStaticFields(), 2U);
     // 0 == Class[] interfaces; 1 == Class[][] throws;
     if (GetDexFieldIndex() == 0) {
-      return &declaring_class->GetSFields()[0];
+      return &declaring_class->GetSFieldsPtr()->At(0);
     } else {
       DCHECK_EQ(GetDexFieldIndex(), 1U);
-      return &declaring_class->GetSFields()[1];
+      return &declaring_class->GetSFieldsPtr()->At(1);
     }
   }
   mirror::DexCache* const dex_cache = declaring_class->GetDexCache();
diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc
index 80decaa..87fb5ba 100644
--- a/runtime/mirror/object.cc
+++ b/runtime/mirror/object.cc
@@ -208,15 +208,13 @@
     return;
   }
   for (Class* cur = c; cur != nullptr; cur = cur->GetSuperClass()) {
-    ArtField* fields = cur->GetIFields();
-    for (size_t i = 0, count = cur->NumInstanceFields(); i < count; ++i) {
+    for (ArtField& field : cur->GetIFields()) {
       StackHandleScope<1> hs(Thread::Current());
       Handle<Object> h_object(hs.NewHandle(new_value));
-      ArtField* field = &fields[i];
-      if (field->GetOffset().Int32Value() == field_offset.Int32Value()) {
-        CHECK_NE(field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
+      if (field.GetOffset().Int32Value() == field_offset.Int32Value()) {
+        CHECK_NE(field.GetTypeAsPrimitiveType(), Primitive::kPrimNot);
         // TODO: resolve the field type for moving GC.
-        mirror::Class* field_type = field->GetType<!kMovingCollector>();
+        mirror::Class* field_type = field.GetType<!kMovingCollector>();
         if (field_type != nullptr) {
           CHECK(field_type->IsAssignableFrom(new_value->GetClass()));
         }
@@ -229,13 +227,11 @@
     return;
   }
   if (IsClass()) {
-    ArtField* fields = AsClass()->GetSFields();
-    for (size_t i = 0, count = AsClass()->NumStaticFields(); i < count; ++i) {
-      ArtField* field = &fields[i];
-      if (field->GetOffset().Int32Value() == field_offset.Int32Value()) {
-        CHECK_NE(field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
+    for (ArtField& field : AsClass()->GetSFields()) {
+      if (field.GetOffset().Int32Value() == field_offset.Int32Value()) {
+        CHECK_NE(field.GetTypeAsPrimitiveType(), Primitive::kPrimNot);
         // TODO: resolve the field type for moving GC.
-        mirror::Class* field_type = field->GetType<!kMovingCollector>();
+        mirror::Class* field_type = field.GetType<!kMovingCollector>();
         if (field_type != nullptr) {
           CHECK(field_type->IsAssignableFrom(new_value->GetClass()));
         }
diff --git a/runtime/native/java_lang_Class.cc b/runtime/native/java_lang_Class.cc
index eddb2d1..1ca98e5 100644
--- a/runtime/native/java_lang_Class.cc
+++ b/runtime/native/java_lang_Class.cc
@@ -110,20 +110,18 @@
     Thread* self, mirror::Class* klass, bool public_only, bool force_resolve)
       SHARED_REQUIRES(Locks::mutator_lock_) {
   StackHandleScope<1> hs(self);
-  auto* ifields = klass->GetIFields();
-  auto* sfields = klass->GetSFields();
-  const auto num_ifields = klass->NumInstanceFields();
-  const auto num_sfields = klass->NumStaticFields();
-  size_t array_size = num_ifields + num_sfields;
+  IterationRange<StrideIterator<ArtField>> ifields = klass->GetIFields();
+  IterationRange<StrideIterator<ArtField>> sfields = klass->GetSFields();
+  size_t array_size = klass->NumInstanceFields() + klass->NumStaticFields();
   if (public_only) {
     // Lets go subtract all the non public fields.
-    for (size_t i = 0; i < num_ifields; ++i) {
-      if (!ifields[i].IsPublic()) {
+    for (ArtField& field : ifields) {
+      if (!field.IsPublic()) {
         --array_size;
       }
     }
-    for (size_t i = 0; i < num_sfields; ++i) {
-      if (!sfields[i].IsPublic()) {
+    for (ArtField& field : sfields) {
+      if (!field.IsPublic()) {
         --array_size;
       }
     }
@@ -134,34 +132,32 @@
   if (object_array.Get() == nullptr) {
     return nullptr;
   }
-  for (size_t i = 0; i < num_ifields; ++i) {
-    auto* art_field = &ifields[i];
-    if (!public_only || art_field->IsPublic()) {
-      auto* field = mirror::Field::CreateFromArtField(self, art_field, force_resolve);
-      if (field == nullptr) {
+  for (ArtField& field : ifields) {
+    if (!public_only || field.IsPublic()) {
+      auto* reflect_field = mirror::Field::CreateFromArtField(self, &field, force_resolve);
+      if (reflect_field == nullptr) {
         if (kIsDebugBuild) {
           self->AssertPendingException();
         }
         // Maybe null due to OOME or type resolving exception.
         return nullptr;
       }
-      object_array->SetWithoutChecks<false>(array_idx++, field);
+      object_array->SetWithoutChecks<false>(array_idx++, reflect_field);
     }
   }
-  for (size_t i = 0; i < num_sfields; ++i) {
-    auto* art_field = &sfields[i];
-    if (!public_only || art_field->IsPublic()) {
-      auto* field = mirror::Field::CreateFromArtField(self, art_field, force_resolve);
-      if (field == nullptr) {
+  for (ArtField& field : sfields) {
+    if (!public_only || field.IsPublic()) {
+      auto* reflect_field = mirror::Field::CreateFromArtField(self, &field, force_resolve);
+      if (reflect_field == nullptr) {
         if (kIsDebugBuild) {
           self->AssertPendingException();
         }
         return nullptr;
       }
-      object_array->SetWithoutChecks<false>(array_idx++, field);
+      object_array->SetWithoutChecks<false>(array_idx++, reflect_field);
     }
   }
-  CHECK_EQ(array_idx, array_size);
+  DCHECK_EQ(array_idx, array_size);
   return object_array.Get();
 }
 
@@ -188,16 +184,19 @@
 // the dex cache for lookups? I think CompareModifiedUtf8ToUtf16AsCodePointValues should be fairly
 // fast.
 ALWAYS_INLINE static inline ArtField* FindFieldByName(
-    Thread* self ATTRIBUTE_UNUSED, mirror::String* name, ArtField* fields, size_t num_fields)
+    Thread* self ATTRIBUTE_UNUSED, mirror::String* name, LengthPrefixedArray<ArtField>* fields)
     SHARED_REQUIRES(Locks::mutator_lock_) {
+  if (fields == nullptr) {
+    return nullptr;
+  }
   size_t low = 0;
-  size_t high = num_fields;
+  size_t high = fields->Length();
   const uint16_t* const data = name->GetValue();
   const size_t length = name->GetLength();
   while (low < high) {
     auto mid = (low + high) / 2;
-    ArtField* const field = &fields[mid];
-    int result = CompareModifiedUtf8ToUtf16AsCodePointValues(field->GetName(), data, length);
+    ArtField& field = fields->At(mid);
+    int result = CompareModifiedUtf8ToUtf16AsCodePointValues(field.GetName(), data, length);
     // Alternate approach, only a few % faster at the cost of more allocations.
     // int result = field->GetStringName(self, true)->CompareTo(name);
     if (result < 0) {
@@ -205,12 +204,12 @@
     } else if (result > 0) {
       high = mid;
     } else {
-      return field;
+      return &field;
     }
   }
   if (kIsDebugBuild) {
-    for (size_t i = 0; i < num_fields; ++i) {
-      CHECK_NE(fields[i].GetName(), name->ToModifiedUtf8());
+    for (ArtField& field : MakeIterationRangeFromLengthPrefixedArray(fields, sizeof(ArtField))) {
+      CHECK_NE(field.GetName(), name->ToModifiedUtf8());
     }
   }
   return nullptr;
@@ -219,13 +218,11 @@
 ALWAYS_INLINE static inline mirror::Field* GetDeclaredField(
     Thread* self, mirror::Class* c, mirror::String* name)
     SHARED_REQUIRES(Locks::mutator_lock_) {
-  auto* instance_fields = c->GetIFields();
-  auto* art_field = FindFieldByName(self, name, instance_fields, c->NumInstanceFields());
+  ArtField* art_field = FindFieldByName(self, name, c->GetIFieldsPtr());
   if (art_field != nullptr) {
     return mirror::Field::CreateFromArtField(self, art_field, true);
   }
-  auto* static_fields = c->GetSFields();
-  art_field = FindFieldByName(self, name, static_fields, c->NumStaticFields());
+  art_field = FindFieldByName(self, name, c->GetSFieldsPtr());
   if (art_field != nullptr) {
     return mirror::Field::CreateFromArtField(self, art_field, true);
   }
diff --git a/runtime/proxy_test.cc b/runtime/proxy_test.cc
index c33b126..bc9ba37 100644
--- a/runtime/proxy_test.cc
+++ b/runtime/proxy_test.cc
@@ -160,10 +160,9 @@
   ASSERT_TRUE(proxyClass->IsProxyClass());
   ASSERT_TRUE(proxyClass->IsInitialized());
 
-  ArtField* instance_fields = proxyClass->GetIFields();
-  EXPECT_TRUE(instance_fields == nullptr);
+  EXPECT_TRUE(proxyClass->GetIFieldsPtr() == nullptr);
 
-  ArtField* static_fields = proxyClass->GetSFields();
+  LengthPrefixedArray<ArtField>* static_fields = proxyClass->GetSFieldsPtr();
   ASSERT_TRUE(static_fields != nullptr);
   ASSERT_EQ(2u, proxyClass->NumStaticFields());
 
@@ -175,7 +174,7 @@
   ASSERT_TRUE(throwsFieldClass.Get() != nullptr);
 
   // Test "Class[] interfaces" field.
-  ArtField* field = &static_fields[0];
+  ArtField* field = &static_fields->At(0);
   EXPECT_STREQ("interfaces", field->GetName());
   EXPECT_STREQ("[Ljava/lang/Class;", field->GetTypeDescriptor());
   EXPECT_EQ(interfacesFieldClass.Get(), field->GetType<true>());
@@ -184,7 +183,7 @@
   EXPECT_FALSE(field->IsPrimitiveType());
 
   // Test "Class[][] throws" field.
-  field = &static_fields[1];
+  field = &static_fields->At(1);
   EXPECT_STREQ("throws", field->GetName());
   EXPECT_STREQ("[[Ljava/lang/Class;", field->GetTypeDescriptor());
   EXPECT_EQ(throwsFieldClass.Get(), field->GetType<true>());
@@ -215,30 +214,30 @@
   ASSERT_TRUE(proxyClass1->IsProxyClass());
   ASSERT_TRUE(proxyClass1->IsInitialized());
 
-  ArtField* static_fields0 = proxyClass0->GetSFields();
+  LengthPrefixedArray<ArtField>* static_fields0 = proxyClass0->GetSFieldsPtr();
   ASSERT_TRUE(static_fields0 != nullptr);
-  ASSERT_EQ(2u, proxyClass0->NumStaticFields());
-  ArtField* static_fields1 = proxyClass1->GetSFields();
+  ASSERT_EQ(2u, static_fields0->Length());
+  LengthPrefixedArray<ArtField>* static_fields1 = proxyClass1->GetSFieldsPtr();
   ASSERT_TRUE(static_fields1 != nullptr);
-  ASSERT_EQ(2u, proxyClass1->NumStaticFields());
+  ASSERT_EQ(2u, static_fields1->Length());
 
-  EXPECT_EQ(static_fields0[0].GetDeclaringClass(), proxyClass0.Get());
-  EXPECT_EQ(static_fields0[1].GetDeclaringClass(), proxyClass0.Get());
-  EXPECT_EQ(static_fields1[0].GetDeclaringClass(), proxyClass1.Get());
-  EXPECT_EQ(static_fields1[1].GetDeclaringClass(), proxyClass1.Get());
+  EXPECT_EQ(static_fields0->At(0).GetDeclaringClass(), proxyClass0.Get());
+  EXPECT_EQ(static_fields0->At(1).GetDeclaringClass(), proxyClass0.Get());
+  EXPECT_EQ(static_fields1->At(0).GetDeclaringClass(), proxyClass1.Get());
+  EXPECT_EQ(static_fields1->At(1).GetDeclaringClass(), proxyClass1.Get());
 
   Handle<mirror::Field> field00 =
-      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields0[0], true));
+      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields0->At(0), true));
   Handle<mirror::Field> field01 =
-      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields0[1], true));
+      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields0->At(1), true));
   Handle<mirror::Field> field10 =
-      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields1[0], true));
+      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields1->At(0), true));
   Handle<mirror::Field> field11 =
-      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields1[1], true));
-  EXPECT_EQ(field00->GetArtField(), &static_fields0[0]);
-  EXPECT_EQ(field01->GetArtField(), &static_fields0[1]);
-  EXPECT_EQ(field10->GetArtField(), &static_fields1[0]);
-  EXPECT_EQ(field11->GetArtField(), &static_fields1[1]);
+      hs.NewHandle(mirror::Field::CreateFromArtField(soa.Self(), &static_fields1->At(1), true));
+  EXPECT_EQ(field00->GetArtField(), &static_fields0->At(0));
+  EXPECT_EQ(field01->GetArtField(), &static_fields0->At(1));
+  EXPECT_EQ(field10->GetArtField(), &static_fields1->At(0));
+  EXPECT_EQ(field11->GetArtField(), &static_fields1->At(1));
 }
 
 }  // namespace art
diff --git a/runtime/reflection.cc b/runtime/reflection.cc
index ee2e2c5..100d199 100644
--- a/runtime/reflection.cc
+++ b/runtime/reflection.cc
@@ -780,7 +780,7 @@
   mirror::Class* klass = o->GetClass();
   mirror::Class* src_class = nullptr;
   ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
-  ArtField* primitive_field = &klass->GetIFields()[0];
+  ArtField* primitive_field = &klass->GetIFieldsPtr()->At(0);
   if (klass->DescriptorEquals("Ljava/lang/Boolean;")) {
     src_class = class_linker->FindPrimitiveClass('Z');
     boxed_value.SetZ(primitive_field->GetBoolean(o));
diff --git a/runtime/stride_iterator.h b/runtime/stride_iterator.h
index d8d21aa..c69f30e 100644
--- a/runtime/stride_iterator.h
+++ b/runtime/stride_iterator.h
@@ -29,11 +29,12 @@
   StrideIterator& operator=(const StrideIterator&) = default;
   StrideIterator& operator=(StrideIterator&&) = default;
 
-  StrideIterator(uintptr_t ptr, size_t stride)
-      : ptr_(ptr), stride_(stride) {
-  }
+  StrideIterator(T* ptr, size_t stride)
+      : ptr_(reinterpret_cast<uintptr_t>(ptr)),
+        stride_(reinterpret_cast<uintptr_t>(stride)) {}
 
   bool operator==(const StrideIterator& other) const {
+    DCHECK_EQ(stride_, other.stride_);
     return ptr_ == other.ptr_;
   }
 
@@ -52,6 +53,12 @@
     return temp;
   }
 
+  StrideIterator operator+(ssize_t delta) const {
+    auto temp = *this;
+    temp.ptr_ += static_cast<ssize_t>(stride_) * delta;
+    return temp;
+  }
+
   T& operator*() const {
     return *reinterpret_cast<T*>(ptr_);
   }