Revert "Revert "JIT root tables.""

Also  contains Revert "Support kJitTableAddress in x86/arm/arm64."

This reverts commit 4acd03638fcdb4e5d1666f8eec7eb3bf6d6be035.
This reverts commit 997d1217830c0a18b70faeabd53c04700a87d7d9.

Test: ART_USE_READ_BARRIER=true/false test-art-host test-art-target

Change-Id: I77cb1e9bf8f1b4c58b72d3cf5ca31ced2aaa1ea3
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 8b450e1..a5f248d 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -1375,4 +1375,30 @@
   return klass->GetDisableIntrinsicFlagOffset().Uint32Value();
 }
 
+void CodeGenerator::EmitJitRoots(uint8_t* code,
+                                 Handle<mirror::ObjectArray<mirror::Object>> roots,
+                                 const uint8_t* roots_data,
+                                 Handle<mirror::DexCache> outer_dex_cache) {
+  DCHECK_EQ(static_cast<size_t>(roots->GetLength()), GetNumberOfJitRoots());
+  StackHandleScope<1> hs(Thread::Current());
+  MutableHandle<mirror::DexCache> h_dex_cache(hs.NewHandle<mirror::DexCache>(nullptr));
+  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
+  size_t index = 0;
+  for (auto& entry : jit_string_roots_) {
+    const DexFile& entry_dex_file = *entry.first.dex_file;
+    // Avoid the expensive FindDexCache call by checking if the string is
+    // in the compiled method's dex file.
+    h_dex_cache.Assign(IsSameDexFile(*outer_dex_cache->GetDexFile(), entry_dex_file)
+        ? outer_dex_cache.Get()
+        : class_linker->FindDexCache(hs.Self(), entry_dex_file));
+    mirror::String* string = class_linker->LookupString(
+        entry_dex_file, entry.first.string_index, h_dex_cache);
+    DCHECK(string != nullptr) << "JIT roots require strings to have been loaded";
+    roots->Set(index, string);
+    entry.second = index;
+    ++index;
+  }
+  EmitJitRootPatches(code, roots_data);
+}
+
 }  // namespace art
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index bf246ad..a5d19ab 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -32,6 +32,7 @@
 #include "optimizing_compiler_stats.h"
 #include "read_barrier_option.h"
 #include "stack_map_stream.h"
+#include "string_reference.h"
 #include "utils/label.h"
 
 namespace art {
@@ -335,6 +336,17 @@
 
   void BuildStackMaps(MemoryRegion region, const DexFile::CodeItem& code_item);
   size_t ComputeStackMapsSize();
+  size_t GetNumberOfJitRoots() const {
+    return jit_string_roots_.size();
+  }
+
+  // Fills the `literals` array with literals collected during code generation.
+  // Also emits literal patches.
+  void EmitJitRoots(uint8_t* code,
+                    Handle<mirror::ObjectArray<mirror::Object>> roots,
+                    const uint8_t* roots_data,
+                    Handle<mirror::DexCache> outer_dex_cache)
+      REQUIRES_SHARED(Locks::mutator_lock_);
 
   bool IsLeafMethod() const {
     return is_leaf_;
@@ -515,6 +527,26 @@
   virtual HLoadClass::LoadKind GetSupportedLoadClassKind(
       HLoadClass::LoadKind desired_class_load_kind) = 0;
 
+  static LocationSummary::CallKind GetLoadStringCallKind(HLoadString* load) {
+    switch (load->GetLoadKind()) {
+      case HLoadString::LoadKind::kBssEntry:
+        DCHECK(load->NeedsEnvironment());
+        return LocationSummary::kCallOnSlowPath;
+      case HLoadString::LoadKind::kDexCacheViaMethod:
+        DCHECK(load->NeedsEnvironment());
+        return LocationSummary::kCallOnMainOnly;
+      case HLoadString::LoadKind::kJitTableAddress:
+        DCHECK(!load->NeedsEnvironment());
+        return kEmitCompilerReadBarrier
+            ? LocationSummary::kCallOnSlowPath
+            : LocationSummary::kNoCall;
+        break;
+      default:
+        DCHECK(!load->NeedsEnvironment());
+        return LocationSummary::kNoCall;
+    }
+  }
+
   // Check if the desired_dispatch_info is supported. If it is, return it,
   // otherwise return a fall-back info that should be used instead.
   virtual HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
@@ -571,6 +603,8 @@
         fpu_callee_save_mask_(fpu_callee_save_mask),
         stack_map_stream_(graph->GetArena()),
         block_order_(nullptr),
+        jit_string_roots_(StringReferenceValueComparator(),
+                          graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
         disasm_info_(nullptr),
         stats_(stats),
         graph_(graph),
@@ -637,6 +671,12 @@
     return current_slow_path_;
   }
 
+  // Emit the patches assocatied with JIT roots. Only applies to JIT compiled code.
+  virtual void EmitJitRootPatches(uint8_t* code ATTRIBUTE_UNUSED,
+                                  const uint8_t* roots_data ATTRIBUTE_UNUSED) {
+    DCHECK_EQ(jit_string_roots_.size(), 0u);
+  }
+
   // Frame size required for this method.
   uint32_t frame_size_;
   uint32_t core_spill_mask_;
@@ -662,6 +702,11 @@
   // The order to use for code generation.
   const ArenaVector<HBasicBlock*>* block_order_;
 
+  // Maps a StringReference (dex_file, string_index) to the index in the literal table.
+  // Entries are intially added with a 0 index, and `EmitJitRoots` will compute all the
+  // indices.
+  ArenaSafeMap<StringReference, size_t, StringReferenceValueComparator> jit_string_roots_;
+
   DisassemblyInformation* disasm_info_;
 
  private:
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 7c72d00..035c02e 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -1222,7 +1222,9 @@
                                graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       boot_image_address_patches_(std::less<uint32_t>(),
-                                  graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
+                                  graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+      jit_string_patches_(StringReferenceValueComparator(),
+                          graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
   // Always save the LR register to mimic Quick.
   AddAllocatedRegister(Location::RegisterLocation(LR));
 }
@@ -5901,6 +5903,9 @@
     case HLoadString::LoadKind::kBssEntry:
       DCHECK(!Runtime::Current()->UseJitCompilation());
       break;
+    case HLoadString::LoadKind::kJitTableAddress:
+      DCHECK(Runtime::Current()->UseJitCompilation());
+      break;
     case HLoadString::LoadKind::kDexCacheViaMethod:
       break;
   }
@@ -5908,13 +5913,8 @@
 }
 
 void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
-  LocationSummary::CallKind call_kind = load->NeedsEnvironment()
-      ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
-          ? LocationSummary::kCallOnMainOnly
-          : LocationSummary::kCallOnSlowPath)
-      : LocationSummary::kNoCall;
+  LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
-
   HLoadString::LoadKind load_kind = load->GetLoadKind();
   if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
     locations->SetOut(Location::RegisterLocation(R0));
@@ -5987,6 +5987,13 @@
       __ Bind(slow_path->GetExitLabel());
       return;
     }
+    case HLoadString::LoadKind::kJitTableAddress: {
+      __ LoadLiteral(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
+                                                                load->GetStringIndex()));
+      // /* GcRoot<mirror::String> */ out = *out
+      GenerateGcRootFieldLoad(load, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
+      return;
+    }
     default:
       break;
   }
@@ -7343,6 +7350,14 @@
   return DeduplicateUint32Literal(address, &uint32_literals_);
 }
 
+Literal* CodeGeneratorARM::DeduplicateJitStringLiteral(const DexFile& dex_file,
+                                                       uint32_t string_index) {
+  jit_string_roots_.Overwrite(StringReference(&dex_file, string_index), /* placeholder */ 0u);
+  return jit_string_patches_.GetOrCreate(
+      StringReference(&dex_file, string_index),
+      [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
+}
+
 template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
 inline void CodeGeneratorARM::EmitPcRelativeLinkerPatches(
     const ArenaDeque<PcRelativePatchInfo>& infos,
@@ -7659,6 +7674,21 @@
   }
 }
 
+void CodeGeneratorARM::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
+  for (const auto& entry : jit_string_patches_) {
+    const auto& it = jit_string_roots_.find(entry.first);
+    DCHECK(it != jit_string_roots_.end());
+    size_t index_in_table = it->second;
+    Literal* literal = entry.second;
+    DCHECK(literal->GetLabel()->IsBound());
+    uint32_t literal_offset = literal->GetLabel()->Position();
+    uintptr_t address =
+        reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
+    uint8_t* data = code + literal_offset;
+    reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
+  }
+}
+
 #undef __
 #undef QUICK_ENTRY_POINT
 
diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h
index f95dd57..8ace3da 100644
--- a/compiler/optimizing/code_generator_arm.h
+++ b/compiler/optimizing/code_generator_arm.h
@@ -488,9 +488,12 @@
   Literal* DeduplicateBootImageTypeLiteral(const DexFile& dex_file, uint32_t type_index);
   Literal* DeduplicateBootImageAddressLiteral(uint32_t address);
   Literal* DeduplicateDexCacheAddressLiteral(uint32_t address);
+  Literal* DeduplicateJitStringLiteral(const DexFile& dex_file, uint32_t string_index);
 
   void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
 
+  void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
+
   // Fast path implementation of ReadBarrier::Barrier for a heap
   // reference field load when Baker's read barriers are used.
   void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
@@ -591,9 +594,9 @@
 
   using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, Literal*>;
   using MethodToLiteralMap = ArenaSafeMap<MethodReference, Literal*, MethodReferenceComparator>;
-  using BootStringToLiteralMap = ArenaSafeMap<StringReference,
-                                              Literal*,
-                                              StringReferenceValueComparator>;
+  using StringToLiteralMap = ArenaSafeMap<StringReference,
+                                          Literal*,
+                                          StringReferenceValueComparator>;
   using BootTypeToLiteralMap = ArenaSafeMap<TypeReference,
                                             Literal*,
                                             TypeReferenceValueComparator>;
@@ -605,7 +608,6 @@
   PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
                                           uint32_t offset_or_index,
                                           ArenaDeque<PcRelativePatchInfo>* patches);
-
   template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
   static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
                                           ArenaVector<LinkerPatch>* linker_patches);
@@ -630,7 +632,7 @@
   // PC-relative patch info for each HArmDexCacheArraysBase.
   ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
   // Deduplication map for boot string literals for kBootImageLinkTimeAddress.
-  BootStringToLiteralMap boot_image_string_patches_;
+  StringToLiteralMap boot_image_string_patches_;
   // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
   ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
   // Deduplication map for boot type literals for kBootImageLinkTimeAddress.
@@ -640,6 +642,9 @@
   // Deduplication map for patchable boot image addresses.
   Uint32ToLiteralMap boot_image_address_patches_;
 
+  // Patches for string literals in JIT compiled code.
+  StringToLiteralMap jit_string_patches_;
+
   DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARM);
 };
 
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 35b1605..ea57990 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1166,7 +1166,9 @@
                                graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       boot_image_address_patches_(std::less<uint32_t>(),
-                                  graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
+                                  graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+      jit_string_patches_(StringReferenceValueComparator(),
+                          graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
   // Save the link register (containing the return address) to mimic Quick.
   AddAllocatedRegister(LocationFrom(lr));
 }
@@ -4138,6 +4140,14 @@
   return DeduplicateUint64Literal(address);
 }
 
+vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
+    const DexFile& dex_file, uint32_t string_index) {
+  jit_string_roots_.Overwrite(StringReference(&dex_file, string_index), /* placeholder */ 0u);
+  return jit_string_patches_.GetOrCreate(
+      StringReference(&dex_file, string_index),
+      [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
+}
+
 void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
                                              vixl::aarch64::Register reg) {
   DCHECK(reg.IsX());
@@ -4523,16 +4533,15 @@
       break;
     case HLoadString::LoadKind::kDexCacheViaMethod:
       break;
+    case HLoadString::LoadKind::kJitTableAddress:
+      DCHECK(Runtime::Current()->UseJitCompilation());
+      break;
   }
   return desired_string_load_kind;
 }
 
 void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
-  LocationSummary::CallKind call_kind = load->NeedsEnvironment()
-      ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
-          ? LocationSummary::kCallOnMainOnly
-          : LocationSummary::kCallOnSlowPath)
-      : LocationSummary::kNoCall;
+  LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
   if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
     InvokeRuntimeCallingConvention calling_convention;
@@ -4558,6 +4567,7 @@
 
 void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
   Register out = OutputRegister(load);
+  Location out_loc = load->GetLocations()->Out();
 
   switch (load->GetLoadKind()) {
     case HLoadString::LoadKind::kBootImageLinkTimeAddress:
@@ -4594,9 +4604,9 @@
       // Add LDR with its PC-relative String patch.
       vixl::aarch64::Label* ldr_label =
           codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
-      // /* GcRoot<mirror::Class> */ out = *(base_address + offset)  /* PC-relative */
+      // /* GcRoot<mirror::String> */ out = *(base_address + offset)  /* PC-relative */
       GenerateGcRootFieldLoad(load,
-                              load->GetLocations()->Out(),
+                              out_loc,
                               temp,
                               /* offset placeholder */ 0u,
                               ldr_label,
@@ -4608,6 +4618,17 @@
       __ Bind(slow_path->GetExitLabel());
       return;
     }
+    case HLoadString::LoadKind::kJitTableAddress: {
+      __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
+                                                        load->GetStringIndex()));
+      GenerateGcRootFieldLoad(load,
+                              out_loc,
+                              out.X(),
+                              /* offset */ 0,
+                              /* fixup_label */ nullptr,
+                              kCompilerReadBarrierOption);
+      return;
+    }
     default:
       break;
   }
@@ -5727,7 +5748,19 @@
   }
 }
 
-
+void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
+  for (const auto& entry : jit_string_patches_) {
+    const auto& it = jit_string_roots_.find(entry.first);
+    DCHECK(it != jit_string_roots_.end());
+    size_t index_in_table = it->second;
+    vixl::aarch64::Literal<uint32_t>* literal = entry.second;
+    uint32_t literal_offset = literal->GetOffset();
+    uintptr_t address =
+        reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
+    uint8_t* data = code + literal_offset;
+    reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
+  }
+}
 
 #undef __
 #undef QUICK_ENTRY_POINT
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 0e8d4fd..a2ab607 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -565,6 +565,8 @@
                                                                     uint32_t type_index);
   vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address);
   vixl::aarch64::Literal<uint64_t>* DeduplicateDexCacheAddressLiteral(uint64_t address);
+  vixl::aarch64::Literal<uint32_t>* DeduplicateJitStringLiteral(const DexFile& dex_file,
+                                                                uint32_t string_index);
 
   void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg);
   void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
@@ -576,6 +578,8 @@
 
   void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
 
+  void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
+
   // Fast path implementation of ReadBarrier::Barrier for a heap
   // reference field load when Baker's read barriers are used.
   void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
@@ -673,9 +677,9 @@
   using MethodToLiteralMap = ArenaSafeMap<MethodReference,
                                           vixl::aarch64::Literal<uint64_t>*,
                                           MethodReferenceComparator>;
-  using BootStringToLiteralMap = ArenaSafeMap<StringReference,
-                                              vixl::aarch64::Literal<uint32_t>*,
-                                              StringReferenceValueComparator>;
+  using StringToLiteralMap = ArenaSafeMap<StringReference,
+                                          vixl::aarch64::Literal<uint32_t>*,
+                                          StringReferenceValueComparator>;
   using BootTypeToLiteralMap = ArenaSafeMap<TypeReference,
                                             vixl::aarch64::Literal<uint32_t>*,
                                             TypeReferenceValueComparator>;
@@ -739,7 +743,7 @@
   // PC-relative DexCache access info.
   ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
   // Deduplication map for boot string literals for kBootImageLinkTimeAddress.
-  BootStringToLiteralMap boot_image_string_patches_;
+  StringToLiteralMap boot_image_string_patches_;
   // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
   ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
   // Deduplication map for boot type literals for kBootImageLinkTimeAddress.
@@ -749,6 +753,9 @@
   // Deduplication map for patchable boot image addresses.
   Uint32ToLiteralMap boot_image_address_patches_;
 
+  // Patches for string literals in JIT compiled code.
+  StringToLiteralMap jit_string_patches_;
+
   DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARM64);
 };
 
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index f169eb0..0936ac0 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -5215,6 +5215,11 @@
     case HLoadString::LoadKind::kDexCacheViaMethod:
       fallback_load = false;
       break;
+    case HLoadString::LoadKind::kJitTableAddress:
+      DCHECK(Runtime::Current()->UseJitCompilation());
+      // TODO: implement.
+      fallback_load = true;
+      break;
   }
   if (fallback_load) {
     desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 2a9e21d..c7addce 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1020,6 +1020,7 @@
       simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+      jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       constant_area_start_(-1),
       fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
       method_address_offset_(-1) {
@@ -6226,16 +6227,15 @@
       break;
     case HLoadString::LoadKind::kDexCacheViaMethod:
       break;
+    case HLoadString::LoadKind::kJitTableAddress:
+      DCHECK(Runtime::Current()->UseJitCompilation());
+      break;
   }
   return desired_string_load_kind;
 }
 
 void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
-  LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
-      ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
-          ? LocationSummary::kCallOnMainOnly
-          : LocationSummary::kCallOnSlowPath)
-      : LocationSummary::kNoCall;
+  LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
   HLoadString::LoadKind load_kind = load->GetLoadKind();
   if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
@@ -6260,6 +6260,14 @@
   }
 }
 
+Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index) {
+  jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u);
+  // Add a patch entry and return the label.
+  jit_string_patches_.emplace_back(dex_file, dex_index);
+  PatchInfo<Label>* info = &jit_string_patches_.back();
+  return &info->label;
+}
+
 void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) {
   LocationSummary* locations = load->GetLocations();
   Location out_loc = locations->Out();
@@ -6288,7 +6296,7 @@
       Register method_address = locations->InAt(0).AsRegister<Register>();
       Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
       Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
-      // /* GcRoot<mirror::Class> */ out = *address  /* PC-relative */
+      // /* GcRoot<mirror::String> */ out = *address  /* PC-relative */
       GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
       SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
       codegen_->AddSlowPath(slow_path);
@@ -6297,6 +6305,14 @@
       __ Bind(slow_path->GetExitLabel());
       return;
     }
+    case HLoadString::LoadKind::kJitTableAddress: {
+      Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
+      Label* fixup_label = codegen_->NewJitRootStringPatch(
+          load->GetDexFile(), load->GetStringIndex());
+      // /* GcRoot<mirror::String> */ out = *address
+      GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
+      return;
+    }
     default:
       break;
   }
@@ -7722,6 +7738,20 @@
   }
 }
 
+void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
+  for (const PatchInfo<Label>& info : jit_string_patches_) {
+    const auto& it = jit_string_roots_.find(StringReference(&info.dex_file, info.index));
+    DCHECK(it != jit_string_roots_.end());
+    size_t index_in_table = it->second;
+    uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
+    uintptr_t address =
+        reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
+    typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
+    reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
+       dchecked_integral_cast<uint32_t>(address);
+  }
+}
+
 #undef __
 
 }  // namespace x86
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index 164231b..16ea6b5 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -414,12 +414,15 @@
   void RecordTypePatch(HLoadClass* load_class);
   Label* NewStringBssEntryPatch(HLoadString* load_string);
   Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset);
+  Label* NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index);
 
   void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE;
 
   // Emit linker patches.
   void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
 
+  void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
+
   // Emit a write barrier.
   void MarkGCCard(Register temp,
                   Register card,
@@ -616,6 +619,9 @@
   // Type patch locations.
   ArenaDeque<PatchInfo<Label>> type_patches_;
 
+  // Patches for string root accesses in JIT compiled code.
+  ArenaDeque<PatchInfo<Label>> jit_string_patches_;
+
   // Offset to the start of the constant area in the assembled code.
   // Used for fixups to the constant area.
   int32_t constant_area_start_;
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index cb89e50..7be887f 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1266,7 +1266,8 @@
         simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
         string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
         type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
-        fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
+        fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+        jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
   AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
 }
 
@@ -5638,16 +5639,15 @@
       break;
     case HLoadString::LoadKind::kDexCacheViaMethod:
       break;
+    case HLoadString::LoadKind::kJitTableAddress:
+      DCHECK(Runtime::Current()->UseJitCompilation());
+      break;
   }
   return desired_string_load_kind;
 }
 
 void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
-  LocationSummary::CallKind call_kind = load->NeedsEnvironment()
-      ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
-          ? LocationSummary::kCallOnMainOnly
-          : LocationSummary::kCallOnSlowPath)
-      : LocationSummary::kNoCall;
+  LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
   if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
     locations->SetOut(Location::RegisterLocation(RAX));
@@ -5667,6 +5667,14 @@
   }
 }
 
+Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index) {
+  jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u);
+  // Add a patch entry and return the label.
+  jit_string_patches_.emplace_back(dex_file, dex_index);
+  PatchInfo<Label>* info = &jit_string_patches_.back();
+  return &info->label;
+}
+
 void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
   LocationSummary* locations = load->GetLocations();
   Location out_loc = locations->Out();
@@ -5698,6 +5706,15 @@
       __ Bind(slow_path->GetExitLabel());
       return;
     }
+    case HLoadString::LoadKind::kJitTableAddress: {
+      Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
+                                          /* no_rip */ true);
+      Label* fixup_label =
+          codegen_->NewJitRootStringPatch(load->GetDexFile(), load->GetStringIndex());
+      // /* GcRoot<mirror::String> */ out = *address
+      GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
+      return;
+    }
     default:
       break;
   }
@@ -7076,6 +7093,20 @@
   }
 }
 
+void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
+  for (const PatchInfo<Label>& info : jit_string_patches_) {
+    const auto& it = jit_string_roots_.find(StringReference(&info.dex_file, info.index));
+    DCHECK(it != jit_string_roots_.end());
+    size_t index_in_table = it->second;
+    uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
+    uintptr_t address =
+        reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
+    typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
+    reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
+       dchecked_integral_cast<uint32_t>(address);
+  }
+}
+
 #undef __
 
 }  // namespace x86_64
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index e5a4152..0f70b15 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -412,11 +412,14 @@
   void RecordTypePatch(HLoadClass* load_class);
   Label* NewStringBssEntryPatch(HLoadString* load_string);
   Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset);
+  Label* NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index);
 
   void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE;
 
   void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
 
+  void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
+
   const X86_64InstructionSetFeatures& GetInstructionSetFeatures() const {
     return isa_features_;
   }
@@ -602,6 +605,9 @@
   // Fixups for jump tables need to be handled specially.
   ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_;
 
+  // Patches for string literals in JIT compiled code.
+  ArenaDeque<PatchInfo<Label>> jit_string_patches_;
+
   DISALLOW_COPY_AND_ASSIGN(CodeGeneratorX86_64);
 };
 
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index e0c582a..215ed54 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -5690,7 +5690,10 @@
     // all other types are unavailable.
     kDexCacheViaMethod,
 
-    kLast = kDexCacheViaMethod
+    // Load from the root table associated with the JIT compiled method.
+    kJitTableAddress,
+
+    kLast = kJitTableAddress,
   };
 
   HLoadString(HCurrentMethod* current_method,
@@ -5748,7 +5751,8 @@
     LoadKind load_kind = GetLoadKind();
     if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
         load_kind == LoadKind::kBootImageLinkTimePcRelative ||
-        load_kind == LoadKind::kBootImageAddress) {
+        load_kind == LoadKind::kBootImageAddress ||
+        load_kind == LoadKind::kJitTableAddress) {
       return false;
     }
     return !IsInDexCache();
@@ -5801,7 +5805,8 @@
     return load_kind == LoadKind::kBootImageLinkTimeAddress ||
         load_kind == LoadKind::kBootImageLinkTimePcRelative ||
         load_kind == LoadKind::kBssEntry ||
-        load_kind == LoadKind::kDexCacheViaMethod;
+        load_kind == LoadKind::kDexCacheViaMethod ||
+        load_kind == LoadKind::kJitTableAddress;
   }
 
   static bool HasAddress(LoadKind load_kind) {
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 6f84cdc..1add660 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -117,6 +117,7 @@
 
   size_t GetSize() const { return size_; }
   const ArenaVector<uint8_t>& GetMemory() const { return memory_; }
+  uint8_t* GetData() { return memory_.data(); }
 
  private:
   ArenaVector<uint8_t> memory_;
@@ -1126,7 +1127,7 @@
                                     jit::JitCodeCache* code_cache,
                                     ArtMethod* method,
                                     bool osr) {
-  StackHandleScope<2> hs(self);
+  StackHandleScope<3> hs(self);
   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(
       method->GetDeclaringClass()->GetClassLoader()));
   Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
@@ -1172,22 +1173,43 @@
   }
 
   size_t stack_map_size = codegen->ComputeStackMapsSize();
-  uint8_t* stack_map_data = code_cache->ReserveData(self, stack_map_size, method);
-  if (stack_map_data == nullptr) {
+  size_t number_of_roots = codegen->GetNumberOfJitRoots();
+  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
+  // We allocate an object array to ensure the JIT roots that we will collect in EmitJitRoots
+  // will be visible by the GC between EmitLiterals and CommitCode. Once CommitCode is
+  // executed, this array is not needed.
+  Handle<mirror::ObjectArray<mirror::Object>> roots(
+      hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(
+          self, class_linker->GetClassRoot(ClassLinker::kObjectArrayClass), number_of_roots)));
+  if (roots.Get() == nullptr) {
+    // Out of memory, just clear the exception to avoid any Java exception uncaught problems.
+    DCHECK(self->IsExceptionPending());
+    self->ClearException();
+    return false;
+  }
+  uint8_t* stack_map_data = nullptr;
+  uint8_t* roots_data = nullptr;
+  code_cache->ReserveData(
+      self, stack_map_size, number_of_roots, method, &stack_map_data, &roots_data);
+  if (stack_map_data == nullptr || roots_data == nullptr) {
     return false;
   }
   MaybeRecordStat(MethodCompilationStat::kCompiled);
   codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size), *code_item);
+  codegen->EmitJitRoots(code_allocator.GetData(), roots, roots_data, dex_cache);
+
   const void* code = code_cache->CommitCode(
       self,
       method,
       stack_map_data,
+      roots_data,
       codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(),
       codegen->GetCoreSpillMask(),
       codegen->GetFpuSpillMask(),
       code_allocator.GetMemory().data(),
       code_allocator.GetSize(),
-      osr);
+      osr,
+      roots);
 
   if (code == nullptr) {
     code_cache->ClearData(self, stack_map_data);
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index 63e4ca6..15254ed 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -281,7 +281,8 @@
         : hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file));
 
     if (codegen_->GetCompilerOptions().IsBootImage()) {
-      // Compiling boot image. Resolve the string and allocate it if needed.
+      // Compiling boot image. Resolve the string and allocate it if needed, to ensure
+      // the string will be added to the boot image.
       DCHECK(!runtime->UseJitCompilation());
       mirror::String* string = class_linker->ResolveString(dex_file, string_index, dex_cache);
       CHECK(string != nullptr);
@@ -297,10 +298,14 @@
     } else if (runtime->UseJitCompilation()) {
       // TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus.
       // DCHECK(!codegen_->GetCompilerOptions().GetCompilePic());
-      mirror::String* string = dex_cache->GetResolvedString(string_index);
-      if (string != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(string)) {
-        desired_load_kind = HLoadString::LoadKind::kBootImageAddress;
-        address = reinterpret_cast64<uint64_t>(string);
+      mirror::String* string = class_linker->LookupString(dex_file, string_index, dex_cache);
+      if (string != nullptr) {
+        if (runtime->GetHeap()->ObjectIsInBootImageSpace(string)) {
+          desired_load_kind = HLoadString::LoadKind::kBootImageAddress;
+          address = reinterpret_cast64<uint64_t>(string);
+        } else {
+          desired_load_kind = HLoadString::LoadKind::kJitTableAddress;
+        }
       }
     } else {
       // AOT app compilation. Try to lookup the string without allocating if not found.
@@ -322,6 +327,7 @@
     case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
     case HLoadString::LoadKind::kBssEntry:
     case HLoadString::LoadKind::kDexCacheViaMethod:
+    case HLoadString::LoadKind::kJitTableAddress:
       load_string->SetLoadKindWithStringReference(load_kind, dex_file, string_index);
       break;
     case HLoadString::LoadKind::kBootImageAddress:
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index a26d850..2143adc 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -80,8 +80,18 @@
 
   std::string error_str;
   // Map name specific for android_os_Debug.cpp accounting.
+  // Map in low 4gb to simplify accessing root tables for x86_64.
+  // We could do PC-relative addressing to avoid this problem, but that
+  // would require reserving code and data area before submitting, which
+  // means more windows for the code memory to be RWX.
   MemMap* data_map = MemMap::MapAnonymous(
-      "data-code-cache", nullptr, max_capacity, kProtAll, false, false, &error_str, use_ashmem);
+      "data-code-cache", nullptr,
+      max_capacity,
+      kProtAll,
+      /* low_4gb */ true,
+      /* reuse */ false,
+      &error_str,
+      use_ashmem);
   if (data_map == nullptr) {
     std::ostringstream oss;
     oss << "Failed to create read write execute cache: " << error_str << " size=" << max_capacity;
@@ -197,34 +207,40 @@
 
 uint8_t* JitCodeCache::CommitCode(Thread* self,
                                   ArtMethod* method,
-                                  const uint8_t* vmap_table,
+                                  uint8_t* stack_map,
+                                  uint8_t* roots_data,
                                   size_t frame_size_in_bytes,
                                   size_t core_spill_mask,
                                   size_t fp_spill_mask,
                                   const uint8_t* code,
                                   size_t code_size,
-                                  bool osr) {
+                                  bool osr,
+                                  Handle<mirror::ObjectArray<mirror::Object>> roots) {
   uint8_t* result = CommitCodeInternal(self,
                                        method,
-                                       vmap_table,
+                                       stack_map,
+                                       roots_data,
                                        frame_size_in_bytes,
                                        core_spill_mask,
                                        fp_spill_mask,
                                        code,
                                        code_size,
-                                       osr);
+                                       osr,
+                                       roots);
   if (result == nullptr) {
     // Retry.
     GarbageCollectCache(self);
     result = CommitCodeInternal(self,
                                 method,
-                                vmap_table,
+                                stack_map,
+                                roots_data,
                                 frame_size_in_bytes,
                                 core_spill_mask,
                                 fp_spill_mask,
                                 code,
                                 code_size,
-                                osr);
+                                osr,
+                                roots);
   }
   return result;
 }
@@ -243,20 +259,67 @@
   return reinterpret_cast<uintptr_t>(code) - RoundUp(sizeof(OatQuickMethodHeader), alignment);
 }
 
+static uint32_t ComputeRootTableSize(uint32_t number_of_roots) {
+  return sizeof(uint32_t) + number_of_roots * sizeof(GcRoot<mirror::Object>);
+}
+
+static uint32_t GetNumberOfRoots(const uint8_t* stack_map) {
+  // The length of the table is stored just before the stack map (and therefore at the end of
+  // the table itself), in order to be able to fetch it from a `stack_map` pointer.
+  return reinterpret_cast<const uint32_t*>(stack_map)[-1];
+}
+
+static void FillRootTable(uint8_t* roots_data, Handle<mirror::ObjectArray<mirror::Object>> roots)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+  GcRoot<mirror::Object>* gc_roots = reinterpret_cast<GcRoot<mirror::Object>*>(roots_data);
+  uint32_t length = roots->GetLength();
+  // Put all roots in `roots_data`.
+  for (uint32_t i = 0; i < length; ++i) {
+    gc_roots[i] = GcRoot<mirror::Object>(roots->Get(i));
+  }
+  // Store the length of the table at the end. This will allow fetching it from a `stack_map`
+  // pointer.
+  reinterpret_cast<uint32_t*>(gc_roots + length)[0] = length;
+}
+
+static uint8_t* GetRootTable(const void* code_ptr, uint32_t* number_of_roots = nullptr) {
+  OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromCodePointer(code_ptr);
+  uint8_t* data = method_header->GetOptimizedCodeInfoPtr();
+  uint32_t roots = GetNumberOfRoots(data);
+  if (number_of_roots != nullptr) {
+    *number_of_roots = roots;
+  }
+  return data - ComputeRootTableSize(roots);
+}
+
+void JitCodeCache::SweepRootTables(IsMarkedVisitor* visitor) {
+  MutexLock mu(Thread::Current(), lock_);
+  for (const auto& entry : method_code_map_) {
+    uint32_t number_of_roots = 0;
+    uint8_t* roots_data = GetRootTable(entry.first, &number_of_roots);
+    GcRoot<mirror::Object>* roots = reinterpret_cast<GcRoot<mirror::Object>*>(roots_data);
+    for (uint32_t i = 0; i < number_of_roots; ++i) {
+      // This does not need a read barrier because this is called by GC.
+      mirror::Object* object = roots[i].Read<kWithoutReadBarrier>();
+      DCHECK(object != nullptr);
+      mirror::Object* new_string = visitor->IsMarked(object);
+      // We know the string is marked because it's a strongly-interned string that
+      // is always alive.
+      // TODO: Do not use IsMarked for j.l.Class, and adjust once we move this method
+      // out of the weak access/creation pause. b/32167580
+      DCHECK(new_string != nullptr);
+      DCHECK(new_string->IsString());
+      roots[i] = GcRoot<mirror::Object>(new_string);
+    }
+  }
+}
+
 void JitCodeCache::FreeCode(const void* code_ptr, ArtMethod* method ATTRIBUTE_UNUSED) {
   uintptr_t allocation = FromCodeToAllocation(code_ptr);
-  const OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromCodePointer(code_ptr);
   // Notify native debugger that we are about to remove the code.
   // It does nothing if we are not using native debugger.
   DeleteJITCodeEntryForAddress(reinterpret_cast<uintptr_t>(code_ptr));
-
-  // Use the offset directly to prevent sanity check that the method is
-  // compiled with optimizing.
-  // TODO(ngeoffray): Clean up.
-  if (method_header->vmap_table_offset_ != 0) {
-    const uint8_t* data = method_header->code_ - method_header->vmap_table_offset_;
-    FreeData(const_cast<uint8_t*>(data));
-  }
+  FreeData(GetRootTable(code_ptr));
   FreeCode(reinterpret_cast<uint8_t*>(allocation));
 }
 
@@ -308,13 +371,16 @@
 
 uint8_t* JitCodeCache::CommitCodeInternal(Thread* self,
                                           ArtMethod* method,
-                                          const uint8_t* vmap_table,
+                                          uint8_t* stack_map,
+                                          uint8_t* roots_data,
                                           size_t frame_size_in_bytes,
                                           size_t core_spill_mask,
                                           size_t fp_spill_mask,
                                           const uint8_t* code,
                                           size_t code_size,
-                                          bool osr) {
+                                          bool osr,
+                                          Handle<mirror::ObjectArray<mirror::Object>> roots) {
+  DCHECK(stack_map != nullptr);
   size_t alignment = GetInstructionSetAlignment(kRuntimeISA);
   // Ensure the header ends up at expected instruction alignment.
   size_t header_size = RoundUp(sizeof(OatQuickMethodHeader), alignment);
@@ -338,7 +404,7 @@
       std::copy(code, code + code_size, code_ptr);
       method_header = OatQuickMethodHeader::FromCodePointer(code_ptr);
       new (method_header) OatQuickMethodHeader(
-          (vmap_table == nullptr) ? 0 : code_ptr - vmap_table,
+          code_ptr - stack_map,
           frame_size_in_bytes,
           core_spill_mask,
           fp_spill_mask,
@@ -353,6 +419,8 @@
   {
     MutexLock mu(self, lock_);
     method_code_map_.Put(code_ptr, method);
+    // Fill the root table before updating the entry point.
+    FillRootTable(roots_data, roots);
     if (osr) {
       number_of_osr_compilations_++;
       osr_code_map_.Put(method, code_ptr);
@@ -408,8 +476,14 @@
   FreeData(reinterpret_cast<uint8_t*>(data));
 }
 
-uint8_t* JitCodeCache::ReserveData(Thread* self, size_t size, ArtMethod* method) {
-  size = RoundUp(size, sizeof(void*));
+void JitCodeCache::ReserveData(Thread* self,
+                               size_t stack_map_size,
+                               size_t number_of_roots,
+                               ArtMethod* method,
+                               uint8_t** stack_map_data,
+                               uint8_t** roots_data) {
+  size_t table_size = ComputeRootTableSize(number_of_roots);
+  size_t size = RoundUp(stack_map_size + table_size, sizeof(void*));
   uint8_t* result = nullptr;
 
   {
@@ -436,7 +510,8 @@
               << " for stack maps of "
               << ArtMethod::PrettyMethod(method);
   }
-  return result;
+  *roots_data = result;
+  *stack_map_data = result + table_size;
 }
 
 class MarkCodeVisitor FINAL : public StackVisitor {
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index e15c93a..a97ef68 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -92,13 +92,15 @@
   // Allocate and write code and its metadata to the code cache.
   uint8_t* CommitCode(Thread* self,
                       ArtMethod* method,
-                      const uint8_t* vmap_table,
+                      uint8_t* stack_map,
+                      uint8_t* roots_data,
                       size_t frame_size_in_bytes,
                       size_t core_spill_mask,
                       size_t fp_spill_mask,
                       const uint8_t* code,
                       size_t code_size,
-                      bool osr)
+                      bool osr,
+                      Handle<mirror::ObjectArray<mirror::Object>> roots)
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(!lock_);
 
@@ -108,8 +110,14 @@
   // Return true if the code cache contains this method.
   bool ContainsMethod(ArtMethod* method) REQUIRES(!lock_);
 
-  // Reserve a region of data of size at least "size". Returns null if there is no more room.
-  uint8_t* ReserveData(Thread* self, size_t size, ArtMethod* method)
+  // Allocate a region of data that contain `size` bytes, and potentially space
+  // for storing `number_of_roots` roots. Returns null if there is no more room.
+  void ReserveData(Thread* self,
+                   size_t size,
+                   size_t number_of_roots,
+                   ArtMethod* method,
+                   uint8_t** stack_map_data,
+                   uint8_t** roots_data)
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(!lock_);
 
@@ -188,6 +196,10 @@
 
   bool IsOsrCompiled(ArtMethod* method) REQUIRES(!lock_);
 
+  void SweepRootTables(IsMarkedVisitor* visitor)
+      REQUIRES(!lock_)
+      REQUIRES_SHARED(Locks::mutator_lock_);
+
  private:
   // Take ownership of maps.
   JitCodeCache(MemMap* code_map,
@@ -201,13 +213,15 @@
   // allocation fails. Return null if the allocation fails.
   uint8_t* CommitCodeInternal(Thread* self,
                               ArtMethod* method,
-                              const uint8_t* vmap_table,
+                              uint8_t* stack_map,
+                              uint8_t* roots_data,
                               size_t frame_size_in_bytes,
                               size_t core_spill_mask,
                               size_t fp_spill_mask,
                               const uint8_t* code,
                               size_t code_size,
-                              bool osr)
+                              bool osr,
+                              Handle<mirror::ObjectArray<mirror::Object>> roots)
       REQUIRES(!lock_)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
diff --git a/runtime/oat_quick_method_header.h b/runtime/oat_quick_method_header.h
index ee5002f..4afca7d 100644
--- a/runtime/oat_quick_method_header.h
+++ b/runtime/oat_quick_method_header.h
@@ -67,6 +67,11 @@
     return data;
   }
 
+  uint8_t* GetOptimizedCodeInfoPtr() {
+    DCHECK(IsOptimized());
+    return code_ - vmap_table_offset_;
+  }
+
   CodeInfo GetOptimizedCodeInfo() const {
     return CodeInfo(GetOptimizedCodeInfoPtr());
   }
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index ca65c2b..d90e896 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -81,6 +81,7 @@
 #include "intern_table.h"
 #include "interpreter/interpreter.h"
 #include "jit/jit.h"
+#include "jit/jit_code_cache.h"
 #include "jni_internal.h"
 #include "linear_alloc.h"
 #include "mirror/array.h"
@@ -491,6 +492,14 @@
   GetMonitorList()->SweepMonitorList(visitor);
   GetJavaVM()->SweepJniWeakGlobals(visitor);
   GetHeap()->SweepAllocationRecords(visitor);
+  if (GetJit() != nullptr) {
+    // Visit JIT literal tables. Objects in these tables are classes and strings
+    // and only classes can be affected by class unloading. The strings always
+    // stay alive as they are strongly interned.
+    // TODO: Move this closer to CleanupClassLoaders, to avoid blocking weak accesses
+    // from mutators. See b/32167580.
+    GetJit()->GetCodeCache()->SweepRootTables(visitor);
+  }
 
   // All other generic system-weak holders.
   for (gc::AbstractSystemWeakHolder* holder : system_weak_holders_) {