Rewrite class initialization check elimination.

Split the notion of type being in dex cache away from the
class being initialized. Include static invokes in the class
initialization elimination pass.

Change-Id: Ie3760d8fd55b987f9507f32ef51456a57d79e3fb
diff --git a/compiler/dex/compiler_enums.h b/compiler/dex/compiler_enums.h
index beeb3ad..0b76999 100644
--- a/compiler/dex/compiler_enums.h
+++ b/compiler/dex/compiler_enums.h
@@ -313,7 +313,8 @@
   kMIRNullCheckOnly,
   kMIRIgnoreRangeCheck,
   kMIRRangeCheckOnly,
-  kMIRIgnoreClInitCheck,
+  kMIRClassIsInitialized,
+  kMIRClassIsInDexCache,
   kMirIgnoreDivZeroCheck,
   kMIRInlined,                        // Invoke is inlined (ie dead).
   kMIRInlinedPred,                    // Invoke is inlined via prediction.
diff --git a/compiler/dex/global_value_numbering_test.cc b/compiler/dex/global_value_numbering_test.cc
index d1bca29..35d5b99 100644
--- a/compiler/dex/global_value_numbering_test.cc
+++ b/compiler/dex/global_value_numbering_test.cc
@@ -161,7 +161,8 @@
       MirSFieldLoweringInfo field_info(def->field_idx);
       // Mark even unresolved fields as initialized.
       field_info.flags_ = MirSFieldLoweringInfo::kFlagIsStatic |
-          MirSFieldLoweringInfo::kFlagIsInitialized;
+          MirSFieldLoweringInfo::kFlagClassIsInitialized;
+      // NOTE: MirSFieldLoweringInfo::kFlagClassIsInDexCache isn't used by GVN.
       if (def->declaring_dex_file != 0u) {
         field_info.declaring_dex_file_ = reinterpret_cast<const DexFile*>(def->declaring_dex_file);
         field_info.declaring_field_idx_ = def->declaring_field_idx;
diff --git a/compiler/dex/local_value_numbering.cc b/compiler/dex/local_value_numbering.cc
index 5456f4d..a171d7c 100644
--- a/compiler/dex/local_value_numbering.cc
+++ b/compiler/dex/local_value_numbering.cc
@@ -1314,7 +1314,8 @@
 uint16_t LocalValueNumbering::HandleSGet(MIR* mir, uint16_t opcode) {
   const MirSFieldLoweringInfo& field_info = gvn_->GetMirGraph()->GetSFieldLoweringInfo(mir);
   if (!field_info.IsResolved() || field_info.IsVolatile() ||
-      (!field_info.IsInitialized() && (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0)) {
+      (!field_info.IsClassInitialized() &&
+       (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0)) {
     // Volatile SGETs (and unresolved fields are potentially volatile) have acquire semantics
     // and class initialization can call arbitrary functions, we need to wipe aliasing values.
     HandleInvokeOrClInitOrAcquireOp(mir);
@@ -1350,7 +1351,8 @@
 
 void LocalValueNumbering::HandleSPut(MIR* mir, uint16_t opcode) {
   const MirSFieldLoweringInfo& field_info = gvn_->GetMirGraph()->GetSFieldLoweringInfo(mir);
-  if (!field_info.IsInitialized() && (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0) {
+  if (!field_info.IsClassInitialized() &&
+      (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0) {
     // Class initialization can call arbitrary functions, we need to wipe aliasing values.
     HandleInvokeOrClInitOrAcquireOp(mir);
   }
diff --git a/compiler/dex/local_value_numbering_test.cc b/compiler/dex/local_value_numbering_test.cc
index 33d6c14..824c323 100644
--- a/compiler/dex/local_value_numbering_test.cc
+++ b/compiler/dex/local_value_numbering_test.cc
@@ -114,7 +114,8 @@
       MirSFieldLoweringInfo field_info(def->field_idx);
       // Mark even unresolved fields as initialized.
       field_info.flags_ = MirSFieldLoweringInfo::kFlagIsStatic |
-          MirSFieldLoweringInfo::kFlagIsInitialized;
+          MirSFieldLoweringInfo::kFlagClassIsInitialized;
+      // NOTE: MirSFieldLoweringInfo::kFlagClassIsInDexCache isn't used by LVN.
       if (def->declaring_dex_file != 0u) {
         field_info.declaring_dex_file_ = reinterpret_cast<const DexFile*>(def->declaring_dex_file);
         field_info.declaring_field_idx_ = def->declaring_field_idx;
@@ -172,7 +173,7 @@
   void MakeSFieldUninitialized(uint32_t sfield_index) {
     CHECK_LT(sfield_index, cu_.mir_graph->sfield_lowering_infos_.size());
     cu_.mir_graph->sfield_lowering_infos_[sfield_index].flags_ &=
-        ~MirSFieldLoweringInfo::kFlagIsInitialized;
+        ~MirSFieldLoweringInfo::kFlagClassIsInitialized;
   }
 
   void PerformLVN() {
diff --git a/compiler/dex/mir_dataflow.cc b/compiler/dex/mir_dataflow.cc
index 0a6924c..e6a8cea 100644
--- a/compiler/dex/mir_dataflow.cc
+++ b/compiler/dex/mir_dataflow.cc
@@ -319,46 +319,46 @@
   DF_UA | DF_UB | DF_NULL_CHK_B | DF_REF_B | DF_IFIELD | DF_LVN,
 
   // 60 SGET vAA, field@BBBB
-  DF_DA | DF_SFIELD | DF_UMS,
+  DF_DA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 61 SGET_WIDE vAA, field@BBBB
-  DF_DA | DF_A_WIDE | DF_SFIELD | DF_UMS,
+  DF_DA | DF_A_WIDE | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 62 SGET_OBJECT vAA, field@BBBB
-  DF_DA | DF_REF_A | DF_SFIELD | DF_UMS,
+  DF_DA | DF_REF_A | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 63 SGET_BOOLEAN vAA, field@BBBB
-  DF_DA | DF_SFIELD | DF_UMS,
+  DF_DA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 64 SGET_BYTE vAA, field@BBBB
-  DF_DA | DF_SFIELD | DF_UMS,
+  DF_DA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 65 SGET_CHAR vAA, field@BBBB
-  DF_DA | DF_SFIELD | DF_UMS,
+  DF_DA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 66 SGET_SHORT vAA, field@BBBB
-  DF_DA | DF_SFIELD | DF_UMS,
+  DF_DA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 67 SPUT vAA, field@BBBB
-  DF_UA | DF_SFIELD | DF_UMS,
+  DF_UA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 68 SPUT_WIDE vAA, field@BBBB
-  DF_UA | DF_A_WIDE | DF_SFIELD | DF_UMS,
+  DF_UA | DF_A_WIDE | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 69 SPUT_OBJECT vAA, field@BBBB
-  DF_UA | DF_REF_A | DF_SFIELD | DF_UMS,
+  DF_UA | DF_REF_A | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 6A SPUT_BOOLEAN vAA, field@BBBB
-  DF_UA | DF_SFIELD | DF_UMS,
+  DF_UA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 6B SPUT_BYTE vAA, field@BBBB
-  DF_UA | DF_SFIELD | DF_UMS,
+  DF_UA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 6C SPUT_CHAR vAA, field@BBBB
-  DF_UA | DF_SFIELD | DF_UMS,
+  DF_UA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 6D SPUT_SHORT vAA, field@BBBB
-  DF_UA | DF_SFIELD | DF_UMS,
+  DF_UA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // 6E INVOKE_VIRTUAL {vD, vE, vF, vG, vA}
   DF_FORMAT_35C | DF_NULL_CHK_OUT0 | DF_UMS,
@@ -370,7 +370,7 @@
   DF_FORMAT_35C | DF_NULL_CHK_OUT0 | DF_UMS,
 
   // 71 INVOKE_STATIC {vD, vE, vF, vG, vA}
-  DF_FORMAT_35C | DF_UMS,
+  DF_FORMAT_35C | DF_CLINIT | DF_UMS,
 
   // 72 INVOKE_INTERFACE {vD, vE, vF, vG, vA}
   DF_FORMAT_35C | DF_NULL_CHK_OUT0 | DF_UMS,
@@ -388,7 +388,7 @@
   DF_FORMAT_3RC | DF_NULL_CHK_OUT0 | DF_UMS,
 
   // 77 INVOKE_STATIC_RANGE {vCCCC .. vNNNN}
-  DF_FORMAT_3RC | DF_UMS,
+  DF_FORMAT_3RC | DF_CLINIT | DF_UMS,
 
   // 78 INVOKE_INTERFACE_RANGE {vCCCC .. vNNNN}
   DF_FORMAT_3RC | DF_NULL_CHK_OUT0 | DF_UMS,
@@ -718,10 +718,10 @@
   DF_UA | DF_UB | DF_NULL_CHK_B | DF_REF_B | DF_IFIELD | DF_LVN,
 
   // E5 SGET_VOLATILE
-  DF_DA | DF_SFIELD | DF_UMS,
+  DF_DA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // E6 SPUT_VOLATILE
-  DF_UA | DF_SFIELD | DF_UMS,
+  DF_UA | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // E7 IGET_OBJECT_VOLATILE
   DF_DA | DF_UB | DF_NULL_CHK_B | DF_REF_A | DF_REF_B | DF_IFIELD | DF_LVN,
@@ -733,10 +733,10 @@
   DF_UA | DF_A_WIDE | DF_UB | DF_NULL_CHK_B | DF_REF_B | DF_IFIELD | DF_LVN,
 
   // EA SGET_WIDE_VOLATILE
-  DF_DA | DF_A_WIDE | DF_SFIELD | DF_UMS,
+  DF_DA | DF_A_WIDE | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // EB SPUT_WIDE_VOLATILE
-  DF_UA | DF_A_WIDE | DF_SFIELD | DF_UMS,
+  DF_UA | DF_A_WIDE | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // EC BREAKPOINT
   DF_NOP,
@@ -790,10 +790,10 @@
   DF_UA | DF_UB | DF_NULL_CHK_B | DF_REF_A | DF_REF_B | DF_IFIELD | DF_LVN,
 
   // FD SGET_OBJECT_VOLATILE
-  DF_DA | DF_REF_A | DF_SFIELD | DF_UMS,
+  DF_DA | DF_REF_A | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // FE SPUT_OBJECT_VOLATILE
-  DF_UA | DF_REF_A | DF_SFIELD | DF_UMS,
+  DF_UA | DF_REF_A | DF_SFIELD | DF_CLINIT | DF_UMS,
 
   // FF UNUSED_FF
   DF_NOP,
diff --git a/compiler/dex/mir_field_info.cc b/compiler/dex/mir_field_info.cc
index 68247b7..1db3b5b 100644
--- a/compiler/dex/mir_field_info.cc
+++ b/compiler/dex/mir_field_info.cc
@@ -62,7 +62,7 @@
     compiler_driver->GetResolvedFieldDexFileLocation(resolved_field,
         &it->declaring_dex_file_, &it->declaring_class_idx_, &it->declaring_field_idx_);
     bool is_volatile = compiler_driver->IsFieldVolatile(resolved_field);
-    it->field_offset_ = resolved_field->GetOffset();
+    it->field_offset_ = compiler_driver->GetFieldOffset(resolved_field);
     std::pair<bool, bool> fast_path = compiler_driver->IsFastInstanceField(
         dex_cache.Get(), referrer_class.Get(), resolved_field, field_idx);
     it->flags_ = 0u |  // Without kFlagIsStatic.
@@ -94,7 +94,7 @@
   Handle<mirror::DexCache> dex_cache(hs.NewHandle(compiler_driver->GetDexCache(mUnit)));
   Handle<mirror::ClassLoader> class_loader(
       hs.NewHandle(compiler_driver->GetClassLoader(soa, mUnit)));
-  Handle<mirror::Class> referrer_class(hs.NewHandle(
+  Handle<mirror::Class> referrer_class_handle(hs.NewHandle(
       compiler_driver->ResolveCompilingMethodsClass(soa, dex_cache, class_loader, mUnit)));
   // Even if the referrer class is unresolved (i.e. we're compiling a method without class
   // definition) we still want to resolve fields and record all available info.
@@ -110,16 +110,27 @@
         &it->declaring_dex_file_, &it->declaring_class_idx_, &it->declaring_field_idx_);
     bool is_volatile = compiler_driver->IsFieldVolatile(resolved_field) ? 1u : 0u;
 
-    bool is_referrers_class, is_initialized;
+    mirror::Class* referrer_class = referrer_class_handle.Get();
     std::pair<bool, bool> fast_path = compiler_driver->IsFastStaticField(
-        dex_cache.Get(), referrer_class.Get(), resolved_field, field_idx, &it->field_offset_,
-        &it->storage_index_, &is_referrers_class, &is_initialized);
-    it->flags_ = kFlagIsStatic |
+        dex_cache.Get(), referrer_class, resolved_field, field_idx, &it->storage_index_);
+    uint16_t flags = kFlagIsStatic |
         (is_volatile ? kFlagIsVolatile : 0u) |
         (fast_path.first ? kFlagFastGet : 0u) |
-        (fast_path.second ? kFlagFastPut : 0u) |
-        (is_referrers_class ? kFlagIsReferrersClass : 0u) |
-        (is_initialized ? kFlagIsInitialized : 0u);
+        (fast_path.second ? kFlagFastPut : 0u);
+    if (fast_path.first) {
+      it->field_offset_ = compiler_driver->GetFieldOffset(resolved_field);
+      bool is_referrers_class =
+          compiler_driver->IsStaticFieldInReferrerClass(referrer_class, resolved_field);
+      bool is_class_initialized =
+          compiler_driver->IsStaticFieldsClassInitialized(referrer_class, resolved_field);
+      bool is_class_in_dex_cache = !is_referrers_class &&  // If referrer's class, we don't care.
+          compiler_driver->CanAssumeTypeIsPresentInDexCache(*dex_cache->GetDexFile(),
+                                                            it->storage_index_);
+      flags |= (is_referrers_class ? kFlagIsReferrersClass : 0u) |
+          (is_class_initialized ? kFlagClassIsInitialized : 0u) |
+          (is_class_in_dex_cache ? kFlagClassIsInDexCache : 0u);
+    }
+    it->flags_ = flags;
   }
 }
 
diff --git a/compiler/dex/mir_field_info.h b/compiler/dex/mir_field_info.h
index 1842a16..c628182 100644
--- a/compiler/dex/mir_field_info.h
+++ b/compiler/dex/mir_field_info.h
@@ -173,8 +173,12 @@
     return (flags_ & kFlagIsReferrersClass) != 0u;
   }
 
-  bool IsInitialized() const {
-    return (flags_ & kFlagIsInitialized) != 0u;
+  bool IsClassInitialized() const {
+    return (flags_ & kFlagClassIsInitialized) != 0u;
+  }
+
+  bool IsClassInDexCache() const {
+    return (flags_ & kFlagClassIsInDexCache) != 0u;
   }
 
   MemberOffset FieldOffset() const {
@@ -190,14 +194,16 @@
     kBitFastGet = kFieldInfoBitEnd,
     kBitFastPut,
     kBitIsReferrersClass,
-    kBitIsInitialized,
+    kBitClassIsInitialized,
+    kBitClassIsInDexCache,
     kSFieldLoweringInfoBitEnd
   };
   COMPILE_ASSERT(kSFieldLoweringInfoBitEnd <= 16, too_many_flags);
   static constexpr uint16_t kFlagFastGet = 1u << kBitFastGet;
   static constexpr uint16_t kFlagFastPut = 1u << kBitFastPut;
   static constexpr uint16_t kFlagIsReferrersClass = 1u << kBitIsReferrersClass;
-  static constexpr uint16_t kFlagIsInitialized = 1u << kBitIsInitialized;
+  static constexpr uint16_t kFlagClassIsInitialized = 1u << kBitClassIsInitialized;
+  static constexpr uint16_t kFlagClassIsInDexCache = 1u << kBitClassIsInDexCache;
 
   // The member offset of the field, 0u if unresolved.
   MemberOffset field_offset_;
diff --git a/compiler/dex/mir_graph.cc b/compiler/dex/mir_graph.cc
index 3fa80b9..e0f471e 100644
--- a/compiler/dex/mir_graph.cc
+++ b/compiler/dex/mir_graph.cc
@@ -934,7 +934,7 @@
                 bb->first_mir_insn ? " | " : " ");
         for (mir = bb->first_mir_insn; mir; mir = mir->next) {
             int opcode = mir->dalvikInsn.opcode;
-            fprintf(file, "    {%04x %s %s %s %s %s %s %s\\l}%s\\\n", mir->offset,
+            fprintf(file, "    {%04x %s %s %s %s %s %s %s %s\\l}%s\\\n", mir->offset,
                       mir->ssa_rep ? GetDalvikDisassembly(mir) :
                       !MIR::DecodedInstruction::IsPseudoMirOp(opcode) ?
                         Instruction::Name(mir->dalvikInsn.opcode) :
@@ -944,7 +944,8 @@
                       (mir->optimization_flags & MIR_IGNORE_SUSPEND_CHECK) != 0 ? " no_suspendcheck" : " ",
                       (mir->optimization_flags & MIR_STORE_NON_TEMPORAL) != 0 ? " non_temporal" : " ",
                       (mir->optimization_flags & MIR_CALLEE) != 0 ? " inlined" : " ",
-                      (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) != 0 ? " no_clinit" : " ",
+                      (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0 ? " cl_inited" : " ",
+                      (mir->optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0 ? " cl_in_cache" : " ",
                       mir->next ? " | " : " ");
         }
         fprintf(file, "  }\"];\n\n");
diff --git a/compiler/dex/mir_graph.h b/compiler/dex/mir_graph.h
index 5c74e9e..fd4c473 100644
--- a/compiler/dex/mir_graph.h
+++ b/compiler/dex/mir_graph.h
@@ -69,6 +69,7 @@
   kUsesMethodStar,       // Implicit use of Method*.
   kUsesIField,           // Accesses an instance field (IGET/IPUT).
   kUsesSField,           // Accesses a static field (SGET/SPUT).
+  kCanInitializeClass,   // Can trigger class initialization (SGET/SPUT/INVOKE_STATIC).
   kDoLVN,                // Worth computing local value numbers.
 };
 
@@ -105,6 +106,7 @@
 #define DF_UMS                  (UINT64_C(1) << kUsesMethodStar)
 #define DF_IFIELD               (UINT64_C(1) << kUsesIField)
 #define DF_SFIELD               (UINT64_C(1) << kUsesSField)
+#define DF_CLINIT               (UINT64_C(1) << kCanInitializeClass)
 #define DF_LVN                  (UINT64_C(1) << kDoLVN)
 
 #define DF_HAS_USES             (DF_UA | DF_UB | DF_UC)
@@ -146,7 +148,8 @@
 #define MIR_NULL_CHECK_ONLY             (1 << kMIRNullCheckOnly)
 #define MIR_IGNORE_RANGE_CHECK          (1 << kMIRIgnoreRangeCheck)
 #define MIR_RANGE_CHECK_ONLY            (1 << kMIRRangeCheckOnly)
-#define MIR_IGNORE_CLINIT_CHECK         (1 << kMIRIgnoreClInitCheck)
+#define MIR_CLASS_IS_INITIALIZED        (1 << kMIRClassIsInitialized)
+#define MIR_CLASS_IS_IN_DEX_CACHE       (1 << kMIRClassIsInDexCache)
 #define MIR_IGNORE_DIV_ZERO_CHECK       (1 << kMirIgnoreDivZeroCheck)
 #define MIR_INLINED                     (1 << kMIRInlined)
 #define MIR_INLINED_PRED                (1 << kMIRInlinedPred)
diff --git a/compiler/dex/mir_method_info.cc b/compiler/dex/mir_method_info.cc
index cc2bd95..b234950 100644
--- a/compiler/dex/mir_method_info.cc
+++ b/compiler/dex/mir_method_info.cc
@@ -76,14 +76,16 @@
     int fast_path_flags = compiler_driver->IsFastInvoke(
         soa, dex_cache, class_loader, mUnit, referrer_class.Get(), resolved_method, &invoke_type,
         &target_method, devirt_target, &it->direct_code_, &it->direct_method_);
-    bool needs_clinit =
-        compiler_driver->NeedsClassInitialization(referrer_class.Get(), resolved_method);
+    bool is_referrers_class = (referrer_class.Get() == resolved_method->GetDeclaringClass());
+    bool is_class_initialized =
+        compiler_driver->IsMethodsClassInitialized(referrer_class.Get(), resolved_method);
     uint16_t other_flags = it->flags_ &
-        ~(kFlagFastPath | kFlagNeedsClassInitialization | (kInvokeTypeMask << kBitSharpTypeBegin));
+        ~(kFlagFastPath | kFlagClassIsInitialized | (kInvokeTypeMask << kBitSharpTypeBegin));
     it->flags_ = other_flags |
         (fast_path_flags != 0 ? kFlagFastPath : 0u) |
         (static_cast<uint16_t>(invoke_type) << kBitSharpTypeBegin) |
-        (needs_clinit ? kFlagNeedsClassInitialization : 0u);
+        (is_referrers_class ? kFlagIsReferrersClass : 0u) |
+        (is_class_initialized ? kFlagClassIsInitialized : 0u);
     it->target_dex_file_ = target_method.dex_file;
     it->target_method_idx_ = target_method.dex_method_index;
     it->stats_flags_ = fast_path_flags;
diff --git a/compiler/dex/mir_method_info.h b/compiler/dex/mir_method_info.h
index efe92f3..e64b028 100644
--- a/compiler/dex/mir_method_info.h
+++ b/compiler/dex/mir_method_info.h
@@ -123,8 +123,12 @@
     return (flags_ & kFlagFastPath) != 0u;
   }
 
-  bool NeedsClassInitialization() const {
-    return (flags_ & kFlagNeedsClassInitialization) != 0u;
+  bool IsReferrersClass() const {
+    return (flags_ & kFlagIsReferrersClass) != 0;
+  }
+
+  bool IsClassInitialized() const {
+    return (flags_ & kFlagClassIsInitialized) != 0u;
   }
 
   InvokeType GetInvokeType() const {
@@ -162,12 +166,14 @@
     kBitInvokeTypeEnd = kBitInvokeTypeBegin + 3,  // 3 bits for invoke type.
     kBitSharpTypeBegin,
     kBitSharpTypeEnd = kBitSharpTypeBegin + 3,  // 3 bits for sharp type.
-    kBitNeedsClassInitialization = kBitSharpTypeEnd,
-    kMethodLoweringInfoEnd
+    kBitIsReferrersClass = kBitSharpTypeEnd,
+    kBitClassIsInitialized,
+    kMethodLoweringInfoBitEnd
   };
-  COMPILE_ASSERT(kMethodLoweringInfoEnd <= 16, too_many_flags);
+  COMPILE_ASSERT(kMethodLoweringInfoBitEnd <= 16, too_many_flags);
   static constexpr uint16_t kFlagFastPath = 1u << kBitFastPath;
-  static constexpr uint16_t kFlagNeedsClassInitialization = 1u << kBitNeedsClassInitialization;
+  static constexpr uint16_t kFlagIsReferrersClass = 1u << kBitIsReferrersClass;
+  static constexpr uint16_t kFlagClassIsInitialized = 1u << kBitClassIsInitialized;
   static constexpr uint16_t kInvokeTypeMask = 7u;
   COMPILE_ASSERT((1u << (kBitInvokeTypeEnd - kBitInvokeTypeBegin)) - 1u == kInvokeTypeMask,
                  assert_invoke_type_bits_ok);
@@ -185,7 +191,7 @@
   uint16_t vtable_idx_;
   int stats_flags_;
 
-  friend class ClassInitCheckEliminationTest;
+  friend class MirOptimizationTest;
 };
 
 }  // namespace art
diff --git a/compiler/dex/mir_optimization.cc b/compiler/dex/mir_optimization.cc
index 96505ab..8e583cc 100644
--- a/compiler/dex/mir_optimization.cc
+++ b/compiler/dex/mir_optimization.cc
@@ -778,8 +778,8 @@
       const MirSFieldLoweringInfo& field_info = GetSFieldLoweringInfo(throw_insn);
       bool fast = ((df_attributes & DF_DA)  ? field_info.FastPut() : field_info.FastGet());
       // Don't combine if the SGET/SPUT can call <clinit>().
-      bool clinit = !field_info.IsInitialized() &&
-          (throw_insn->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0;
+      bool clinit = !field_info.IsClassInitialized() &&
+          (throw_insn->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0;
       ok = fast && !clinit;
     } else if ((df_attributes & DF_HAS_RANGE_CHKS) != 0) {
       // Only AGET/APUT have range checks. We have processed the AGET/APUT null check above.
@@ -1116,7 +1116,7 @@
 
 bool MIRGraph::EliminateClassInitChecksGate() {
   if ((cu_->disable_opt & (1 << kClassInitCheckElimination)) != 0 ||
-      !cu_->mir_graph->HasStaticFieldAccess()) {
+      (merged_df_flags_ & DF_CLINIT) == 0) {
     return false;
   }
 
@@ -1127,6 +1127,7 @@
   const size_t end = (GetNumDalvikInsns() + 1u) / 2u;
   temp_insn_data_ = static_cast<uint16_t*>(
       temp_scoped_alloc_->Alloc(end * sizeof(*temp_insn_data_), kArenaAllocGrowableArray));
+  std::fill_n(temp_insn_data_, end, 0xffffu);
 
   uint32_t unique_class_count = 0u;
   {
@@ -1162,8 +1163,7 @@
           if (mir->dalvikInsn.opcode >= Instruction::SGET &&
               mir->dalvikInsn.opcode <= Instruction::SPUT_SHORT) {
             const MirSFieldLoweringInfo& field_info = GetSFieldLoweringInfo(mir);
-            uint16_t index = 0xffffu;
-            if (!field_info.IsInitialized()) {
+            if (!field_info.IsReferrersClass()) {
               DCHECK_LT(class_to_index_map.size(), 0xffffu);
               MapEntry entry = {
                   // Treat unresolved fields as if each had its own class.
@@ -1173,10 +1173,24 @@
                                           : field_info.FieldIndex(),
                   static_cast<uint16_t>(class_to_index_map.size())
               };
-              index = class_to_index_map.insert(entry).first->index;
+              uint16_t index = class_to_index_map.insert(entry).first->index;
+              // Using offset/2 for index into temp_insn_data_.
+              temp_insn_data_[mir->offset / 2u] = index;
             }
-            // Using offset/2 for index into temp_insn_data_.
-            temp_insn_data_[mir->offset / 2u] = index;
+          } else if (mir->dalvikInsn.opcode == Instruction::INVOKE_STATIC ||
+              mir->dalvikInsn.opcode == Instruction::INVOKE_STATIC_RANGE) {
+            const MirMethodLoweringInfo& method_info = GetMethodLoweringInfo(mir);
+            DCHECK(method_info.IsStatic());
+            if (method_info.FastPath() && !method_info.IsReferrersClass()) {
+              MapEntry entry = {
+                  method_info.DeclaringDexFile(),
+                  method_info.DeclaringClassIndex(),
+                  static_cast<uint16_t>(class_to_index_map.size())
+              };
+              uint16_t index = class_to_index_map.insert(entry).first->index;
+              // Using offset/2 for index into temp_insn_data_.
+              temp_insn_data_[mir->offset / 2u] = index;
+            }
           }
         }
       }
@@ -1191,7 +1205,8 @@
     return false;
   }
 
-  temp_bit_vector_size_ = unique_class_count;
+  // 2 bits for each class: is class initialized, is class in dex cache.
+  temp_bit_vector_size_ = 2u * unique_class_count;
   temp_bit_vector_ = new (temp_scoped_alloc_.get()) ArenaBitVector(
       temp_scoped_alloc_.get(), temp_bit_vector_size_, false, kBitMapClInitCheck);
   temp_bit_matrix_ = static_cast<ArenaBitVector**>(
@@ -1219,26 +1234,18 @@
   DCHECK(classes_to_check != nullptr);
   if (bb->block_type == kEntryBlock) {
     classes_to_check->SetInitialBits(temp_bit_vector_size_);
-  } else if (bb->predecessors.size() == 1) {
-    BasicBlock* pred_bb = GetBasicBlock(bb->predecessors[0]);
-    // pred_bb must have already been processed at least once.
-    DCHECK(pred_bb != nullptr);
-    DCHECK(temp_bit_matrix_[pred_bb->id] != nullptr);
-    classes_to_check->Copy(temp_bit_matrix_[pred_bb->id]);
   } else {
     // Starting state is union of all incoming arcs.
     bool copied_first = false;
     for (BasicBlockId pred_id : bb->predecessors) {
-      BasicBlock* pred_bb = GetBasicBlock(pred_id);
-      DCHECK(pred_bb != nullptr);
-      if (temp_bit_matrix_[pred_bb->id] == nullptr) {
+      if (temp_bit_matrix_[pred_id] == nullptr) {
         continue;
       }
       if (!copied_first) {
         copied_first = true;
-        classes_to_check->Copy(temp_bit_matrix_[pred_bb->id]);
+        classes_to_check->Copy(temp_bit_matrix_[pred_id]);
       } else {
-        classes_to_check->Union(temp_bit_matrix_[pred_bb->id]);
+        classes_to_check->Union(temp_bit_matrix_[pred_id]);
       }
     }
     DCHECK(copied_first);  // At least one predecessor must have been processed before this bb.
@@ -1247,22 +1254,46 @@
 
   // Walk through the instruction in the block, updating as necessary
   for (MIR* mir = bb->first_mir_insn; mir != nullptr; mir = mir->next) {
-    if (mir->dalvikInsn.opcode >= Instruction::SGET &&
-        mir->dalvikInsn.opcode <= Instruction::SPUT_SHORT) {
-      uint16_t index = temp_insn_data_[mir->offset / 2u];
-      if (index != 0xffffu) {
-        if (mir->dalvikInsn.opcode >= Instruction::SGET &&
-            mir->dalvikInsn.opcode <= Instruction::SPUT_SHORT) {
-          if (!classes_to_check->IsBitSet(index)) {
-            // Eliminate the class init check.
-            mir->optimization_flags |= MIR_IGNORE_CLINIT_CHECK;
-          } else {
-            // Do the class init check.
-            mir->optimization_flags &= ~MIR_IGNORE_CLINIT_CHECK;
-          }
+    uint16_t index = temp_insn_data_[mir->offset / 2u];
+    if (index != 0xffffu) {
+      bool check_initialization = false;
+      bool check_dex_cache = false;
+
+      // NOTE: index != 0xffff does not guarantee that this is an SGET/SPUT/INVOKE_STATIC.
+      // Dex instructions with width 1 can have the same offset/2.
+
+      if (mir->dalvikInsn.opcode >= Instruction::SGET &&
+          mir->dalvikInsn.opcode <= Instruction::SPUT_SHORT) {
+        check_initialization = true;
+        check_dex_cache = true;
+      } else if (mir->dalvikInsn.opcode == Instruction::INVOKE_STATIC ||
+               mir->dalvikInsn.opcode == Instruction::INVOKE_STATIC_RANGE) {
+        check_initialization = true;
+        // NOTE: INVOKE_STATIC doesn't guarantee that the type will be in the dex cache.
+      }
+
+      if (check_dex_cache) {
+        uint32_t check_dex_cache_index = 2u * index + 1u;
+        if (!classes_to_check->IsBitSet(check_dex_cache_index)) {
+          // Eliminate the class init check.
+          mir->optimization_flags |= MIR_CLASS_IS_IN_DEX_CACHE;
+        } else {
+          // Do the class init check.
+          mir->optimization_flags &= ~MIR_CLASS_IS_IN_DEX_CACHE;
+        }
+        classes_to_check->ClearBit(check_dex_cache_index);
+      }
+      if (check_initialization) {
+        uint32_t check_clinit_index = 2u * index;
+        if (!classes_to_check->IsBitSet(check_clinit_index)) {
+          // Eliminate the class init check.
+          mir->optimization_flags |= MIR_CLASS_IS_INITIALIZED;
+        } else {
+          // Do the class init check.
+          mir->optimization_flags &= ~MIR_CLASS_IS_INITIALIZED;
         }
         // Mark the class as initialized.
-        classes_to_check->ClearBit(index);
+        classes_to_check->ClearBit(check_clinit_index);
       }
     }
   }
@@ -1425,8 +1456,8 @@
     }
 
     if (sharp_type == kStatic) {
-      bool needs_clinit = method_info.NeedsClassInitialization() &&
-          ((mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0);
+      bool needs_clinit = !method_info.IsClassInitialized() &&
+          ((mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0);
       if (needs_clinit) {
         continue;
       }
diff --git a/compiler/dex/mir_optimization_test.cc b/compiler/dex/mir_optimization_test.cc
index 337d4ef..8874faf 100644
--- a/compiler/dex/mir_optimization_test.cc
+++ b/compiler/dex/mir_optimization_test.cc
@@ -36,10 +36,21 @@
     BasicBlockId predecessors[kMaxPredecessors];
   };
 
+  struct MethodDef {
+    uint16_t method_idx;
+    uintptr_t declaring_dex_file;
+    uint16_t declaring_class_idx;
+    uint16_t declaring_method_idx;
+    InvokeType invoke_type;
+    InvokeType sharp_type;
+    bool is_referrers_class;
+    bool is_initialized;
+  };
+
   struct MIRDef {
     BasicBlockId bbid;
     Instruction::Code opcode;
-    uint32_t field_info;
+    uint32_t field_or_method_info;
     uint32_t vA;
     uint32_t vB;
     uint32_t vC;
@@ -68,6 +79,19 @@
 #define DEF_BB(type, succ, pred) \
     { type, succ, pred }
 
+#define DEF_SGET_SPUT(bb, opcode, vA, field_info) \
+    { bb, opcode, field_info, vA, 0u, 0u }
+#define DEF_IGET_IPUT(bb, opcode, vA, vB, field_info) \
+    { bb, opcode, field_info, vA, vB, 0u }
+#define DEF_AGET_APUT(bb, opcode, vA, vB, vC) \
+    { bb, opcode, 0u, vA, vB, vC }
+#define DEF_INVOKE(bb, opcode, vC, method_info) \
+    { bb, opcode, method_info, 0u, 0u, vC }
+#define DEF_OTHER1(bb, opcode, vA) \
+    { bb, opcode, 0u, vA, 0u, 0u }
+#define DEF_OTHER2(bb, opcode, vA, vB) \
+    { bb, opcode, 0u, vA, vB, 0u }
+
   void DoPrepareBasicBlocks(const BBDef* defs, size_t count) {
     cu_.mir_graph->block_id_map_.clear();
     cu_.mir_graph->block_list_.clear();
@@ -172,6 +196,35 @@
     check_bb->successor_blocks.push_back(successor_block_info);
   }
 
+  void DoPrepareMethods(const MethodDef* defs, size_t count) {
+    cu_.mir_graph->method_lowering_infos_.clear();
+    cu_.mir_graph->method_lowering_infos_.reserve(count);
+    for (size_t i = 0u; i != count; ++i) {
+      const MethodDef* def = &defs[i];
+      MirMethodLoweringInfo method_info(def->method_idx, def->invoke_type);
+      if (def->declaring_dex_file != 0u) {
+        method_info.declaring_dex_file_ = reinterpret_cast<const DexFile*>(def->declaring_dex_file);
+        method_info.declaring_class_idx_ = def->declaring_class_idx;
+        method_info.declaring_method_idx_ = def->declaring_method_idx;
+      }
+      ASSERT_EQ(def->invoke_type != kStatic, def->sharp_type != kStatic);
+      method_info.flags_ =
+          ((def->invoke_type == kStatic) ? MirMethodLoweringInfo::kFlagIsStatic : 0u) |
+          MirMethodLoweringInfo::kFlagFastPath |
+          (static_cast<uint16_t>(def->invoke_type) << MirMethodLoweringInfo::kBitInvokeTypeBegin) |
+          (static_cast<uint16_t>(def->sharp_type) << MirMethodLoweringInfo::kBitSharpTypeBegin) |
+          ((def->is_referrers_class) ? MirMethodLoweringInfo::kFlagIsReferrersClass : 0u) |
+          ((def->is_initialized == kStatic) ? MirMethodLoweringInfo::kFlagClassIsInitialized : 0u);
+      ASSERT_EQ(def->declaring_dex_file != 0u, method_info.IsResolved());
+      cu_.mir_graph->method_lowering_infos_.push_back(method_info);
+    }
+  }
+
+  template <size_t count>
+  void PrepareMethods(const MethodDef (&defs)[count]) {
+    DoPrepareMethods(defs, count);
+  }
+
   void DoPrepareMIRs(const MIRDef* defs, size_t count) {
     mir_count_ = count;
     mirs_ = reinterpret_cast<MIR*>(cu_.arena.Alloc(sizeof(MIR) * count, kArenaAllocMIR));
@@ -184,11 +237,16 @@
       BasicBlock* bb = cu_.mir_graph->block_list_[def->bbid];
       bb->AppendMIR(mir);
       if (def->opcode >= Instruction::SGET && def->opcode <= Instruction::SPUT_SHORT) {
-        ASSERT_LT(def->field_info, cu_.mir_graph->sfield_lowering_infos_.size());
-        mir->meta.sfield_lowering_info = def->field_info;
+        ASSERT_LT(def->field_or_method_info, cu_.mir_graph->sfield_lowering_infos_.size());
+        mir->meta.sfield_lowering_info = def->field_or_method_info;
       } else if (def->opcode >= Instruction::IGET && def->opcode <= Instruction::IPUT_SHORT) {
-        ASSERT_LT(def->field_info, cu_.mir_graph->ifield_lowering_infos_.size());
-        mir->meta.ifield_lowering_info = def->field_info;
+        ASSERT_LT(def->field_or_method_info, cu_.mir_graph->ifield_lowering_infos_.size());
+        mir->meta.ifield_lowering_info = def->field_or_method_info;
+      } else if (def->opcode >= Instruction::INVOKE_VIRTUAL &&
+          def->opcode < Instruction::INVOKE_INTERFACE_RANGE &&
+          def->opcode != Instruction::RETURN_VOID_BARRIER) {
+        ASSERT_LT(def->field_or_method_info, cu_.mir_graph->method_lowering_infos_.size());
+        mir->meta.method_lowering_info = def->field_or_method_info;
       }
       mir->dalvikInsn.vA = def->vA;
       mir->dalvikInsn.vB = def->vB;
@@ -251,7 +309,7 @@
         field_info.flags_ = MirSFieldLoweringInfo::kFlagIsStatic;
       }
       ASSERT_EQ(def->declaring_dex_file != 0u, field_info.IsResolved());
-      ASSERT_FALSE(field_info.IsInitialized());
+      ASSERT_FALSE(field_info.IsClassInitialized());
       cu_.mir_graph->sfield_lowering_infos_.push_back(field_info);
     }
   }
@@ -326,12 +384,13 @@
 
   NullCheckEliminationTest()
       : MirOptimizationTest() {
+    static const MethodDef methods[] = {
+        { 0u, 1u, 0u, 0u, kDirect, kDirect, false, false },  // Dummy.
+    };
+    PrepareMethods(methods);
   }
 };
 
-#define DEF_SGET_SPUT_V0(bb, opcode, field_info) \
-    { bb, opcode, field_info, 0u, 0u, 0u }
-
 TEST_F(ClassInitCheckEliminationTest, SingleBlock) {
   static const SFieldDef sfields[] = {
       { 0u, 1u, 0u, 0u },
@@ -342,17 +401,17 @@
       { 5u, 0u, 0u, 0u },  // Unresolved.
   };
   static const MIRDef mirs[] = {
-      DEF_SGET_SPUT_V0(3u, Instruction::SPUT, 5u),  // Unresolved.
-      DEF_SGET_SPUT_V0(3u, Instruction::SPUT, 0u),
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 1u),
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 2u),
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 5u),  // Unresolved.
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 0u),
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 1u),
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 2u),
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 5u),  // Unresolved.
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 3u),
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 4u),
+      DEF_SGET_SPUT(3u, Instruction::SPUT, 0u, 5u),  // Unresolved.
+      DEF_SGET_SPUT(3u, Instruction::SPUT, 0u, 0u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 1u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 2u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 5u),  // Unresolved.
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 0u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 1u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 2u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 5u),  // Unresolved.
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 3u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 4u),
   };
   static const bool expected_ignore_clinit_check[] = {
       false, false, false, false, true, true, true, true, true, false, true
@@ -365,7 +424,50 @@
   ASSERT_EQ(arraysize(expected_ignore_clinit_check), mir_count_);
   for (size_t i = 0u; i != arraysize(mirs); ++i) {
     EXPECT_EQ(expected_ignore_clinit_check[i],
-              (mirs_[i].optimization_flags & MIR_IGNORE_CLINIT_CHECK) != 0) << i;
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0) << i;
+    EXPECT_EQ(expected_ignore_clinit_check[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0) << i;
+  }
+}
+
+TEST_F(ClassInitCheckEliminationTest, SingleBlockWithInvokes) {
+  static const SFieldDef sfields[] = {
+      { 0u, 1u, 0u, 0u },
+      { 1u, 1u, 1u, 1u },
+      { 2u, 1u, 2u, 2u },
+  };
+  static const MethodDef methods[] = {
+      { 0u, 1u, 0u, 0u, kStatic, kStatic, false, false },
+      { 1u, 1u, 1u, 1u, kStatic, kStatic, false, false },
+      { 2u, 1u, 2u, 2u, kStatic, kStatic, false, false },
+  };
+  static const MIRDef mirs[] = {
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 0u),
+      DEF_INVOKE(3u, Instruction::INVOKE_STATIC, 0u /* dummy */, 0u),
+      DEF_INVOKE(3u, Instruction::INVOKE_STATIC, 0u /* dummy */, 1u),
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 1u),
+      DEF_INVOKE(3u, Instruction::INVOKE_STATIC, 0u /* dummy */, 2u),
+      DEF_INVOKE(3u, Instruction::INVOKE_STATIC, 0u /* dummy */, 2u),
+  };
+  static const bool expected_class_initialized[] = {
+      false, true, false, true, false, true
+  };
+  static const bool expected_class_in_dex_cache[] = {
+      false, false, false, false, false, false
+  };
+
+  PrepareSFields(sfields);
+  PrepareMethods(methods);
+  PrepareSingleBlock();
+  PrepareMIRs(mirs);
+  PerformClassInitCheckElimination();
+  ASSERT_EQ(arraysize(expected_class_initialized), mir_count_);
+  ASSERT_EQ(arraysize(expected_class_in_dex_cache), mir_count_);
+  for (size_t i = 0u; i != arraysize(mirs); ++i) {
+    EXPECT_EQ(expected_class_initialized[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0) << i;
+    EXPECT_EQ(expected_class_in_dex_cache[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0) << i;
   }
 }
 
@@ -385,32 +487,32 @@
   };
   static const MIRDef mirs[] = {
       // NOTE: MIRs here are ordered by unique tests. They will be put into appropriate blocks.
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 10u),  // Unresolved.
-      DEF_SGET_SPUT_V0(3u, Instruction::SPUT, 10u),  // Unresolved.
-      DEF_SGET_SPUT_V0(3u, Instruction::SPUT, 0u),
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 0u),  // Eliminated (BB #3 dominates #6).
-      DEF_SGET_SPUT_V0(4u, Instruction::SPUT, 1u),
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 1u),  // Not eliminated (BB #4 doesn't dominate #6).
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 2u),
-      DEF_SGET_SPUT_V0(4u, Instruction::SGET, 2u),  // Eliminated (BB #3 dominates #4).
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 3u),
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 3u),  // Eliminated (BB #3 dominates #5).
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 4u),
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 4u),  // Eliminated (BB #3 dominates #6).
-      DEF_SGET_SPUT_V0(4u, Instruction::SGET, 5u),
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 5u),  // Not eliminated (BB #4 doesn't dominate #6).
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 6u),
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 6u),  // Not eliminated (BB #5 doesn't dominate #6).
-      DEF_SGET_SPUT_V0(4u, Instruction::SGET, 7u),
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 7u),
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 7u),  // Eliminated (initialized in both #3 and #4).
-      DEF_SGET_SPUT_V0(4u, Instruction::SGET, 8u),
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 9u),
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 8u),  // Eliminated (with sfield[9] in BB #5).
-      DEF_SGET_SPUT_V0(6u, Instruction::SPUT, 9u),  // Eliminated (with sfield[8] in BB #4).
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 10u),  // Unresolved.
+      DEF_SGET_SPUT(3u, Instruction::SPUT, 0u, 10u),  // Unresolved.
+      DEF_SGET_SPUT(3u, Instruction::SPUT, 0u, 0u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 0u),  // Eliminated (BB #3 dominates #6).
+      DEF_SGET_SPUT(4u, Instruction::SPUT, 0u, 1u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 1u),  // Not eliminated (BB #4 doesn't dominate #6).
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 2u),
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 2u),  // Eliminated (BB #3 dominates #4).
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 3u),
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 3u),  // Eliminated (BB #3 dominates #5).
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 4u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 4u),  // Eliminated (BB #3 dominates #6).
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 5u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 5u),  // Not eliminated (BB #4 doesn't dominate #6).
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 6u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 6u),  // Not eliminated (BB #5 doesn't dominate #6).
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 7u),
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 7u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 7u),  // Eliminated (initialized in both #3 and #4).
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 8u),
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 9u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 8u),  // Eliminated (with sfield[9] in BB #5).
+      DEF_SGET_SPUT(6u, Instruction::SPUT, 0u, 9u),  // Eliminated (with sfield[8] in BB #4).
   };
   static const bool expected_ignore_clinit_check[] = {
-      false, true,          // Unresolved: sfield[10], method[2]
+      false, true,          // Unresolved: sfield[10]
       false, true,          // sfield[0]
       false, false,         // sfield[1]
       false, true,          // sfield[2]
@@ -429,7 +531,70 @@
   ASSERT_EQ(arraysize(expected_ignore_clinit_check), mir_count_);
   for (size_t i = 0u; i != arraysize(mirs); ++i) {
     EXPECT_EQ(expected_ignore_clinit_check[i],
-              (mirs_[i].optimization_flags & MIR_IGNORE_CLINIT_CHECK) != 0) << i;
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0) << i;
+    EXPECT_EQ(expected_ignore_clinit_check[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0) << i;
+  }
+}
+
+TEST_F(ClassInitCheckEliminationTest, DiamondWithInvokes) {
+  static const SFieldDef sfields[] = {
+      { 0u, 1u, 0u, 0u },
+      { 1u, 1u, 1u, 1u },
+      { 2u, 1u, 2u, 2u },
+      { 3u, 1u, 3u, 3u },
+      { 4u, 1u, 4u, 4u },
+  };
+  static const MethodDef methods[] = {
+      { 0u, 1u, 0u, 0u, kStatic, kStatic, false, false },
+      { 1u, 1u, 1u, 1u, kStatic, kStatic, false, false },
+      { 2u, 1u, 2u, 2u, kStatic, kStatic, false, false },
+      { 3u, 1u, 3u, 3u, kStatic, kStatic, false, false },
+      { 4u, 1u, 4u, 4u, kStatic, kStatic, false, false },
+  };
+  static const MIRDef mirs[] = {
+      // NOTE: MIRs here are ordered by unique tests. They will be put into appropriate blocks.
+      DEF_SGET_SPUT(3u, Instruction::SPUT, 0u, 0u),
+      DEF_INVOKE(6u, Instruction::INVOKE_STATIC, 0u /* dummy */, 0u),
+      DEF_INVOKE(3u, Instruction::INVOKE_STATIC, 0u /* dummy */, 1u),
+      DEF_SGET_SPUT(6u, Instruction::SPUT, 0u, 1u),
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 2u),
+      DEF_INVOKE(5u, Instruction::INVOKE_STATIC, 0u /* dummy */, 2u),
+      DEF_SGET_SPUT(6u, Instruction::SPUT, 0u, 2u),
+      DEF_INVOKE(4u, Instruction::INVOKE_STATIC, 0u /* dummy */, 3u),
+      DEF_SGET_SPUT(5u, Instruction::SPUT, 0u, 3u),
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 3u),
+      DEF_SGET_SPUT(4u, Instruction::SPUT, 0u, 4u),
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 4u),
+      DEF_INVOKE(6u, Instruction::INVOKE_STATIC, 0u /* dummy */, 4u),
+  };
+  static const bool expected_class_initialized[] = {
+      false, true,    // BB #3 SPUT, BB#6 INVOKE_STATIC
+      false, true,    // BB #3 INVOKE_STATIC, BB#6 SPUT
+      false, false, true,   // BB #4 SGET, BB #5 INVOKE_STATIC, BB #6 SPUT
+      false, false, true,   // BB #4 INVOKE_STATIC, BB #5 SPUT, BB #6 SGET
+      false, false, true,   // BB #4 SPUT, BB #5 SGET, BB #6 INVOKE_STATIC
+  };
+  static const bool expected_class_in_dex_cache[] = {
+      false, false,   // BB #3 SPUT, BB#6 INVOKE_STATIC
+      false, false,   // BB #3 INVOKE_STATIC, BB#6 SPUT
+      false, false, false,  // BB #4 SGET, BB #5 INVOKE_STATIC, BB #6 SPUT
+      false, false, false,  // BB #4 INVOKE_STATIC, BB #5 SPUT, BB #6 SGET
+      false, false, false,  // BB #4 SPUT, BB #5 SGET, BB #6 INVOKE_STATIC
+  };
+
+  PrepareSFields(sfields);
+  PrepareMethods(methods);
+  PrepareDiamond();
+  PrepareMIRs(mirs);
+  PerformClassInitCheckElimination();
+  ASSERT_EQ(arraysize(expected_class_initialized), mir_count_);
+  ASSERT_EQ(arraysize(expected_class_in_dex_cache), mir_count_);
+  for (size_t i = 0u; i != arraysize(mirs); ++i) {
+    EXPECT_EQ(expected_class_initialized[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0) << i;
+    EXPECT_EQ(expected_class_in_dex_cache[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0) << i;
   }
 }
 
@@ -437,15 +602,18 @@
   static const SFieldDef sfields[] = {
       { 0u, 1u, 0u, 0u },
       { 1u, 1u, 1u, 1u },
+      { 2u, 1u, 2u, 2u },
   };
   static const MIRDef mirs[] = {
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 0u),
-      DEF_SGET_SPUT_V0(4u, Instruction::SGET, 1u),
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 0u),  // Eliminated.
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 1u),  // Eliminated.
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 0u),
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 0u),  // Eliminated.
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 1u),
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 1u),  // Eliminated.
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 2u),
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 2u),  // Eliminated.
   };
   static const bool expected_ignore_clinit_check[] = {
-      false, false, true, true
+      false, true, false, true, false, true,
   };
 
   PrepareSFields(sfields);
@@ -455,7 +623,49 @@
   ASSERT_EQ(arraysize(expected_ignore_clinit_check), mir_count_);
   for (size_t i = 0u; i != arraysize(mirs); ++i) {
     EXPECT_EQ(expected_ignore_clinit_check[i],
-              (mirs_[i].optimization_flags & MIR_IGNORE_CLINIT_CHECK) != 0) << i;
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0) << i;
+    EXPECT_EQ(expected_ignore_clinit_check[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0) << i;
+  }
+}
+
+TEST_F(ClassInitCheckEliminationTest, LoopWithInvokes) {
+  static const SFieldDef sfields[] = {
+      { 0u, 1u, 0u, 0u },
+  };
+  static const MethodDef methods[] = {
+      { 0u, 1u, 0u, 0u, kStatic, kStatic, false, false },
+      { 1u, 1u, 1u, 1u, kStatic, kStatic, false, false },
+      { 2u, 1u, 2u, 2u, kStatic, kStatic, false, false },
+  };
+  static const MIRDef mirs[] = {
+      DEF_INVOKE(3u, Instruction::INVOKE_STATIC, 0u /* dummy */, 0u),
+      DEF_INVOKE(4u, Instruction::INVOKE_STATIC, 0u /* dummy */, 0u),
+      DEF_INVOKE(3u, Instruction::INVOKE_STATIC, 0u /* dummy */, 1u),
+      DEF_INVOKE(5u, Instruction::INVOKE_STATIC, 0u /* dummy */, 1u),
+      DEF_INVOKE(4u, Instruction::INVOKE_STATIC, 0u /* dummy */, 2u),
+      DEF_INVOKE(5u, Instruction::INVOKE_STATIC, 0u /* dummy */, 2u),
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 0u),
+  };
+  static const bool expected_class_initialized[] = {
+      false, true, false, true, false, true, true,
+  };
+  static const bool expected_class_in_dex_cache[] = {
+      false, false, false, false, false, false, false,
+  };
+
+  PrepareSFields(sfields);
+  PrepareMethods(methods);
+  PrepareLoop();
+  PrepareMIRs(mirs);
+  PerformClassInitCheckElimination();
+  ASSERT_EQ(arraysize(expected_class_initialized), mir_count_);
+  ASSERT_EQ(arraysize(expected_class_in_dex_cache), mir_count_);
+  for (size_t i = 0u; i != arraysize(mirs); ++i) {
+    EXPECT_EQ(expected_class_initialized[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0) << i;
+    EXPECT_EQ(expected_class_in_dex_cache[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0) << i;
   }
 }
 
@@ -467,16 +677,16 @@
       { 3u, 1u, 3u, 3u },
   };
   static const MIRDef mirs[] = {
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 0u),  // Before the exception edge.
-      DEF_SGET_SPUT_V0(3u, Instruction::SGET, 1u),  // Before the exception edge.
-      DEF_SGET_SPUT_V0(4u, Instruction::SGET, 2u),  // After the exception edge.
-      DEF_SGET_SPUT_V0(4u, Instruction::SGET, 3u),  // After the exception edge.
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 0u),  // In catch handler; clinit check eliminated.
-      DEF_SGET_SPUT_V0(5u, Instruction::SGET, 2u),  // In catch handler; clinit check not eliminated.
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 0u),  // Class init check eliminated.
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 1u),  // Class init check eliminated.
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 2u),  // Class init check eliminated.
-      DEF_SGET_SPUT_V0(6u, Instruction::SGET, 3u),  // Class init check not eliminated.
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 0u),  // Before the exception edge.
+      DEF_SGET_SPUT(3u, Instruction::SGET, 0u, 1u),  // Before the exception edge.
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 2u),  // After the exception edge.
+      DEF_SGET_SPUT(4u, Instruction::SGET, 0u, 3u),  // After the exception edge.
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 0u),  // In catch handler; eliminated.
+      DEF_SGET_SPUT(5u, Instruction::SGET, 0u, 2u),  // In catch handler; not eliminated.
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 0u),  // Class init check eliminated.
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 1u),  // Class init check eliminated.
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 2u),  // Class init check eliminated.
+      DEF_SGET_SPUT(6u, Instruction::SGET, 0u, 3u),  // Class init check not eliminated.
   };
   static const bool expected_ignore_clinit_check[] = {
       false, false, false, false, true, false, true, true, true, false
@@ -489,21 +699,12 @@
   ASSERT_EQ(arraysize(expected_ignore_clinit_check), mir_count_);
   for (size_t i = 0u; i != arraysize(mirs); ++i) {
     EXPECT_EQ(expected_ignore_clinit_check[i],
-              (mirs_[i].optimization_flags & MIR_IGNORE_CLINIT_CHECK) != 0) << i;
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_INITIALIZED) != 0) << i;
+    EXPECT_EQ(expected_ignore_clinit_check[i],
+              (mirs_[i].optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) != 0) << i;
   }
 }
 
-#define DEF_IGET_IPUT(bb, opcode, vA, vB, field_info) \
-    { bb, opcode, field_info, vA, vB, 0u }
-#define DEF_AGET_APUT(bb, opcode, vA, vB, vC) \
-    { bb, opcode, 0u, vA, vB, vC }
-#define DEF_INVOKE(bb, opcode, vC) \
-    { bb, opcode, 0u, 0u, 0u, vC }
-#define DEF_OTHER1(bb, opcode, vA) \
-    { bb, opcode, 0u, vA, 0u, 0u }
-#define DEF_OTHER2(bb, opcode, vA, vB) \
-    { bb, opcode, 0u, vA, vB, 0u }
-
 TEST_F(NullCheckEliminationTest, SingleBlock) {
   static const IFieldDef ifields[] = {
       { 0u, 1u, 0u, 0u },
@@ -525,10 +726,10 @@
       DEF_IGET_IPUT(3u, Instruction::IPUT, 11u, 105u, 1u),
       DEF_IGET_IPUT(3u, Instruction::IPUT, 12u, 106u, 0u),
       DEF_IGET_IPUT(3u, Instruction::IGET, 13u, 106u, 1u),
-      DEF_INVOKE(3u, Instruction::INVOKE_DIRECT, 107),
+      DEF_INVOKE(3u, Instruction::INVOKE_DIRECT, 107, 0u /* dummy */),
       DEF_IGET_IPUT(3u, Instruction::IGET, 15u, 107u, 1u),
       DEF_IGET_IPUT(3u, Instruction::IGET, 16u, 108u, 0u),
-      DEF_INVOKE(3u, Instruction::INVOKE_DIRECT, 108),
+      DEF_INVOKE(3u, Instruction::INVOKE_DIRECT, 108, 0u /* dummy */),
       DEF_AGET_APUT(3u, Instruction::AGET, 18u, 109u, 110u),
       DEF_AGET_APUT(3u, Instruction::APUT, 19u, 109u, 111u),
       DEF_OTHER2(3u, Instruction::ARRAY_LENGTH, 20u, 112u),
@@ -583,7 +784,7 @@
       DEF_IGET_IPUT(6u, Instruction::IPUT, 7u, 103u, 1u),  // Not eliminated (going through BB #5).
       DEF_IGET_IPUT(5u, Instruction::IGET, 8u, 104u, 1u),
       DEF_IGET_IPUT(6u, Instruction::IGET, 9u, 104u, 0u),  // Not eliminated (going through BB #4).
-      DEF_INVOKE(4u, Instruction::INVOKE_DIRECT, 105u),
+      DEF_INVOKE(4u, Instruction::INVOKE_DIRECT, 105u, 0u /* dummy */),
       DEF_IGET_IPUT(5u, Instruction::IGET, 11u, 105u, 1u),
       DEF_IGET_IPUT(6u, Instruction::IPUT, 12u, 105u, 0u),  // Eliminated.
       DEF_IGET_IPUT(3u, Instruction::IGET_OBJECT, 13u, 106u, 2u),
diff --git a/compiler/dex/quick/gen_common.cc b/compiler/dex/quick/gen_common.cc
index 2abfcc3..d76a870 100644
--- a/compiler/dex/quick/gen_common.cc
+++ b/compiler/dex/quick/gen_common.cc
@@ -522,15 +522,21 @@
 //
 class StaticFieldSlowPath : public Mir2Lir::LIRSlowPath {
  public:
+  // There are up to two branches to the static field slow path, the "unresolved" when the type
+  // entry in the dex cache is null, and the "uninit" when the class is not yet initialized.
+  // At least one will be non-null here, otherwise we wouldn't generate the slow path.
   StaticFieldSlowPath(Mir2Lir* m2l, LIR* unresolved, LIR* uninit, LIR* cont, int storage_index,
-                      RegStorage r_base) :
-    LIRSlowPath(m2l, m2l->GetCurrentDexPc(), unresolved, cont), uninit_(uninit),
-               storage_index_(storage_index), r_base_(r_base) {
+                      RegStorage r_base)
+      : LIRSlowPath(m2l, m2l->GetCurrentDexPc(), unresolved != nullptr ? unresolved : uninit, cont),
+        second_branch_(unresolved != nullptr ? uninit : nullptr),
+        storage_index_(storage_index), r_base_(r_base) {
   }
 
   void Compile() {
-    LIR* unresolved_target = GenerateTargetLabel();
-    uninit_->target = unresolved_target;
+    LIR* target = GenerateTargetLabel();
+    if (second_branch_ != nullptr) {
+      second_branch_->target = target;
+    }
     m2l_->CallRuntimeHelperImm(kQuickInitializeStaticStorage, storage_index_, true);
     // Copy helper's result into r_base, a no-op on all but MIPS.
     m2l_->OpRegCopy(r_base_,  m2l_->TargetReg(kRet0, kRef));
@@ -539,7 +545,9 @@
   }
 
  private:
-  LIR* const uninit_;
+  // Second branch to the slow path, or null if there's only one branch.
+  LIR* const second_branch_;
+
   const int storage_index_;
   const RegStorage r_base_;
 };
@@ -577,30 +585,38 @@
       int32_t offset_of_field = ObjArray::OffsetOfElement(field_info.StorageIndex()).Int32Value();
       LoadRefDisp(r_base, offset_of_field, r_base, kNotVolatile);
       // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
-      if (!field_info.IsInitialized() &&
-          (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0) {
-        // Check if r_base is NULL or a not yet initialized class.
-
-        // The slow path is invoked if the r_base is NULL or the class pointed
-        // to by it is not initialized.
-        LIR* unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
+      LIR* unresolved_branch = nullptr;
+      if (!field_info.IsClassInDexCache() &&
+          (mir->optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) == 0) {
+        // Check if r_base is NULL.
+        unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
+      }
+      LIR* uninit_branch = nullptr;
+      if (!field_info.IsClassInitialized() &&
+          (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0) {
+        // Check if r_base is not yet initialized class.
         RegStorage r_tmp = TargetReg(kArg2, kNotWide);
         LockTemp(r_tmp);
-        LIR* uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
+        uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
                                           mirror::Class::StatusOffset().Int32Value(),
                                           mirror::Class::kStatusInitialized, nullptr, nullptr);
+        FreeTemp(r_tmp);
+      }
+      if (unresolved_branch != nullptr || uninit_branch != nullptr) {
+        // The slow path is invoked if the r_base is NULL or the class pointed
+        // to by it is not initialized.
         LIR* cont = NewLIR0(kPseudoTargetLabel);
-
         AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
                                                      field_info.StorageIndex(), r_base));
 
-        FreeTemp(r_tmp);
-        // Ensure load of status and store of value don't re-order.
-        // TODO: Presumably the actual value store is control-dependent on the status load,
-        // and will thus not be reordered in any case, since stores are never speculated.
-        // Does later code "know" that the class is now initialized?  If so, we still
-        // need the barrier to guard later static loads.
-        GenMemBarrier(kLoadAny);
+        if (uninit_branch != nullptr) {
+          // Ensure load of status and store of value don't re-order.
+          // TODO: Presumably the actual value store is control-dependent on the status load,
+          // and will thus not be reordered in any case, since stores are never speculated.
+          // Does later code "know" that the class is now initialized?  If so, we still
+          // need the barrier to guard later static loads.
+          GenMemBarrier(kLoadAny);
+        }
       }
       FreeTemp(r_method);
     }
@@ -684,26 +700,34 @@
       int32_t offset_of_field = ObjArray::OffsetOfElement(field_info.StorageIndex()).Int32Value();
       LoadRefDisp(r_base, offset_of_field, r_base, kNotVolatile);
       // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
-      if (!field_info.IsInitialized() &&
-          (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0) {
-        // Check if r_base is NULL or a not yet initialized class.
-
-        // The slow path is invoked if the r_base is NULL or the class pointed
-        // to by it is not initialized.
-        LIR* unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
+      LIR* unresolved_branch = nullptr;
+      if (!field_info.IsClassInDexCache() &&
+          (mir->optimization_flags & MIR_CLASS_IS_IN_DEX_CACHE) == 0) {
+        // Check if r_base is NULL.
+        unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
+      }
+      LIR* uninit_branch = nullptr;
+      if (!field_info.IsClassInitialized() &&
+          (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0) {
+        // Check if r_base is not yet initialized class.
         RegStorage r_tmp = TargetReg(kArg2, kNotWide);
         LockTemp(r_tmp);
-        LIR* uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
+        uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
                                           mirror::Class::StatusOffset().Int32Value(),
                                           mirror::Class::kStatusInitialized, nullptr, nullptr);
+        FreeTemp(r_tmp);
+      }
+      if (unresolved_branch != nullptr || uninit_branch != nullptr) {
+        // The slow path is invoked if the r_base is NULL or the class pointed
+        // to by it is not initialized.
         LIR* cont = NewLIR0(kPseudoTargetLabel);
-
         AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
                                                      field_info.StorageIndex(), r_base));
 
-        FreeTemp(r_tmp);
-        // Ensure load of status and load of value don't re-order.
-        GenMemBarrier(kLoadAny);
+        if (uninit_branch != nullptr) {
+          // Ensure load of status and load of value don't re-order.
+          GenMemBarrier(kLoadAny);
+        }
       }
       FreeTemp(r_method);
     }
diff --git a/compiler/driver/compiler_driver-inl.h b/compiler/driver/compiler_driver-inl.h
index 3325568..1805d59 100644
--- a/compiler/driver/compiler_driver-inl.h
+++ b/compiler/driver/compiler_driver-inl.h
@@ -93,6 +93,10 @@
   return field->IsVolatile();
 }
 
+inline MemberOffset CompilerDriver::GetFieldOffset(mirror::ArtField* field) {
+  return field->GetOffset();
+}
+
 inline std::pair<bool, bool> CompilerDriver::IsFastInstanceField(
     mirror::DexCache* dex_cache, mirror::Class* referrer_class,
     mirror::ArtField* resolved_field, uint16_t field_idx) {
@@ -107,16 +111,12 @@
 
 inline std::pair<bool, bool> CompilerDriver::IsFastStaticField(
     mirror::DexCache* dex_cache, mirror::Class* referrer_class,
-    mirror::ArtField* resolved_field, uint16_t field_idx, MemberOffset* field_offset,
-    uint32_t* storage_index, bool* is_referrers_class, bool* is_initialized) {
+    mirror::ArtField* resolved_field, uint16_t field_idx, uint32_t* storage_index) {
   DCHECK(resolved_field->IsStatic());
   if (LIKELY(referrer_class != nullptr)) {
     mirror::Class* fields_class = resolved_field->GetDeclaringClass();
     if (fields_class == referrer_class) {
-      *field_offset = resolved_field->GetOffset();
       *storage_index = fields_class->GetDexTypeIndex();
-      *is_referrers_class = true;  // implies no worrying about class initialization
-      *is_initialized = true;
       return std::make_pair(true, true);
     }
     if (referrer_class->CanAccessResolvedField(fields_class, resolved_field,
@@ -148,23 +148,30 @@
         }
       }
       if (storage_idx != DexFile::kDexNoIndex) {
-        *field_offset = resolved_field->GetOffset();
         *storage_index = storage_idx;
-        *is_referrers_class = false;
-        *is_initialized = fields_class->IsInitialized() &&
-            CanAssumeTypeIsPresentInDexCache(*dex_file, storage_idx);
         return std::make_pair(true, !resolved_field->IsFinal());
       }
     }
   }
   // Conservative defaults.
-  *field_offset = MemberOffset(0u);
   *storage_index = DexFile::kDexNoIndex;
-  *is_referrers_class = false;
-  *is_initialized = false;
   return std::make_pair(false, false);
 }
 
+inline bool CompilerDriver::IsStaticFieldInReferrerClass(mirror::Class* referrer_class,
+                                                         mirror::ArtField* resolved_field) {
+  DCHECK(resolved_field->IsStatic());
+  mirror::Class* fields_class = resolved_field->GetDeclaringClass();
+  return referrer_class == fields_class;
+}
+
+inline bool CompilerDriver::IsStaticFieldsClassInitialized(mirror::Class* referrer_class,
+                                                           mirror::ArtField* resolved_field) {
+  DCHECK(resolved_field->IsStatic());
+  mirror::Class* fields_class = resolved_field->GetDeclaringClass();
+  return fields_class == referrer_class || fields_class->IsInitialized();
+}
+
 inline mirror::ArtMethod* CompilerDriver::ResolveMethod(
     ScopedObjectAccess& soa, Handle<mirror::DexCache> dex_cache,
     Handle<mirror::ClassLoader> class_loader, const DexCompilationUnit* mUnit,
@@ -312,14 +319,13 @@
   return stats_flags;
 }
 
-inline bool CompilerDriver::NeedsClassInitialization(mirror::Class* referrer_class,
-                                                     mirror::ArtMethod* resolved_method) {
+inline bool CompilerDriver::IsMethodsClassInitialized(mirror::Class* referrer_class,
+                                                      mirror::ArtMethod* resolved_method) {
   if (!resolved_method->IsStatic()) {
-    return false;
+    return true;
   }
   mirror::Class* methods_class = resolved_method->GetDeclaringClass();
-  // NOTE: Unlike in IsFastStaticField(), we don't check CanAssumeTypeIsPresentInDexCache() here.
-  return methods_class != referrer_class && !methods_class->IsInitialized();
+  return methods_class == referrer_class || methods_class->IsInitialized();
 }
 
 }  // namespace art
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index a60c5bc..eb5739f 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -1094,11 +1094,17 @@
   if (resolved_field != nullptr && referrer_class != nullptr) {
     *is_volatile = IsFieldVolatile(resolved_field);
     std::pair<bool, bool> fast_path = IsFastStaticField(
-        dex_cache, referrer_class, resolved_field, field_idx, field_offset,
-        storage_index, is_referrers_class, is_initialized);
+        dex_cache, referrer_class, resolved_field, field_idx, storage_index);
     result = is_put ? fast_path.second : fast_path.first;
   }
-  if (!result) {
+  if (result) {
+    *field_offset = GetFieldOffset(resolved_field);
+    *is_referrers_class = IsStaticFieldInReferrerClass(referrer_class, resolved_field);
+    // *is_referrers_class == true implies no worrying about class initialization.
+    *is_initialized = (*is_referrers_class) ||
+        (IsStaticFieldsClassInitialized(referrer_class, resolved_field) &&
+         CanAssumeTypeIsPresentInDexCache(*mUnit->GetDexFile(), *storage_index));
+  } else {
     // Conservative defaults.
     *is_volatile = true;
     *field_offset = MemberOffset(static_cast<size_t>(-1));
diff --git a/compiler/driver/compiler_driver.h b/compiler/driver/compiler_driver.h
index 0796f48..6764aef 100644
--- a/compiler/driver/compiler_driver.h
+++ b/compiler/driver/compiler_driver.h
@@ -233,6 +233,7 @@
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   bool IsFieldVolatile(mirror::ArtField* field) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+  MemberOffset GetFieldOffset(mirror::ArtField* field) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   // Can we fast-path an IGET/IPUT access to an instance field? If yes, compute the field offset.
   std::pair<bool, bool> IsFastInstanceField(
@@ -240,13 +241,20 @@
       mirror::ArtField* resolved_field, uint16_t field_idx)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  // Can we fast-path an SGET/SPUT access to a static field? If yes, compute the field offset,
-  // the type index of the declaring class in the referrer's dex file and whether the declaring
-  // class is the referrer's class or at least can be assumed to be initialized.
+  // Can we fast-path an SGET/SPUT access to a static field? If yes, compute the type index
+  // of the declaring class in the referrer's dex file.
   std::pair<bool, bool> IsFastStaticField(
       mirror::DexCache* dex_cache, mirror::Class* referrer_class,
-      mirror::ArtField* resolved_field, uint16_t field_idx, MemberOffset* field_offset,
-      uint32_t* storage_index, bool* is_referrers_class, bool* is_initialized)
+      mirror::ArtField* resolved_field, uint16_t field_idx, uint32_t* storage_index)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  // Is static field's in referrer's class?
+  bool IsStaticFieldInReferrerClass(mirror::Class* referrer_class, mirror::ArtField* resolved_field)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  // Is static field's class initialized?
+  bool IsStaticFieldsClassInitialized(mirror::Class* referrer_class,
+                                      mirror::ArtField* resolved_field)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   // Resolve a method. Returns nullptr on failure, including incompatible class change.
@@ -277,8 +285,10 @@
       uintptr_t* direct_code, uintptr_t* direct_method)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  // Does invokation of the resolved method need class initialization?
-  bool NeedsClassInitialization(mirror::Class* referrer_class, mirror::ArtMethod* resolved_method)
+  // Is method's class initialized for an invoke?
+  // For static invokes to determine whether we need to consider potential call to <clinit>().
+  // For non-static invokes, assuming a non-null reference, the class is always initialized.
+  bool IsMethodsClassInitialized(mirror::Class* referrer_class, mirror::ArtMethod* resolved_method)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   void ProcessedInstanceField(bool resolved);