Add more compilation stats to optimizing

Optimizing is getting closer to have full coverage and this provides a
nice overview on why certain methods are not compiled/optimized.

Also, clean up some of the builder methods.

Change-Id: Id2f31441a788b797b0efea7ec78bee27bb654186
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index 0a3f830..d39f1c7 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -224,31 +224,35 @@
   current_block_ = nullptr;
 }
 
-static bool ShouldSkipCompilation(const CompilerDriver& compiler_driver,
-                                  const DexCompilationUnit& dex_compilation_unit,
-                                  size_t number_of_dex_instructions,
-                                  size_t number_of_blocks ATTRIBUTE_UNUSED,
-                                  size_t number_of_branches) {
-  const CompilerOptions& compiler_options = compiler_driver.GetCompilerOptions();
+void HGraphBuilder::MaybeRecordStat(MethodCompilationStat compilation_stat) {
+  if (compilation_stats_ != nullptr) {
+    compilation_stats_->RecordStat(compilation_stat);
+  }
+}
+
+bool HGraphBuilder::SkipCompilation(size_t number_of_dex_instructions,
+                                    size_t number_of_blocks ATTRIBUTE_UNUSED,
+                                    size_t number_of_branches) {
+  const CompilerOptions& compiler_options = compiler_driver_->GetCompilerOptions();
   CompilerOptions::CompilerFilter compiler_filter = compiler_options.GetCompilerFilter();
   if (compiler_filter == CompilerOptions::kEverything) {
     return false;
   }
 
   if (compiler_options.IsHugeMethod(number_of_dex_instructions)) {
-    LOG(INFO) << "Skip compilation of huge method "
-              << PrettyMethod(dex_compilation_unit.GetDexMethodIndex(),
-                              *dex_compilation_unit.GetDexFile())
-              << ": " << number_of_dex_instructions << " dex instructions";
+    VLOG(compiler) << "Skip compilation of huge method "
+                   << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_)
+                   << ": " << number_of_dex_instructions << " dex instructions";
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledHugeMethod);
     return true;
   }
 
   // If it's large and contains no branches, it's likely to be machine generated initialization.
   if (compiler_options.IsLargeMethod(number_of_dex_instructions) && (number_of_branches == 0)) {
-    LOG(INFO) << "Skip compilation of large method with no branch "
-              << PrettyMethod(dex_compilation_unit.GetDexMethodIndex(),
-                              *dex_compilation_unit.GetDexFile())
-              << ": " << number_of_dex_instructions << " dex instructions";
+    VLOG(compiler) << "Skip compilation of large method with no branch "
+                   << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_)
+                   << ": " << number_of_dex_instructions << " dex instructions";
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledLargeMethodNoBranches);
     return true;
   }
 
@@ -283,14 +287,9 @@
       code_ptr, code_end, &number_of_dex_instructions, &number_of_blocks, &number_of_branches);
 
   // Note that the compiler driver is null when unit testing.
-  if (compiler_driver_ != nullptr) {
-    if (ShouldSkipCompilation(*compiler_driver_,
-                              *dex_compilation_unit_,
-                              number_of_dex_instructions,
-                              number_of_blocks,
-                              number_of_branches)) {
-      return nullptr;
-    }
+  if ((compiler_driver_ != nullptr)
+      && SkipCompilation(number_of_dex_instructions, number_of_blocks, number_of_branches)) {
+    return nullptr;
   }
 
   // Also create blocks for catch handlers.
@@ -319,7 +318,9 @@
     // Update the current block if dex_pc starts a new block.
     MaybeUpdateCurrentBlock(dex_pc);
     const Instruction& instruction = *Instruction::At(code_ptr);
-    if (!AnalyzeDexInstruction(instruction, dex_pc)) return nullptr;
+    if (!AnalyzeDexInstruction(instruction, dex_pc)) {
+      return nullptr;
+    }
     dex_pc += instruction.SizeInCodeUnits();
     code_ptr += instruction.SizeInCodeUnits();
   }
@@ -593,8 +594,9 @@
   if (!compiler_driver_->ComputeInvokeInfo(dex_compilation_unit_, dex_pc, true, true,
                                            &optimized_invoke_type, &target_method, &table_index,
                                            &direct_code, &direct_method)) {
-    LOG(INFO) << "Did not compile " << PrettyMethod(method_idx, *dex_file_)
-              << " because a method call could not be resolved";
+    VLOG(compiler) << "Did not compile " << PrettyMethod(method_idx, *dex_file_)
+                   << " because a method call could not be resolved";
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledUnresolvedMethod);
     return false;
   }
   DCHECK(optimized_invoke_type != kSuper);
@@ -636,6 +638,7 @@
       LOG(WARNING) << "Non sequential register pair in " << dex_compilation_unit_->GetSymbol()
                    << " at " << dex_pc;
       // We do not implement non sequential register pair.
+      MaybeRecordStat(MethodCompilationStat::kNotCompiledNonSequentialRegPair);
       return false;
     }
     HInstruction* arg = LoadLocal(is_range ? register_index + i : args[i], type);
@@ -664,9 +667,11 @@
       compiler_driver_->ComputeInstanceFieldInfo(field_index, dex_compilation_unit_, is_put, soa)));
 
   if (resolved_field.Get() == nullptr) {
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledUnresolvedField);
     return false;
   }
   if (resolved_field->IsVolatile()) {
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledVolatile);
     return false;
   }
 
@@ -721,10 +726,12 @@
                                                             &is_initialized,
                                                             &field_type);
   if (!fast_path) {
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledUnresolvedField);
     return false;
   }
 
   if (is_volatile) {
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledVolatile);
     return false;
   }
 
@@ -947,6 +954,7 @@
       dex_compilation_unit_->GetDexMethodIndex(), *dex_file_, type_index,
       &type_known_final, &type_known_abstract, &is_referrers_class);
   if (!can_access) {
+    MaybeRecordStat(MethodCompilationStat::kNotCompiledCantAccesType);
     return false;
   }
   HInstruction* object = LoadLocal(reference, Primitive::kPrimNot);
@@ -967,7 +975,7 @@
   return true;
 }
 
-bool HGraphBuilder::BuildPackedSwitch(const Instruction& instruction, uint32_t dex_pc) {
+void HGraphBuilder::BuildPackedSwitch(const Instruction& instruction, uint32_t dex_pc) {
   SwitchTable table(instruction, dex_pc, false);
 
   // Value to test against.
@@ -984,10 +992,9 @@
     BuildSwitchCaseHelper(instruction, i, i == num_entries, table, value, starting_key + i - 1,
                           table.GetEntryAt(i), dex_pc);
   }
-  return true;
 }
 
-bool HGraphBuilder::BuildSparseSwitch(const Instruction& instruction, uint32_t dex_pc) {
+void HGraphBuilder::BuildSparseSwitch(const Instruction& instruction, uint32_t dex_pc) {
   SwitchTable table(instruction, dex_pc, true);
 
   // Value to test against.
@@ -1001,7 +1008,6 @@
     BuildSwitchCaseHelper(instruction, i, i == static_cast<size_t>(num_entries) - 1, table, value,
                           table.GetEntryAt(i), table.GetEntryAt(i + num_entries), dex_pc);
   }
-  return true;
 }
 
 void HGraphBuilder::BuildSwitchCaseHelper(const Instruction& instruction, size_t index,
@@ -1928,6 +1934,7 @@
           dex_compilation_unit_->GetDexMethodIndex(), *dex_file_, type_index,
           &type_known_final, &type_known_abstract, &is_referrers_class);
       if (!can_access) {
+        MaybeRecordStat(MethodCompilationStat::kNotCompiledCantAccesType);
         return false;
       }
       current_block_->AddInstruction(
@@ -1989,20 +1996,21 @@
     }
 
     case Instruction::PACKED_SWITCH: {
-      if (!BuildPackedSwitch(instruction, dex_pc)) {
-        return false;
-      }
+      BuildPackedSwitch(instruction, dex_pc);
       break;
     }
 
     case Instruction::SPARSE_SWITCH: {
-      if (!BuildSparseSwitch(instruction, dex_pc)) {
-        return false;
-      }
+      BuildSparseSwitch(instruction, dex_pc);
       break;
     }
 
     default:
+      VLOG(compiler) << "Did not compile "
+                     << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_)
+                     << " because of unhandled instruction "
+                     << instruction.Name();
+      MaybeRecordStat(MethodCompilationStat::kNotCompiledUnhandledInstruction);
       return false;
   }
   return true;
diff --git a/compiler/optimizing/builder.h b/compiler/optimizing/builder.h
index 73c2f50..75c8634 100644
--- a/compiler/optimizing/builder.h
+++ b/compiler/optimizing/builder.h
@@ -21,6 +21,7 @@
 #include "dex_file-inl.h"
 #include "driver/compiler_driver.h"
 #include "driver/dex_compilation_unit.h"
+#include "optimizing_compiler_stats.h"
 #include "primitive.h"
 #include "utils/arena_object.h"
 #include "utils/growable_array.h"
@@ -36,7 +37,8 @@
   HGraphBuilder(ArenaAllocator* arena,
                 DexCompilationUnit* dex_compilation_unit,
                 const DexFile* dex_file,
-                CompilerDriver* driver)
+                CompilerDriver* driver,
+                OptimizingCompilerStats* compiler_stats)
       : arena_(arena),
         branch_targets_(arena, 0),
         locals_(arena, 0),
@@ -51,7 +53,8 @@
         compiler_driver_(driver),
         return_type_(Primitive::GetType(dex_compilation_unit_->GetShorty()[0])),
         code_start_(nullptr),
-        latest_result_(nullptr) {}
+        latest_result_(nullptr),
+        compilation_stats_(compiler_stats) {}
 
   // Only for unit testing.
   HGraphBuilder(ArenaAllocator* arena, Primitive::Type return_type = Primitive::kPrimInt)
@@ -69,7 +72,8 @@
         compiler_driver_(nullptr),
         return_type_(return_type),
         code_start_(nullptr),
-        latest_result_(nullptr) {}
+        latest_result_(nullptr),
+        compilation_stats_(nullptr) {}
 
   HGraph* BuildGraph(const DexFile::CodeItem& code);
 
@@ -205,16 +209,22 @@
                       uint32_t dex_pc);
 
   // Builds an instruction sequence for a packed switch statement.
-  bool BuildPackedSwitch(const Instruction& instruction, uint32_t dex_pc);
+  void BuildPackedSwitch(const Instruction& instruction, uint32_t dex_pc);
 
   // Builds an instruction sequence for a sparse switch statement.
-  bool BuildSparseSwitch(const Instruction& instruction, uint32_t dex_pc);
+  void BuildSparseSwitch(const Instruction& instruction, uint32_t dex_pc);
 
   void BuildSwitchCaseHelper(const Instruction& instruction, size_t index,
                              bool is_last_case, const SwitchTable& table,
                              HInstruction* value, int32_t case_value_int,
                              int32_t target_offset, uint32_t dex_pc);
 
+  bool SkipCompilation(size_t number_of_dex_instructions,
+                       size_t number_of_blocks,
+                       size_t number_of_branches);
+
+  void MaybeRecordStat(MethodCompilationStat compilation_stat);
+
   ArenaAllocator* const arena_;
 
   // A list of the size of the dex code holding block information for
@@ -245,6 +255,8 @@
   // used by move-result instructions.
   HInstruction* latest_result_;
 
+  OptimizingCompilerStats* compilation_stats_;
+
   DISALLOW_COPY_AND_ASSIGN(HGraphBuilder);
 };
 
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 11fc9bf..a8abb01 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -121,9 +121,8 @@
   // Whether we should run any optimization or register allocation. If false, will
   // just run the code generation after the graph was built.
   const bool run_optimizations_;
-  mutable AtomicInteger total_compiled_methods_;
-  mutable AtomicInteger unoptimized_compiled_methods_;
-  mutable AtomicInteger optimized_compiled_methods_;
+
+  mutable OptimizingCompilerStats compilation_stats_;
 
   std::unique_ptr<std::ostream> visualizer_output_;
 
@@ -136,24 +135,14 @@
     : Compiler(driver, kMaximumCompilationTimeBeforeWarning),
       run_optimizations_(
           driver->GetCompilerOptions().GetCompilerFilter() != CompilerOptions::kTime),
-      total_compiled_methods_(0),
-      unoptimized_compiled_methods_(0),
-      optimized_compiled_methods_(0) {
+      compilation_stats_() {
   if (kIsVisualizerEnabled) {
     visualizer_output_.reset(new std::ofstream("art.cfg"));
   }
 }
 
 OptimizingCompiler::~OptimizingCompiler() {
-  if (total_compiled_methods_ == 0) {
-    LOG(INFO) << "Did not compile any method.";
-  } else {
-    size_t unoptimized_percent = (unoptimized_compiled_methods_ * 100 / total_compiled_methods_);
-    size_t optimized_percent = (optimized_compiled_methods_ * 100 / total_compiled_methods_);
-    LOG(INFO) << "Compiled " << total_compiled_methods_ << " methods: "
-              << unoptimized_percent << "% (" << unoptimized_compiled_methods_ << ") unoptimized, "
-              << optimized_percent << "% (" << optimized_compiled_methods_ << ") optimized.";
-  }
+  compilation_stats_.Log();
 }
 
 bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx ATTRIBUTE_UNUSED,
@@ -246,7 +235,7 @@
                                             jobject class_loader,
                                             const DexFile& dex_file) const {
   UNUSED(invoke_type);
-  total_compiled_methods_++;
+  compilation_stats_.RecordStat(MethodCompilationStat::kAttemptCompilation);
   InstructionSet instruction_set = GetCompilerDriver()->GetInstructionSet();
   // Always use the thumb2 assembler: some runtime functionality (like implicit stack
   // overflow checks) assume thumb2.
@@ -256,10 +245,12 @@
 
   // Do not attempt to compile on architectures we do not support.
   if (!IsInstructionSetSupported(instruction_set)) {
+    compilation_stats_.RecordStat(MethodCompilationStat::kNotCompiledUnsupportedIsa);
     return nullptr;
   }
 
   if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
+    compilation_stats_.RecordStat(MethodCompilationStat::kNotCompiledPathological);
     return nullptr;
   }
 
@@ -276,7 +267,10 @@
 
   ArenaPool pool;
   ArenaAllocator arena(&pool);
-  HGraphBuilder builder(&arena, &dex_compilation_unit, &dex_file, GetCompilerDriver());
+  HGraphBuilder builder(&arena,
+                        &dex_compilation_unit,
+                        &dex_file, GetCompilerDriver(),
+                        &compilation_stats_);
 
   HGraph* graph = builder.BuildGraph(*code_item);
   if (graph == nullptr) {
@@ -287,6 +281,7 @@
   CodeGenerator* codegen = CodeGenerator::Create(&arena, graph, instruction_set);
   if (codegen == nullptr) {
     CHECK(!shouldCompile) << "Could not find code generator for optimizing compiler";
+    compilation_stats_.RecordStat(MethodCompilationStat::kNotCompiledNoCodegen);
     return nullptr;
   }
 
@@ -296,13 +291,13 @@
 
   CodeVectorAllocator allocator;
 
-  if (run_optimizations_
-      && CanOptimize(*code_item)
-      && RegisterAllocator::CanAllocateRegistersFor(*graph, instruction_set)) {
+  bool can_optimize = CanOptimize(*code_item);
+  bool can_allocate_registers = RegisterAllocator::CanAllocateRegistersFor(*graph, instruction_set);
+  if (run_optimizations_ && can_optimize && can_allocate_registers) {
     VLOG(compiler) << "Optimizing " << PrettyMethod(method_idx, dex_file);
-    optimized_compiled_methods_++;
     if (!TryBuildingSsa(graph, dex_compilation_unit, visualizer)) {
       // We could not transform the graph to SSA, bailout.
+      compilation_stats_.RecordStat(MethodCompilationStat::kNotCompiledCannotBuildSSA);
       return nullptr;
     }
     RunOptimizations(graph, visualizer);
@@ -327,6 +322,7 @@
     std::vector<uint8_t> stack_map;
     codegen->BuildStackMaps(&stack_map);
 
+    compilation_stats_.RecordStat(MethodCompilationStat::kCompiledOptimized);
     return new CompiledMethod(GetCompilerDriver(),
                               instruction_set,
                               allocator.GetMemory(),
@@ -340,7 +336,15 @@
     UNREACHABLE();
   } else {
     VLOG(compiler) << "Compile baseline " << PrettyMethod(method_idx, dex_file);
-    unoptimized_compiled_methods_++;
+
+    if (!run_optimizations_) {
+      compilation_stats_.RecordStat(MethodCompilationStat::kNotOptimizedDisabled);
+    } else if (!can_optimize) {
+      compilation_stats_.RecordStat(MethodCompilationStat::kNotOptimizedTryCatch);
+    } else if (!can_allocate_registers) {
+      compilation_stats_.RecordStat(MethodCompilationStat::kNotOptimizedRegisterAllocator);
+    }
+
     codegen->CompileBaseline(&allocator);
 
     std::vector<uint8_t> mapping_table;
@@ -353,6 +357,7 @@
     std::vector<uint8_t> gc_map;
     codegen->BuildNativeGCMap(&gc_map, dex_compilation_unit);
 
+    compilation_stats_.RecordStat(MethodCompilationStat::kCompiledBaseline);
     return new CompiledMethod(GetCompilerDriver(),
                               instruction_set,
                               allocator.GetMemory(),
diff --git a/compiler/optimizing/optimizing_compiler.h b/compiler/optimizing/optimizing_compiler.h
index a415eca..d076fb5 100644
--- a/compiler/optimizing/optimizing_compiler.h
+++ b/compiler/optimizing/optimizing_compiler.h
@@ -24,6 +24,6 @@
 
 Compiler* CreateOptimizingCompiler(CompilerDriver* driver);
 
-}
+}  // namespace art
 
 #endif  // ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_H_
diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h
new file mode 100644
index 0000000..829982e
--- /dev/null
+++ b/compiler/optimizing/optimizing_compiler_stats.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
+#define ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
+
+#include <sstream>
+#include <string>
+
+#include "atomic.h"
+
+namespace art {
+
+enum MethodCompilationStat {
+  kAttemptCompilation = 0,
+  kCompiledBaseline,
+  kCompiledOptimized,
+  kNotCompiledUnsupportedIsa,
+  kNotCompiledPathological,
+  kNotCompiledHugeMethod,
+  kNotCompiledLargeMethodNoBranches,
+  kNotCompiledCannotBuildSSA,
+  kNotCompiledNoCodegen,
+  kNotCompiledUnresolvedMethod,
+  kNotCompiledUnresolvedField,
+  kNotCompiledNonSequentialRegPair,
+  kNotCompiledVolatile,
+  kNotOptimizedTryCatch,
+  kNotOptimizedDisabled,
+  kNotCompiledCantAccesType,
+  kNotOptimizedRegisterAllocator,
+  kNotCompiledUnhandledInstruction,
+  kLastStat
+};
+
+class OptimizingCompilerStats {
+ public:
+  OptimizingCompilerStats() {}
+
+  void RecordStat(MethodCompilationStat stat) {
+    compile_stats_[stat]++;
+  }
+
+  void Log() const {
+    if (compile_stats_[kAttemptCompilation] == 0) {
+      LOG(INFO) << "Did not compile any method.";
+    } else {
+      size_t unoptimized_percent =
+          compile_stats_[kCompiledBaseline] * 100 / compile_stats_[kAttemptCompilation];
+      size_t optimized_percent =
+          compile_stats_[kCompiledOptimized] * 100 / compile_stats_[kAttemptCompilation];
+      std::ostringstream oss;
+      oss << "Attempted compilation of " << compile_stats_[kAttemptCompilation] << " methods: "
+          << unoptimized_percent << "% (" << compile_stats_[kCompiledBaseline] << ") unoptimized, "
+          << optimized_percent << "% (" << compile_stats_[kCompiledOptimized] << ") optimized.\n";
+      for (int i = 0; i < kLastStat; i++) {
+        if (compile_stats_[i] != 0) {
+          oss << PrintMethodCompilationStat(i) << ": " << compile_stats_[i] << "\n";
+        }
+      }
+      LOG(INFO) << oss.str();
+    }
+  }
+
+ private:
+  std::string PrintMethodCompilationStat(int stat) const {
+    switch (stat) {
+      case kAttemptCompilation : return "kAttemptCompilation";
+      case kCompiledBaseline : return "kCompiledBaseline";
+      case kCompiledOptimized : return "kCompiledOptimized";
+      case kNotCompiledUnsupportedIsa : return "kNotCompiledUnsupportedIsa";
+      case kNotCompiledPathological : return "kNotCompiledPathological";
+      case kNotCompiledHugeMethod : return "kNotCompiledHugeMethod";
+      case kNotCompiledLargeMethodNoBranches : return "kNotCompiledLargeMethodNoBranches";
+      case kNotCompiledCannotBuildSSA : return "kNotCompiledCannotBuildSSA";
+      case kNotCompiledNoCodegen : return "kNotCompiledNoCodegen";
+      case kNotCompiledUnresolvedMethod : return "kNotCompiledUnresolvedMethod";
+      case kNotCompiledUnresolvedField : return "kNotCompiledUnresolvedField";
+      case kNotCompiledNonSequentialRegPair : return "kNotCompiledNonSequentialRegPair";
+      case kNotCompiledVolatile : return "kNotCompiledVolatile";
+      case kNotOptimizedDisabled : return "kNotOptimizedDisabled";
+      case kNotOptimizedTryCatch : return "kNotOptimizedTryCatch";
+      case kNotCompiledCantAccesType : return "kNotCompiledCantAccesType";
+      case kNotOptimizedRegisterAllocator : return "kNotOptimizedRegisterAllocator";
+      case kNotCompiledUnhandledInstruction : return "kNotCompiledUnhandledInstruction";
+      default: LOG(FATAL) << "invalid stat";
+    }
+    return "";
+  }
+
+  AtomicInteger compile_stats_[kLastStat];
+
+  DISALLOW_COPY_AND_ASSIGN(OptimizingCompilerStats);
+};
+
+}  // namespace art
+
+#endif  // ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_