Remove the use of Method from jni_compiler

Change-Id: Ibf1c72a806e7f1ba7a2d83960c3d57f41937d336
diff --git a/src/calling_convention.cc b/src/calling_convention.cc
index 5256ee1..bf27b6c 100644
--- a/src/calling_convention.cc
+++ b/src/calling_convention.cc
@@ -17,27 +17,23 @@
 // Managed runtime calling convention
 
 ManagedRuntimeCallingConvention* ManagedRuntimeCallingConvention::Create(
-    const Method* native_method, InstructionSet instruction_set) {
+    bool is_static, bool is_synchronized, const char* shorty, InstructionSet instruction_set) {
   if (instruction_set == kX86) {
-    return new x86::X86ManagedRuntimeCallingConvention(native_method);
+    return new x86::X86ManagedRuntimeCallingConvention(is_static, is_synchronized, shorty);
   } else {
     CHECK(instruction_set == kArm || instruction_set == kThumb2);
-    return new arm::ArmManagedRuntimeCallingConvention(native_method);
+    return new arm::ArmManagedRuntimeCallingConvention(is_static, is_synchronized, shorty);
   }
 }
 
-size_t ManagedRuntimeCallingConvention::FrameSize() {
-  return GetMethod()->GetFrameSizeInBytes();
-}
-
 bool ManagedRuntimeCallingConvention::HasNext() {
-  return itr_args_ < GetMethod()->NumArgs();
+  return itr_args_ < NumArgs();
 }
 
 void ManagedRuntimeCallingConvention::Next() {
   CHECK(HasNext());
   if (IsCurrentArgExplicit() &&  // don't query parameter type of implicit args
-      GetMethod()->IsParamALongOrDouble(itr_args_)) {
+      IsParamALongOrDouble(itr_args_)) {
     itr_longs_and_doubles_++;
     itr_slots_++;
   }
@@ -50,7 +46,7 @@
 
 bool ManagedRuntimeCallingConvention::IsCurrentArgExplicit() {
   // Static methods have no implicit arguments, others implicitly pass this
-  return GetMethod()->IsStatic() || (itr_args_ != 0);
+  return IsStatic() || (itr_args_ != 0);
 }
 
 bool ManagedRuntimeCallingConvention::IsCurrentArgPossiblyNull() {
@@ -58,28 +54,28 @@
 }
 
 size_t ManagedRuntimeCallingConvention::CurrentParamSize() {
-  return GetMethod()->ParamSize(itr_args_);
+  return ParamSize(itr_args_);
 }
 
 bool ManagedRuntimeCallingConvention::IsCurrentParamAReference() {
-  return GetMethod()->IsParamAReference(itr_args_);
+  return IsParamAReference(itr_args_);
 }
 
 // JNI calling convention
 
-JniCallingConvention* JniCallingConvention::Create(const Method* native_method,
-                                               InstructionSet instruction_set) {
+JniCallingConvention* JniCallingConvention::Create(bool is_static, bool is_synchronized,
+                                                   const char* shorty,
+                                                   InstructionSet instruction_set) {
   if (instruction_set == kX86) {
-    return new x86::X86JniCallingConvention(native_method);
+    return new x86::X86JniCallingConvention(is_static, is_synchronized, shorty);
   } else {
     CHECK(instruction_set == kArm || instruction_set == kThumb2);
-    return new arm::ArmJniCallingConvention(native_method);
+    return new arm::ArmJniCallingConvention(is_static, is_synchronized, shorty);
   }
 }
 
 size_t JniCallingConvention::ReferenceCount() const {
-  const Method* method = GetMethod();
-  return method->NumReferenceArgs() + (method->IsStatic() ? 1 : 0);
+  return NumReferenceArgs() + (IsStatic() ? 1 : 0);
 }
 
 FrameOffset JniCallingConvention::SavedLocalReferenceCookieOffset() const {
@@ -97,16 +93,16 @@
   if (itr_args_ <= kObjectOrClass) {
     return true;
   } else {
-    unsigned int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(GetMethod());
-    return arg_pos < GetMethod()->NumArgs();
+    unsigned int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+    return arg_pos < NumArgs();
   }
 }
 
 void JniCallingConvention::Next() {
   CHECK(HasNext());
   if (itr_args_ > kObjectOrClass) {
-    int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(GetMethod());
-    if (GetMethod()->IsParamALongOrDouble(arg_pos)) {
+    int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+    if (IsParamALongOrDouble(arg_pos)) {
       itr_longs_and_doubles_++;
       itr_slots_++;
     }
@@ -125,8 +121,8 @@
     case kObjectOrClass:
       return true;   // jobject or jclass
     default: {
-      int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(GetMethod());
-      return GetMethod()->IsParamAReference(arg_pos);
+      int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+      return IsParamAReference(arg_pos);
     }
   }
 }
@@ -147,15 +143,15 @@
   if (itr_args_ <= kObjectOrClass) {
     return kPointerSize;  // JNIEnv or jobject/jclass
   } else {
-    int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(GetMethod());
-    return GetMethod()->ParamSize(arg_pos);
+    int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+    return ParamSize(arg_pos);
   }
 }
 
-size_t JniCallingConvention::NumberOfExtraArgumentsForJni(const Method* method) {
+size_t JniCallingConvention::NumberOfExtraArgumentsForJni() {
   // The first argument is the JNIEnv*.
   // Static methods have an extra argument which is the jclass.
-  return method->IsStatic() ? 2 : 1;
+  return IsStatic() ? 2 : 1;
 }
 
 }  // namespace art
diff --git a/src/calling_convention.h b/src/calling_convention.h
index 7e87264..bfd7d21 100644
--- a/src/calling_convention.h
+++ b/src/calling_convention.h
@@ -5,7 +5,6 @@
 
 #include <vector>
 #include "managed_register.h"
-#include "object.h"
 #include "stack_indirect_reference_table.h"
 #include "thread.h"
 
@@ -14,9 +13,15 @@
 // Top-level abstraction for different calling conventions
 class CallingConvention {
  public:
-  bool IsReturnAReference() const { return method_->IsReturnAReference(); }
+  bool IsReturnAReference() const { return shorty_[0] == 'L'; }
 
-  size_t SizeOfReturnValue() const { return method_->ReturnSize(); }
+  size_t SizeOfReturnValue() const {
+    size_t result = Primitive::ComponentSize(Primitive::GetType(shorty_[0]));
+    if (result >= 1 && result < 4) {
+      result = 4;
+    }
+    return result;
+  }
 
   // Register that holds result of this method
   virtual ManagedRegister ReturnRegister() = 0;
@@ -42,11 +47,73 @@
   virtual ~CallingConvention() {}
 
  protected:
-  explicit CallingConvention(const Method* method)
-      : displacement_(0), method_(const_cast<Method*>(method)) {}
+  CallingConvention(bool is_static, bool is_synchronized, const char* shorty)
+      : displacement_(0), is_static_(is_static), is_synchronized_(is_synchronized),
+        shorty_(shorty) {
+    num_args_ = (is_static ? 0 : 1) + strlen(shorty) - 1;
+    num_ref_args_ = is_static ? 0 : 1;  // The implicit this pointer.
+    num_long_or_double_args_ = 0;
+    for (size_t i = 1; i < strlen(shorty); i++) {
+      char ch = shorty_[i];
+      if (ch == 'L') {
+        num_ref_args_++;
+      } else if ((ch == 'D') || (ch == 'J')) {
+        num_long_or_double_args_++;
+      }
+    }
+  }
 
-  const Method* GetMethod() const { return method_; }
+  bool IsStatic() const {
+    return is_static_;
+  }
+  bool IsSynchronized() const {
+    return is_synchronized_;
+  }
+  bool IsParamALongOrDouble(unsigned int param) const {
+    DCHECK_LT(param, NumArgs());
+    if (IsStatic()) {
+      param++;  // 0th argument must skip return value at start of the shorty
+    } else if (param == 0) {
+      return false;  // this argument
+    }
+    char ch = shorty_[param];
+    return (ch == 'J' || ch == 'D');
+  }
+  bool IsParamAReference(unsigned int param) const {
+    DCHECK_LT(param, NumArgs());
+    if (IsStatic()) {
+      param++;  // 0th argument must skip return value at start of the shorty
+    } else if (param == 0) {
+      return true;  // this argument
+    }
+    return shorty_[param] == 'L';
 
+  }
+  size_t NumArgs() const {
+    return num_args_;
+  }
+  size_t NumLongOrDoubleArgs() const {
+    return num_long_or_double_args_;
+  }
+  size_t NumReferenceArgs() const {
+    return num_ref_args_;
+  }
+  size_t ParamSize(unsigned int param) const {
+    DCHECK_LT(param, NumArgs());
+    if (IsStatic()) {
+      param++;  // 0th argument must skip return value at start of the shorty
+    } else if (param == 0) {
+      return kPointerSize;  // this argument
+    }
+    size_t result = Primitive::ComponentSize(Primitive::GetType(shorty_[param]));
+    if (result >= 1 && result < 4) {
+      result = 4;
+    }
+    return result;
+  }
+  const char* GetShorty() const {
+    return shorty_.c_str();
+  }
   // The slot number for current calling_convention argument.
   // Note that each slot is 32-bit. When the current argument is bigger
   // than 32 bits, return the first slot number for this argument.
@@ -61,7 +128,12 @@
   FrameOffset displacement_;
 
  private:
-  Method* method_;
+  const bool is_static_;
+  const bool is_synchronized_;
+  std::string shorty_;
+  size_t num_args_;
+  size_t num_ref_args_;
+  size_t num_long_or_double_args_;
 };
 
 // Abstraction for managed code's calling conventions
@@ -74,11 +146,10 @@
 // | { Method* }             | <-- SP
 class ManagedRuntimeCallingConvention : public CallingConvention {
  public:
-  static ManagedRuntimeCallingConvention* Create(const Method* native_method,
+  static ManagedRuntimeCallingConvention* Create(bool is_static, bool is_synchronized,
+                                                 const char* shorty,
                                                  InstructionSet instruction_set);
 
-  size_t FrameSize();
-
   // Register that holds the incoming method argument
   virtual ManagedRegister MethodRegister() = 0;
 
@@ -97,8 +168,8 @@
   virtual ~ManagedRuntimeCallingConvention() {}
 
  protected:
-  explicit ManagedRuntimeCallingConvention(const Method* method) :
-                                           CallingConvention(method) {}
+  ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty) :
+      CallingConvention(is_static, is_synchronized, shorty) {}
 };
 
 // Abstraction for JNI calling conventions
@@ -117,7 +188,7 @@
 // callee saves for frames above this one.
 class JniCallingConvention : public CallingConvention {
  public:
-  static JniCallingConvention* Create(const Method* native_method,
+  static JniCallingConvention* Create(bool is_static, bool is_synchronized, const char* shorty,
                                       InstructionSet instruction_set);
 
   // Size of frame excluding space for outgoing args (its assumed Method* is
@@ -184,15 +255,15 @@
     kObjectOrClass = 1
   };
 
-  explicit JniCallingConvention(const Method* native_method) :
-      CallingConvention(native_method) {}
+  explicit JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty) :
+      CallingConvention(is_static, is_synchronized, shorty) {}
 
   // Number of stack slots for outgoing arguments, above which the SIRT is
   // located
   virtual size_t NumberOfOutgoingStackArgs() = 0;
 
  protected:
-  static size_t NumberOfExtraArgumentsForJni(const Method* method);
+  size_t NumberOfExtraArgumentsForJni();
 };
 
 }  // namespace art
diff --git a/src/calling_convention_arm.cc b/src/calling_convention_arm.cc
index fc8c662..c9af6f1 100644
--- a/src/calling_convention_arm.cc
+++ b/src/calling_convention_arm.cc
@@ -17,14 +17,14 @@
   return ArmManagedRegister::FromCoreRegister(IP);  // R12
 }
 
-static ManagedRegister ReturnRegisterForMethod(const Method* method) {
-  if (method->IsReturnAFloat()) {
+static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+  if (shorty[0] == 'F') {
     return ArmManagedRegister::FromCoreRegister(R0);
-  } else if (method->IsReturnADouble()) {
+  } else if (shorty[0] == 'D') {
     return ArmManagedRegister::FromRegisterPair(R0_R1);
-  } else if (method->IsReturnALong()) {
+  } else if (shorty[0] == 'J') {
     return ArmManagedRegister::FromRegisterPair(R0_R1);
-  } else if (method->IsReturnVoid()) {
+  } else if (shorty[0] == 'V') {
     return ArmManagedRegister::NoRegister();
   } else {
     return ArmManagedRegister::FromCoreRegister(R0);
@@ -32,11 +32,11 @@
 }
 
 ManagedRegister ArmManagedRuntimeCallingConvention::ReturnRegister() {
-  return ReturnRegisterForMethod(GetMethod());
+  return ReturnRegisterForShorty(GetShorty());
 }
 
 ManagedRegister ArmJniCallingConvention::ReturnRegister() {
-  return ReturnRegisterForMethod(GetMethod());
+  return ReturnRegisterForShorty(GetShorty());
 }
 
 // Managed runtime calling convention
@@ -56,7 +56,7 @@
     return true;
   } else {
     // handle funny case of a long/double straddling registers and the stack
-    return GetMethod()->IsParamALongOrDouble(itr_args_);
+    return IsParamALongOrDouble(itr_args_);
   }
 }
 
@@ -65,8 +65,7 @@
 };
 ManagedRegister ArmManagedRuntimeCallingConvention::CurrentParamRegister() {
   CHECK(IsCurrentParamInRegister());
-  const Method* method = GetMethod();
-  if (method->IsParamALongOrDouble(itr_args_)) {
+  if (IsParamALongOrDouble(itr_args_)) {
     if (itr_slots_ == 0) {
       return ArmManagedRegister::FromRegisterPair(R1_R2);
     } else if (itr_slots_ == 1) {
@@ -99,15 +98,16 @@
 
 // JNI calling convention
 
-ArmJniCallingConvention::ArmJniCallingConvention(const Method* method)
-    : JniCallingConvention(method) {
+ArmJniCallingConvention::ArmJniCallingConvention(bool is_static, bool is_synchronized,
+                                                 const char* shorty)
+    : JniCallingConvention(is_static, is_synchronized, shorty) {
   // Compute padding to ensure longs and doubles are not split in AAPCS
   // TODO: in terms of outgoing argument size this may be overly generous
   // due to padding appearing in the registers
   size_t padding = 0;
-  size_t check = method->IsStatic() ? 1 : 0;
-  for (size_t i = 0; i < method->NumArgs(); i++) {
-    if (((i & 1) == check) && method->IsParamALongOrDouble(i)) {
+  size_t check = IsStatic() ? 1 : 0;
+  for (size_t i = 0; i < NumArgs(); i++) {
+    if (((i & 1) == check) && IsParamALongOrDouble(i)) {
       padding += 4;
     }
   }
@@ -155,11 +155,10 @@
 // in even register numbers and stack slots
 void ArmJniCallingConvention::Next() {
   JniCallingConvention::Next();
-  const Method* method = GetMethod();
-  size_t arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(method);
+  size_t arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
   if ((itr_args_ >= 2) &&
-      (arg_pos < GetMethod()->NumArgs()) &&
-      method->IsParamALongOrDouble(arg_pos)) {
+      (arg_pos < NumArgs()) &&
+      IsParamALongOrDouble(arg_pos)) {
     // itr_slots_ needs to be an even number, according to AAPCS.
     if ((itr_slots_ & 0x1u) != 0) {
       itr_slots_++;
@@ -180,9 +179,8 @@
 };
 ManagedRegister ArmJniCallingConvention::CurrentParamRegister() {
   CHECK_LT(itr_slots_, 4u);
-  const Method* method = GetMethod();
-  int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni(method);
-  if ((itr_args_ >= 2) && method->IsParamALongOrDouble(arg_pos)) {
+  int arg_pos = itr_args_ - NumberOfExtraArgumentsForJni();
+  if ((itr_args_ >= 2) && IsParamALongOrDouble(arg_pos)) {
     CHECK_EQ(itr_slots_, 2u);
     return ArmManagedRegister::FromRegisterPair(R2_R3);
   } else {
@@ -198,11 +196,9 @@
 }
 
 size_t ArmJniCallingConvention::NumberOfOutgoingStackArgs() {
-  const Method* method = GetMethod();
-  size_t static_args = method->IsStatic() ? 1 : 0;  // count jclass
+  size_t static_args = IsStatic() ? 1 : 0;  // count jclass
   // regular argument parameters and this
-  size_t param_args = method->NumArgs() +
-                      method->NumLongOrDoubleArgs();
+  size_t param_args = NumArgs() + NumLongOrDoubleArgs();
   // count JNIEnv* less arguments in registers
   return static_args + param_args + 1 - 4;
 }
diff --git a/src/calling_convention_arm.h b/src/calling_convention_arm.h
index 4ecf9df..bb4c30f 100644
--- a/src/calling_convention_arm.h
+++ b/src/calling_convention_arm.h
@@ -10,8 +10,8 @@
 
 class ArmManagedRuntimeCallingConvention : public ManagedRuntimeCallingConvention {
  public:
-  explicit ArmManagedRuntimeCallingConvention(const Method* method) :
-                                     ManagedRuntimeCallingConvention(method) {}
+  ArmManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty) :
+      ManagedRuntimeCallingConvention(is_static, is_synchronized, shorty) {}
   virtual ~ArmManagedRuntimeCallingConvention() {}
   // Calling convention
   virtual ManagedRegister ReturnRegister();
@@ -29,7 +29,7 @@
 
 class ArmJniCallingConvention : public JniCallingConvention {
  public:
-  explicit ArmJniCallingConvention(const Method* method);
+  explicit ArmJniCallingConvention(bool is_static, bool is_synchronized, const char* shorty);
   virtual ~ArmJniCallingConvention() {}
   // Calling convention
   virtual ManagedRegister ReturnRegister();
diff --git a/src/calling_convention_x86.cc b/src/calling_convention_x86.cc
index 07ccece..7becbb3 100644
--- a/src/calling_convention_x86.cc
+++ b/src/calling_convention_x86.cc
@@ -22,12 +22,12 @@
   return ManagedRegister::NoRegister();  // No free regs, so assembler uses push/pop
 }
 
-static ManagedRegister ReturnRegisterForMethod(const Method* method) {
-  if (method->IsReturnAFloatOrDouble()) {
+static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
+  if (shorty[0] == 'F' || shorty[0] == 'D') {
     return X86ManagedRegister::FromX87Register(ST0);
-  } else if (method->IsReturnALong()) {
+  } else if (shorty[0] == 'J') {
     return X86ManagedRegister::FromRegisterPair(EAX_EDX);
-  } else if (method->IsReturnVoid()) {
+  } else if (shorty[0] == 'V') {
     return ManagedRegister::NoRegister();
   } else {
     return X86ManagedRegister::FromCpuRegister(EAX);
@@ -35,11 +35,11 @@
 }
 
 ManagedRegister X86ManagedRuntimeCallingConvention::ReturnRegister() {
-  return ReturnRegisterForMethod(GetMethod());
+  return ReturnRegisterForShorty(GetShorty());
 }
 
 ManagedRegister X86JniCallingConvention::ReturnRegister() {
-  return ReturnRegisterForMethod(GetMethod());
+  return ReturnRegisterForShorty(GetShorty());
 }
 
 // Managed runtime calling convention
@@ -85,7 +85,7 @@
 }
 
 bool X86JniCallingConvention::IsMethodRegisterClobberedPreCall() {
-  return GetMethod()->IsSynchronized();  // Monitor enter crushes the method register
+  return IsSynchronized();  // Monitor enter crushes the method register
 }
 
 bool X86JniCallingConvention::IsCurrentParamInRegister() {
@@ -107,10 +107,9 @@
 }
 
 size_t X86JniCallingConvention::NumberOfOutgoingStackArgs() {
-  size_t static_args = GetMethod()->IsStatic() ? 1 : 0;  // count jclass
+  size_t static_args = IsStatic() ? 1 : 0;  // count jclass
   // regular argument parameters and this
-  size_t param_args = GetMethod()->NumArgs() +
-                      GetMethod()->NumLongOrDoubleArgs();
+  size_t param_args = NumArgs() + NumLongOrDoubleArgs();
   return static_args + param_args + 2;  // count JNIEnv* and return pc (pushed after Method*)
 }
 
diff --git a/src/calling_convention_x86.h b/src/calling_convention_x86.h
index f3c85e9..c265917 100644
--- a/src/calling_convention_x86.h
+++ b/src/calling_convention_x86.h
@@ -10,8 +10,9 @@
 
 class X86ManagedRuntimeCallingConvention : public ManagedRuntimeCallingConvention {
  public:
-  explicit X86ManagedRuntimeCallingConvention(const Method* method) :
-                                     ManagedRuntimeCallingConvention(method) {}
+  explicit X86ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized,
+                                              const char* shorty) :
+      ManagedRuntimeCallingConvention(is_static, is_synchronized, shorty) {}
   virtual ~X86ManagedRuntimeCallingConvention() {}
   // Calling convention
   virtual ManagedRegister ReturnRegister();
@@ -29,8 +30,8 @@
 
 class X86JniCallingConvention : public JniCallingConvention {
  public:
-  explicit X86JniCallingConvention(const Method* method) :
-                                   JniCallingConvention(method) {}
+  X86JniCallingConvention(bool is_static, bool is_synchronized, const char* shorty) :
+      JniCallingConvention(is_static, is_synchronized, shorty) {}
   virtual ~X86JniCallingConvention() {}
   // Calling convention
   virtual ManagedRegister ReturnRegister();
diff --git a/src/common_test.h b/src/common_test.h
index 56b54a3..c55ef7f 100644
--- a/src/common_test.h
+++ b/src/common_test.h
@@ -205,7 +205,7 @@
 #elif defined(__arm__)
     instruction_set = kThumb2;
 #endif
-    runtime_->SetJniStubArray(JniCompiler::CreateJniStub(instruction_set));
+    runtime_->SetJniDlsymLookupStub(Compiler::CreateJniDlysmLookupStub(instruction_set));
     runtime_->SetAbstractMethodErrorStubArray(Compiler::CreateAbstractMethodErrorStub(instruction_set));
     for (int i = 0; i < Runtime::kLastTrampolineMethodType; i++) {
       Runtime::TrampolineType type = Runtime::TrampolineType(i);
@@ -352,7 +352,7 @@
     compiler_->CompileOne(method);
     MakeExecutable(method);
 
-    MakeExecutable(runtime_->GetJniStubArray());
+    MakeExecutable(runtime_->GetJniDlsymLookupStub());
   }
 
   void CompileDirectMethod(ClassLoader* class_loader,
diff --git a/src/compiled_method.cc b/src/compiled_method.cc
index 500a62b..4e21a91 100644
--- a/src/compiled_method.cc
+++ b/src/compiled_method.cc
@@ -10,7 +10,9 @@
                                const uint32_t core_spill_mask,
                                const uint32_t fp_spill_mask,
                                std::vector<uint32_t>& mapping_table,
-                               std::vector<uint16_t>& vmap_table) {
+                               std::vector<uint16_t>& vmap_table)
+    : instruction_set_(instruction_set), frame_size_in_bytes_(frame_size_in_bytes),
+      core_spill_mask_(core_spill_mask), fp_spill_mask_(fp_spill_mask) {
   CHECK_NE(short_code.size(), 0U);
   CHECK_GE(vmap_table.size(), 1U);  // should always contain an entry for LR
 
@@ -33,11 +35,7 @@
   DCHECK_EQ(vmap_table.size() + 1, length_prefixed_vmap_table.size());
   DCHECK_EQ(vmap_table.size(), length_prefixed_vmap_table[0]);
 
-  instruction_set_ = instruction_set;
   code_ = byte_code;
-  frame_size_in_bytes_ = frame_size_in_bytes;
-  core_spill_mask_ = core_spill_mask;
-  fp_spill_mask_ = fp_spill_mask;
   mapping_table_ = length_prefixed_mapping_table;
   vmap_table_ = length_prefixed_vmap_table;
 
@@ -48,14 +46,10 @@
                                std::vector<uint8_t>& code,
                                const size_t frame_size_in_bytes,
                                const uint32_t core_spill_mask,
-                               const uint32_t fp_spill_mask) {
+                               const uint32_t fp_spill_mask)
+    : instruction_set_(instruction_set), code_(code), frame_size_in_bytes_(frame_size_in_bytes),
+      core_spill_mask_(core_spill_mask), fp_spill_mask_(fp_spill_mask) {
   CHECK_NE(code.size(), 0U);
-
-  instruction_set_ = instruction_set;
-  code_ = code;
-  frame_size_in_bytes_ = frame_size_in_bytes;
-  core_spill_mask_ = core_spill_mask;
-  fp_spill_mask_ = fp_spill_mask;
 }
 
 CompiledMethod::~CompiledMethod() {}
diff --git a/src/compiled_method.h b/src/compiled_method.h
index 4e0870b..b09a762 100644
--- a/src/compiled_method.h
+++ b/src/compiled_method.h
@@ -54,11 +54,11 @@
                                  InstructionSet instruction_set);
 
  private:
-  InstructionSet instruction_set_;
+  const InstructionSet instruction_set_;
   std::vector<uint8_t> code_;
-  size_t frame_size_in_bytes_;
-  uint32_t core_spill_mask_;
-  uint32_t fp_spill_mask_;
+  const size_t frame_size_in_bytes_;
+  const uint32_t core_spill_mask_;
+  const uint32_t fp_spill_mask_;
   std::vector<uint32_t> mapping_table_;
   std::vector<uint16_t> vmap_table_;
 };
diff --git a/src/compiler.cc b/src/compiler.cc
index 96beaa6..b274420 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -24,11 +24,13 @@
   ByteArray* CreateAbstractMethodErrorStub();
   CompiledInvokeStub* ArmCreateInvokeStub(bool is_static, const char* shorty);
   ByteArray* ArmCreateResolutionTrampoline(Runtime::TrampolineType type);
+  ByteArray* CreateJniDlysmLookupStub();
 }
 namespace x86 {
   ByteArray* CreateAbstractMethodErrorStub();
   CompiledInvokeStub* X86CreateInvokeStub(bool is_static, const char* shorty);
   ByteArray* X86CreateResolutionTrampoline(Runtime::TrampolineType type);
+  ByteArray* CreateJniDlysmLookupStub();
 }
 
 Compiler::Compiler(InstructionSet instruction_set, bool image)
@@ -55,6 +57,19 @@
   }
 }
 
+ByteArray* Compiler::CreateJniDlysmLookupStub(InstructionSet instruction_set) {
+  switch (instruction_set) {
+    case kArm:
+    case kThumb2:
+      return arm::CreateJniDlysmLookupStub();
+    case kX86:
+      return x86::CreateJniDlysmLookupStub();
+    default:
+      LOG(FATAL) << "Unknown InstructionSet " << (int) instruction_set;
+      return NULL;
+  }
+}
+
 ByteArray* Compiler::CreateAbstractMethodErrorStub(InstructionSet instruction_set) {
   if (instruction_set == kX86) {
     return x86::CreateAbstractMethodErrorStub();
@@ -84,8 +99,7 @@
   const DexCache* dex_cache = method->GetDeclaringClass()->GetDexCache();
   const DexFile& dex_file = Runtime::Current()->GetClassLinker()->FindDexFile(dex_cache);
   uint32_t method_idx = method->GetDexMethodIndex();
-  CompileMethod(method->IsDirect(), method->IsNative(), method->IsStatic(), method->IsAbstract(),
-                method_idx, class_loader, dex_file);
+  CompileMethod(method->GetAccessFlags(), method_idx, class_loader, dex_file);
   SetCodeAndDirectMethods(class_loader);
 }
 
@@ -289,34 +303,26 @@
   }
   // Compile direct methods
   while (it.HasNextDirectMethod()) {
-    bool is_native = (it.GetMemberAccessFlags() & kAccNative) != 0;
-    bool is_static = (it.GetMemberAccessFlags() & kAccStatic) != 0;
-    bool is_abstract = (it.GetMemberAccessFlags() & kAccAbstract) != 0;
-    CompileMethod(true, is_native, is_static, is_abstract, it.GetMemberIndex(), class_loader,
-                  dex_file);
+    CompileMethod(it.GetMemberAccessFlags(), it.GetMemberIndex(), class_loader, dex_file);
     it.Next();
   }
   // Compile virtual methods
   while (it.HasNextVirtualMethod()) {
-    bool is_native = (it.GetMemberAccessFlags() & kAccNative) != 0;
-    bool is_static = (it.GetMemberAccessFlags() & kAccStatic) != 0;
-    bool is_abstract = (it.GetMemberAccessFlags() & kAccAbstract) != 0;
-    CompileMethod(false, is_native, is_static, is_abstract, it.GetMemberIndex(), class_loader,
-                  dex_file);
+    CompileMethod(it.GetMemberAccessFlags(), it.GetMemberIndex(), class_loader, dex_file);
     it.Next();
   }
   DCHECK(!it.HasNext());
 }
 
-void Compiler::CompileMethod(bool is_direct, bool is_native, bool is_static, bool is_abstract,
-                             uint32_t method_idx, const ClassLoader* class_loader,
-                             const DexFile& dex_file) {
+void Compiler::CompileMethod(uint32_t access_flags, uint32_t method_idx,
+                             const ClassLoader* class_loader, const DexFile& dex_file) {
   CompiledMethod* compiled_method = NULL;
-  if (is_native) {
-    compiled_method = jni_compiler_.Compile(is_direct, method_idx, class_loader, dex_file);
+  if ((access_flags & kAccNative) != 0) {
+    compiled_method = jni_compiler_.Compile(access_flags, method_idx, class_loader, dex_file);
     CHECK(compiled_method != NULL);
-  } else if (is_abstract) {
+  } else if ((access_flags & kAccAbstract) != 0) {
   } else {
+    bool is_direct = (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
     compiled_method = oatCompileMethod(*this, is_direct, method_idx, class_loader, dex_file,
                                        kThumb2);
     // TODO: assert compiled_method is not NULL, currently NULL may be returned if the method
@@ -335,6 +341,7 @@
   }
 
   const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
+  bool is_static = (access_flags & kAccStatic) != 0;
   const CompiledInvokeStub* compiled_invoke_stub = FindInvokeStub(is_static, shorty);
   if (compiled_invoke_stub == NULL) {
     if (instruction_set_ == kX86) {
diff --git a/src/compiler.h b/src/compiler.h
index f29fbbc..18079644 100644
--- a/src/compiler.h
+++ b/src/compiler.h
@@ -54,6 +54,8 @@
   static ByteArray* CreateResolutionStub(InstructionSet instruction_set,
                                          Runtime::TrampolineType type);
 
+  static ByteArray* CreateJniDlysmLookupStub(InstructionSet instruction_set);
+
   // A method is uniquely located by its DexFile and index into the method_id table of that dex file
   typedef std::pair<const DexFile*, uint32_t> MethodReference;
 
@@ -93,8 +95,8 @@
   void CompileDexFile(const ClassLoader* class_loader, const DexFile& dex_file);
   void CompileClass(const DexFile::ClassDef& class_def, const ClassLoader* class_loader,
                     const DexFile& dex_file);
-  void CompileMethod(bool is_direct, bool is_native, bool is_static, bool is_abstract,
-                     uint32_t method_idx, const ClassLoader* class_loader, const DexFile& dex_file);
+  void CompileMethod(uint32_t access_flags, uint32_t method_idx, const ClassLoader* class_loader,
+                     const DexFile& dex_file);
 
   // After compiling, walk all the DexCaches and set the code and
   // method pointers of CodeAndDirectMethods entries in the DexCaches.
diff --git a/src/dex2oat.cc b/src/dex2oat.cc
index bcce5b6..6cf6df6 100644
--- a/src/dex2oat.cc
+++ b/src/dex2oat.cc
@@ -282,8 +282,8 @@
   }
 
   // if we loaded an existing image, we will reuse values from the image roots.
-  if (!runtime->HasJniStubArray()) {
-    runtime->SetJniStubArray(JniCompiler::CreateJniStub(kThumb2));
+  if (!runtime->HasJniDlsymLookupStub()) {
+    runtime->SetJniDlsymLookupStub(Compiler::CreateJniDlysmLookupStub(kThumb2));
   }
   if (!runtime->HasAbstractMethodErrorStubArray()) {
     runtime->SetAbstractMethodErrorStubArray(Compiler::CreateAbstractMethodErrorStub(kThumb2));
diff --git a/src/image_test.cc b/src/image_test.cc
index 88ddae9..812b281 100644
--- a/src/image_test.cc
+++ b/src/image_test.cc
@@ -69,7 +69,7 @@
   ASSERT_TRUE(runtime_.get() != NULL);
   class_linker_ = runtime_->GetClassLinker();
 
-  ASSERT_TRUE(runtime_->GetJniStubArray() != NULL);
+  ASSERT_TRUE(runtime_->GetJniDlsymLookupStub() != NULL);
 
   ASSERT_EQ(2U, Heap::GetSpaces().size());
   ASSERT_TRUE(Heap::GetSpaces()[0]->IsImageSpace());
diff --git a/src/image_writer.cc b/src/image_writer.cc
index 6e4f6ae..ef577af 100644
--- a/src/image_writer.cc
+++ b/src/image_writer.cc
@@ -140,7 +140,7 @@
   // build an Object[] of the roots needed to restore the runtime
   SirtRef<ObjectArray<Object> > image_roots(
       ObjectArray<Object>::Alloc(object_array_class, ImageHeader::kImageRootsMax));
-  image_roots->Set(ImageHeader::kJniStubArray, runtime->GetJniStubArray());
+  image_roots->Set(ImageHeader::kJniStubArray, runtime->GetJniDlsymLookupStub());
   image_roots->Set(ImageHeader::kAbstractMethodErrorStubArray,
                    runtime->GetAbstractMethodErrorStubArray());
   image_roots->Set(ImageHeader::kInstanceResolutionStubArray,
@@ -285,7 +285,7 @@
   if (orig->IsNative()) {
     // The native method's pointer is directed to a stub to lookup via dlsym.
     // Note this is not the code_ pointer, that is handled above.
-    ByteArray* orig_jni_stub_array_ = Runtime::Current()->GetJniStubArray();
+    ByteArray* orig_jni_stub_array_ = Runtime::Current()->GetJniDlsymLookupStub();
     ByteArray* copy_jni_stub_array_ = down_cast<ByteArray*>(GetImageAddress(orig_jni_stub_array_));
     copy->native_method_ = copy_jni_stub_array_->GetData();
   } else {
diff --git a/src/jni_compiler.cc b/src/jni_compiler.cc
index 5a4056a..6758026 100644
--- a/src/jni_compiler.cc
+++ b/src/jni_compiler.cc
@@ -19,27 +19,6 @@
 
 namespace art {
 
-namespace arm {
-ByteArray* CreateJniStub();
-}
-
-namespace x86 {
-ByteArray* CreateJniStub();
-}
-
-ByteArray* JniCompiler::CreateJniStub(InstructionSet instruction_set) {
-  switch (instruction_set) {
-    case kArm:
-    case kThumb2:
-      return arm::CreateJniStub();
-    case kX86:
-      return x86::CreateJniStub();
-    default:
-      LOG(FATAL) << "Unknown InstructionSet " << (int) instruction_set;
-      return NULL;
-  }
-}
-
 JniCompiler::JniCompiler(InstructionSet instruction_set) {
   if (instruction_set == kThumb2) {
     // currently only ARM code generation is supported
@@ -56,19 +35,17 @@
 //   registers, a reference to the method object is supplied as part of this
 //   convention.
 //
-CompiledMethod* JniCompiler::Compile(bool is_direct, uint32_t method_idx,
+CompiledMethod* JniCompiler::Compile(uint32_t access_flags, uint32_t method_idx,
                                      const ClassLoader* class_loader, const DexFile& dex_file) {
-  ClassLinker* linker = Runtime::Current()->GetClassLinker();
-  DexCache* dex_cache = linker->FindDexCache(dex_file);
-  Method* native_method = linker->ResolveMethod(dex_file, method_idx, dex_cache,
-                                                class_loader, is_direct);
-  CHECK(native_method->IsNative());
-
+  CHECK((access_flags & kAccNative) != 0);
+  const bool is_static = (access_flags & kAccStatic) != 0;
+  const bool is_synchronized = (access_flags & kAccSynchronized) != 0;
+  const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
   // Calling conventions used to iterate over parameters to method
   UniquePtr<JniCallingConvention> jni_conv(
-      JniCallingConvention::Create(native_method, instruction_set_));
+      JniCallingConvention::Create(is_static, is_synchronized, shorty, instruction_set_));
   UniquePtr<ManagedRuntimeCallingConvention> mr_conv(
-      ManagedRuntimeCallingConvention::Create(native_method, instruction_set_));
+      ManagedRuntimeCallingConvention::Create(is_static, is_synchronized, shorty, instruction_set_));
 
   // Assembler that holds generated instructions
   UniquePtr<Assembler> jni_asm(Assembler::Create(instruction_set_));
@@ -80,9 +57,6 @@
   const Offset monitor_enter(OFFSETOF_MEMBER(JNINativeInterface, MonitorEnter));
   const Offset monitor_exit(OFFSETOF_MEMBER(JNINativeInterface, MonitorExit));
 
-  // Cache of IsStatic as we call it often enough
-  const bool is_static = native_method->IsStatic();
-
   // 1. Build the frame saving all callee saves
   const size_t frame_size(jni_conv->FrameSize());
   const std::vector<ManagedRegister>& callee_save_regs = jni_conv->CalleeSaveRegisters();
@@ -162,7 +136,7 @@
   __ IncreaseFrameSize(out_arg_size);
 
   // 6. Acquire lock for synchronized methods.
-  if (native_method->IsSynchronized()) {
+  if (is_synchronized) {
     // Compute arguments in registers to preserve
     mr_conv->ResetIterator(FrameOffset(frame_size + out_arg_size));
     std::vector<ManagedRegister> live_argument_regs;
@@ -310,7 +284,7 @@
   }
 
   // 10. Release lock for synchronized methods.
-  if (native_method->IsSynchronized()) {
+  if (is_synchronized) {
     mr_conv->ResetIterator(FrameOffset(frame_size+out_arg_size));
     jni_conv->ResetIterator(FrameOffset(out_arg_size));
     jni_conv->Next();  // Skip JNIEnv*
@@ -424,7 +398,7 @@
   __ ExceptionPoll(jni_conv->InterproceduralScratchRegister());
 
   // 16. Remove activation
-  if (native_method->IsSynchronized()) {
+  if (is_synchronized) {
     __ RemoveFrame(frame_size, callee_save_regs);
   } else {
     // no need to restore callee save registers because we didn't
diff --git a/src/jni_compiler.h b/src/jni_compiler.h
index dbdfa68..f6e8cd4 100644
--- a/src/jni_compiler.h
+++ b/src/jni_compiler.h
@@ -6,12 +6,14 @@
 #include "compiled_method.h"
 #include "constants.h"
 #include "macros.h"
-#include "object.h"
+#include "thread.h"
 
 namespace art {
 
 class Assembler;
+class ClassLoader;
 class Compiler;
+class DexFile;
 class JniCallingConvention;
 class ManagedRegister;
 class ManagedRuntimeCallingConvention;
@@ -25,12 +27,8 @@
   explicit JniCompiler(InstructionSet instruction_set);
   ~JniCompiler();
 
-  CompiledMethod* Compile(bool is_direct, uint32_t method_idx, const ClassLoader* class_loader,
-                          const DexFile& dex_file);
-
-  // Stub to perform native method symbol lookup via dlsym
-  // TODO: remove from JniCompiler
-  static ByteArray* CreateJniStub(InstructionSet instruction_set);
+  CompiledMethod* Compile(uint32_t access_flags, uint32_t method_idx,
+                          const ClassLoader* class_loader, const DexFile& dex_file);
 
  private:
   // Copy a single parameter from the managed to the JNI calling convention
diff --git a/src/object.cc b/src/object.cc
index 3954b3d..50a2117 100644
--- a/src/object.cc
+++ b/src/object.cc
@@ -760,7 +760,7 @@
 
 bool Method::IsRegistered() const {
   void* native_method = GetFieldPtr<void*>(OFFSET_OF_OBJECT_MEMBER(Method, native_method_), false);
-  void* jni_stub = Runtime::Current()->GetJniStubArray()->GetData();
+  void* jni_stub = Runtime::Current()->GetJniDlsymLookupStub()->GetData();
   return native_method != jni_stub;
 }
 
@@ -774,7 +774,7 @@
 void Method::UnregisterNative() {
   CHECK(IsNative()) << PrettyMethod(this);
   // restore stub to lookup native pointer via dlsym
-  RegisterNative(Runtime::Current()->GetJniStubArray()->GetData());
+  RegisterNative(Runtime::Current()->GetJniDlsymLookupStub()->GetData());
 }
 
 void Class::SetStatus(Status new_status) {
diff --git a/src/primitive.h b/src/primitive.h
index 259378d..240892f 100644
--- a/src/primitive.h
+++ b/src/primitive.h
@@ -68,6 +68,7 @@
 
   static size_t ComponentSize(Type type) {
     switch (type) {
+      case kPrimVoid:    return 0;
       case kPrimBoolean:
       case kPrimByte:    return 1;
       case kPrimChar:
diff --git a/src/runtime.cc b/src/runtime.cc
index c1060f7..083e3ae 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -731,16 +731,16 @@
   }
 }
 
-bool Runtime::HasJniStubArray() const {
+bool Runtime::HasJniDlsymLookupStub() const {
   return jni_stub_array_ != NULL;
 }
 
-ByteArray* Runtime::GetJniStubArray() const {
+ByteArray* Runtime::GetJniDlsymLookupStub() const {
   CHECK(jni_stub_array_ != NULL);
   return jni_stub_array_;
 }
 
-void Runtime::SetJniStubArray(ByteArray* jni_stub_array) {
+void Runtime::SetJniDlsymLookupStub(ByteArray* jni_stub_array) {
   CHECK(jni_stub_array != NULL)  << " jni_stub_array=" << jni_stub_array;
   CHECK(jni_stub_array_ == NULL || jni_stub_array_ == jni_stub_array)
       << "jni_stub_array_=" << jni_stub_array_ << " jni_stub_array=" << jni_stub_array;
diff --git a/src/runtime.h b/src/runtime.h
index 504f2fe..50f9ed1 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -164,9 +164,9 @@
 
   void VisitRoots(Heap::RootVisitor* visitor, void* arg) const;
 
-  bool HasJniStubArray() const;
-  ByteArray* GetJniStubArray() const;
-  void SetJniStubArray(ByteArray* jni_stub_array);
+  bool HasJniDlsymLookupStub() const;
+  ByteArray* GetJniDlsymLookupStub() const;
+  void SetJniDlsymLookupStub(ByteArray* jni_stub_array);
 
   bool HasAbstractMethodErrorStubArray() const;
   ByteArray* GetAbstractMethodErrorStubArray() const;
diff --git a/src/space.cc b/src/space.cc
index 4822ceb..6421188 100644
--- a/src/space.cc
+++ b/src/space.cc
@@ -137,7 +137,7 @@
   DCHECK_EQ(0, memcmp(&image_header, image_header_, sizeof(ImageHeader)));
 
   Object* jni_stub_array = image_header.GetImageRoot(ImageHeader::kJniStubArray);
-  runtime->SetJniStubArray(down_cast<ByteArray*>(jni_stub_array));
+  runtime->SetJniDlsymLookupStub(down_cast<ByteArray*>(jni_stub_array));
 
   Object* ame_stub_array = image_header.GetImageRoot(ImageHeader::kAbstractMethodErrorStubArray);
   runtime->SetAbstractMethodErrorStubArray(down_cast<ByteArray*>(ame_stub_array));
diff --git a/src/stack_indirect_reference_table.h b/src/stack_indirect_reference_table.h
index 8b98763..0246419 100644
--- a/src/stack_indirect_reference_table.h
+++ b/src/stack_indirect_reference_table.h
@@ -17,9 +17,11 @@
 #ifndef ART_SRC_STACK_INDIRECT_REFERENCE_TABLE_H_
 #define ART_SRC_STACK_INDIRECT_REFERENCE_TABLE_H_
 
-namespace art {
-
+#include "logging.h"
 #include "macros.h"
+#include "thread.h"
+
+namespace art {
 
 class Object;
 
diff --git a/src/stub_arm.cc b/src/stub_arm.cc
index ae9acd62..b88cf6e 100644
--- a/src/stub_arm.cc
+++ b/src/stub_arm.cc
@@ -83,14 +83,13 @@
   size_t cs = assembler->CodeSize();
   SirtRef<ByteArray> abstract_stub(ByteArray::Alloc(cs));
   CHECK(abstract_stub.get() != NULL);
-  CHECK(abstract_stub->GetClass()->GetDescriptor());
   MemoryRegion code(abstract_stub->GetData(), abstract_stub->GetLength());
   assembler->FinalizeInstructions(code);
 
   return abstract_stub.get();
 }
 
-ByteArray* CreateJniStub() {
+ByteArray* CreateJniDlysmLookupStub() {
   UniquePtr<ArmAssembler> assembler(static_cast<ArmAssembler*>(Assembler::Create(kArm)));
   // Build frame and save argument registers and LR.
   RegList save = (1 << R0) | (1 << R1) | (1 << R2) | (1 << R3) | (1 << LR);
diff --git a/src/stub_x86.cc b/src/stub_x86.cc
index 7660f6f..f5c92b9 100644
--- a/src/stub_x86.cc
+++ b/src/stub_x86.cc
@@ -55,7 +55,7 @@
   return abstract_stub.get();
 }
 
-ByteArray* CreateJniStub() {
+ByteArray* CreateJniDlysmLookupStub() {
   UniquePtr<X86Assembler> assembler(static_cast<X86Assembler*>(Assembler::Create(kX86)));
 
   // Pad stack to ensure 16-byte alignment