Opt Compiler: Materialise constants that cannot be encoded

The VIXL MacroAssembler deals gracefully with any immediate. However
when the constant has multiple uses and cannot be encoded in the
instruction's immediate field we are better off using a register for
the constant and thus sharing the constant generation between multiple
uses.

Eg:
  var += #Const;    // #Const cannot be encoded.
  var += #Const;

Before:                 After:
  mov wip0, #Const        mov w4, #Const
  add w0, w0, wip0        add w0, w0, w4
  mov wip0, #Const        add w0, w0, w4
  add w0, w0, wip0

Change-Id: Ied8577c879845777e52867aced16b2b45e06ac6c
Signed-off-by: Serban Constantinescu <serban.constantinescu@arm.com>
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index c48cab4..8ee150b 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -63,6 +63,7 @@
 using helpers::VIXLRegCodeFromART;
 using helpers::WRegisterFrom;
 using helpers::XRegisterFrom;
+using helpers::ARM64EncodableConstantOrRegister;
 
 static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
 static constexpr int kCurrentMethodStackOffset = 0;
@@ -1106,7 +1107,7 @@
     case Primitive::kPrimInt:
     case Primitive::kPrimLong:
       locations->SetInAt(0, Location::RequiresRegister());
-      locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
+      locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
       break;
 
@@ -1398,7 +1399,7 @@
   switch (in_type) {
     case Primitive::kPrimLong: {
       locations->SetInAt(0, Location::RequiresRegister());
-      locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
+      locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
       break;
     }
@@ -1468,7 +1469,7 @@
 void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
   locations->SetInAt(0, Location::RequiresRegister());
-  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
+  locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
   if (instruction->NeedsMaterialization()) {
     locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
   }
@@ -2120,7 +2121,7 @@
   switch (neg->GetResultType()) {
     case Primitive::kPrimInt:
     case Primitive::kPrimLong:
-      locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
+      locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
       break;
 
diff --git a/compiler/optimizing/common_arm64.h b/compiler/optimizing/common_arm64.h
index 9447d3b..fd8c0c6 100644
--- a/compiler/optimizing/common_arm64.h
+++ b/compiler/optimizing/common_arm64.h
@@ -183,6 +183,40 @@
   }
 }
 
+static bool CanEncodeConstantAsImmediate(HConstant* constant, HInstruction* instr) {
+  DCHECK(constant->IsIntConstant() || constant->IsLongConstant() || constant->IsNullConstant());
+
+  // For single uses we let VIXL handle the constant generation since it will
+  // use registers that are not managed by the register allocator (wip0, wip1).
+  if (constant->GetUses().HasOnlyOneUse()) {
+    return true;
+  }
+
+  int64_t value = CodeGenerator::GetInt64ValueOf(constant);
+
+  if (instr->IsAdd() || instr->IsSub() || instr->IsCondition() || instr->IsCompare()) {
+    // Uses aliases of ADD/SUB instructions.
+    return vixl::Assembler::IsImmAddSub(value);
+  } else if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) {
+    // Uses logical operations.
+    return vixl::Assembler::IsImmLogical(value, vixl::kXRegSize);
+  } else {
+    DCHECK(instr->IsNeg());
+    // Uses mov -immediate.
+    return vixl::Assembler::IsImmMovn(value, vixl::kXRegSize);
+  }
+}
+
+static inline Location ARM64EncodableConstantOrRegister(HInstruction* constant,
+                                                        HInstruction* instr) {
+  if (constant->IsConstant()
+      && CanEncodeConstantAsImmediate(constant->AsConstant(), instr)) {
+    return Location::ConstantLocation(constant->AsConstant());
+  }
+
+  return Location::RequiresRegister();
+}
+
 }  // namespace helpers
 }  // namespace arm64
 }  // namespace art