ART: Fuse compare-with-0-and-branch in Arm64 utils-assembler

This patch squashes the use of cmp + b to cbz.

Change-Id: I3d146a9921c471f08ba7304f1ca1b427d8e7dcf9
Signed-off-by: Serban Constantinescu <serban.constantinescu@arm.com>
diff --git a/compiler/utils/arm64/assembler_arm64.cc b/compiler/utils/arm64/assembler_arm64.cc
index 009b227..5b97ba0 100644
--- a/compiler/utils/arm64/assembler_arm64.cc
+++ b/compiler/utils/arm64/assembler_arm64.cc
@@ -595,8 +595,7 @@
     // FIXME: Who sets the flags here?
     LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
   }
-  ___ Cmp(reg_x(in_reg.AsCoreRegister()), 0);
-  ___ B(&exit, COND_OP(EQ));
+  ___ Cbz(reg_x(in_reg.AsCoreRegister()), &exit);
   LoadFromOffset(out_reg.AsCoreRegister(), in_reg.AsCoreRegister(), 0);
   ___ Bind(&exit);
 }
@@ -607,8 +606,7 @@
   Arm64Exception *current_exception = new Arm64Exception(scratch, stack_adjust);
   exception_blocks_.push_back(current_exception);
   LoadFromOffset(scratch.AsCoreRegister(), ETR, Thread::ExceptionOffset<8>().Int32Value());
-  ___ Cmp(reg_x(scratch.AsCoreRegister()), 0);
-  ___ B(current_exception->Entry(), COND_OP(NE));
+  ___ Cbnz(reg_x(scratch.AsCoreRegister()), current_exception->Entry());
 }
 
 void Arm64Assembler::EmitExceptionPoll(Arm64Exception *exception) {