InterpBreak cleanup (part 1)

Moved the suspend count variables from the interpBreak structure.  These
are already protected by a mutex, and we need the space in interpBreak
for additional subMode flags.  This CL just does the move and expands
the width of subMode to 16-bits.

Change-Id: I4a6070b1ba4fb08a0f6e0aba6f150b30f9159eed
diff --git a/vm/Debugger.cpp b/vm/Debugger.cpp
index 781b96b..b7347f7 100644
--- a/vm/Debugger.cpp
+++ b/vm/Debugger.cpp
@@ -1792,7 +1792,7 @@
     if (thread == NULL)
         goto bail;
 
-    result = thread->interpBreak.ctl.suspendCount;
+    result = thread->suspendCount;
 
 bail:
     dvmUnlockThreadList();
@@ -2662,11 +2662,11 @@
      * by rejecting the method invocation request.  Without this, we will
      * be stuck waiting on a suspended thread.
      */
-    if (targetThread->interpBreak.ctl.suspendCount > 1) {
+    if (targetThread->suspendCount > 1) {
         LOGW("threadid=%d: suspend count on threadid=%d is %d, too deep "
              "for method exec\n",
             dvmThreadSelf()->threadId, targetThread->threadId,
-            targetThread->interpBreak.ctl.suspendCount);
+            targetThread->suspendCount);
         dvmUnlockThreadList();
         return ERR_THREAD_SUSPENDED;     /* probably not expected here */
     }
diff --git a/vm/Globals.h b/vm/Globals.h
index 0415305..0e3207d 100644
--- a/vm/Globals.h
+++ b/vm/Globals.h
@@ -456,7 +456,7 @@
     pthread_mutex_t _threadSuspendLock;
 
     /*
-     * Guards Thread->interpBreak.ctl.suspendCount for all threads, and
+     * Guards Thread->suspendCount for all threads, and
      * provides the lock for the condition variable that all suspended threads
      * sleep on (threadSuspendCountCond).
      *
diff --git a/vm/Thread.cpp b/vm/Thread.cpp
index 4d18e14..6c3f27e 100644
--- a/vm/Thread.cpp
+++ b/vm/Thread.cpp
@@ -1618,7 +1618,7 @@
 
     /* Remove this thread's suspendCount from global suspendCount sum */
     lockThreadSuspendCount();
-    dvmAddToSuspendCounts(self, -self->interpBreak.ctl.suspendCount, 0);
+    dvmAddToSuspendCounts(self, -self->suspendCount, 0);
     unlockThreadSuspendCount();
 }
 
@@ -2187,7 +2187,7 @@
     dvmAddToSuspendCounts(thread, 1, 1);
 
     LOG_THREAD("threadid=%d: suspend++, now=%d\n",
-        thread->threadId, thread->interpBreak.ctl.suspendCount);
+        thread->threadId, thread->suspendCount);
     unlockThreadSuspendCount();
 
     waitForThreadSuspend(dvmThreadSelf(), thread);
@@ -2211,7 +2211,7 @@
     //assert(thread->handle != dvmJdwpGetDebugThread(gDvm.jdwpState));
 
     lockThreadSuspendCount();
-    if (thread->interpBreak.ctl.suspendCount > 0) {
+    if (thread->suspendCount > 0) {
         dvmAddToSuspendCounts(thread, -1, -1);
     } else {
         LOG_THREAD("threadid=%d:  suspendCount already zero\n",
@@ -2219,9 +2219,9 @@
     }
 
     LOG_THREAD("threadid=%d: suspend--, now=%d\n",
-        thread->threadId, thread->interpBreak.ctl.suspendCount);
+        thread->threadId, thread->suspendCount);
 
-    if (thread->interpBreak.ctl.suspendCount == 0) {
+    if (thread->suspendCount == 0) {
         dvmBroadcastCond(&gDvm.threadSuspendCountCond);
     }
 
@@ -2253,7 +2253,7 @@
     /*
      * Suspend ourselves.
      */
-    assert(self->interpBreak.ctl.suspendCount > 0);
+    assert(self->suspendCount > 0);
     self->status = THREAD_SUSPENDED;
     LOG_THREAD("threadid=%d: self-suspending (dbg)\n", self->threadId);
 
@@ -2270,10 +2270,10 @@
         dvmJdwpClearWaitForEventThread(gDvm.jdwpState);
     }
 
-    while (self->interpBreak.ctl.suspendCount != 0) {
+    while (self->suspendCount != 0) {
         dvmWaitCond(&gDvm.threadSuspendCountCond,
                     &gDvm.threadSuspendCountLock);
-        if (self->interpBreak.ctl.suspendCount != 0) {
+        if (self->suspendCount != 0) {
             /*
              * The condition was signaled but we're still suspended.  This
              * can happen if the debugger lets go while a SIGQUIT thread
@@ -2281,12 +2281,10 @@
              * just long enough to try to grab the thread-suspend lock).
              */
             LOGD("threadid=%d: still suspended after undo (sc=%d dc=%d)\n",
-                self->threadId, self->interpBreak.ctl.suspendCount,
-                self->interpBreak.ctl.dbgSuspendCount);
+                self->threadId, self->suspendCount, self->dbgSuspendCount);
         }
     }
-    assert(self->interpBreak.ctl.suspendCount == 0 &&
-           self->interpBreak.ctl.dbgSuspendCount == 0);
+    assert(self->suspendCount == 0 && self->dbgSuspendCount == 0);
     self->status = THREAD_RUNNING;
     LOG_THREAD("threadid=%d: self-reviving (dbg), status=%d\n",
         self->threadId, self->status);
@@ -2562,7 +2560,7 @@
      * This can happen when a couple of threads have simultaneous events
      * of interest to the debugger.
      */
-    //assert(self->interpBreak.ctl.suspendCount == 0);
+    //assert(self->suspendCount == 0);
 
     /*
      * Increment everybody's suspend count (except our own).
@@ -2613,10 +2611,8 @@
         waitForThreadSuspend(self, thread);
 
         LOG_THREAD("threadid=%d:   threadid=%d status=%d sc=%d dc=%d\n",
-            self->threadId,
-            thread->threadId, thread->status,
-            thread->interpBreak.ctl.suspendCount,
-            thread->interpBreak.ctl.dbgSuspendCount);
+            self->threadId, thread->threadId, thread->status,
+            thread->suspendCount, thread->dbgSuspendCount);
     }
 
     dvmUnlockThreadList();
@@ -2657,7 +2653,7 @@
             continue;
         }
 
-        if (thread->interpBreak.ctl.suspendCount > 0) {
+        if (thread->suspendCount > 0) {
             dvmAddToSuspendCounts(thread, -1,
                                   (why == SUSPEND_FOR_DEBUG ||
                                   why == SUSPEND_FOR_DEBUG_EVENT)
@@ -2745,15 +2741,13 @@
 
         /* debugger events don't suspend JDWP thread */
         if (thread->handle == dvmJdwpGetDebugThread(gDvm.jdwpState)) {
-            assert(thread->interpBreak.ctl.dbgSuspendCount == 0);
+            assert(thread->dbgSuspendCount == 0);
             continue;
         }
 
-        assert(thread->interpBreak.ctl.suspendCount >=
-               thread->interpBreak.ctl.dbgSuspendCount);
-        dvmAddToSuspendCounts(thread,
-                              -thread->interpBreak.ctl.dbgSuspendCount,
-                              -thread->interpBreak.ctl.dbgSuspendCount);
+        assert(thread->suspendCount >= thread->dbgSuspendCount);
+        dvmAddToSuspendCounts(thread, -thread->dbgSuspendCount,
+                              -thread->dbgSuspendCount);
     }
     unlockThreadSuspendCount();
     dvmUnlockThreadList();
@@ -2798,7 +2792,7 @@
      *      we hold suspendCountLock).
      */
 
-    return (thread->interpBreak.ctl.suspendCount != 0 &&
+    return (thread->suspendCount != 0 &&
             thread->status != THREAD_RUNNING);
 }
 
@@ -2843,21 +2837,21 @@
 static bool fullSuspendCheck(Thread* self)
 {
     assert(self != NULL);
-    assert(self->interpBreak.ctl.suspendCount >= 0);
+    assert(self->suspendCount >= 0);
 
     /*
      * Grab gDvm.threadSuspendCountLock.  This gives us exclusive write
-     * access to self->interpBreak.ctl.suspendCount.
+     * access to self->suspendCount.
      */
     lockThreadSuspendCount();   /* grab gDvm.threadSuspendCountLock */
 
-    bool needSuspend = (self->interpBreak.ctl.suspendCount != 0);
+    bool needSuspend = (self->suspendCount != 0);
     if (needSuspend) {
         LOG_THREAD("threadid=%d: self-suspending\n", self->threadId);
         ThreadStatus oldStatus = self->status;      /* should be RUNNING */
         self->status = THREAD_SUSPENDED;
 
-        while (self->interpBreak.ctl.suspendCount != 0) {
+        while (self->suspendCount != 0) {
             /*
              * Wait for wakeup signal, releasing lock.  The act of releasing
              * and re-acquiring the lock provides the memory barriers we
@@ -2866,8 +2860,7 @@
             dvmWaitCond(&gDvm.threadSuspendCountCond,
                     &gDvm.threadSuspendCountLock);
         }
-        assert(self->interpBreak.ctl.suspendCount == 0 &&
-               self->interpBreak.ctl.dbgSuspendCount == 0);
+        assert(self->suspendCount == 0 && self->dbgSuspendCount == 0);
         self->status = oldStatus;
         LOG_THREAD("threadid=%d: self-reviving, status=%d\n",
             self->threadId, self->status);
@@ -2885,7 +2878,7 @@
 bool dvmCheckSuspendPending(Thread* self)
 {
     assert(self != NULL);
-    if (self->interpBreak.ctl.suspendCount == 0) {
+    if (self->suspendCount == 0) {
         return false;
     } else {
         return fullSuspendCheck(self);
@@ -2967,7 +2960,7 @@
         volatile void* raw = reinterpret_cast<volatile void*>(&self->status);
         volatile int32_t* addr = reinterpret_cast<volatile int32_t*>(raw);
         android_atomic_acquire_store(newStatus, addr);
-        if (self->interpBreak.ctl.suspendCount != 0) {
+        if (self->suspendCount != 0) {
             fullSuspendCheck(self);
         }
     } else {
@@ -3378,8 +3371,8 @@
         );
     dvmPrintDebugMessage(target,
         "  | group=\"%s\" sCount=%d dsCount=%d obj=%p self=%p\n",
-        groupName, thread->interpBreak.ctl.suspendCount,
-        thread->interpBreak.ctl.dbgSuspendCount, thread->threadObj, thread);
+        groupName, thread->suspendCount, thread->dbgSuspendCount,
+        thread->threadObj, thread);
     dvmPrintDebugMessage(target,
         "  | sysTid=%d nice=%d sched=%d/%d cgrp=%s handle=%d\n",
         thread->systemTid, getpriority(PRIO_PROCESS, thread->systemTid),
diff --git a/vm/Thread.h b/vm/Thread.h
index d655732..617e702 100644
--- a/vm/Thread.h
+++ b/vm/Thread.h
@@ -90,10 +90,9 @@
 typedef union InterpBreak {
     volatile int64_t   all;
     struct {
+        uint16_t   subMode;
         uint8_t    breakFlags;
-        uint8_t    subMode;
-        int8_t     suspendCount;
-        int8_t     dbgSuspendCount;
+        int8_t     unused;   /* for future expansion */
 #ifndef DVM_NO_ASM_INTERP
         void* curHandlerTable;
 #else
@@ -129,7 +128,10 @@
      * interpBreak contains info about the interpreter mode, as well as
      * a count of the number of times the thread has been suspended.  When
      * the count drops to zero, the thread resumes.
-     *
+     */
+    InterpBreak interpBreak;
+
+    /*
      * "dbgSuspendCount" is the portion of the suspend count that the
      * debugger is responsible for.  This has to be tracked separately so
      * that we can recover correctly if the debugger abruptly disconnects
@@ -142,7 +144,9 @@
      * Note the non-debug component will rarely be other than 1 or 0 -- (not
      * sure it's even possible with the way mutexes are currently used.)
      */
-    InterpBreak interpBreak;
+
+    volatile int suspendCount;
+    volatile int dbgSuspendCount;
 
     u1*         cardTable;
 
@@ -162,8 +166,6 @@
     /* thread handle, as reported by pthread_self() */
     pthread_t   handle;
 
-
-
     /* Assembly interpreter handler tables */
 #ifndef DVM_NO_ASM_INTERP
     void*       mainHandlerTable;   // Table of actual instruction handler
diff --git a/vm/compiler/Compiler.cpp b/vm/compiler/Compiler.cpp
index d730066..763c90e 100644
--- a/vm/compiler/Compiler.cpp
+++ b/vm/compiler/Compiler.cpp
@@ -150,7 +150,7 @@
 
     dvmLockMutex(&gDvmJit.compilerLock);
     while (workQueueLength() != 0 && !gDvmJit.haltCompilerThread &&
-           self->interpBreak.ctl.suspendCount == 0) {
+           self->suspendCount == 0) {
         /*
          * Use timed wait here - more than one mutator threads may be blocked
          * but the compiler thread will only signal once when the queue is
diff --git a/vm/compiler/template/armv5te/footer.S b/vm/compiler/template/armv5te/footer.S
index 4015ec9..16660ae 100644
--- a/vm/compiler/template/armv5te/footer.S
+++ b/vm/compiler/template/armv5te/footer.S
@@ -15,7 +15,7 @@
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r1, #(offStackSaveArea_localRefCookie - sizeofStackSaveArea)]
                                         @ newFp->localRefCookie=top
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r10, r1)           @ r10<- new stack save area
 
     mov     r2, r0                      @ r2<- methodToCall
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S b/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S
index c7be29a..331d902 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S
@@ -1866,7 +1866,7 @@
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r1, #(offStackSaveArea_localRefCookie - sizeofStackSaveArea)]
                                         @ newFp->localRefCookie=top
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r10, r1)           @ r10<- new stack save area
 
     mov     r2, r0                      @ r2<- methodToCall
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S b/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S
index 6b57c00..044843e 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S
@@ -1597,7 +1597,7 @@
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r1, #(offStackSaveArea_localRefCookie - sizeofStackSaveArea)]
                                         @ newFp->localRefCookie=top
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r10, r1)           @ r10<- new stack save area
 
     mov     r2, r0                      @ r2<- methodToCall
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S
index 4c73919..ba798e0 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S
@@ -1866,7 +1866,7 @@
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r1, #(offStackSaveArea_localRefCookie - sizeofStackSaveArea)]
                                         @ newFp->localRefCookie=top
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r10, r1)           @ r10<- new stack save area
 
     mov     r2, r0                      @ r2<- methodToCall
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S
index a5e7a31..825ac40 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S
@@ -1866,7 +1866,7 @@
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r1, #(offStackSaveArea_localRefCookie - sizeofStackSaveArea)]
                                         @ newFp->localRefCookie=top
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r10, r1)           @ r10<- new stack save area
 
     mov     r2, r0                      @ r2<- methodToCall
diff --git a/vm/interp/Interp.cpp b/vm/interp/Interp.cpp
index bc2b4ec..40fbcbb 100644
--- a/vm/interp/Interp.cpp
+++ b/vm/interp/Interp.cpp
@@ -1504,9 +1504,6 @@
 {
     InterpBreak oldValue, newValue;
 
-    // Do not use this routine for suspend updates.  See below.
-    assert((newBreak & kInterpSuspendBreak) == 0);
-
     do {
         oldValue = newValue = thread->interpBreak;
         if (enable) {
@@ -1526,31 +1523,17 @@
  * Update the normal and debugger suspend counts for a thread.
  * threadSuspendCount must be acquired before calling this to
  * ensure a clean update of suspendCount, dbgSuspendCount and
- * sumThreadSuspendCount.  suspendCount & dbgSuspendCount must
- * use the atomic update to avoid conflict with writes to the
- * other fields in interpBreak.
+ * sumThreadSuspendCount.
  *
  * CLEANUP TODO: Currently only the JIT is using sumThreadSuspendCount.
  * Move under WITH_JIT ifdefs.
 */
 void dvmAddToSuspendCounts(Thread* thread, int delta, int dbgDelta)
 {
-    InterpBreak oldValue, newValue;
-
-    do {
-        oldValue = newValue = thread->interpBreak;
-        newValue.ctl.suspendCount += delta;
-        newValue.ctl.dbgSuspendCount += dbgDelta;
-        assert(newValue.ctl.suspendCount >= newValue.ctl.dbgSuspendCount);
-        if (newValue.ctl.suspendCount > 0) {
-            newValue.ctl.breakFlags |= kInterpSuspendBreak;
-        } else {
-            newValue.ctl.breakFlags &= ~kInterpSuspendBreak;
-        }
-        newValue.ctl.curHandlerTable = (newValue.ctl.breakFlags) ?
-            thread->altHandlerTable : thread->mainHandlerTable;
-    } while (dvmQuasiAtomicCas64(oldValue.all, newValue.all,
-             &thread->interpBreak.all) != 0);
+    thread->suspendCount += delta;
+    thread->dbgSuspendCount += dbgDelta;
+    dvmUpdateInterpBreak(thread, kInterpSuspendBreak, kSubModeNormal,
+                         (thread->suspendCount != 0) /* enable break? */);
 
     // Update the global suspend count total
     gDvm.sumThreadSuspendCount += delta;
@@ -1749,7 +1732,7 @@
 #endif
 
     /* Safe point handling */
-    if (self->interpBreak.ctl.suspendCount ||
+    if (self->suspendCount ||
         (self->interpBreak.ctl.breakFlags & kInterpSafePointCallback)) {
         // Are we are a safe point?
         int flags;
@@ -1776,7 +1759,7 @@
                 }
             }
             // Need to suspend?
-            if (self->interpBreak.ctl.suspendCount) {
+            if (self->suspendCount) {
                 dvmExportPC(pc, fp);
                 dvmCheckSuspendPending(self);
             }
diff --git a/vm/mterp/armv5te/OP_EXECUTE_INLINE.S b/vm/mterp/armv5te/OP_EXECUTE_INLINE.S
index a77ce12..806eb98 100644
--- a/vm/mterp/armv5te/OP_EXECUTE_INLINE.S
+++ b/vm/mterp/armv5te/OP_EXECUTE_INLINE.S
@@ -14,7 +14,7 @@
      * swap if profiler/debuggger active.
      */
     /* [opt] execute-inline vAA, {vC, vD, vE, vF}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
diff --git a/vm/mterp/armv5te/OP_EXECUTE_INLINE_RANGE.S b/vm/mterp/armv5te/OP_EXECUTE_INLINE_RANGE.S
index b5b9c32..bb4b0e8 100644
--- a/vm/mterp/armv5te/OP_EXECUTE_INLINE_RANGE.S
+++ b/vm/mterp/armv5te/OP_EXECUTE_INLINE_RANGE.S
@@ -12,7 +12,7 @@
      * us if an exception was thrown.
      */
     /* [opt] execute-inline/range {vCCCC..v(CCCC+AA-1)}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
diff --git a/vm/mterp/armv5te/OP_INVOKE_OBJECT_INIT_RANGE.S b/vm/mterp/armv5te/OP_INVOKE_OBJECT_INIT_RANGE.S
index cdb98df..fb0e657 100644
--- a/vm/mterp/armv5te/OP_INVOKE_OBJECT_INIT_RANGE.S
+++ b/vm/mterp/armv5te/OP_INVOKE_OBJECT_INIT_RANGE.S
@@ -15,7 +15,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .L${opcode}_setFinal        @ yes, go
 .L${opcode}_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .L${opcode}_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(${cccc}+1)       @ advance to next instr, load rINST
diff --git a/vm/mterp/armv5te/OP_INVOKE_STATIC.S b/vm/mterp/armv5te/OP_INVOKE_STATIC.S
index 66a5be3..a89db03 100644
--- a/vm/mterp/armv5te/OP_INVOKE_STATIC.S
+++ b/vm/mterp/armv5te/OP_INVOKE_STATIC.S
@@ -35,7 +35,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethod${routine}     @ no (r0=method, r9="this")
diff --git a/vm/mterp/armv5te/OP_INVOKE_STATIC_JUMBO.S b/vm/mterp/armv5te/OP_INVOKE_STATIC_JUMBO.S
index 1ff4152..171127d 100644
--- a/vm/mterp/armv5te/OP_INVOKE_STATIC_JUMBO.S
+++ b/vm/mterp/armv5te/OP_INVOKE_STATIC_JUMBO.S
@@ -32,7 +32,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodJumboNoThis    @ no (r0=method, r9="this")
diff --git a/vm/mterp/armv5te/OP_NEW_INSTANCE.S b/vm/mterp/armv5te/OP_NEW_INSTANCE.S
index eadd438..e280c4c 100644
--- a/vm/mterp/armv5te/OP_NEW_INSTANCE.S
+++ b/vm/mterp/armv5te/OP_NEW_INSTANCE.S
@@ -39,7 +39,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .L${opcode}_jitCheck
diff --git a/vm/mterp/armv5te/OP_NEW_INSTANCE_JUMBO.S b/vm/mterp/armv5te/OP_NEW_INSTANCE_JUMBO.S
index c5f7bdb..ce41bf3 100644
--- a/vm/mterp/armv5te/OP_NEW_INSTANCE_JUMBO.S
+++ b/vm/mterp/armv5te/OP_NEW_INSTANCE_JUMBO.S
@@ -41,7 +41,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .L${opcode}_jitCheck
diff --git a/vm/mterp/armv5te/footer.S b/vm/mterp/armv5te/footer.S
index 7a8ed04..f6c527f 100644
--- a/vm/mterp/armv5te/footer.S
+++ b/vm/mterp/armv5te/footer.S
@@ -485,7 +485,7 @@
  /* On entry: r0 is "Method* methodToCall, r9 is "this" */
 .LinvokeNewJumbo:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -507,7 +507,7 @@
 common_invokeMethodRange:
 .LinvokeNewRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -537,7 +537,7 @@
 common_invokeMethodNoRange:
 .LinvokeNewNoRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -587,7 +587,7 @@
     ldr     r9, [rSELF, #offThread_interpStackEnd]    @ r9<- interpStackEnd
     sub     r3, r10, r3, lsl #2         @ r3<- bottom (newsave - outsSize)
     cmp     r3, r9                      @ bottom < interpStackEnd?
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r3, [r0, #offMethod_accessFlags] @ r3<- methodToCall->accessFlags
     blo     .LstackOverflow             @ yes, this frame will overflow stack
 
@@ -666,7 +666,7 @@
 .LinvokeNative:
     @ Prep for the native call
     @ r0=methodToCall, r1=newFp, r10=newSaveArea
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r9, [rSELF, #offThread_jniLocal_topCookie]@r9<-thread->localRef->...
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r10, #offStackSaveArea_localRefCookie] @newFp->localRefCookie=top
@@ -774,7 +774,7 @@
  */
 common_returnFromMethod:
 .LreturnNew:
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r0, rFP)
     ldr     r9, [r0, #offStackSaveArea_savedPc] @ r9 = saveArea->savedPc
     cmp     lr, #0                      @ any special subMode handling needed?
@@ -861,7 +861,7 @@
     mov     r1, rSELF                   @ r1<- self
     mov     r0, r9                      @ r0<- exception
     bl      dvmAddTrackedAlloc          @ don't let the exception be GCed
-    ldrb    r2, [rSELF, #offThread_subMode]  @ get subMode flags
+    ldrh    r2, [rSELF, #offThread_subMode]  @ get subMode flags
     mov     r3, #0                      @ r3<- NULL
     str     r3, [rSELF, #offThread_exception] @ self->exception = NULL
 
@@ -878,7 +878,7 @@
     ldr     r1, [rSELF, #offThread_method] @ r1<- self->method
     mov     r0, rSELF                   @ r0<- self
     ldr     r1, [r1, #offMethod_insns]  @ r1<- method->insns
-    ldrb    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
+    ldrh    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
     mov     r2, r9                      @ r2<- exception
     sub     r1, rPC, r1                 @ r1<- pc - method->insns
     mov     r1, r1, asr #1              @ r1<- offset in code units
@@ -996,7 +996,7 @@
      *     r0:  field pointer (must preserve)
      */
 common_verifyField:
-    ldrb    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
+    ldrh    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
     ands    r3, #kSubModeJitTraceBuild
     bxeq    lr                          @ Not building trace, continue
     ldr     r1, [r10]                   @ r1<- reload resolved StaticField ptr
diff --git a/vm/mterp/common/asm-constants.h b/vm/mterp/common/asm-constants.h
index 5b9b006..f0a1a32 100644
--- a/vm/mterp/common/asm-constants.h
+++ b/vm/mterp/common/asm-constants.h
@@ -168,46 +168,44 @@
 MTERP_OFFSET(offThread_retval_j,          Thread, retval.j, 32)
 MTERP_OFFSET(offThread_retval_l,          Thread, retval.l, 32)
 //40
-MTERP_OFFSET(offThread_breakFlags, \
-                               Thread, interpBreak.ctl.breakFlags, 40)
 MTERP_OFFSET(offThread_subMode, \
-                               Thread, interpBreak.ctl.subMode, 41)
-MTERP_OFFSET(offThread_suspendCount, \
-                               Thread, interpBreak.ctl.suspendCount, 42)
-MTERP_OFFSET(offThread_dbgSuspendCount, \
-                               Thread, interpBreak.ctl.dbgSuspendCount, 43)
+                               Thread, interpBreak.ctl.subMode, 40)
+MTERP_OFFSET(offThread_breakFlags, \
+                               Thread, interpBreak.ctl.breakFlags, 42)
 MTERP_OFFSET(offThread_curHandlerTable, \
                                Thread, interpBreak.ctl.curHandlerTable, 44)
-MTERP_OFFSET(offThread_cardTable,         Thread, cardTable, 48)
-MTERP_OFFSET(offThread_interpStackEnd,    Thread, interpStackEnd, 52)
-MTERP_OFFSET(offThread_exception,         Thread, exception, 60)
-MTERP_OFFSET(offThread_debugIsMethodEntry, Thread, debugIsMethodEntry, 64)
-MTERP_OFFSET(offThread_interpStackSize,   Thread, interpStackSize, 68)
-MTERP_OFFSET(offThread_stackOverflowed,   Thread, stackOverflowed, 72)
-MTERP_OFFSET(offThread_mainHandlerTable,  Thread, mainHandlerTable, 80)
-MTERP_OFFSET(offThread_singleStepCount,   Thread, singleStepCount, 88)
+MTERP_OFFSET(offThread_suspendCount,      Thread, suspendCount, 48);
+MTERP_OFFSET(offThread_dbgSuspendCount,   Thread, dbgSuspendCount, 52);
+MTERP_OFFSET(offThread_cardTable,         Thread, cardTable, 56)
+MTERP_OFFSET(offThread_interpStackEnd,    Thread, interpStackEnd, 60)
+MTERP_OFFSET(offThread_exception,         Thread, exception, 68)
+MTERP_OFFSET(offThread_debugIsMethodEntry, Thread, debugIsMethodEntry, 72)
+MTERP_OFFSET(offThread_interpStackSize,   Thread, interpStackSize, 76)
+MTERP_OFFSET(offThread_stackOverflowed,   Thread, stackOverflowed, 80)
+MTERP_OFFSET(offThread_mainHandlerTable,  Thread, mainHandlerTable, 88)
+MTERP_OFFSET(offThread_singleStepCount,   Thread, singleStepCount, 96)
 
 #ifdef WITH_JIT
-MTERP_OFFSET(offThread_jitToInterpEntries,Thread, jitToInterpEntries, 92)
-MTERP_OFFSET(offThread_inJitCodeCache,    Thread, inJitCodeCache, 116)
-MTERP_OFFSET(offThread_pJitProfTable,     Thread, pJitProfTable, 120)
-MTERP_OFFSET(offThread_jitThreshold,      Thread, jitThreshold, 124)
-MTERP_OFFSET(offThread_jitResumeNPC,      Thread, jitResumeNPC, 128)
-MTERP_OFFSET(offThread_jitResumeNSP,      Thread, jitResumeNSP, 132)
-MTERP_OFFSET(offThread_jitResumeDPC,      Thread, jitResumeDPC, 136)
-MTERP_OFFSET(offThread_jitState,          Thread, jitState, 140)
-MTERP_OFFSET(offThread_icRechainCount,    Thread, icRechainCount, 144)
-MTERP_OFFSET(offThread_pProfileCountdown, Thread, pProfileCountdown, 148)
-MTERP_OFFSET(offThread_callsiteClass,     Thread, callsiteClass, 152)
-MTERP_OFFSET(offThread_methodToCall,      Thread, methodToCall, 156)
+MTERP_OFFSET(offThread_jitToInterpEntries,Thread, jitToInterpEntries, 100)
+MTERP_OFFSET(offThread_inJitCodeCache,    Thread, inJitCodeCache, 124)
+MTERP_OFFSET(offThread_pJitProfTable,     Thread, pJitProfTable, 128)
+MTERP_OFFSET(offThread_jitThreshold,      Thread, jitThreshold, 132)
+MTERP_OFFSET(offThread_jitResumeNPC,      Thread, jitResumeNPC, 136)
+MTERP_OFFSET(offThread_jitResumeNSP,      Thread, jitResumeNSP, 140)
+MTERP_OFFSET(offThread_jitResumeDPC,      Thread, jitResumeDPC, 144)
+MTERP_OFFSET(offThread_jitState,          Thread, jitState, 148)
+MTERP_OFFSET(offThread_icRechainCount,    Thread, icRechainCount, 152)
+MTERP_OFFSET(offThread_pProfileCountdown, Thread, pProfileCountdown, 156)
+MTERP_OFFSET(offThread_callsiteClass,     Thread, callsiteClass, 160)
+MTERP_OFFSET(offThread_methodToCall,      Thread, methodToCall, 164)
 MTERP_OFFSET(offThread_jniLocal_topCookie, \
-                                Thread, jniLocalRefTable.segmentState.all, 160)
+                                Thread, jniLocalRefTable.segmentState.all, 168)
 #if defined(WITH_SELF_VERIFICATION)
-MTERP_OFFSET(offThread_shadowSpace,       Thread, shadowSpace, 184)
+MTERP_OFFSET(offThread_shadowSpace,       Thread, shadowSpace, 192)
 #endif
 #else
 MTERP_OFFSET(offThread_jniLocal_topCookie, \
-                                Thread, jniLocalRefTable.segmentState.all, 92)
+                                Thread, jniLocalRefTable.segmentState.all, 100)
 #endif
 
 /* Object fields */
diff --git a/vm/mterp/out/InterpAsm-armv5te-vfp.S b/vm/mterp/out/InterpAsm-armv5te-vfp.S
index 115a7d2..a7bb274 100644
--- a/vm/mterp/out/InterpAsm-armv5te-vfp.S
+++ b/vm/mterp/out/InterpAsm-armv5te-vfp.S
@@ -7297,7 +7297,7 @@
      * swap if profiler/debuggger active.
      */
     /* [opt] execute-inline vAA, {vC, vD, vE, vF}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7331,7 +7331,7 @@
      * us if an exception was thrown.
      */
     /* [opt] execute-inline/range {vCCCC..v(CCCC+AA-1)}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7367,7 +7367,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_RANGE_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(2+1)       @ advance to next instr, load rINST
@@ -10432,7 +10432,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_JUMBO_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(4+1)       @ advance to next instr, load rINST
@@ -10987,7 +10987,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_jitCheck
@@ -12115,7 +12115,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodNoRange     @ no (r0=method, r9="this")
@@ -12215,7 +12215,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodRange     @ no (r0=method, r9="this")
@@ -12979,7 +12979,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_JUMBO_jitCheck
@@ -14018,7 +14018,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodJumboNoThis    @ no (r0=method, r9="this")
@@ -26689,7 +26689,7 @@
  /* On entry: r0 is "Method* methodToCall, r9 is "this" */
 .LinvokeNewJumbo:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26711,7 +26711,7 @@
 common_invokeMethodRange:
 .LinvokeNewRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26741,7 +26741,7 @@
 common_invokeMethodNoRange:
 .LinvokeNewNoRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26791,7 +26791,7 @@
     ldr     r9, [rSELF, #offThread_interpStackEnd]    @ r9<- interpStackEnd
     sub     r3, r10, r3, lsl #2         @ r3<- bottom (newsave - outsSize)
     cmp     r3, r9                      @ bottom < interpStackEnd?
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r3, [r0, #offMethod_accessFlags] @ r3<- methodToCall->accessFlags
     blo     .LstackOverflow             @ yes, this frame will overflow stack
 
@@ -26870,7 +26870,7 @@
 .LinvokeNative:
     @ Prep for the native call
     @ r0=methodToCall, r1=newFp, r10=newSaveArea
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r9, [rSELF, #offThread_jniLocal_topCookie]@r9<-thread->localRef->...
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r10, #offStackSaveArea_localRefCookie] @newFp->localRefCookie=top
@@ -26978,7 +26978,7 @@
  */
 common_returnFromMethod:
 .LreturnNew:
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r0, rFP)
     ldr     r9, [r0, #offStackSaveArea_savedPc] @ r9 = saveArea->savedPc
     cmp     lr, #0                      @ any special subMode handling needed?
@@ -27065,7 +27065,7 @@
     mov     r1, rSELF                   @ r1<- self
     mov     r0, r9                      @ r0<- exception
     bl      dvmAddTrackedAlloc          @ don't let the exception be GCed
-    ldrb    r2, [rSELF, #offThread_subMode]  @ get subMode flags
+    ldrh    r2, [rSELF, #offThread_subMode]  @ get subMode flags
     mov     r3, #0                      @ r3<- NULL
     str     r3, [rSELF, #offThread_exception] @ self->exception = NULL
 
@@ -27082,7 +27082,7 @@
     ldr     r1, [rSELF, #offThread_method] @ r1<- self->method
     mov     r0, rSELF                   @ r0<- self
     ldr     r1, [r1, #offMethod_insns]  @ r1<- method->insns
-    ldrb    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
+    ldrh    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
     mov     r2, r9                      @ r2<- exception
     sub     r1, rPC, r1                 @ r1<- pc - method->insns
     mov     r1, r1, asr #1              @ r1<- offset in code units
@@ -27200,7 +27200,7 @@
      *     r0:  field pointer (must preserve)
      */
 common_verifyField:
-    ldrb    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
+    ldrh    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
     ands    r3, #kSubModeJitTraceBuild
     bxeq    lr                          @ Not building trace, continue
     ldr     r1, [r10]                   @ r1<- reload resolved StaticField ptr
diff --git a/vm/mterp/out/InterpAsm-armv5te.S b/vm/mterp/out/InterpAsm-armv5te.S
index 665178b..8566997 100644
--- a/vm/mterp/out/InterpAsm-armv5te.S
+++ b/vm/mterp/out/InterpAsm-armv5te.S
@@ -7619,7 +7619,7 @@
      * swap if profiler/debuggger active.
      */
     /* [opt] execute-inline vAA, {vC, vD, vE, vF}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7653,7 +7653,7 @@
      * us if an exception was thrown.
      */
     /* [opt] execute-inline/range {vCCCC..v(CCCC+AA-1)}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7689,7 +7689,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_RANGE_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(2+1)       @ advance to next instr, load rINST
@@ -10754,7 +10754,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_JUMBO_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(4+1)       @ advance to next instr, load rINST
@@ -11309,7 +11309,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_jitCheck
@@ -12573,7 +12573,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodNoRange     @ no (r0=method, r9="this")
@@ -12673,7 +12673,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodRange     @ no (r0=method, r9="this")
@@ -13437,7 +13437,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_JUMBO_jitCheck
@@ -14476,7 +14476,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodJumboNoThis    @ no (r0=method, r9="this")
@@ -27147,7 +27147,7 @@
  /* On entry: r0 is "Method* methodToCall, r9 is "this" */
 .LinvokeNewJumbo:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -27169,7 +27169,7 @@
 common_invokeMethodRange:
 .LinvokeNewRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -27199,7 +27199,7 @@
 common_invokeMethodNoRange:
 .LinvokeNewNoRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -27249,7 +27249,7 @@
     ldr     r9, [rSELF, #offThread_interpStackEnd]    @ r9<- interpStackEnd
     sub     r3, r10, r3, lsl #2         @ r3<- bottom (newsave - outsSize)
     cmp     r3, r9                      @ bottom < interpStackEnd?
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r3, [r0, #offMethod_accessFlags] @ r3<- methodToCall->accessFlags
     blo     .LstackOverflow             @ yes, this frame will overflow stack
 
@@ -27328,7 +27328,7 @@
 .LinvokeNative:
     @ Prep for the native call
     @ r0=methodToCall, r1=newFp, r10=newSaveArea
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r9, [rSELF, #offThread_jniLocal_topCookie]@r9<-thread->localRef->...
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r10, #offStackSaveArea_localRefCookie] @newFp->localRefCookie=top
@@ -27436,7 +27436,7 @@
  */
 common_returnFromMethod:
 .LreturnNew:
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r0, rFP)
     ldr     r9, [r0, #offStackSaveArea_savedPc] @ r9 = saveArea->savedPc
     cmp     lr, #0                      @ any special subMode handling needed?
@@ -27523,7 +27523,7 @@
     mov     r1, rSELF                   @ r1<- self
     mov     r0, r9                      @ r0<- exception
     bl      dvmAddTrackedAlloc          @ don't let the exception be GCed
-    ldrb    r2, [rSELF, #offThread_subMode]  @ get subMode flags
+    ldrh    r2, [rSELF, #offThread_subMode]  @ get subMode flags
     mov     r3, #0                      @ r3<- NULL
     str     r3, [rSELF, #offThread_exception] @ self->exception = NULL
 
@@ -27540,7 +27540,7 @@
     ldr     r1, [rSELF, #offThread_method] @ r1<- self->method
     mov     r0, rSELF                   @ r0<- self
     ldr     r1, [r1, #offMethod_insns]  @ r1<- method->insns
-    ldrb    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
+    ldrh    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
     mov     r2, r9                      @ r2<- exception
     sub     r1, rPC, r1                 @ r1<- pc - method->insns
     mov     r1, r1, asr #1              @ r1<- offset in code units
@@ -27658,7 +27658,7 @@
      *     r0:  field pointer (must preserve)
      */
 common_verifyField:
-    ldrb    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
+    ldrh    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
     ands    r3, #kSubModeJitTraceBuild
     bxeq    lr                          @ Not building trace, continue
     ldr     r1, [r10]                   @ r1<- reload resolved StaticField ptr
diff --git a/vm/mterp/out/InterpAsm-armv7-a-neon.S b/vm/mterp/out/InterpAsm-armv7-a-neon.S
index e1f54fb..99bc0af 100644
--- a/vm/mterp/out/InterpAsm-armv7-a-neon.S
+++ b/vm/mterp/out/InterpAsm-armv7-a-neon.S
@@ -7255,7 +7255,7 @@
      * swap if profiler/debuggger active.
      */
     /* [opt] execute-inline vAA, {vC, vD, vE, vF}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7289,7 +7289,7 @@
      * us if an exception was thrown.
      */
     /* [opt] execute-inline/range {vCCCC..v(CCCC+AA-1)}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7325,7 +7325,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_RANGE_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(2+1)       @ advance to next instr, load rINST
@@ -10386,7 +10386,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_JUMBO_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(4+1)       @ advance to next instr, load rINST
@@ -10941,7 +10941,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_jitCheck
@@ -12053,7 +12053,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodNoRange     @ no (r0=method, r9="this")
@@ -12153,7 +12153,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodRange     @ no (r0=method, r9="this")
@@ -12917,7 +12917,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_JUMBO_jitCheck
@@ -13956,7 +13956,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodJumboNoThis    @ no (r0=method, r9="this")
@@ -26627,7 +26627,7 @@
  /* On entry: r0 is "Method* methodToCall, r9 is "this" */
 .LinvokeNewJumbo:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26649,7 +26649,7 @@
 common_invokeMethodRange:
 .LinvokeNewRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26679,7 +26679,7 @@
 common_invokeMethodNoRange:
 .LinvokeNewNoRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26729,7 +26729,7 @@
     ldr     r9, [rSELF, #offThread_interpStackEnd]    @ r9<- interpStackEnd
     sub     r3, r10, r3, lsl #2         @ r3<- bottom (newsave - outsSize)
     cmp     r3, r9                      @ bottom < interpStackEnd?
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r3, [r0, #offMethod_accessFlags] @ r3<- methodToCall->accessFlags
     blo     .LstackOverflow             @ yes, this frame will overflow stack
 
@@ -26808,7 +26808,7 @@
 .LinvokeNative:
     @ Prep for the native call
     @ r0=methodToCall, r1=newFp, r10=newSaveArea
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r9, [rSELF, #offThread_jniLocal_topCookie]@r9<-thread->localRef->...
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r10, #offStackSaveArea_localRefCookie] @newFp->localRefCookie=top
@@ -26916,7 +26916,7 @@
  */
 common_returnFromMethod:
 .LreturnNew:
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r0, rFP)
     ldr     r9, [r0, #offStackSaveArea_savedPc] @ r9 = saveArea->savedPc
     cmp     lr, #0                      @ any special subMode handling needed?
@@ -27003,7 +27003,7 @@
     mov     r1, rSELF                   @ r1<- self
     mov     r0, r9                      @ r0<- exception
     bl      dvmAddTrackedAlloc          @ don't let the exception be GCed
-    ldrb    r2, [rSELF, #offThread_subMode]  @ get subMode flags
+    ldrh    r2, [rSELF, #offThread_subMode]  @ get subMode flags
     mov     r3, #0                      @ r3<- NULL
     str     r3, [rSELF, #offThread_exception] @ self->exception = NULL
 
@@ -27020,7 +27020,7 @@
     ldr     r1, [rSELF, #offThread_method] @ r1<- self->method
     mov     r0, rSELF                   @ r0<- self
     ldr     r1, [r1, #offMethod_insns]  @ r1<- method->insns
-    ldrb    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
+    ldrh    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
     mov     r2, r9                      @ r2<- exception
     sub     r1, rPC, r1                 @ r1<- pc - method->insns
     mov     r1, r1, asr #1              @ r1<- offset in code units
@@ -27138,7 +27138,7 @@
      *     r0:  field pointer (must preserve)
      */
 common_verifyField:
-    ldrb    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
+    ldrh    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
     ands    r3, #kSubModeJitTraceBuild
     bxeq    lr                          @ Not building trace, continue
     ldr     r1, [r10]                   @ r1<- reload resolved StaticField ptr
diff --git a/vm/mterp/out/InterpAsm-armv7-a.S b/vm/mterp/out/InterpAsm-armv7-a.S
index 6a36b6d..ae30fd2 100644
--- a/vm/mterp/out/InterpAsm-armv7-a.S
+++ b/vm/mterp/out/InterpAsm-armv7-a.S
@@ -7255,7 +7255,7 @@
      * swap if profiler/debuggger active.
      */
     /* [opt] execute-inline vAA, {vC, vD, vE, vF}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7289,7 +7289,7 @@
      * us if an exception was thrown.
      */
     /* [opt] execute-inline/range {vCCCC..v(CCCC+AA-1)}, inline@BBBB */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     FETCH(r10, 1)                       @ r10<- BBBB
     EXPORT_PC()                         @ can throw
     ands    r2, #kSubModeDebugProfile   @ Any going on?
@@ -7325,7 +7325,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_RANGE_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_RANGE_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(2+1)       @ advance to next instr, load rINST
@@ -10386,7 +10386,7 @@
     tst     r2, #CLASS_ISFINALIZABLE    @ is this class finalizable?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_setFinal        @ yes, go
 .LOP_INVOKE_OBJECT_INIT_JUMBO_finish:
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeDebuggerActive @ debugger active?
     bne     .LOP_INVOKE_OBJECT_INIT_JUMBO_debugger        @ Yes - skip optimization
     FETCH_ADVANCE_INST(4+1)       @ advance to next instr, load rINST
@@ -10941,7 +10941,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_jitCheck
@@ -12053,7 +12053,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodNoRange     @ no (r0=method, r9="this")
@@ -12153,7 +12153,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodRange     @ no (r0=method, r9="this")
@@ -12917,7 +12917,7 @@
      * The JIT needs the class to be fully resolved before it can
      * include this instruction in a trace.
      */
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown      @ yes, handle the exception
     ands    r1, #kSubModeJitTraceBuild  @ under construction?
     bne     .LOP_NEW_INSTANCE_JUMBO_jitCheck
@@ -13956,7 +13956,7 @@
      * we need to keep this instruction out of it.
      * r10: &resolved_methodToCall
      */
-    ldrb    r2, [rSELF, #offThread_subMode]
+    ldrh    r2, [rSELF, #offThread_subMode]
     beq     common_exceptionThrown            @ null, handle exception
     ands    r2, #kSubModeJitTraceBuild        @ trace under construction?
     beq     common_invokeMethodJumboNoThis    @ no (r0=method, r9="this")
@@ -26627,7 +26627,7 @@
  /* On entry: r0 is "Method* methodToCall, r9 is "this" */
 .LinvokeNewJumbo:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26649,7 +26649,7 @@
 common_invokeMethodRange:
 .LinvokeNewRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26679,7 +26679,7 @@
 common_invokeMethodNoRange:
 .LinvokeNewNoRange:
 #if defined(WITH_JIT)
-    ldrb    r1, [rSELF, #offThread_subMode]
+    ldrh    r1, [rSELF, #offThread_subMode]
     ands    r1, #kSubModeJitTraceBuild
     blne    save_callsiteinfo
 #endif
@@ -26729,7 +26729,7 @@
     ldr     r9, [rSELF, #offThread_interpStackEnd]    @ r9<- interpStackEnd
     sub     r3, r10, r3, lsl #2         @ r3<- bottom (newsave - outsSize)
     cmp     r3, r9                      @ bottom < interpStackEnd?
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r3, [r0, #offMethod_accessFlags] @ r3<- methodToCall->accessFlags
     blo     .LstackOverflow             @ yes, this frame will overflow stack
 
@@ -26808,7 +26808,7 @@
 .LinvokeNative:
     @ Prep for the native call
     @ r0=methodToCall, r1=newFp, r10=newSaveArea
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     ldr     r9, [rSELF, #offThread_jniLocal_topCookie]@r9<-thread->localRef->...
     str     r1, [rSELF, #offThread_curFrame]   @ curFrame = newFp
     str     r9, [r10, #offStackSaveArea_localRefCookie] @newFp->localRefCookie=top
@@ -26916,7 +26916,7 @@
  */
 common_returnFromMethod:
 .LreturnNew:
-    ldrb    lr, [rSELF, #offThread_subMode]
+    ldrh    lr, [rSELF, #offThread_subMode]
     SAVEAREA_FROM_FP(r0, rFP)
     ldr     r9, [r0, #offStackSaveArea_savedPc] @ r9 = saveArea->savedPc
     cmp     lr, #0                      @ any special subMode handling needed?
@@ -27003,7 +27003,7 @@
     mov     r1, rSELF                   @ r1<- self
     mov     r0, r9                      @ r0<- exception
     bl      dvmAddTrackedAlloc          @ don't let the exception be GCed
-    ldrb    r2, [rSELF, #offThread_subMode]  @ get subMode flags
+    ldrh    r2, [rSELF, #offThread_subMode]  @ get subMode flags
     mov     r3, #0                      @ r3<- NULL
     str     r3, [rSELF, #offThread_exception] @ self->exception = NULL
 
@@ -27020,7 +27020,7 @@
     ldr     r1, [rSELF, #offThread_method] @ r1<- self->method
     mov     r0, rSELF                   @ r0<- self
     ldr     r1, [r1, #offMethod_insns]  @ r1<- method->insns
-    ldrb    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
+    ldrh    lr, [rSELF, #offThread_subMode]  @ lr<- subMode flags
     mov     r2, r9                      @ r2<- exception
     sub     r1, rPC, r1                 @ r1<- pc - method->insns
     mov     r1, r1, asr #1              @ r1<- offset in code units
@@ -27138,7 +27138,7 @@
      *     r0:  field pointer (must preserve)
      */
 common_verifyField:
-    ldrb    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
+    ldrh    r3, [rSELF, #offThread_subMode]  @ r3 <- submode byte
     ands    r3, #kSubModeJitTraceBuild
     bxeq    lr                          @ Not building trace, continue
     ldr     r1, [r10]                   @ r1<- reload resolved StaticField ptr
diff --git a/vm/mterp/out/InterpAsm-x86.S b/vm/mterp/out/InterpAsm-x86.S
index 5ae3fc3..63fa3ce 100644
--- a/vm/mterp/out/InterpAsm-x86.S
+++ b/vm/mterp/out/InterpAsm-x86.S
@@ -26306,7 +26306,7 @@
     movl        rPC, offStackSaveArea_savedPc(%edx) # newSaveArea->savedPc<- rPC
 
     /* Any special actions to take? */
-    cmpb        $0, offThread_subMode(%ecx)
+    cmpw        $0, offThread_subMode(%ecx)
     jne         2f                     # Yes - handle them
 1:
     testl       $ACC_NATIVE, offMethod_accessFlags(%eax) # check for native call
@@ -26358,7 +26358,7 @@
     movl        %edx, LOCAL2_OFFSET(%ebp)  # save newSaveArea
     movl        LOCAL1_OFFSET(%ebp), rINST # rINST<- newFP
     movl        rINST, offThread_curFrame(%ecx)  # curFrame<- newFP
-    cmpb        $0, offThread_subMode(%ecx)  # Anything special going on?
+    cmpw        $0, offThread_subMode(%ecx)  # Anything special going on?
     jne         11f                     # yes - handle it
     movl        %ecx, OUT_ARG3(%esp)    # push parameter self
     movl        %eax, OUT_ARG2(%esp)    # push parameter methodToCall
@@ -26424,7 +26424,7 @@
     movl    rSELF,%ecx
     SAVEAREA_FROM_FP %eax                         # eax<- saveArea (old)
     movl    offStackSaveArea_prevFrame(%eax),rFP  # rFP<- prevFrame
-    cmpb    $0, offThread_subMode(%ecx)          # special action needed?
+    cmpw    $0, offThread_subMode(%ecx)          # special action needed?
     jne     19f                                   # go if so
 14:
     movl    (offStackSaveArea_method-sizeofStackSaveArea)(rFP),rINST
diff --git a/vm/mterp/x86/footer.S b/vm/mterp/x86/footer.S
index c9a88c1..3f09adf 100644
--- a/vm/mterp/x86/footer.S
+++ b/vm/mterp/x86/footer.S
@@ -375,7 +375,7 @@
     movl        rPC, offStackSaveArea_savedPc(%edx) # newSaveArea->savedPc<- rPC
 
     /* Any special actions to take? */
-    cmpb        $$0, offThread_subMode(%ecx)
+    cmpw        $$0, offThread_subMode(%ecx)
     jne         2f                     # Yes - handle them
 1:
     testl       $$ACC_NATIVE, offMethod_accessFlags(%eax) # check for native call
@@ -427,7 +427,7 @@
     movl        %edx, LOCAL2_OFFSET(%ebp)  # save newSaveArea
     movl        LOCAL1_OFFSET(%ebp), rINST # rINST<- newFP
     movl        rINST, offThread_curFrame(%ecx)  # curFrame<- newFP
-    cmpb        $$0, offThread_subMode(%ecx)  # Anything special going on?
+    cmpw        $$0, offThread_subMode(%ecx)  # Anything special going on?
     jne         11f                     # yes - handle it
     movl        %ecx, OUT_ARG3(%esp)    # push parameter self
     movl        %eax, OUT_ARG2(%esp)    # push parameter methodToCall
@@ -493,7 +493,7 @@
     movl    rSELF,%ecx
     SAVEAREA_FROM_FP %eax                         # eax<- saveArea (old)
     movl    offStackSaveArea_prevFrame(%eax),rFP  # rFP<- prevFrame
-    cmpb    $$0, offThread_subMode(%ecx)          # special action needed?
+    cmpw    $$0, offThread_subMode(%ecx)          # special action needed?
     jne     19f                                   # go if so
 14:
     movl    (offStackSaveArea_method-sizeofStackSaveArea)(rFP),rINST