Rework common_periodicChecks.
The function was rewritten to optimize the common path. The control flow
now matches the C version, which tests for debugger/profiler even if the
previous test for suspension came up true.
This also adds a minor optimization on the test for debugger attachment,
allowing us to skip a load from memory if the process is simply not
debuggable. (The optimization isn't yet enabled because a similar change
must be made to the x86 asm code.)
The VM apparently hadn't been built without debugging/profiling support
for a while, so this fixes those places (necessary to be able to test
all forms of the new code).
Bug 2634642.
Change-Id: I096b58c961bb73ee0d128ba776d68dbf29bba924
diff --git a/vm/compiler/Compiler.c b/vm/compiler/Compiler.c
index 9964285..649f6f0 100644
--- a/vm/compiler/Compiler.c
+++ b/vm/compiler/Compiler.c
@@ -724,7 +724,16 @@
dvmLockMutex(&gDvmJit.tableLock);
jitActive = gDvmJit.pProfTable != NULL;
+
+#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
jitActivate = !(gDvm.debuggerActive || (gDvm.activeProfilers > 0));
+#elif defined(WITH_DEBUGGER)
+ jitActivate = !gDvm.debuggerActive;
+#elif defined(WITH_PROFILER)
+ jitActivate = !(gDvm.activeProfilers > 0);
+#else
+ jitActivate = true;
+#endif
if (jitActivate && !jitActive) {
gDvmJit.pProfTable = gDvmJit.pProfTableCopy;
diff --git a/vm/interp/InterpDefs.h b/vm/interp/InterpDefs.h
index 826c6d2..ac667cd 100644
--- a/vm/interp/InterpDefs.h
+++ b/vm/interp/InterpDefs.h
@@ -124,12 +124,16 @@
* These are available globally, from gDvm, or from another glue field
* (self/method). They're copied in here for speed.
*/
+ /* copy of self->interpStackEnd */
const u1* interpStackEnd;
+ /* points at self->suspendCount */
volatile int* pSelfSuspendCount;
#if defined(WITH_DEBUGGER)
+ /* points at gDvm.debuggerActive, or NULL if debugger not enabled */
volatile u1* pDebuggerActive;
#endif
#if defined(WITH_PROFILER)
+ /* points at gDvm.activeProfilers */
volatile int* pActiveProfilers;
#endif
/* ----------------------------------------------------------------------
diff --git a/vm/mterp/Mterp.c b/vm/mterp/Mterp.c
index ca2ca16..36f1e1c 100644
--- a/vm/mterp/Mterp.c
+++ b/vm/mterp/Mterp.c
@@ -87,7 +87,13 @@
glue->jitThreshold = gDvmJit.threshold;
#endif
#if defined(WITH_DEBUGGER)
- glue->pDebuggerActive = &gDvm.debuggerActive;
+ if (gDvm.jdwpConfigured) {
+ glue->pDebuggerActive = &gDvm.debuggerActive;
+ } else {
+ /* TODO: fix x86 impl before enabling this */
+ //glue->pDebuggerActive = NULL;
+ glue->pDebuggerActive = &gDvm.debuggerActive;
+ }
#endif
#if defined(WITH_PROFILER)
glue->pActiveProfilers = &gDvm.activeProfilers;
diff --git a/vm/mterp/armv5te/footer.S b/vm/mterp/armv5te/footer.S
index 50071ec..4e935cd 100644
--- a/vm/mterp/armv5te/footer.S
+++ b/vm/mterp/armv5te/footer.S
@@ -357,6 +357,11 @@
/*
* Common code when a backward branch is taken.
*
+ * TODO: we could avoid a branch by just setting r0 and falling through
+ * into the common_periodicChecks code, and having a test on r0 at the
+ * end determine if we should return to the caller or update & branch to
+ * the next instr.
+ *
* On entry:
* r9 is PC adjustment *in bytes*
*/
@@ -379,23 +384,25 @@
/*
* Need to see if the thread needs to be suspended or debugger/profiler
- * activity has begun.
+ * activity has begun. If so, we suspend the thread or side-exit to
+ * the debug interpreter as appropriate.
*
- * TODO: if JDWP isn't running, zero out pDebuggerActive pointer so we don't
- * have to do the second ldr.
+ * The common case is no activity on any of these, so we want to figure
+ * that out quickly. If something is up, we can then sort out what.
+ *
+ * We want to be fast if the VM was built without debugger or profiler
+ * support, but we also need to recognize that the system is usually
+ * shipped with both of these enabled.
*
* TODO: reduce this so we're just checking a single location.
*
* On entry:
- * r0 is reentry type, e.g. kInterpEntryInstr
+ * r0 is reentry type, e.g. kInterpEntryInstr (for debugger/profiling)
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
- @ speculatively store r0 before it is clobbered by dvmCheckSuspendPending
- str r0, [rGLUE, #offGlue_entryPoint]
-
#if defined(WITH_DEBUGGER)
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
#endif
@@ -403,33 +410,42 @@
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
#endif
- ldr r3, [r3] @ r3<- suspendCount (int)
+ ldr ip, [r3] @ ip<- suspendCount (int)
-#if defined(WITH_DEBUGGER)
- ldrb r1, [r1] @ r1<- debuggerActive (boolean)
-#endif
-#if defined (WITH_PROFILER)
+#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrne ip, ip, r1 @ ip<- suspendCount | debuggerActive
+ orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
+#elif defined(WITH_DEBUGGER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ orrsne ip, ip, r1 @ yes, ip<- suspend | debugger; set Z
+ @ (if not enabled, Z was set by test for r1==0, which is what we want)
+#elif defined (WITH_PROFILER)
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrs ip, ip, r2 @ ip<- suspendCount | activeProfilers
+#else
+ cmp ip, #0 @ not ORing anything in; set Z
#endif
- cmp r3, #0 @ suspend pending?
- bne 2f @ yes, do full suspension check
+ bxeq lr @ all zero, return
+ /*
+ * One or more interesting events have happened. Figure out what.
+ *
+ * If debugging or profiling are compiled in, we need to disambiguate.
+ *
+ * r0 still holds the reentry type.
+ */
#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
-# if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
- orrs r1, r1, r2 @ r1<- r1 | r2
- cmp r1, #0 @ debugger attached or profiler started?
-# elif defined(WITH_DEBUGGER)
- cmp r1, #0 @ debugger attached?
-# elif defined(WITH_PROFILER)
- cmp r2, #0 @ profiler started?
-# endif
- bne 3f @ debugger/profiler, switch interp
+ ldr ip, [r3] @ ip<- suspendCount (int)
+ cmp ip, #0 @ want suspend?
+ beq 1f @ no, must be debugger/profiler
#endif
- bx lr @ nothing to do, return
-
-2: @ check suspend
+ stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
/*
* Refresh the Jit's cached copy of profile table pointer. This pointer
@@ -444,12 +460,45 @@
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
EXPORT_PC() @ need for precise GC
#endif
- b dvmCheckSuspendPending @ suspend if necessary, then return
+ bl dvmCheckSuspendPending @ do full check, suspend if necessary
+ ldmfd sp!, {r0, lr} @ restore r0 and lr
-3: @ debugger/profiler enabled, bail out
+#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
+
+ /*
+ * Reload the debugger/profiler enable flags. We're checking to see
+ * if either of these got set while we were suspended.
+ *
+ * We can't really avoid the #ifdefs here, because the fields don't
+ * exist when the feature is disabled.
+ */
+#if defined(WITH_DEBUGGER)
+ ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#else
+ mov r1, #0
+#endif
+#if defined(WITH_PROFILER)
+ ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+#else
+ mov r2, #0
+#endif
+
+ orrs r1, r1, r2
+ beq 2f
+
+1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
- b common_gotoBail
+ b common_gotoBail @ side exit
+
+#endif /*WITH_DEBUGGER || WITH_PROFILER*/
+
+2:
+ bx lr @ nothing to do, return
/*
diff --git a/vm/mterp/out/InterpAsm-armv4t.S b/vm/mterp/out/InterpAsm-armv4t.S
index f7f1a69..2888ec5 100644
--- a/vm/mterp/out/InterpAsm-armv4t.S
+++ b/vm/mterp/out/InterpAsm-armv4t.S
@@ -10079,6 +10079,11 @@
/*
* Common code when a backward branch is taken.
*
+ * TODO: we could avoid a branch by just setting r0 and falling through
+ * into the common_periodicChecks code, and having a test on r0 at the
+ * end determine if we should return to the caller or update & branch to
+ * the next instr.
+ *
* On entry:
* r9 is PC adjustment *in bytes*
*/
@@ -10101,23 +10106,25 @@
/*
* Need to see if the thread needs to be suspended or debugger/profiler
- * activity has begun.
+ * activity has begun. If so, we suspend the thread or side-exit to
+ * the debug interpreter as appropriate.
*
- * TODO: if JDWP isn't running, zero out pDebuggerActive pointer so we don't
- * have to do the second ldr.
+ * The common case is no activity on any of these, so we want to figure
+ * that out quickly. If something is up, we can then sort out what.
+ *
+ * We want to be fast if the VM was built without debugger or profiler
+ * support, but we also need to recognize that the system is usually
+ * shipped with both of these enabled.
*
* TODO: reduce this so we're just checking a single location.
*
* On entry:
- * r0 is reentry type, e.g. kInterpEntryInstr
+ * r0 is reentry type, e.g. kInterpEntryInstr (for debugger/profiling)
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
- @ speculatively store r0 before it is clobbered by dvmCheckSuspendPending
- str r0, [rGLUE, #offGlue_entryPoint]
-
#if defined(WITH_DEBUGGER)
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
#endif
@@ -10125,33 +10132,42 @@
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
#endif
- ldr r3, [r3] @ r3<- suspendCount (int)
+ ldr ip, [r3] @ ip<- suspendCount (int)
-#if defined(WITH_DEBUGGER)
- ldrb r1, [r1] @ r1<- debuggerActive (boolean)
-#endif
-#if defined (WITH_PROFILER)
+#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrne ip, ip, r1 @ ip<- suspendCount | debuggerActive
+ orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
+#elif defined(WITH_DEBUGGER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ orrsne ip, ip, r1 @ yes, ip<- suspend | debugger; set Z
+ @ (if not enabled, Z was set by test for r1==0, which is what we want)
+#elif defined (WITH_PROFILER)
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrs ip, ip, r2 @ ip<- suspendCount | activeProfilers
+#else
+ cmp ip, #0 @ not ORing anything in; set Z
#endif
- cmp r3, #0 @ suspend pending?
- bne 2f @ yes, do full suspension check
+ bxeq lr @ all zero, return
+ /*
+ * One or more interesting events have happened. Figure out what.
+ *
+ * If debugging or profiling are compiled in, we need to disambiguate.
+ *
+ * r0 still holds the reentry type.
+ */
#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
-# if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
- orrs r1, r1, r2 @ r1<- r1 | r2
- cmp r1, #0 @ debugger attached or profiler started?
-# elif defined(WITH_DEBUGGER)
- cmp r1, #0 @ debugger attached?
-# elif defined(WITH_PROFILER)
- cmp r2, #0 @ profiler started?
-# endif
- bne 3f @ debugger/profiler, switch interp
+ ldr ip, [r3] @ ip<- suspendCount (int)
+ cmp ip, #0 @ want suspend?
+ beq 1f @ no, must be debugger/profiler
#endif
- bx lr @ nothing to do, return
-
-2: @ check suspend
+ stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
/*
* Refresh the Jit's cached copy of profile table pointer. This pointer
@@ -10166,12 +10182,45 @@
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
EXPORT_PC() @ need for precise GC
#endif
- b dvmCheckSuspendPending @ suspend if necessary, then return
+ bl dvmCheckSuspendPending @ do full check, suspend if necessary
+ ldmfd sp!, {r0, lr} @ restore r0 and lr
-3: @ debugger/profiler enabled, bail out
+#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
+
+ /*
+ * Reload the debugger/profiler enable flags. We're checking to see
+ * if either of these got set while we were suspended.
+ *
+ * We can't really avoid the #ifdefs here, because the fields don't
+ * exist when the feature is disabled.
+ */
+#if defined(WITH_DEBUGGER)
+ ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#else
+ mov r1, #0
+#endif
+#if defined(WITH_PROFILER)
+ ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+#else
+ mov r2, #0
+#endif
+
+ orrs r1, r1, r2
+ beq 2f
+
+1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
- b common_gotoBail
+ b common_gotoBail @ side exit
+
+#endif /*WITH_DEBUGGER || WITH_PROFILER*/
+
+2:
+ bx lr @ nothing to do, return
/*
diff --git a/vm/mterp/out/InterpAsm-armv5te-vfp.S b/vm/mterp/out/InterpAsm-armv5te-vfp.S
index e377858..773d0d4 100644
--- a/vm/mterp/out/InterpAsm-armv5te-vfp.S
+++ b/vm/mterp/out/InterpAsm-armv5te-vfp.S
@@ -9599,6 +9599,11 @@
/*
* Common code when a backward branch is taken.
*
+ * TODO: we could avoid a branch by just setting r0 and falling through
+ * into the common_periodicChecks code, and having a test on r0 at the
+ * end determine if we should return to the caller or update & branch to
+ * the next instr.
+ *
* On entry:
* r9 is PC adjustment *in bytes*
*/
@@ -9621,23 +9626,25 @@
/*
* Need to see if the thread needs to be suspended or debugger/profiler
- * activity has begun.
+ * activity has begun. If so, we suspend the thread or side-exit to
+ * the debug interpreter as appropriate.
*
- * TODO: if JDWP isn't running, zero out pDebuggerActive pointer so we don't
- * have to do the second ldr.
+ * The common case is no activity on any of these, so we want to figure
+ * that out quickly. If something is up, we can then sort out what.
+ *
+ * We want to be fast if the VM was built without debugger or profiler
+ * support, but we also need to recognize that the system is usually
+ * shipped with both of these enabled.
*
* TODO: reduce this so we're just checking a single location.
*
* On entry:
- * r0 is reentry type, e.g. kInterpEntryInstr
+ * r0 is reentry type, e.g. kInterpEntryInstr (for debugger/profiling)
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
- @ speculatively store r0 before it is clobbered by dvmCheckSuspendPending
- str r0, [rGLUE, #offGlue_entryPoint]
-
#if defined(WITH_DEBUGGER)
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
#endif
@@ -9645,33 +9652,42 @@
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
#endif
- ldr r3, [r3] @ r3<- suspendCount (int)
+ ldr ip, [r3] @ ip<- suspendCount (int)
-#if defined(WITH_DEBUGGER)
- ldrb r1, [r1] @ r1<- debuggerActive (boolean)
-#endif
-#if defined (WITH_PROFILER)
+#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrne ip, ip, r1 @ ip<- suspendCount | debuggerActive
+ orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
+#elif defined(WITH_DEBUGGER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ orrsne ip, ip, r1 @ yes, ip<- suspend | debugger; set Z
+ @ (if not enabled, Z was set by test for r1==0, which is what we want)
+#elif defined (WITH_PROFILER)
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrs ip, ip, r2 @ ip<- suspendCount | activeProfilers
+#else
+ cmp ip, #0 @ not ORing anything in; set Z
#endif
- cmp r3, #0 @ suspend pending?
- bne 2f @ yes, do full suspension check
+ bxeq lr @ all zero, return
+ /*
+ * One or more interesting events have happened. Figure out what.
+ *
+ * If debugging or profiling are compiled in, we need to disambiguate.
+ *
+ * r0 still holds the reentry type.
+ */
#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
-# if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
- orrs r1, r1, r2 @ r1<- r1 | r2
- cmp r1, #0 @ debugger attached or profiler started?
-# elif defined(WITH_DEBUGGER)
- cmp r1, #0 @ debugger attached?
-# elif defined(WITH_PROFILER)
- cmp r2, #0 @ profiler started?
-# endif
- bne 3f @ debugger/profiler, switch interp
+ ldr ip, [r3] @ ip<- suspendCount (int)
+ cmp ip, #0 @ want suspend?
+ beq 1f @ no, must be debugger/profiler
#endif
- bx lr @ nothing to do, return
-
-2: @ check suspend
+ stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
/*
* Refresh the Jit's cached copy of profile table pointer. This pointer
@@ -9686,12 +9702,45 @@
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
EXPORT_PC() @ need for precise GC
#endif
- b dvmCheckSuspendPending @ suspend if necessary, then return
+ bl dvmCheckSuspendPending @ do full check, suspend if necessary
+ ldmfd sp!, {r0, lr} @ restore r0 and lr
-3: @ debugger/profiler enabled, bail out
+#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
+
+ /*
+ * Reload the debugger/profiler enable flags. We're checking to see
+ * if either of these got set while we were suspended.
+ *
+ * We can't really avoid the #ifdefs here, because the fields don't
+ * exist when the feature is disabled.
+ */
+#if defined(WITH_DEBUGGER)
+ ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#else
+ mov r1, #0
+#endif
+#if defined(WITH_PROFILER)
+ ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+#else
+ mov r2, #0
+#endif
+
+ orrs r1, r1, r2
+ beq 2f
+
+1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
- b common_gotoBail
+ b common_gotoBail @ side exit
+
+#endif /*WITH_DEBUGGER || WITH_PROFILER*/
+
+2:
+ bx lr @ nothing to do, return
/*
diff --git a/vm/mterp/out/InterpAsm-armv5te.S b/vm/mterp/out/InterpAsm-armv5te.S
index 6c22d1d..f79049f 100644
--- a/vm/mterp/out/InterpAsm-armv5te.S
+++ b/vm/mterp/out/InterpAsm-armv5te.S
@@ -10075,6 +10075,11 @@
/*
* Common code when a backward branch is taken.
*
+ * TODO: we could avoid a branch by just setting r0 and falling through
+ * into the common_periodicChecks code, and having a test on r0 at the
+ * end determine if we should return to the caller or update & branch to
+ * the next instr.
+ *
* On entry:
* r9 is PC adjustment *in bytes*
*/
@@ -10097,23 +10102,25 @@
/*
* Need to see if the thread needs to be suspended or debugger/profiler
- * activity has begun.
+ * activity has begun. If so, we suspend the thread or side-exit to
+ * the debug interpreter as appropriate.
*
- * TODO: if JDWP isn't running, zero out pDebuggerActive pointer so we don't
- * have to do the second ldr.
+ * The common case is no activity on any of these, so we want to figure
+ * that out quickly. If something is up, we can then sort out what.
+ *
+ * We want to be fast if the VM was built without debugger or profiler
+ * support, but we also need to recognize that the system is usually
+ * shipped with both of these enabled.
*
* TODO: reduce this so we're just checking a single location.
*
* On entry:
- * r0 is reentry type, e.g. kInterpEntryInstr
+ * r0 is reentry type, e.g. kInterpEntryInstr (for debugger/profiling)
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
- @ speculatively store r0 before it is clobbered by dvmCheckSuspendPending
- str r0, [rGLUE, #offGlue_entryPoint]
-
#if defined(WITH_DEBUGGER)
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
#endif
@@ -10121,33 +10128,42 @@
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
#endif
- ldr r3, [r3] @ r3<- suspendCount (int)
+ ldr ip, [r3] @ ip<- suspendCount (int)
-#if defined(WITH_DEBUGGER)
- ldrb r1, [r1] @ r1<- debuggerActive (boolean)
-#endif
-#if defined (WITH_PROFILER)
+#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrne ip, ip, r1 @ ip<- suspendCount | debuggerActive
+ orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
+#elif defined(WITH_DEBUGGER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ orrsne ip, ip, r1 @ yes, ip<- suspend | debugger; set Z
+ @ (if not enabled, Z was set by test for r1==0, which is what we want)
+#elif defined (WITH_PROFILER)
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrs ip, ip, r2 @ ip<- suspendCount | activeProfilers
+#else
+ cmp ip, #0 @ not ORing anything in; set Z
#endif
- cmp r3, #0 @ suspend pending?
- bne 2f @ yes, do full suspension check
+ bxeq lr @ all zero, return
+ /*
+ * One or more interesting events have happened. Figure out what.
+ *
+ * If debugging or profiling are compiled in, we need to disambiguate.
+ *
+ * r0 still holds the reentry type.
+ */
#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
-# if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
- orrs r1, r1, r2 @ r1<- r1 | r2
- cmp r1, #0 @ debugger attached or profiler started?
-# elif defined(WITH_DEBUGGER)
- cmp r1, #0 @ debugger attached?
-# elif defined(WITH_PROFILER)
- cmp r2, #0 @ profiler started?
-# endif
- bne 3f @ debugger/profiler, switch interp
+ ldr ip, [r3] @ ip<- suspendCount (int)
+ cmp ip, #0 @ want suspend?
+ beq 1f @ no, must be debugger/profiler
#endif
- bx lr @ nothing to do, return
-
-2: @ check suspend
+ stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
/*
* Refresh the Jit's cached copy of profile table pointer. This pointer
@@ -10162,12 +10178,45 @@
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
EXPORT_PC() @ need for precise GC
#endif
- b dvmCheckSuspendPending @ suspend if necessary, then return
+ bl dvmCheckSuspendPending @ do full check, suspend if necessary
+ ldmfd sp!, {r0, lr} @ restore r0 and lr
-3: @ debugger/profiler enabled, bail out
+#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
+
+ /*
+ * Reload the debugger/profiler enable flags. We're checking to see
+ * if either of these got set while we were suspended.
+ *
+ * We can't really avoid the #ifdefs here, because the fields don't
+ * exist when the feature is disabled.
+ */
+#if defined(WITH_DEBUGGER)
+ ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#else
+ mov r1, #0
+#endif
+#if defined(WITH_PROFILER)
+ ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+#else
+ mov r2, #0
+#endif
+
+ orrs r1, r1, r2
+ beq 2f
+
+1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
- b common_gotoBail
+ b common_gotoBail @ side exit
+
+#endif /*WITH_DEBUGGER || WITH_PROFILER*/
+
+2:
+ bx lr @ nothing to do, return
/*
diff --git a/vm/mterp/out/InterpAsm-armv7-a-neon.S b/vm/mterp/out/InterpAsm-armv7-a-neon.S
index 9e00786..efad56a 100644
--- a/vm/mterp/out/InterpAsm-armv7-a-neon.S
+++ b/vm/mterp/out/InterpAsm-armv7-a-neon.S
@@ -9525,6 +9525,11 @@
/*
* Common code when a backward branch is taken.
*
+ * TODO: we could avoid a branch by just setting r0 and falling through
+ * into the common_periodicChecks code, and having a test on r0 at the
+ * end determine if we should return to the caller or update & branch to
+ * the next instr.
+ *
* On entry:
* r9 is PC adjustment *in bytes*
*/
@@ -9547,23 +9552,25 @@
/*
* Need to see if the thread needs to be suspended or debugger/profiler
- * activity has begun.
+ * activity has begun. If so, we suspend the thread or side-exit to
+ * the debug interpreter as appropriate.
*
- * TODO: if JDWP isn't running, zero out pDebuggerActive pointer so we don't
- * have to do the second ldr.
+ * The common case is no activity on any of these, so we want to figure
+ * that out quickly. If something is up, we can then sort out what.
+ *
+ * We want to be fast if the VM was built without debugger or profiler
+ * support, but we also need to recognize that the system is usually
+ * shipped with both of these enabled.
*
* TODO: reduce this so we're just checking a single location.
*
* On entry:
- * r0 is reentry type, e.g. kInterpEntryInstr
+ * r0 is reentry type, e.g. kInterpEntryInstr (for debugger/profiling)
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
- @ speculatively store r0 before it is clobbered by dvmCheckSuspendPending
- str r0, [rGLUE, #offGlue_entryPoint]
-
#if defined(WITH_DEBUGGER)
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
#endif
@@ -9571,33 +9578,42 @@
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
#endif
- ldr r3, [r3] @ r3<- suspendCount (int)
+ ldr ip, [r3] @ ip<- suspendCount (int)
-#if defined(WITH_DEBUGGER)
- ldrb r1, [r1] @ r1<- debuggerActive (boolean)
-#endif
-#if defined (WITH_PROFILER)
+#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrne ip, ip, r1 @ ip<- suspendCount | debuggerActive
+ orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
+#elif defined(WITH_DEBUGGER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ orrsne ip, ip, r1 @ yes, ip<- suspend | debugger; set Z
+ @ (if not enabled, Z was set by test for r1==0, which is what we want)
+#elif defined (WITH_PROFILER)
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrs ip, ip, r2 @ ip<- suspendCount | activeProfilers
+#else
+ cmp ip, #0 @ not ORing anything in; set Z
#endif
- cmp r3, #0 @ suspend pending?
- bne 2f @ yes, do full suspension check
+ bxeq lr @ all zero, return
+ /*
+ * One or more interesting events have happened. Figure out what.
+ *
+ * If debugging or profiling are compiled in, we need to disambiguate.
+ *
+ * r0 still holds the reentry type.
+ */
#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
-# if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
- orrs r1, r1, r2 @ r1<- r1 | r2
- cmp r1, #0 @ debugger attached or profiler started?
-# elif defined(WITH_DEBUGGER)
- cmp r1, #0 @ debugger attached?
-# elif defined(WITH_PROFILER)
- cmp r2, #0 @ profiler started?
-# endif
- bne 3f @ debugger/profiler, switch interp
+ ldr ip, [r3] @ ip<- suspendCount (int)
+ cmp ip, #0 @ want suspend?
+ beq 1f @ no, must be debugger/profiler
#endif
- bx lr @ nothing to do, return
-
-2: @ check suspend
+ stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
/*
* Refresh the Jit's cached copy of profile table pointer. This pointer
@@ -9612,12 +9628,45 @@
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
EXPORT_PC() @ need for precise GC
#endif
- b dvmCheckSuspendPending @ suspend if necessary, then return
+ bl dvmCheckSuspendPending @ do full check, suspend if necessary
+ ldmfd sp!, {r0, lr} @ restore r0 and lr
-3: @ debugger/profiler enabled, bail out
+#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
+
+ /*
+ * Reload the debugger/profiler enable flags. We're checking to see
+ * if either of these got set while we were suspended.
+ *
+ * We can't really avoid the #ifdefs here, because the fields don't
+ * exist when the feature is disabled.
+ */
+#if defined(WITH_DEBUGGER)
+ ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#else
+ mov r1, #0
+#endif
+#if defined(WITH_PROFILER)
+ ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+#else
+ mov r2, #0
+#endif
+
+ orrs r1, r1, r2
+ beq 2f
+
+1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
- b common_gotoBail
+ b common_gotoBail @ side exit
+
+#endif /*WITH_DEBUGGER || WITH_PROFILER*/
+
+2:
+ bx lr @ nothing to do, return
/*
diff --git a/vm/mterp/out/InterpAsm-armv7-a.S b/vm/mterp/out/InterpAsm-armv7-a.S
index e2e0def..657f5f0 100644
--- a/vm/mterp/out/InterpAsm-armv7-a.S
+++ b/vm/mterp/out/InterpAsm-armv7-a.S
@@ -9525,6 +9525,11 @@
/*
* Common code when a backward branch is taken.
*
+ * TODO: we could avoid a branch by just setting r0 and falling through
+ * into the common_periodicChecks code, and having a test on r0 at the
+ * end determine if we should return to the caller or update & branch to
+ * the next instr.
+ *
* On entry:
* r9 is PC adjustment *in bytes*
*/
@@ -9547,23 +9552,25 @@
/*
* Need to see if the thread needs to be suspended or debugger/profiler
- * activity has begun.
+ * activity has begun. If so, we suspend the thread or side-exit to
+ * the debug interpreter as appropriate.
*
- * TODO: if JDWP isn't running, zero out pDebuggerActive pointer so we don't
- * have to do the second ldr.
+ * The common case is no activity on any of these, so we want to figure
+ * that out quickly. If something is up, we can then sort out what.
+ *
+ * We want to be fast if the VM was built without debugger or profiler
+ * support, but we also need to recognize that the system is usually
+ * shipped with both of these enabled.
*
* TODO: reduce this so we're just checking a single location.
*
* On entry:
- * r0 is reentry type, e.g. kInterpEntryInstr
+ * r0 is reentry type, e.g. kInterpEntryInstr (for debugger/profiling)
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
- @ speculatively store r0 before it is clobbered by dvmCheckSuspendPending
- str r0, [rGLUE, #offGlue_entryPoint]
-
#if defined(WITH_DEBUGGER)
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
#endif
@@ -9571,33 +9578,42 @@
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
#endif
- ldr r3, [r3] @ r3<- suspendCount (int)
+ ldr ip, [r3] @ ip<- suspendCount (int)
-#if defined(WITH_DEBUGGER)
- ldrb r1, [r1] @ r1<- debuggerActive (boolean)
-#endif
-#if defined (WITH_PROFILER)
+#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrne ip, ip, r1 @ ip<- suspendCount | debuggerActive
+ orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
+#elif defined(WITH_DEBUGGER)
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ orrsne ip, ip, r1 @ yes, ip<- suspend | debugger; set Z
+ @ (if not enabled, Z was set by test for r1==0, which is what we want)
+#elif defined (WITH_PROFILER)
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+ orrs ip, ip, r2 @ ip<- suspendCount | activeProfilers
+#else
+ cmp ip, #0 @ not ORing anything in; set Z
#endif
- cmp r3, #0 @ suspend pending?
- bne 2f @ yes, do full suspension check
+ bxeq lr @ all zero, return
+ /*
+ * One or more interesting events have happened. Figure out what.
+ *
+ * If debugging or profiling are compiled in, we need to disambiguate.
+ *
+ * r0 still holds the reentry type.
+ */
#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
-# if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
- orrs r1, r1, r2 @ r1<- r1 | r2
- cmp r1, #0 @ debugger attached or profiler started?
-# elif defined(WITH_DEBUGGER)
- cmp r1, #0 @ debugger attached?
-# elif defined(WITH_PROFILER)
- cmp r2, #0 @ profiler started?
-# endif
- bne 3f @ debugger/profiler, switch interp
+ ldr ip, [r3] @ ip<- suspendCount (int)
+ cmp ip, #0 @ want suspend?
+ beq 1f @ no, must be debugger/profiler
#endif
- bx lr @ nothing to do, return
-
-2: @ check suspend
+ stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
/*
* Refresh the Jit's cached copy of profile table pointer. This pointer
@@ -9612,12 +9628,45 @@
ldr r0, [rGLUE, #offGlue_self] @ r0<- glue->self
EXPORT_PC() @ need for precise GC
#endif
- b dvmCheckSuspendPending @ suspend if necessary, then return
+ bl dvmCheckSuspendPending @ do full check, suspend if necessary
+ ldmfd sp!, {r0, lr} @ restore r0 and lr
-3: @ debugger/profiler enabled, bail out
+#if defined(WITH_DEBUGGER) || defined(WITH_PROFILER)
+
+ /*
+ * Reload the debugger/profiler enable flags. We're checking to see
+ * if either of these got set while we were suspended.
+ *
+ * We can't really avoid the #ifdefs here, because the fields don't
+ * exist when the feature is disabled.
+ */
+#if defined(WITH_DEBUGGER)
+ ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
+ cmp r1, #0 @ debugger enabled?
+ ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#else
+ mov r1, #0
+#endif
+#if defined(WITH_PROFILER)
+ ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
+ ldr r2, [r2] @ r2<- activeProfilers (int)
+#else
+ mov r2, #0
+#endif
+
+ orrs r1, r1, r2
+ beq 2f
+
+1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
- b common_gotoBail
+ b common_gotoBail @ side exit
+
+#endif /*WITH_DEBUGGER || WITH_PROFILER*/
+
+2:
+ bx lr @ nothing to do, return
/*
diff --git a/vm/mterp/out/InterpAsm-x86.S b/vm/mterp/out/InterpAsm-x86.S
index d2d8702..831586f 100644
--- a/vm/mterp/out/InterpAsm-x86.S
+++ b/vm/mterp/out/InterpAsm-x86.S
@@ -8809,6 +8809,9 @@
* TUNING: Might be worthwhile to inline this.
* TODO: Basic-block style Jit will need a hook here as well. Fold it into
* the suspendCount check so we can get both in 1 shot.
+ * TODO: to match the other intepreters, this should handle suspension
+ * and then check for debugger/profiling after dvmCheckSuspendPending
+ * returns.
*/
common_periodicChecks:
movl offGlue_pSelfSuspendCount(%ecx),%eax # eax <- &suspendCount
@@ -8823,9 +8826,11 @@
movl offGlue_pActiveProfilers(%ecx),%ecx # ecx <- &ActiveProfilers
#endif
#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ # TODO: check for NULL before load
movzbl (%eax),%eax # eax <- debuggerActive (boolean)
orl (%ecx),%eax # eax <- debuggerActive || activeProfilers
#elif defined(WITH_DEBUGGER)
+ # TODO: check for NULL before load
movzbl (%eax),%eax # eax <- debuggerActive (boolean)
#elif defined(WITH_PROFILER)
movl (%ecx),%eax # eax <= activeProfilers
diff --git a/vm/mterp/out/InterpC-portdbg.c b/vm/mterp/out/InterpC-portdbg.c
index f576b3d..bcd4c46 100644
--- a/vm/mterp/out/InterpC-portdbg.c
+++ b/vm/mterp/out/InterpC-portdbg.c
@@ -1445,7 +1445,10 @@
StackSaveArea* debugSaveArea = SAVEAREA_FROM_FP(self->curFrame);
#endif
#if INTERP_TYPE == INTERP_DBG
- bool debugIsMethodEntry = interpState->debugIsMethodEntry;
+ bool debugIsMethodEntry = false;
+# if defined(WITH_DEBUGGER) || defined(WITH_PROFILER) // implied by INTERP_DBG??
+ debugIsMethodEntry = interpState->debugIsMethodEntry;
+# endif
#endif
#if defined(WITH_TRACKREF_CHECKS)
int debugTrackedRefStart = interpState->debugTrackedRefStart;
diff --git a/vm/mterp/out/InterpC-portstd.c b/vm/mterp/out/InterpC-portstd.c
index b2d6478..b6e30c8 100644
--- a/vm/mterp/out/InterpC-portstd.c
+++ b/vm/mterp/out/InterpC-portstd.c
@@ -1184,7 +1184,10 @@
StackSaveArea* debugSaveArea = SAVEAREA_FROM_FP(self->curFrame);
#endif
#if INTERP_TYPE == INTERP_DBG
- bool debugIsMethodEntry = interpState->debugIsMethodEntry;
+ bool debugIsMethodEntry = false;
+# if defined(WITH_DEBUGGER) || defined(WITH_PROFILER) // implied by INTERP_DBG??
+ debugIsMethodEntry = interpState->debugIsMethodEntry;
+# endif
#endif
#if defined(WITH_TRACKREF_CHECKS)
int debugTrackedRefStart = interpState->debugTrackedRefStart;
diff --git a/vm/mterp/portable/entry.c b/vm/mterp/portable/entry.c
index dbd5561..2aa7867 100644
--- a/vm/mterp/portable/entry.c
+++ b/vm/mterp/portable/entry.c
@@ -9,7 +9,10 @@
StackSaveArea* debugSaveArea = SAVEAREA_FROM_FP(self->curFrame);
#endif
#if INTERP_TYPE == INTERP_DBG
- bool debugIsMethodEntry = interpState->debugIsMethodEntry;
+ bool debugIsMethodEntry = false;
+# if defined(WITH_DEBUGGER) || defined(WITH_PROFILER) // implied by INTERP_DBG??
+ debugIsMethodEntry = interpState->debugIsMethodEntry;
+# endif
#endif
#if defined(WITH_TRACKREF_CHECKS)
int debugTrackedRefStart = interpState->debugTrackedRefStart;
diff --git a/vm/mterp/x86-atom/TODO.txt b/vm/mterp/x86-atom/TODO.txt
index 1db8116..825e7db 100644
--- a/vm/mterp/x86-atom/TODO.txt
+++ b/vm/mterp/x86-atom/TODO.txt
@@ -2,6 +2,9 @@
(hi) Correct stack overflow handling (dvmCleanupStackOverflow takes an
additional argument now)
+(hi) "debugger active" test in common_periodicChecks must handle
+ the case where glue->pDebuggerActive is a NULL pointer (used to
+ skip a memory load when debugger support is completely disabled)
(md) Correct OP_MONITOR_EXIT (need to adjust PC before throw)
diff --git a/vm/mterp/x86/footer.S b/vm/mterp/x86/footer.S
index 8ed6c66..b322ff7 100644
--- a/vm/mterp/x86/footer.S
+++ b/vm/mterp/x86/footer.S
@@ -276,6 +276,9 @@
* TUNING: Might be worthwhile to inline this.
* TODO: Basic-block style Jit will need a hook here as well. Fold it into
* the suspendCount check so we can get both in 1 shot.
+ * TODO: to match the other intepreters, this should handle suspension
+ * and then check for debugger/profiling after dvmCheckSuspendPending
+ * returns.
*/
common_periodicChecks:
movl offGlue_pSelfSuspendCount(%ecx),%eax # eax <- &suspendCount
@@ -290,9 +293,11 @@
movl offGlue_pActiveProfilers(%ecx),%ecx # ecx <- &ActiveProfilers
#endif
#if defined(WITH_DEBUGGER) && defined(WITH_PROFILER)
+ # TODO: check for NULL before load
movzbl (%eax),%eax # eax <- debuggerActive (boolean)
orl (%ecx),%eax # eax <- debuggerActive || activeProfilers
#elif defined(WITH_DEBUGGER)
+ # TODO: check for NULL before load
movzbl (%eax),%eax # eax <- debuggerActive (boolean)
#elif defined(WITH_PROFILER)
movl (%ecx),%eax # eax <= activeProfilers
diff --git a/vm/oo/Class.c b/vm/oo/Class.c
index ad4bb1c..b979ff9 100644
--- a/vm/oo/Class.c
+++ b/vm/oo/Class.c
@@ -4402,10 +4402,12 @@
return false;
}
+#ifdef WITH_PROFILER
u8 startWhen = 0;
if (gDvm.allocProf.enabled) {
startWhen = dvmGetRelativeTimeNsec();
}
+#endif
/*
* We're ready to go, and have exclusive access to the class.
@@ -4499,6 +4501,7 @@
clazz->status = CLASS_INITIALIZED;
LOGVV("Initialized class: %s\n", clazz->descriptor);
+#ifdef WITH_PROFILER
/*
* Update alloc counters. TODO: guard with mutex.
*/
@@ -4509,6 +4512,7 @@
gDvm.allocProf.classInitCount++;
self->allocProf.classInitCount++;
}
+#endif
}
bail_notify: