x86/x86-64: Fix IMT conflict trampoline for obsolete methods.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 123693178
Change-Id: I06c08151fc3045824610d3d2e177cc13243b7e0a
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index b0bed56..306c4eb 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1804,6 +1804,7 @@
PUSH ESI
PUSH EDX
movl 16(%esp), %edi // Load referrer.
+ movd %xmm7, %esi // Get target method index stored in xmm7, remember it in ESI.
// If the method is obsolete, just go through the dex cache miss slow path.
// The obsolete flag is set with suspended threads, so we do not need an acquire operation here.
testl LITERAL(ACC_OBSOLETE_METHOD), ART_METHOD_ACCESS_FLAGS_OFFSET(%edi)
@@ -1814,8 +1815,7 @@
movl MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET(%edi), %edi // Load the resolved methods.
pushl ART_METHOD_JNI_OFFSET_32(%eax) // Push ImtConflictTable.
CFI_ADJUST_CFA_OFFSET(4)
- movd %xmm7, %eax // Get target method index stored in xmm7.
- movl %eax, %esi // Remember method index in ESI.
+ movl %esi, %eax // Copy the method index from ESI.
andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index.
leal 0(%edi, %eax, 2 * __SIZEOF_POINTER__), %edi // Load DexCache method slot address.
mov %ecx, %edx // Make EDX:EAX == ECX:EBX so that LOCK CMPXCHG8B makes no changes.
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index a8a648f..39bf6e8 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1654,7 +1654,7 @@
* rdi is the conflict ArtMethod.
* rax is a hidden argument that holds the target interface method's dex method index.
*
- * Note that this stub writes to r10 and rdi.
+ * Note that this stub writes to r10, r11, rax and rdi.
*/
DEFINE_FUNCTION art_quick_imt_conflict_trampoline
#if defined(__APPLE__)
@@ -1662,6 +1662,8 @@
int3
#else
movq __SIZEOF_POINTER__(%rsp), %r10 // Load referrer.
+ mov %eax, %r11d // Remember method index in R11.
+ PUSH rdx // Preserve RDX as we need to clobber it by LOCK CMPXCHG16B.
// If the method is obsolete, just go through the dex cache miss slow path.
// The obsolete flag is set with suspended threads, so we do not need an acquire operation here.
testl LITERAL(ACC_OBSOLETE_METHOD), ART_METHOD_ACCESS_FLAGS_OFFSET(%r10)
@@ -1670,11 +1672,9 @@
movl MIRROR_CLASS_DEX_CACHE_OFFSET(%r10), %r10d // Load the DexCache (without read barrier).
UNPOISON_HEAP_REF r10d
movq MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET(%r10), %r10 // Load the resolved methods.
- mov %eax, %r11d // Remember method index in R11.
andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index.
shll LITERAL(1), %eax // Multiply by 2 as entries have size 2 * __SIZEOF_POINTER__.
leaq 0(%r10, %rax, __SIZEOF_POINTER__), %r10 // Load DexCache method slot address.
- PUSH rdx // Preserve RDX as we need to clobber it by LOCK CMPXCHG16B.
mov %rcx, %rdx // Make RDX:RAX == RCX:RBX so that LOCK CMPXCHG16B makes no changes.
mov %rbx, %rax // (The actual value does not matter.)
lock cmpxchg16b (%r10) // Relaxed atomic load RDX:RAX from the dex cache slot.