Merge "ART: Make GC Pause Listener more precise"
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 2af3e3a..d59f8b4 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1515,7 +1515,7 @@
if (kind == Location::kRegister) {
scratch = LocationFrom(vixl_temps_.AcquireX());
} else {
- DCHECK(kind == Location::kFpuRegister);
+ DCHECK_EQ(kind, Location::kFpuRegister);
scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
: vixl_temps_.AcquireD());
@@ -1743,9 +1743,9 @@
(cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
}
-// Allocate a scratch register from the VIXL pool, querying first into
-// the floating-point register pool, and then the the core register
-// pool. This is essentially a reimplementation of
+// Allocate a scratch register from the VIXL pool, querying first
+// the floating-point register pool, and then the core register
+// pool. This is essentially a reimplementation of
// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
// using a different allocation strategy.
static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
@@ -1893,7 +1893,7 @@
// ask for a scratch register of any type (core or FP).
//
// Also, we start by asking for a FP scratch register first, as the
- // demand of scratch core registers is higher. This is why we
+ // demand of scratch core registers is higher. This is why we
// use AcquireFPOrCoreCPURegisterOfSize instead of
// UseScratchRegisterScope::AcquireCPURegisterOfSize, which
// allocates core scratch registers first.
diff --git a/compiler/optimizing/codegen_test.cc b/compiler/optimizing/codegen_test.cc
index 4ba5c55..7e3c377 100644
--- a/compiler/optimizing/codegen_test.cc
+++ b/compiler/optimizing/codegen_test.cc
@@ -754,7 +754,28 @@
//
// Assertion failed (!available->IsEmpty())
//
- // in vixl::aarch64::UseScratchRegisterScope::AcquireNextAvailable.
+ // in vixl::aarch64::UseScratchRegisterScope::AcquireNextAvailable,
+ // because of the following situation:
+ //
+ // 1. a temp register (IP0) is allocated as a scratch register by
+ // the parallel move resolver to solve a cycle (swap):
+ //
+ // [ source=DS0 destination=DS257 type=PrimDouble instruction=null ]
+ // [ source=DS257 destination=DS0 type=PrimDouble instruction=null ]
+ //
+ // 2. within CodeGeneratorARM64::MoveLocation, another temp
+ // register (IP1) is allocated to generate the swap between two
+ // double stack slots;
+ //
+ // 3. VIXL requires a third temp register to emit the `Ldr` or
+ // `Str` operation from CodeGeneratorARM64::MoveLocation (as
+ // one of the stack slots' offsets cannot be encoded as an
+ // immediate), but the pool of (core) temp registers is now
+ // empty.
+ //
+ // The solution used so far is to use a floating-point temp register
+ // (D31) in step #2, so that IP1 is available for step #3.
+
HParallelMove* move = new (graph->GetArena()) HParallelMove(graph->GetArena());
move->AddMove(Location::DoubleStackSlot(0),
Location::DoubleStackSlot(257),
@@ -807,7 +828,6 @@
InternalCodeAllocator code_allocator;
codegen.Finalize(&code_allocator);
}
-
#endif
#ifdef ART_ENABLE_CODEGEN_mips
diff --git a/runtime/openjdkjvmti/OpenjdkJvmTi.cc b/runtime/openjdkjvmti/OpenjdkJvmTi.cc
index 0921cea..9be486e 100644
--- a/runtime/openjdkjvmti/OpenjdkJvmTi.cc
+++ b/runtime/openjdkjvmti/OpenjdkJvmTi.cc
@@ -1205,6 +1205,30 @@
return error;
}
+ error = add_extension(
+ reinterpret_cast<jvmtiExtensionFunction>(HeapExtensions::IterateThroughHeapExt),
+ "com.android.art.heap.iterate_through_heap_ext",
+ "Iterate through a heap. This is equivalent to the standard IterateThroughHeap function,"
+ " except for additionally passing the heap id of the current object. The jvmtiHeapCallbacks"
+ " structure is reused, with the callbacks field overloaded to a signature of "
+ "jint (*)(jlong, jlong, jlong*, jint length, void*, jint).",
+ 4,
+ { // NOLINT [whitespace/braces] [4]
+ { "heap_filter", JVMTI_KIND_IN, JVMTI_TYPE_JINT, false},
+ { "klass", JVMTI_KIND_IN, JVMTI_TYPE_JCLASS, true},
+ { "callbacks", JVMTI_KIND_IN_PTR, JVMTI_TYPE_CVOID, false},
+ { "user_data", JVMTI_KIND_IN_PTR, JVMTI_TYPE_CVOID, true}
+ },
+ 3,
+ { // NOLINT [whitespace/braces] [4]
+ JVMTI_ERROR_MUST_POSSESS_CAPABILITY,
+ JVMTI_ERROR_INVALID_CLASS,
+ JVMTI_ERROR_NULL_POINTER
+ });
+ if (error != ERR(NONE)) {
+ return error;
+ }
+
// Copy into output buffer.
*extension_count_ptr = ext_vector.size();
diff --git a/runtime/openjdkjvmti/ti_heap.cc b/runtime/openjdkjvmti/ti_heap.cc
index 9b4dcaa..99774c6 100644
--- a/runtime/openjdkjvmti/ti_heap.cc
+++ b/runtime/openjdkjvmti/ti_heap.cc
@@ -651,14 +651,17 @@
art::Runtime::Current()->RemoveSystemWeakHolder(&gIndexCachingTable);
}
+template <typename Callback>
struct IterateThroughHeapData {
- IterateThroughHeapData(HeapUtil* _heap_util,
+ IterateThroughHeapData(Callback _cb,
+ ObjectTagTable* _tag_table,
jvmtiEnv* _env,
art::ObjPtr<art::mirror::Class> klass,
jint _heap_filter,
const jvmtiHeapCallbacks* _callbacks,
const void* _user_data)
- : heap_util(_heap_util),
+ : cb(_cb),
+ tag_table(_tag_table),
heap_filter(_heap_filter),
filter_klass(klass),
env(_env),
@@ -667,7 +670,72 @@
stop_reports(false) {
}
- HeapUtil* heap_util;
+ static void ObjectCallback(art::mirror::Object* obj, void* arg)
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
+ ithd->ObjectCallback(obj);
+ }
+
+ void ObjectCallback(art::mirror::Object* obj)
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ // Early return, as we can't really stop visiting.
+ if (stop_reports) {
+ return;
+ }
+
+ art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
+
+ jlong tag = 0;
+ tag_table->GetTag(obj, &tag);
+
+ jlong class_tag = 0;
+ art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
+ tag_table->GetTag(klass.Ptr(), &class_tag);
+ // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
+
+ if (!heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
+ return;
+ }
+
+ if (filter_klass != nullptr) {
+ if (filter_klass != klass) {
+ return;
+ }
+ }
+
+ jlong size = obj->SizeOf();
+
+ jint length = -1;
+ if (obj->IsArrayInstance()) {
+ length = obj->AsArray()->GetLength();
+ }
+
+ jlong saved_tag = tag;
+ jint ret = cb(obj, callbacks, class_tag, size, &tag, length, const_cast<void*>(user_data));
+
+ if (tag != saved_tag) {
+ tag_table->Set(obj, tag);
+ }
+
+ stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
+
+ if (!stop_reports) {
+ jint string_ret = ReportString(obj, env, tag_table, callbacks, user_data);
+ stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
+ }
+
+ if (!stop_reports) {
+ jint array_ret = ReportPrimitiveArray(obj, env, tag_table, callbacks, user_data);
+ stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
+ }
+
+ if (!stop_reports) {
+ stop_reports = ReportPrimitiveField::Report(obj, tag_table, callbacks, user_data);
+ }
+ }
+
+ Callback cb;
+ ObjectTagTable* tag_table;
const HeapFilter heap_filter;
art::ObjPtr<art::mirror::Class> filter_klass;
jvmtiEnv* env;
@@ -677,85 +745,14 @@
bool stop_reports;
};
-static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
- REQUIRES_SHARED(art::Locks::mutator_lock_) {
- IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
- // Early return, as we can't really stop visiting.
- if (ithd->stop_reports) {
- return;
- }
-
- art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
-
- jlong tag = 0;
- ithd->heap_util->GetTags()->GetTag(obj, &tag);
-
- jlong class_tag = 0;
- art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
- ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
- // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
-
- if (!ithd->heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
- return;
- }
-
- if (ithd->filter_klass != nullptr) {
- if (ithd->filter_klass != klass) {
- return;
- }
- }
-
- jlong size = obj->SizeOf();
-
- jint length = -1;
- if (obj->IsArrayInstance()) {
- length = obj->AsArray()->GetLength();
- }
-
- jlong saved_tag = tag;
- jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
- size,
- &tag,
- length,
- const_cast<void*>(ithd->user_data));
-
- if (tag != saved_tag) {
- ithd->heap_util->GetTags()->Set(obj, tag);
- }
-
- ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
-
- if (!ithd->stop_reports) {
- jint string_ret = ReportString(obj,
- ithd->env,
- ithd->heap_util->GetTags(),
- ithd->callbacks,
- ithd->user_data);
- ithd->stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
- }
-
- if (!ithd->stop_reports) {
- jint array_ret = ReportPrimitiveArray(obj,
- ithd->env,
- ithd->heap_util->GetTags(),
- ithd->callbacks,
- ithd->user_data);
- ithd->stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
- }
-
- if (!ithd->stop_reports) {
- ithd->stop_reports = ReportPrimitiveField::Report(obj,
- ithd->heap_util->GetTags(),
- ithd->callbacks,
- ithd->user_data);
- }
-}
-
-jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
- jint heap_filter,
- jclass klass,
- const jvmtiHeapCallbacks* callbacks,
- const void* user_data) {
+template <typename T>
+static jvmtiError DoIterateThroughHeap(T fn,
+ jvmtiEnv* env,
+ ObjectTagTable* tag_table,
+ jint heap_filter,
+ jclass klass,
+ const jvmtiHeapCallbacks* callbacks,
+ const void* user_data) {
if (callbacks == nullptr) {
return ERR(NULL_POINTER);
}
@@ -763,18 +760,48 @@
art::Thread* self = art::Thread::Current();
art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
- IterateThroughHeapData ithd(this,
- env,
- soa.Decode<art::mirror::Class>(klass),
- heap_filter,
- callbacks,
- user_data);
+ using Iterator = IterateThroughHeapData<T>;
+ Iterator ithd(fn,
+ tag_table,
+ env,
+ soa.Decode<art::mirror::Class>(klass),
+ heap_filter,
+ callbacks,
+ user_data);
- art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
+ art::Runtime::Current()->GetHeap()->VisitObjects(Iterator::ObjectCallback, &ithd);
return ERR(NONE);
}
+jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
+ jint heap_filter,
+ jclass klass,
+ const jvmtiHeapCallbacks* callbacks,
+ const void* user_data) {
+ auto JvmtiIterateHeap = [](art::mirror::Object* obj ATTRIBUTE_UNUSED,
+ const jvmtiHeapCallbacks* cb_callbacks,
+ jlong class_tag,
+ jlong size,
+ jlong* tag,
+ jint length,
+ void* cb_user_data)
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ return cb_callbacks->heap_iteration_callback(class_tag,
+ size,
+ tag,
+ length,
+ cb_user_data);
+ };
+ return DoIterateThroughHeap(JvmtiIterateHeap,
+ env,
+ ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
+ heap_filter,
+ klass,
+ callbacks,
+ user_data);
+}
+
class FollowReferencesHelper FINAL {
public:
FollowReferencesHelper(HeapUtil* h,
@@ -1406,6 +1433,33 @@
static constexpr jint kHeapIdZygote = 2;
static constexpr jint kHeapIdApp = 3;
+static jint GetHeapId(art::ObjPtr<art::mirror::Object> obj)
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ if (obj == nullptr) {
+ return -1;
+ }
+
+ art::gc::Heap* const heap = art::Runtime::Current()->GetHeap();
+ const art::gc::space::ContinuousSpace* const space =
+ heap->FindContinuousSpaceFromObject(obj, true);
+ jint heap_type = kHeapIdApp;
+ if (space != nullptr) {
+ if (space->IsZygoteSpace()) {
+ heap_type = kHeapIdZygote;
+ } else if (space->IsImageSpace() && heap->ObjectIsInBootImageSpace(obj)) {
+ // Only count objects in the boot image as HPROF_HEAP_IMAGE, this leaves app image objects
+ // as HPROF_HEAP_APP. b/35762934
+ heap_type = kHeapIdImage;
+ }
+ } else {
+ const auto* los = heap->GetLargeObjectsSpace();
+ if (los->Contains(obj.Ptr()) && los->IsZygoteLargeObject(art::Thread::Current(), obj.Ptr())) {
+ heap_type = kHeapIdZygote;
+ }
+ }
+ return heap_type;
+};
+
jvmtiError HeapExtensions::GetObjectHeapId(jvmtiEnv* env, jlong tag, jint* heap_id, ...) {
if (heap_id == nullptr) {
return ERR(NULL_POINTER);
@@ -1416,28 +1470,10 @@
auto work = [&]() REQUIRES_SHARED(art::Locks::mutator_lock_) {
ObjectTagTable* tag_table = ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get();
art::ObjPtr<art::mirror::Object> obj = tag_table->Find(tag);
- if (obj == nullptr) {
+ jint heap_type = GetHeapId(obj);
+ if (heap_type == -1) {
return ERR(NOT_FOUND);
}
-
- art::gc::Heap* const heap = art::Runtime::Current()->GetHeap();
- const art::gc::space::ContinuousSpace* const space =
- heap->FindContinuousSpaceFromObject(obj, true);
- jint heap_type = kHeapIdApp;
- if (space != nullptr) {
- if (space->IsZygoteSpace()) {
- heap_type = kHeapIdZygote;
- } else if (space->IsImageSpace() && heap->ObjectIsInBootImageSpace(obj)) {
- // Only count objects in the boot image as HPROF_HEAP_IMAGE, this leaves app image objects
- // as HPROF_HEAP_APP. b/35762934
- heap_type = kHeapIdImage;
- }
- } else {
- const auto* los = heap->GetLargeObjectsSpace();
- if (los->Contains(obj.Ptr()) && los->IsZygoteLargeObject(self, obj.Ptr())) {
- heap_type = kHeapIdZygote;
- }
- }
*heap_id = heap_type;
return ERR(NONE);
};
@@ -1491,4 +1527,36 @@
}
}
+jvmtiError HeapExtensions::IterateThroughHeapExt(jvmtiEnv* env,
+ jint heap_filter,
+ jclass klass,
+ const jvmtiHeapCallbacks* callbacks,
+ const void* user_data) {
+ if (ArtJvmTiEnv::AsArtJvmTiEnv(env)->capabilities.can_tag_objects != 1) { \
+ return ERR(MUST_POSSESS_CAPABILITY); \
+ }
+
+ // ART extension API: Also pass the heap id.
+ auto ArtIterateHeap = [](art::mirror::Object* obj,
+ const jvmtiHeapCallbacks* cb_callbacks,
+ jlong class_tag,
+ jlong size,
+ jlong* tag,
+ jint length,
+ void* cb_user_data)
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ jint heap_id = GetHeapId(obj);
+ using ArtExtensionAPI = jint (*)(jlong, jlong, jlong*, jint length, void*, jint);
+ return reinterpret_cast<ArtExtensionAPI>(cb_callbacks->heap_iteration_callback)(
+ class_tag, size, tag, length, cb_user_data, heap_id);
+ };
+ return DoIterateThroughHeap(ArtIterateHeap,
+ env,
+ ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
+ heap_filter,
+ klass,
+ callbacks,
+ user_data);
+}
+
} // namespace openjdkjvmti
diff --git a/runtime/openjdkjvmti/ti_heap.h b/runtime/openjdkjvmti/ti_heap.h
index b4b71ba..0c973db 100644
--- a/runtime/openjdkjvmti/ti_heap.h
+++ b/runtime/openjdkjvmti/ti_heap.h
@@ -60,6 +60,12 @@
public:
static jvmtiError JNICALL GetObjectHeapId(jvmtiEnv* env, jlong tag, jint* heap_id, ...);
static jvmtiError JNICALL GetHeapName(jvmtiEnv* env, jint heap_id, char** heap_name, ...);
+
+ static jvmtiError JNICALL IterateThroughHeapExt(jvmtiEnv* env,
+ jint heap_filter,
+ jclass klass,
+ const jvmtiHeapCallbacks* callbacks,
+ const void* user_data);
};
} // namespace openjdkjvmti
diff --git a/test/913-heaps/heaps.cc b/test/913-heaps/heaps.cc
index f39c5f1..ec36ceb 100644
--- a/test/913-heaps/heaps.cc
+++ b/test/913-heaps/heaps.cc
@@ -823,6 +823,14 @@
using GetHeapName = jvmtiError(*)(jvmtiEnv*, jint, char**, ...);
static GetHeapName gGetHeapNameFn = nullptr;
+using IterateThroughHeapExt = jvmtiError(*)(jvmtiEnv*,
+ jint,
+ jclass,
+ const jvmtiHeapCallbacks*,
+ const void*);
+static IterateThroughHeapExt gIterateThroughHeapExt = nullptr;
+
+
static void FreeExtensionFunctionInfo(jvmtiExtensionFunctionInfo* extensions, jint count) {
for (size_t i = 0; i != static_cast<size_t>(count); ++i) {
jvmti_env->Deallocate(reinterpret_cast<unsigned char*>(extensions[i].id));
@@ -886,6 +894,38 @@
CHECK(extensions[i].errors != nullptr);
CHECK(extensions[i].errors[0] == JVMTI_ERROR_ILLEGAL_ARGUMENT);
}
+
+ if (strcmp("com.android.art.heap.iterate_through_heap_ext", extensions[i].id) == 0) {
+ CHECK(gIterateThroughHeapExt == nullptr);
+ gIterateThroughHeapExt = reinterpret_cast<IterateThroughHeapExt>(extensions[i].func);
+
+ CHECK_EQ(extensions[i].param_count, 4);
+
+ CHECK_EQ(strcmp("heap_filter", extensions[i].params[0].name), 0);
+ CHECK_EQ(extensions[i].params[0].base_type, JVMTI_TYPE_JINT);
+ CHECK_EQ(extensions[i].params[0].kind, JVMTI_KIND_IN);
+
+ CHECK_EQ(strcmp("klass", extensions[i].params[1].name), 0);
+ CHECK_EQ(extensions[i].params[1].base_type, JVMTI_TYPE_JCLASS);
+ CHECK_EQ(extensions[i].params[1].kind, JVMTI_KIND_IN);
+ CHECK_EQ(extensions[i].params[1].null_ok, true);
+
+ CHECK_EQ(strcmp("callbacks", extensions[i].params[2].name), 0);
+ CHECK_EQ(extensions[i].params[2].base_type, JVMTI_TYPE_CVOID);
+ CHECK_EQ(extensions[i].params[2].kind, JVMTI_KIND_IN_PTR);
+ CHECK_EQ(extensions[i].params[2].null_ok, false);
+
+ CHECK_EQ(strcmp("user_data", extensions[i].params[3].name), 0);
+ CHECK_EQ(extensions[i].params[3].base_type, JVMTI_TYPE_CVOID);
+ CHECK_EQ(extensions[i].params[3].kind, JVMTI_KIND_IN_PTR);
+ CHECK_EQ(extensions[i].params[3].null_ok, true);
+
+ CHECK_EQ(extensions[i].error_count, 3);
+ CHECK(extensions[i].errors != nullptr);
+ CHECK(extensions[i].errors[0] == JVMTI_ERROR_MUST_POSSESS_CAPABILITY);
+ CHECK(extensions[i].errors[1] == JVMTI_ERROR_INVALID_CLASS);
+ CHECK(extensions[i].errors[2] == JVMTI_ERROR_NULL_POINTER);
+ }
}
CHECK(gGetObjectHeapIdFn != nullptr);
@@ -1004,5 +1044,39 @@
}
}
+static bool gFoundExt = false;
+
+static jint JNICALL HeapIterationExtCallback(jlong class_tag ATTRIBUTE_UNUSED,
+ jlong size ATTRIBUTE_UNUSED,
+ jlong* tag_ptr,
+ jint length ATTRIBUTE_UNUSED,
+ void* user_data ATTRIBUTE_UNUSED,
+ jint heap_id) {
+ // We expect some tagged objects at or above the threshold, where the expected heap id is
+ // encoded into lowest byte.
+ constexpr jlong kThreshold = 30000000;
+ jlong tag = *tag_ptr;
+ if (tag >= kThreshold) {
+ jint expected_heap_id = static_cast<jint>(tag - kThreshold);
+ CHECK_EQ(expected_heap_id, heap_id);
+ gFoundExt = true;
+ }
+ return 0;
+}
+
+extern "C" JNIEXPORT void JNICALL Java_art_Test913_iterateThroughHeapExt(
+ JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ CHECK(gIterateThroughHeapExt != nullptr);
+
+ jvmtiHeapCallbacks callbacks;
+ memset(&callbacks, 0, sizeof(jvmtiHeapCallbacks));
+ callbacks.heap_iteration_callback =
+ reinterpret_cast<decltype(callbacks.heap_iteration_callback)>(HeapIterationExtCallback);
+
+ jvmtiError ret = gIterateThroughHeapExt(jvmti_env, 0, nullptr, &callbacks, nullptr);
+ JvmtiErrorToException(env, jvmti_env, ret);
+ CHECK(gFoundExt);
+}
+
} // namespace Test913Heaps
} // namespace art
diff --git a/test/913-heaps/src/art/Test913.java b/test/913-heaps/src/art/Test913.java
index 6694aad..97f48ee 100644
--- a/test/913-heaps/src/art/Test913.java
+++ b/test/913-heaps/src/art/Test913.java
@@ -261,6 +261,15 @@
checkGetObjectHeapIdInCallback(100000, objClassExpectedHeapId);
checkGetObjectHeapIdInCallback(100001, 3);
+ long baseTag = 30000000;
+ setTag(Object.class, baseTag + objClassExpectedHeapId);
+ setTag(Class.class, baseTag + objClassExpectedHeapId);
+ Object o = new Object();
+ extensionTestHolder.add(o);
+ setTag(o, baseTag + 3);
+
+ iterateThroughHeapExt();
+
extensionTestHolder = null;
}
@@ -719,4 +728,6 @@
public static native String[] followReferencesString(Object initialObject);
public static native String followReferencesPrimitiveArray(Object initialObject);
public static native String followReferencesPrimitiveFields(Object initialObject);
+
+ private static native void iterateThroughHeapExt();
}