blob: ee64eda7bc884fb77726ff7ef6f4225af50324f4 [file] [log] [blame]
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "class_linker.h"
#include <unistd.h>
#include <algorithm>
#include <deque>
#include <forward_list>
#include <iostream>
#include <map>
#include <memory>
#include <queue>
#include <string>
#include <string_view>
#include <tuple>
#include <unordered_map>
#include <utility>
#include <vector>
#include "android-base/stringprintf.h"
#include "art_field-inl.h"
#include "art_method-inl.h"
#include "barrier.h"
#include "base/arena_allocator.h"
#include "base/casts.h"
#include "base/file_utils.h"
#include "base/leb128.h"
#include "base/logging.h"
#include "base/mutex-inl.h"
#include "base/os.h"
#include "base/quasi_atomic.h"
#include "base/scoped_arena_containers.h"
#include "base/scoped_flock.h"
#include "base/stl_util.h"
#include "base/string_view_cpp20.h"
#include "base/systrace.h"
#include "base/time_utils.h"
#include "base/unix_file/fd_file.h"
#include "base/utils.h"
#include "base/value_object.h"
#include "cha.h"
#include "class_linker-inl.h"
#include "class_loader_utils.h"
#include "class_root-inl.h"
#include "class_table-inl.h"
#include "compiler_callbacks.h"
#include "debug_print.h"
#include "debugger.h"
#include "dex/class_accessor-inl.h"
#include "dex/descriptors_names.h"
#include "dex/dex_file-inl.h"
#include "dex/dex_file_exception_helpers.h"
#include "dex/dex_file_loader.h"
#include "dex/signature-inl.h"
#include "dex/utf.h"
#include "entrypoints/entrypoint_utils-inl.h"
#include "entrypoints/runtime_asm_entrypoints.h"
#include "experimental_flags.h"
#include "gc/accounting/card_table-inl.h"
#include "gc/accounting/heap_bitmap-inl.h"
#include "gc/accounting/space_bitmap-inl.h"
#include "gc/heap-visit-objects-inl.h"
#include "gc/heap.h"
#include "gc/scoped_gc_critical_section.h"
#include "gc/space/image_space.h"
#include "gc/space/space-inl.h"
#include "gc_root-inl.h"
#include "handle_scope-inl.h"
#include "hidden_api.h"
#include "image-inl.h"
#include "imt_conflict_table.h"
#include "imtable-inl.h"
#include "intern_table-inl.h"
#include "interpreter/interpreter.h"
#include "interpreter/mterp/nterp.h"
#include "jit/debugger_interface.h"
#include "jit/jit.h"
#include "jit/jit_code_cache.h"
#include "jni/java_vm_ext.h"
#include "jni/jni_internal.h"
#include "linear_alloc.h"
#include "mirror/array-alloc-inl.h"
#include "mirror/array-inl.h"
#include "mirror/call_site.h"
#include "mirror/class-alloc-inl.h"
#include "mirror/class-inl.h"
#include "mirror/class.h"
#include "mirror/class_ext.h"
#include "mirror/class_loader.h"
#include "mirror/dex_cache-inl.h"
#include "mirror/dex_cache.h"
#include "mirror/emulated_stack_frame.h"
#include "mirror/field.h"
#include "mirror/iftable-inl.h"
#include "mirror/method.h"
#include "mirror/method_handle_impl.h"
#include "mirror/method_handles_lookup.h"
#include "mirror/method_type.h"
#include "mirror/object-inl.h"
#include "mirror/object-refvisitor-inl.h"
#include "mirror/object.h"
#include "mirror/object_array-alloc-inl.h"
#include "mirror/object_array-inl.h"
#include "mirror/object_array.h"
#include "mirror/object_reference.h"
#include "mirror/object_reference-inl.h"
#include "mirror/proxy.h"
#include "mirror/reference-inl.h"
#include "mirror/stack_trace_element.h"
#include "mirror/string-inl.h"
#include "mirror/throwable.h"
#include "mirror/var_handle.h"
#include "native/dalvik_system_DexFile.h"
#include "nativehelper/scoped_local_ref.h"
#include "oat.h"
#include "oat_file-inl.h"
#include "oat_file.h"
#include "oat_file_assistant.h"
#include "oat_file_manager.h"
#include "object_lock.h"
#include "profile/profile_compilation_info.h"
#include "runtime.h"
#include "runtime_callbacks.h"
#include "scoped_thread_state_change-inl.h"
#include "thread-inl.h"
#include "thread.h"
#include "thread_list.h"
#include "trace.h"
#include "transaction.h"
#include "verifier/class_verifier.h"
#include "well_known_classes.h"
#include "interpreter/interpreter_mterp_impl.h"
namespace art {
using android::base::StringPrintf;
static constexpr bool kCheckImageObjects = kIsDebugBuild;
static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
static void ThrowNoClassDefFoundError(const char* fmt, ...)
__attribute__((__format__(__printf__, 1, 2)))
REQUIRES_SHARED(Locks::mutator_lock_);
static void ThrowNoClassDefFoundError(const char* fmt, ...) {
va_list args;
va_start(args, fmt);
Thread* self = Thread::Current();
self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
va_end(args);
}
static bool HasInitWithString(Thread* self, ClassLinker* class_linker, const char* descriptor)
REQUIRES_SHARED(Locks::mutator_lock_) {
ArtMethod* method = self->GetCurrentMethod(nullptr);
StackHandleScope<1> hs(self);
Handle<mirror::ClassLoader> class_loader(hs.NewHandle(method != nullptr ?
method->GetDeclaringClass()->GetClassLoader() : nullptr));
ObjPtr<mirror::Class> exception_class = class_linker->FindClass(self, descriptor, class_loader);
if (exception_class == nullptr) {
// No exc class ~ no <init>-with-string.
CHECK(self->IsExceptionPending());
self->ClearException();
return false;
}
ArtMethod* exception_init_method = exception_class->FindConstructor(
"(Ljava/lang/String;)V", class_linker->GetImagePointerSize());
return exception_init_method != nullptr;
}
static ObjPtr<mirror::Object> GetVerifyError(ObjPtr<mirror::Class> c)
REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::ClassExt> ext(c->GetExtData());
if (ext == nullptr) {
return nullptr;
} else {
return ext->GetVerifyError();
}
}
// Helper for ThrowEarlierClassFailure. Throws the stored error.
static void HandleEarlierVerifyError(Thread* self,
ClassLinker* class_linker,
ObjPtr<mirror::Class> c)
REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::Object> obj = GetVerifyError(c);
DCHECK(obj != nullptr);
self->AssertNoPendingException();
if (obj->IsClass()) {
// Previous error has been stored as class. Create a new exception of that type.
// It's possible the exception doesn't have a <init>(String).
std::string temp;
const char* descriptor = obj->AsClass()->GetDescriptor(&temp);
if (HasInitWithString(self, class_linker, descriptor)) {
self->ThrowNewException(descriptor, c->PrettyDescriptor().c_str());
} else {
self->ThrowNewException(descriptor, nullptr);
}
} else {
// Previous error has been stored as an instance. Just rethrow.
ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
ObjPtr<mirror::Class> error_class = obj->GetClass();
CHECK(throwable_class->IsAssignableFrom(error_class));
self->SetException(obj->AsThrowable());
}
self->AssertPendingException();
}
static void ChangeInterpreterBridgeToNterp(ArtMethod* method, ClassLinker* class_linker)
REQUIRES_SHARED(Locks::mutator_lock_) {
Runtime* runtime = Runtime::Current();
if (class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()) &&
interpreter::CanMethodUseNterp(method)) {
if (method->GetDeclaringClass()->IsVisiblyInitialized() ||
!NeedsClinitCheckBeforeCall(method)) {
runtime->GetInstrumentation()->UpdateMethodsCode(method, interpreter::GetNterpEntryPoint());
} else {
// Put the resolution stub, which will initialize the class and then
// call the method with nterp.
runtime->GetInstrumentation()->UpdateMethodsCode(method, GetQuickResolutionStub());
}
}
}
// Ensures that methods have the kAccSkipAccessChecks bit set. We use the
// kAccVerificationAttempted bit on the class access flags to determine whether this has been done
// before.
static void EnsureSkipAccessChecksMethods(Handle<mirror::Class> klass, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_) {
Runtime* runtime = Runtime::Current();
ClassLinker* class_linker = runtime->GetClassLinker();
if (!klass->WasVerificationAttempted()) {
klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
klass->SetVerificationAttempted();
// Now that the class has passed verification, try to set nterp entrypoints
// to methods that currently use the switch interpreter.
if (interpreter::CanRuntimeUseNterp()) {
for (ArtMethod& m : klass->GetMethods(pointer_size)) {
ChangeInterpreterBridgeToNterp(&m, class_linker);
}
}
}
}
// Callback responsible for making a batch of classes visibly initialized
// after all threads have called it from a checkpoint, ensuring visibility.
class ClassLinker::VisiblyInitializedCallback final
: public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
public:
explicit VisiblyInitializedCallback(ClassLinker* class_linker)
: class_linker_(class_linker),
num_classes_(0u),
thread_visibility_counter_(0),
barriers_() {
std::fill_n(classes_, kMaxClasses, nullptr);
}
bool IsEmpty() const {
DCHECK_LE(num_classes_, kMaxClasses);
return num_classes_ == 0u;
}
bool IsFull() const {
DCHECK_LE(num_classes_, kMaxClasses);
return num_classes_ == kMaxClasses;
}
void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
DCHECK(!IsFull());
classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
++num_classes_;
}
void AddBarrier(Barrier* barrier) {
barriers_.push_front(barrier);
}
std::forward_list<Barrier*> GetAndClearBarriers() {
std::forward_list<Barrier*> result;
result.swap(barriers_);
result.reverse(); // Return barriers in insertion order.
return result;
}
void MakeVisible(Thread* self) {
DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
AdjustThreadVisibilityCounter(self, count);
}
void Run(Thread* self) override {
self->ClearMakeVisiblyInitializedCounter();
AdjustThreadVisibilityCounter(self, -1);
}
private:
void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
if (old + adjustment == 0) {
// All threads passed the checkpoint. Mark classes as visibly initialized.
{
ScopedObjectAccess soa(self);
StackHandleScope<1u> hs(self);
MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
JavaVMExt* vm = self->GetJniEnv()->GetVm();
for (size_t i = 0, num = num_classes_; i != num; ++i) {
klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
vm->DeleteWeakGlobalRef(self, classes_[i]);
if (klass != nullptr) {
mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
class_linker_->FixupStaticTrampolines(self, klass.Get());
}
}
num_classes_ = 0u;
}
class_linker_->VisiblyInitializedCallbackDone(self, this);
}
}
static constexpr size_t kMaxClasses = 16;
ClassLinker* const class_linker_;
size_t num_classes_;
jweak classes_[kMaxClasses];
// The thread visibility counter starts at 0 and it is incremented by the number of
// threads that need to run this callback (by the thread that request the callback
// to be run) and decremented once for each `Run()` execution. When it reaches 0,
// whether after the increment or after a decrement, we know that `Run()` was executed
// for all threads and therefore we can mark the classes as visibly initialized.
std::atomic<ssize_t> thread_visibility_counter_;
// List of barries to `Pass()` for threads that wait for the callback to complete.
std::forward_list<Barrier*> barriers_;
};
void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
return; // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
}
std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
if (wait) {
maybe_barrier.emplace(0);
}
int wait_count = 0;
VisiblyInitializedCallback* callback = nullptr;
{
MutexLock lock(self, visibly_initialized_callback_lock_);
if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
callback = visibly_initialized_callback_.release();
running_visibly_initialized_callbacks_.push_front(*callback);
}
if (wait) {
DCHECK(maybe_barrier.has_value());
Barrier* barrier = std::addressof(*maybe_barrier);
for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
cb.AddBarrier(barrier);
++wait_count;
}
}
}
if (callback != nullptr) {
callback->MakeVisible(self);
}
if (wait_count != 0) {
DCHECK(maybe_barrier.has_value());
maybe_barrier->Increment(self, wait_count);
}
}
void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
VisiblyInitializedCallback* callback) {
MutexLock lock(self, visibly_initialized_callback_lock_);
// Pass the barriers if requested.
for (Barrier* barrier : callback->GetAndClearBarriers()) {
barrier->Pass(self);
}
// Remove the callback from the list of running callbacks.
auto before = running_visibly_initialized_callbacks_.before_begin();
auto it = running_visibly_initialized_callbacks_.begin();
DCHECK(it != running_visibly_initialized_callbacks_.end());
while (std::addressof(*it) != callback) {
before = it;
++it;
DCHECK(it != running_visibly_initialized_callbacks_.end());
}
running_visibly_initialized_callbacks_.erase_after(before);
// Reuse or destroy the callback object.
if (visibly_initialized_callback_ == nullptr) {
visibly_initialized_callback_.reset(callback);
} else {
delete callback;
}
}
void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
if (cb != nullptr) {
cb->MakeVisible(self);
}
ScopedThreadSuspension sts(self, ThreadState::kSuspended);
MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
}
ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
Thread* self, Handle<mirror::Class> klass) {
if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
// Thanks to the x86 memory model, we do not need any memory fences and
// we can immediately mark the class as visibly initialized.
mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
FixupStaticTrampolines(self, klass.Get());
return nullptr;
}
if (Runtime::Current()->IsActiveTransaction()) {
// Transactions are single-threaded, so we can mark the class as visibly intialized.
// (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
FixupStaticTrampolines(self, klass.Get());
return nullptr;
}
mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
MutexLock lock(self, visibly_initialized_callback_lock_);
if (visibly_initialized_callback_ == nullptr) {
visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
}
DCHECK(!visibly_initialized_callback_->IsFull());
visibly_initialized_callback_->AddClass(self, klass.Get());
if (visibly_initialized_callback_->IsFull()) {
VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
running_visibly_initialized_callbacks_.push_front(*callback);
return callback;
} else {
return nullptr;
}
}
const void* ClassLinker::RegisterNative(
Thread* self, ArtMethod* method, const void* native_method) {
CHECK(method->IsNative()) << method->PrettyMethod();
CHECK(native_method != nullptr) << method->PrettyMethod();
void* new_native_method = nullptr;
Runtime* runtime = Runtime::Current();
runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
native_method,
/*out*/&new_native_method);
if (method->IsCriticalNative()) {
MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
// Remove old registered method if any.
auto it = critical_native_code_with_clinit_check_.find(method);
if (it != critical_native_code_with_clinit_check_.end()) {
critical_native_code_with_clinit_check_.erase(it);
}
// To ensure correct memory visibility, we need the class to be visibly
// initialized before we can set the JNI entrypoint.
if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
method->SetEntryPointFromJni(new_native_method);
} else {
critical_native_code_with_clinit_check_.emplace(method, new_native_method);
}
} else {
method->SetEntryPointFromJni(new_native_method);
}
return new_native_method;
}
void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
CHECK(method->IsNative()) << method->PrettyMethod();
// Restore stub to lookup native pointer via dlsym.
if (method->IsCriticalNative()) {
MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
auto it = critical_native_code_with_clinit_check_.find(method);
if (it != critical_native_code_with_clinit_check_.end()) {
critical_native_code_with_clinit_check_.erase(it);
}
method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
} else {
method->SetEntryPointFromJni(GetJniDlsymLookupStub());
}
}
const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
if (method->IsCriticalNative()) {
MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
auto it = critical_native_code_with_clinit_check_.find(method);
if (it != critical_native_code_with_clinit_check_.end()) {
return it->second;
}
const void* native_code = method->GetEntryPointFromJni();
return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
} else {
const void* native_code = method->GetEntryPointFromJni();
return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
}
}
void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
bool wrap_in_no_class_def,
bool log) {
// The class failed to initialize on a previous attempt, so we want to throw
// a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
// failed in verification, in which case v2 5.4.1 says we need to re-throw
// the previous error.
Runtime* const runtime = Runtime::Current();
if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
std::string extra;
ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
if (verify_error != nullptr) {
if (verify_error->IsClass()) {
extra = mirror::Class::PrettyDescriptor(verify_error->AsClass());
} else {
extra = verify_error->AsThrowable()->Dump();
}
}
if (log) {
LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
<< ": " << extra;
}
}
CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
Thread* self = Thread::Current();
if (runtime->IsAotCompiler()) {
// At compile time, accurate errors and NCDFE are disabled to speed compilation.
ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
self->SetException(pre_allocated);
} else {
ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
if (verify_error != nullptr) {
// Rethrow stored error.
HandleEarlierVerifyError(self, this, c);
}
// TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
// might have meant to go down the earlier if statement with the original error but it got
// swallowed by the OOM so we end up here.
if (verify_error == nullptr || wrap_in_no_class_def) {
// If there isn't a recorded earlier error, or this is a repeat throw from initialization,
// the top-level exception must be a NoClassDefFoundError. The potentially already pending
// exception will be a cause.
self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
c->PrettyDescriptor().c_str());
}
}
}
static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (VLOG_IS_ON(class_linker)) {
std::string temp;
LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
<< klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
}
}
static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_) {
Thread* self = Thread::Current();
JNIEnv* env = self->GetJniEnv();
ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
CHECK(cause.get() != nullptr);
// Boot classpath classes should not fail initialization. This is a consistency debug check.
// This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
std::string tmp;
// We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
// make sure to only do it if we don't have AsyncExceptions being thrown around since those
// could have caused the error.
bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
<< " failed initialization: "
<< self->GetException()->Dump();
}
env->ExceptionClear();
bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
env->Throw(cause.get());
// We only wrap non-Error exceptions; an Error can just be used as-is.
if (!is_error) {
self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
}
VlogClassInitializationFailure(klass);
}
// Gap between two fields in object layout.
struct FieldGap {
uint32_t start_offset; // The offset from the start of the object.
uint32_t size; // The gap size of 1, 2, or 4 bytes.
};
struct FieldGapsComparator {
FieldGapsComparator() {
}
bool operator() (const FieldGap& lhs, const FieldGap& rhs)
NO_THREAD_SAFETY_ANALYSIS {
// Sort by gap size, largest first. Secondary sort by starting offset.
// Note that the priority queue returns the largest element, so operator()
// should return true if lhs is less than rhs.
return lhs.size < rhs.size || (lhs.size == rhs.size && lhs.start_offset > rhs.start_offset);
}
};
using FieldGaps = std::priority_queue<FieldGap, std::vector<FieldGap>, FieldGapsComparator>;
// Adds largest aligned gaps to queue of gaps.
static void AddFieldGap(uint32_t gap_start, uint32_t gap_end, FieldGaps* gaps) {
DCHECK(gaps != nullptr);
uint32_t current_offset = gap_start;
while (current_offset != gap_end) {
size_t remaining = gap_end - current_offset;
if (remaining >= sizeof(uint32_t) && IsAligned<4>(current_offset)) {
gaps->push(FieldGap {current_offset, sizeof(uint32_t)});
current_offset += sizeof(uint32_t);
} else if (remaining >= sizeof(uint16_t) && IsAligned<2>(current_offset)) {
gaps->push(FieldGap {current_offset, sizeof(uint16_t)});
current_offset += sizeof(uint16_t);
} else {
gaps->push(FieldGap {current_offset, sizeof(uint8_t)});
current_offset += sizeof(uint8_t);
}
DCHECK_LE(current_offset, gap_end) << "Overran gap";
}
}
// Shuffle fields forward, making use of gaps whenever possible.
template<int n>
static void ShuffleForward(size_t* current_field_idx,
MemberOffset* field_offset,
std::deque<ArtField*>* grouped_and_sorted_fields,
FieldGaps* gaps)
REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(current_field_idx != nullptr);
DCHECK(grouped_and_sorted_fields != nullptr);
DCHECK(gaps != nullptr);
DCHECK(field_offset != nullptr);
DCHECK(IsPowerOfTwo(n));
while (!grouped_and_sorted_fields->empty()) {
ArtField* field = grouped_and_sorted_fields->front();
Primitive::Type type = field->GetTypeAsPrimitiveType();
if (Primitive::ComponentSize(type) < n) {
break;
}
if (!IsAligned<n>(field_offset->Uint32Value())) {
MemberOffset old_offset = *field_offset;
*field_offset = MemberOffset(RoundUp(field_offset->Uint32Value(), n));
AddFieldGap(old_offset.Uint32Value(), field_offset->Uint32Value(), gaps);
}
CHECK(type != Primitive::kPrimNot) << field->PrettyField(); // should be primitive types
grouped_and_sorted_fields->pop_front();
if (!gaps->empty() && gaps->top().size >= n) {
FieldGap gap = gaps->top();
gaps->pop();
DCHECK_ALIGNED(gap.start_offset, n);
field->SetOffset(MemberOffset(gap.start_offset));
if (gap.size > n) {
AddFieldGap(gap.start_offset + n, gap.start_offset + gap.size, gaps);
}
} else {
DCHECK_ALIGNED(field_offset->Uint32Value(), n);
field->SetOffset(*field_offset);
*field_offset = MemberOffset(field_offset->Uint32Value() + n);
}
++(*current_field_idx);
}
}
ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
: boot_class_table_(new ClassTable()),
failed_dex_cache_class_lookups_(0),
class_roots_(nullptr),
find_array_class_cache_next_victim_(0),
init_done_(false),
log_new_roots_(false),
intern_table_(intern_table),
fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
jni_dlsym_lookup_trampoline_(nullptr),
jni_dlsym_lookup_critical_trampoline_(nullptr),
quick_resolution_trampoline_(nullptr),
quick_imt_conflict_trampoline_(nullptr),
quick_generic_jni_trampoline_(nullptr),
quick_to_interpreter_bridge_trampoline_(nullptr),
image_pointer_size_(kRuntimePointerSize),
visibly_initialized_callback_lock_("visibly initialized callback lock"),
visibly_initialized_callback_(nullptr),
critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
critical_native_code_with_clinit_check_(),
cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
// For CHA disabled during Aot, see b/34193647.
CHECK(intern_table_ != nullptr);
static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
"Array cache size wrong.");
std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
}
void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
if (c2 == nullptr) {
LOG(FATAL) << "Could not find class " << descriptor;
UNREACHABLE();
}
if (c1.Get() != c2) {
std::ostringstream os1, os2;
c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
<< ". This is most likely the result of a broken build. Make sure that "
<< "libcore and art projects match.\n\n"
<< os1.str() << "\n\n" << os2.str();
UNREACHABLE();
}
}
bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
std::string* error_msg) {
VLOG(startup) << "ClassLinker::Init";
Thread* const self = Thread::Current();
Runtime* const runtime = Runtime::Current();
gc::Heap* const heap = runtime->GetHeap();
CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
CHECK(!init_done_);
// Use the pointer size from the runtime since we are probably creating the image.
image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
// java_lang_Class comes first, it's needed for AllocClass
// The GC can't handle an object with a null class since we can't get the size of this object.
heap->IncrementDisableMovingGC(self);
StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
// Allocate the object as non-movable so that there are no cases where Object::IsClass returns
// the incorrect result when comparing to-space vs from-space.
Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
CHECK(java_lang_Class != nullptr);
java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
java_lang_Class->SetClass(java_lang_Class.Get());
if (kUseBakerReadBarrier) {
java_lang_Class->AssertReadBarrierState();
}
java_lang_Class->SetClassSize(class_class_size);
java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
heap->DecrementDisableMovingGC(self);
// AllocClass(ObjPtr<mirror::Class>) can now be used
// Class[] is used for reflection support.
auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
Handle<mirror::Class> class_array_class(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
class_array_class->SetComponentType(java_lang_Class.Get());
// java_lang_Object comes next so that object_array_class can be created.
Handle<mirror::Class> java_lang_Object(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
CHECK(java_lang_Object != nullptr);
// backfill Object as the super class of Class.
java_lang_Class->SetSuperClass(java_lang_Object.Get());
mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
java_lang_Object->SetObjectSize(sizeof(mirror::Object));
// Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
// cleared without triggering the read barrier and unintentionally mark the sentinel alive.
runtime->SetSentinel(heap->AllocNonMovableObject(self,
java_lang_Object.Get(),
java_lang_Object->GetObjectSize(),
VoidFunctor()));
// Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
if (kBitstringSubtypeCheckEnabled) {
// It might seem the lock here is unnecessary, however all the SubtypeCheck
// functions are annotated to require locks all the way down.
//
// We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
}
// Object[] next to hold class roots.
Handle<mirror::Class> object_array_class(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(),
mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
object_array_class->SetComponentType(java_lang_Object.Get());
// Setup java.lang.String.
//
// We make this class non-movable for the unlikely case where it were to be
// moved by a sticky-bit (minor) collection when using the Generational
// Concurrent Copying (CC) collector, potentially creating a stale reference
// in the `klass_` field of one of its instances allocated in the Large-Object
// Space (LOS) -- see the comment about the dirty card scanning logic in
// art::gc::collector::ConcurrentCopying::MarkingPhase.
Handle<mirror::Class> java_lang_String(hs.NewHandle(
AllocClass</* kMovable= */ false>(
self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
java_lang_String->SetStringClass();
mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
// Setup java.lang.ref.Reference.
Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
// Create storage for root classes, save away our work so far (requires descriptors).
class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
mirror::ObjectArray<mirror::Class>::Alloc(self,
object_array_class.Get(),
static_cast<int32_t>(ClassRoot::kMax)));
CHECK(!class_roots_.IsNull());
SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
// Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
java_lang_Object->SetIfTable(AllocIfTable(self, 0));
// Create array interface entries to populate once we can load system classes.
object_array_class->SetIfTable(AllocIfTable(self, 2));
DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
// Setup the primitive type classes.
CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
// Allocate the primitive array classes. We need only the native pointer
// array at this point (int[] or long[], depending on architecture) but
// we shall perform the same setup steps for all primitive array classes.
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
// now that these are registered, we can use AllocClass() and AllocObjectArray
// Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
java_lang_DexCache->SetDexCacheClass();
java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
// Setup dalvik.system.ClassExt
Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
// Set up array classes for string, field, method
Handle<mirror::Class> object_array_string(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(),
mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
object_array_string->SetComponentType(java_lang_String.Get());
SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
// Create runtime resolution and imt conflict methods.
runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
// Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
// DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
// these roots.
if (boot_class_path.empty()) {
*error_msg = "Boot classpath is empty.";
return false;
}
for (auto& dex_file : boot_class_path) {
if (dex_file == nullptr) {
*error_msg = "Null dex file.";
return false;
}
AppendToBootClassPath(self, dex_file.get());
boot_dex_files_.push_back(std::move(dex_file));
}
// now we can use FindSystemClass
// Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
// we do not need friend classes or a publicly exposed setter.
quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
if (!runtime->IsAotCompiler()) {
// We need to set up the generic trampolines since we don't have an image.
jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
quick_resolution_trampoline_ = GetQuickResolutionStub();
quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
}
// Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
// Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
// in class_table_.
CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
// Setup core array classes, i.e. Object[], String[] and Class[] and primitive
// arrays - can't be done until Object has a vtable and component classes are loaded.
FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
// Setup the single, global copy of "iftable".
auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
CHECK(java_lang_Cloneable != nullptr);
auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
CHECK(java_io_Serializable != nullptr);
// We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
// crawl up and explicitly list all of the supers as well.
object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
// Check Class[] and Object[]'s interfaces. GetDirectInterface may cause thread suspension.
CHECK_EQ(java_lang_Cloneable.Get(),
mirror::Class::GetDirectInterface(self, class_array_class.Get(), 0));
CHECK_EQ(java_io_Serializable.Get(),
mirror::Class::GetDirectInterface(self, class_array_class.Get(), 1));
CHECK_EQ(java_lang_Cloneable.Get(),
mirror::Class::GetDirectInterface(self, object_array_class.Get(), 0));
CHECK_EQ(java_io_Serializable.Get(),
mirror::Class::GetDirectInterface(self, object_array_class.Get(), 1));
CHECK_EQ(object_array_string.Get(),
FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
// End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
// Create java.lang.reflect.Proxy root.
SetClassRoot(ClassRoot::kJavaLangReflectProxy,
FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
// Create java.lang.reflect.Field.class root.
ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
// Create java.lang.reflect.Field array root.
class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
// Create java.lang.reflect.Constructor.class root and array root.
class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
// Create java.lang.reflect.Method.class root and array root.
class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
// Create java.lang.invoke.CallSite.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
// Create java.lang.invoke.MethodType.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
// Create java.lang.invoke.MethodHandleImpl.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
// Create java.lang.invoke.MethodHandles.Lookup.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
// Create java.lang.invoke.VarHandle.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
// Create java.lang.invoke.FieldVarHandle.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
// Create java.lang.invoke.ArrayElementVarHandle.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
// Create java.lang.invoke.ByteArrayViewVarHandle.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
// Create java.lang.invoke.ByteBufferViewVarHandle.class root
class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
CHECK(class_root != nullptr);
SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
// java.lang.ref classes need to be specially flagged, but otherwise are normal classes
// finish initializing Reference class
mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
mirror::Reference::ClassSize(image_pointer_size_));
class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
// Setup the ClassLoader, verifying the object_size_.
class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
class_root->SetClassLoaderClass();
CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
// Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
// java.lang.StackTraceElement as a convenience.
SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
// Create conflict tables that depend on the class linker.
runtime->FixupConflictTables();
FinishInit(self);
VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
return true;
}
static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
REQUIRES_SHARED(Locks::mutator_lock_) {
// Find String.<init> -> StringFactory bindings.
ObjPtr<mirror::Class> string_factory_class =
class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
CHECK(string_factory_class != nullptr);
ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
WellKnownClasses::InitStringInit(string_class, string_factory_class);
// Update the primordial thread.
self->InitStringEntryPoints();
}
void ClassLinker::FinishInit(Thread* self) {
VLOG(startup) << "ClassLinker::FinishInit entering";
CreateStringInitBindings(self, this);
// Let the heap know some key offsets into java.lang.ref instances
// Note: we hard code the field indexes here rather than using FindInstanceField
// as the types of the field can't be resolved prior to the runtime being
// fully initialized
StackHandleScope<3> hs(self);
Handle<mirror::Class> java_lang_ref_Reference =
hs.NewHandle(GetClassRoot<mirror::Reference>(this));
Handle<mirror::Class> java_lang_ref_FinalizerReference =
hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
CHECK_STREQ(pendingNext->GetName(), "pendingNext");
CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
CHECK_STREQ(queue->GetName(), "queue");
CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
CHECK_STREQ(queueNext->GetName(), "queueNext");
CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
CHECK_STREQ(referent->GetName(), "referent");
CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
CHECK_STREQ(zombie->GetName(), "zombie");
CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
// ensure all class_roots_ are initialized
for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
ClassRoot class_root = static_cast<ClassRoot>(i);
ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
CHECK(klass != nullptr);
DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
// note SetClassRoot does additional validation.
// if possible add new checks there to catch errors early
}
CHECK(GetArrayIfTable() != nullptr);
// disable the slow paths in FindClass and CreatePrimitiveClass now
// that Object, Class, and Object[] are setup
init_done_ = true;
// Under sanitization, the small carve-out to handle stack overflow might not be enough to
// initialize the StackOverflowError class (as it might require running the verifier). Instead,
// ensure that the class will be initialized.
if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
verifier::ClassVerifier::Init(this); // Need to prepare the verifier.
ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
// Strange, but don't crash.
LOG(WARNING) << "Could not prepare StackOverflowError.";
self->ClearException();
}
}
VLOG(startup) << "ClassLinker::FinishInit exiting";
}
void ClassLinker::RunRootClinits(Thread* self) {
for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
if (!c->IsArrayClass() && !c->IsPrimitive()) {
StackHandleScope<1> hs(self);
Handle<mirror::Class> h_class(hs.NewHandle(c));
if (!EnsureInitialized(self, h_class, true, true)) {
LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
<< ": " << self->GetException()->Dump();
}
} else {
DCHECK(c->IsInitialized());
}
}
}
struct TrampolineCheckData {
const void* quick_resolution_trampoline;
const void* quick_imt_conflict_trampoline;
const void* quick_generic_jni_trampoline;
const void* quick_to_interpreter_bridge_trampoline;
PointerSize pointer_size;
ArtMethod* m;
bool error;
};
bool ClassLinker::InitFromBootImage(std::string* error_msg) {
VLOG(startup) << __FUNCTION__ << " entering";
CHECK(!init_done_);
Runtime* const runtime = Runtime::Current();
Thread* const self = Thread::Current();
gc::Heap* const heap = runtime->GetHeap();
std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
CHECK(!spaces.empty());
const ImageHeader& image_header = spaces[0]->GetImageHeader();
uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
if (!ValidPointerSize(pointer_size_unchecked)) {
*error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
return false;
}
image_pointer_size_ = image_header.GetPointerSize();
if (!runtime->IsAotCompiler()) {
// Only the Aot compiler supports having an image with a different pointer size than the
// runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
// compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
if (image_pointer_size_ != kRuntimePointerSize) {
*error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
static_cast<size_t>(image_pointer_size_),
sizeof(void*));
return false;
}
}
DCHECK(!runtime->HasResolutionMethod());
runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
runtime->SetImtUnimplementedMethod(
image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
runtime->SetCalleeSaveMethod(
image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
CalleeSaveType::kSaveAllCalleeSaves);
runtime->SetCalleeSaveMethod(
image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
CalleeSaveType::kSaveRefsOnly);
runtime->SetCalleeSaveMethod(
image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
CalleeSaveType::kSaveRefsAndArgs);
runtime->SetCalleeSaveMethod(
image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
CalleeSaveType::kSaveEverything);
runtime->SetCalleeSaveMethod(
image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
CalleeSaveType::kSaveEverythingForClinit);
runtime->SetCalleeSaveMethod(
image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
CalleeSaveType::kSaveEverythingForSuspendCheck);
std::vector<const OatFile*> oat_files =
runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
DCHECK(!oat_files.empty());
const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
if (kIsDebugBuild) {
// Check that the other images use the same trampoline.
for (size_t i = 1; i < oat_files.size(); ++i) {
const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
const void* ith_jni_dlsym_lookup_trampoline_ =
ith_oat_header.GetJniDlsymLookupTrampoline();
const void* ith_jni_dlsym_lookup_critical_trampoline_ =
ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
const void* ith_quick_resolution_trampoline =
ith_oat_header.GetQuickResolutionTrampoline();
const void* ith_quick_imt_conflict_trampoline =
ith_oat_header.GetQuickImtConflictTrampoline();
const void* ith_quick_generic_jni_trampoline =
ith_oat_header.GetQuickGenericJniTrampoline();
const void* ith_quick_to_interpreter_bridge_trampoline =
ith_oat_header.GetQuickToInterpreterBridge();
if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_) {
// Make sure that all methods in this image do not contain those trampolines as
// entrypoints. Otherwise the class-linker won't be able to work with a single set.
TrampolineCheckData data;
data.error = false;
data.pointer_size = GetImagePointerSize();
data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
if (obj->IsClass()) {
ObjPtr<mirror::Class> klass = obj->AsClass();
for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
const void* entrypoint =
m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
if (entrypoint == data.quick_resolution_trampoline ||
entrypoint == data.quick_imt_conflict_trampoline ||
entrypoint == data.quick_generic_jni_trampoline ||
entrypoint == data.quick_to_interpreter_bridge_trampoline) {
data.m = &m;
data.error = true;
return;
}
}
}
};
spaces[i]->GetLiveBitmap()->Walk(visitor);
if (data.error) {
ArtMethod* m = data.m;
LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
*error_msg = "Found an ArtMethod with a bad entrypoint";
return false;
}
}
}
}
class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
image_header.GetImageRoot(ImageHeader::kClassRoots)));
DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
// Boot class loader, use a null handle.
std::vector<std::unique_ptr<const DexFile>> dex_files;
if (!AddImageSpace(spaces[i],
ScopedNullHandle<mirror::ClassLoader>(),
/*out*/&dex_files,
error_msg)) {
return false;
}
// Append opened dex files at the end.
boot_dex_files_.insert(boot_dex_files_.end(),
std::make_move_iterator(dex_files.begin()),
std::make_move_iterator(dex_files.end()));
}
for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
OatDexFile::MadviseDexFile(*dex_file, MadviseState::kMadviseStateAtLoad);
}
FinishInit(self);
VLOG(startup) << __FUNCTION__ << " exiting";
return true;
}
void ClassLinker::AddExtraBootDexFiles(
Thread* self,
std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
AppendToBootClassPath(self, dex_file.get());
boot_dex_files_.push_back(std::move(dex_file));
}
}
bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
ObjPtr<mirror::ClassLoader> class_loader) {
return class_loader == nullptr ||
soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
class_loader->GetClass();
}
class CHAOnDeleteUpdateClassVisitor {
public:
explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
: allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
self_(Thread::Current()) {}
bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
// This class is going to be unloaded. Tell CHA about it.
cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
return true;
}
private:
const LinearAlloc* allocator_;
const ClassHierarchyAnalysis* cha_;
const PointerSize pointer_size_;
const Thread* self_;
};
/*
* A class used to ensure that all references to strings interned in an AppImage have been
* properly recorded in the interned references list, and is only ever run in debug mode.
*/
class CountInternedStringReferencesVisitor {
public:
CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
const InternTable::UnorderedSet& image_interns)
: space_(space),
image_interns_(image_interns),
count_(0u) {}
void TestObject(ObjPtr<mirror::Object> referred_obj) const
REQUIRES_SHARED(Locks::mutator_lock_) {
if (referred_obj != nullptr &&
space_.HasAddress(referred_obj.Ptr()) &&
referred_obj->IsString()) {
ObjPtr<mirror::String> referred_str = referred_obj->AsString();
auto it = image_interns_.find(GcRoot<mirror::String>(referred_str));
if (it != image_interns_.end() && it->Read() == referred_str) {
++count_;
}
}
}
void VisitRootIfNonNull(
mirror::CompressedReference<mirror::Object>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
if (!root->IsNull()) {
VisitRoot(root);
}
}
void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
TestObject(root->AsMirrorPtr());
}
// Visit Class Fields
void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
bool is_static ATTRIBUTE_UNUSED) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// References within image or across images don't need a read barrier.
ObjPtr<mirror::Object> referred_obj =
obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
TestObject(referred_obj);
}
void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
}
size_t GetCount() const {
return count_;
}
private:
const gc::space::ImageSpace& space_;
const InternTable::UnorderedSet& image_interns_;
mutable size_t count_; // Modified from the `const` callbacks.
};
/*
* This function counts references to strings interned in the AppImage.
* This is used in debug build to check against the number of the recorded references.
*/
size_t CountInternedStringReferences(gc::space::ImageSpace& space,
const InternTable::UnorderedSet& image_interns)
REQUIRES_SHARED(Locks::mutator_lock_) {
const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
const ImageHeader& image_header = space.GetImageHeader();
const uint8_t* target_base = space.GetMemMap()->Begin();
const ImageSection& objects_section = image_header.GetObjectsSection();
auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
CountInternedStringReferencesVisitor visitor(space, image_interns);
bitmap->VisitMarkedRange(objects_begin,
objects_end,
[&space, &visitor](mirror::Object* obj)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (space.HasAddress(obj)) {
if (obj->IsDexCache()) {
obj->VisitReferences</* kVisitNativeRoots= */ true,
kVerifyNone,
kWithoutReadBarrier>(visitor, visitor);
} else {
// Don't visit native roots for non-dex-cache as they can't contain
// native references to strings. This is verified during compilation
// by ImageWriter::VerifyNativeGCRootInvariants.
obj->VisitReferences</* kVisitNativeRoots= */ false,
kVerifyNone,
kWithoutReadBarrier>(visitor, visitor);
}
}
});
return visitor.GetCount();
}
template <typename Visitor>
static void VisitInternedStringReferences(
gc::space::ImageSpace* space,
const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
const uint8_t* target_base = space->Begin();
const ImageSection& sro_section =
space->GetImageHeader().GetImageStringReferenceOffsetsSection();
const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
VLOG(image)
<< "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
<< num_string_offsets;
const auto* sro_base =
reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
uint32_t base_offset = sro_base[offset_index].first;
uint32_t raw_member_offset = sro_base[offset_index].second;
DCHECK_ALIGNED(base_offset, 2);
DCHECK_ALIGNED(raw_member_offset, 2);
ObjPtr<mirror::Object> obj_ptr =
reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
MemberOffset member_offset(raw_member_offset);
ObjPtr<mirror::String> referred_string =
obj_ptr->GetFieldObject<mirror::String,
kVerifyNone,
kWithoutReadBarrier,
/* kIsVolatile= */ false>(member_offset);
DCHECK(referred_string != nullptr);
ObjPtr<mirror::String> visited = visitor(referred_string);
if (visited != referred_string) {
obj_ptr->SetFieldObject</* kTransactionActive= */ false,
/* kCheckTransaction= */ false,
kVerifyNone,
/* kIsVolatile= */ false>(member_offset, visited);
}
}
}
static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
REQUIRES_SHARED(Locks::mutator_lock_) {
InternTable::UnorderedSet image_interns;
const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
if (section.Size() > 0) {
size_t read_count;
const uint8_t* data = space->Begin() + section.Offset();
InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
image_set.swap(image_interns);
}
size_t num_recorded_refs = 0u;
VisitInternedStringReferences(
space,
[&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
REQUIRES_SHARED(Locks::mutator_lock_) {
auto it = image_interns.find(GcRoot<mirror::String>(str));
CHECK(it != image_interns.end());
CHECK(it->Read() == str);
++num_recorded_refs;
return str;
});
size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
CHECK_EQ(num_recorded_refs, num_found_refs);
}
// new_class_set is the set of classes that were read from the class table section in the image.
// If there was no class table section, it is null.
// Note: using a class here to avoid having to make ClassLinker internals public.
class AppImageLoadingHelper {
public:
static void Update(
ClassLinker* class_linker,
gc::space::ImageSpace* space,
Handle<mirror::ClassLoader> class_loader,
Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
static void HandleAppImageStrings(gc::space::ImageSpace* space)
REQUIRES_SHARED(Locks::mutator_lock_);
};
void AppImageLoadingHelper::Update(
ClassLinker* class_linker,
gc::space::ImageSpace* space,
Handle<mirror::ClassLoader> class_loader,
Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_) {
ScopedTrace app_image_timing("AppImage:Updating");
if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
// In debug build, verify the string references before applying
// the Runtime::LoadAppImageStartupCache() option.
VerifyInternedStringReferences(space);
}
Thread* const self = Thread::Current();
Runtime* const runtime = Runtime::Current();
gc::Heap* const heap = runtime->GetHeap();
const ImageHeader& header = space->GetImageHeader();
{
// Register dex caches with the class loader.
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
const DexFile* const dex_file = dex_cache->GetDexFile();
{
WriterMutexLock mu2(self, *Locks::dex_lock_);
CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
}
}
}
if (ClassLinker::kAppImageMayContainStrings) {
HandleAppImageStrings(space);
}
if (kVerifyArtMethodDeclaringClasses) {
ScopedTrace timing("AppImage:VerifyDeclaringClasses");
ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
header.VisitPackedArtMethods([&](ArtMethod& method)
REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
if (klass != nullptr) {
CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
}
}, space->Begin(), kRuntimePointerSize);
}
}
void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
// Iterate over the string reference offsets stored in the image and intern
// the strings they point to.
ScopedTrace timing("AppImage:InternString");
Runtime* const runtime = Runtime::Current();
InternTable* const intern_table = runtime->GetInternTable();
// Add the intern table, removing any conflicts. For conflicts, store the new address in a map
// for faster lookup.
// TODO: Optimize with a bitmap or bloom filter
SafeMap<mirror::String*, mirror::String*> intern_remap;
auto func = [&](InternTable::UnorderedSet& interns)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::intern_table_lock_) {
const size_t non_boot_image_strings = intern_table->CountInterns(
/*visit_boot_images=*/false,
/*visit_non_boot_images=*/true);
VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
// Visit the smaller of the two sets to compute the intersection.
if (interns.size() < non_boot_image_strings) {
for (auto it = interns.begin(); it != interns.end(); ) {
ObjPtr<mirror::String> string = it->Read();
ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
if (existing == nullptr) {
existing = intern_table->LookupStrongLocked(string);
}
if (existing != nullptr) {
intern_remap.Put(string.Ptr(), existing.Ptr());
it = interns.erase(it);
} else {
++it;
}
}
} else {
intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::intern_table_lock_) {
auto it = interns.find(root);
if (it != interns.end()) {
ObjPtr<mirror::String> existing = root.Read();
intern_remap.Put(it->Read(), existing.Ptr());
it = interns.erase(it);
}
}, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
}
// Consistency check to ensure correctness.
if (kIsDebugBuild) {
for (GcRoot<mirror::String>& root : interns) {
ObjPtr<mirror::String> string = root.Read();
CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
}
}
};
intern_table->AddImageStringsToTable(space, func);
if (!intern_remap.empty()) {
VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
VisitInternedStringReferences(
space,
[&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
auto it = intern_remap.find(str.Ptr());
if (it != intern_remap.end()) {
return ObjPtr<mirror::String>(it->second);
}
return str;
});
}
}
static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
const char* location,
std::string* error_msg)
REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(error_msg != nullptr);
std::unique_ptr<const DexFile> dex_file;
const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
if (oat_dex_file == nullptr) {
return std::unique_ptr<const DexFile>();
}
std::string inner_error_msg;
dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
if (dex_file == nullptr) {
*error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
location,
oat_file->GetLocation().c_str(),
inner_error_msg.c_str());
return std::unique_ptr<const DexFile>();
}
if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
*error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
location,
dex_file->GetLocationChecksum(),
oat_dex_file->GetDexFileLocationChecksum());
return std::unique_ptr<const DexFile>();
}
return dex_file;
}
bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
std::string* error_msg) {
ScopedAssertNoThreadSuspension nts(__FUNCTION__);
const ImageHeader& header = space->GetImageHeader();
ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
DCHECK(dex_caches_object != nullptr);
ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
dex_caches_object->AsObjectArray<mirror::DexCache>();
const OatFile* oat_file = space->GetOatFile();
for (auto dex_cache : dex_caches->Iterate()) {
std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
dex_file_location.c_str(),
error_msg);
if (dex_file == nullptr) {
return false;
}
dex_cache->SetDexFile(dex_file.get());
out_dex_files->push_back(std::move(dex_file));
}
return true;
}
// Helper class for ArtMethod checks when adding an image. Keeps all required functionality
// together and caches some intermediate results.
class ImageChecker final {
public:
static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
REQUIRES_SHARED(Locks::mutator_lock_) {
ImageChecker ic(heap, class_linker);
auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(obj != nullptr);
CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
if (obj->IsClass()) {
auto klass = obj->AsClass();
for (ArtField& field : klass->GetIFields()) {
CHECK_EQ(field.GetDeclaringClass(), klass);
}
for (ArtField& field : klass->GetSFields()) {
CHECK_EQ(field.GetDeclaringClass(), klass);
}
const PointerSize pointer_size = ic.pointer_size_;
for (ArtMethod& m : klass->GetMethods(pointer_size)) {
ic.CheckArtMethod(&m, klass);
}
ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
if (vtable != nullptr) {
ic.CheckArtMethodPointerArray(vtable, nullptr);
}
if (klass->ShouldHaveImt()) {
ImTable* imt = klass->GetImt(pointer_size);
for (size_t i = 0; i < ImTable::kSize; ++i) {
ic.CheckArtMethod(imt->Get(i, pointer_size), nullptr);
}
}
if (klass->ShouldHaveEmbeddedVTable()) {
for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
}
}
ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
if (iftable->GetMethodArrayCount(i) > 0) {
ic.CheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
}
}
}
};
heap->VisitObjects(visitor);
}
private:
ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
: spaces_(heap->GetBootImageSpaces()),
pointer_size_(class_linker->GetImagePointerSize()) {
space_begin_.reserve(spaces_.size());
method_sections_.reserve(spaces_.size());
runtime_method_sections_.reserve(spaces_.size());
for (gc::space::ImageSpace* space : spaces_) {
space_begin_.push_back(space->Begin());
auto& header = space->GetImageHeader();
method_sections_.push_back(&header.GetMethodsSection());
runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
}
}
void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (m->IsRuntimeMethod()) {
ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
} else if (m->IsCopied()) {
CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
} else if (expected_class != nullptr) {
CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
}
if (!spaces_.empty()) {
bool contains = false;
for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
contains = method_sections_[i]->Contains(offset) ||
runtime_method_sections_[i]->Contains(offset);
}
CHECK(contains) << m << " not found";
}
}
void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
ObjPtr<mirror::Class> expected_class)
REQUIRES_SHARED(Locks::mutator_lock_) {
CHECK(arr != nullptr);
for (int32_t j = 0; j < arr->GetLength(); ++j) {
auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
// expected_class == null means we are a dex cache.
if (expected_class != nullptr) {
CHECK(method != nullptr);
}
if (method != nullptr) {
CheckArtMethod(method, expected_class);
}
}
}
const std::vector<gc::space::ImageSpace*>& spaces_;
const PointerSize pointer_size_;
// Cached sections from the spaces.
std::vector<const uint8_t*> space_begin_;
std::vector<const ImageSection*> method_sections_;
std::vector<const ImageSection*> runtime_method_sections_;
};
static void VerifyAppImage(const ImageHeader& header,
const Handle<mirror::ClassLoader>& class_loader,
ClassTable* class_table,
gc::space::ImageSpace* space)
REQUIRES_SHARED(Locks::mutator_lock_) {
header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
<< mirror::Class::PrettyClass(klass);
}
}, space->Begin(), kRuntimePointerSize);
{
// Verify that all direct interfaces of classes in the class table are also resolved.
std::vector<ObjPtr<mirror::Class>> classes;
auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
classes.push_back(klass);
}
return true;
};
class_table->Visit(verify_direct_interfaces_in_table);
Thread* self = Thread::Current();
for (ObjPtr<mirror::Class> klass : classes) {
for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
CHECK(klass->GetDirectInterface(self, klass, i) != nullptr)
<< klass->PrettyDescriptor() << " iface #" << i;
}
}
}
}
bool ClassLinker::AddImageSpace(
gc::space::ImageSpace* space,
Handle<mirror::ClassLoader> class_loader,
std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
std::string* error_msg) {
DCHECK(out_dex_files != nullptr);
DCHECK(error_msg != nullptr);
const uint64_t start_time = NanoTime();
const bool app_image = class_loader != nullptr;
const ImageHeader& header = space->GetImageHeader();
ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
DCHECK(dex_caches_object != nullptr);
Runtime* const runtime = Runtime::Current();
gc::Heap* const heap = runtime->GetHeap();
Thread* const self = Thread::Current();
// Check that the image is what we are expecting.
if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
*error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
image_pointer_size_);
return false;
}
size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
*error_msg = StringPrintf("Expected %zu image roots but got %d",
expected_image_roots,
header.GetImageRoots()->GetLength());
return false;
}
StackHandleScope<3> hs(self);
Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
: nullptr));
DCHECK(class_roots != nullptr);
if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
*error_msg = StringPrintf("Expected %d class roots but got %d",
class_roots->GetLength(),
static_cast<int32_t>(ClassRoot::kMax));
return false;
}
// Check against existing class roots to make sure they match the ones in the boot image.
ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
*error_msg = "App image class roots must have pointer equality with runtime ones.";
return false;
}
}
const OatFile* oat_file = space->GetOatFile();
if (oat_file->GetOatHeader().GetDexFileCount() !=
static_cast<uint32_t>(dex_caches->GetLength())) {
*error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
"image";
return false;
}
for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
dex_file_location.c_str(),
error_msg);
if (dex_file == nullptr) {
return false;
}
LinearAlloc* linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader.Get());
DCHECK(linear_alloc != nullptr);
DCHECK_EQ(linear_alloc == Runtime::Current()->GetLinearAlloc(), !app_image);
{
// Native fields are all null. Initialize them and allocate native memory.
WriterMutexLock mu(self, *Locks::dex_lock_);
dex_cache->InitializeNativeFields(dex_file.get(), linear_alloc);
}
if (!app_image) {
// Register dex files, keep track of existing ones that are conflicts.
AppendToBootClassPath(dex_file.get(), dex_cache);
}
out_dex_files->push_back(std::move(dex_file));
}
if (app_image) {
ScopedObjectAccessUnchecked soa(Thread::Current());
ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
if (IsBootClassLoader(soa, image_class_loader.Get())) {
*error_msg = "Unexpected BootClassLoader in app image";
return false;
}
}
if (kCheckImageObjects) {
if (!app_image) {
ImageChecker::CheckObjects(heap, this);
}
}
// Set entry point to interpreter if in InterpretOnly mode.
if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
// Set image methods' entry point to interpreter.
header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
if (!method.IsRuntimeMethod()) {
DCHECK(method.GetDeclaringClass() != nullptr);
if (!method.IsNative() && !method.IsResolutionMethod()) {
method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
image_pointer_size_);
}
}
}, space->Begin(), image_pointer_size_);
}
if (!runtime->IsAotCompiler()) {
bool can_use_nterp = interpreter::CanRuntimeUseNterp();
header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
// In the image, the `data` pointer field of the ArtMethod contains the code
// item offset. Change this to the actual pointer to the code item.
if (method.HasCodeItem()) {
const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
method.SetCodeItem(code_item);
}
// Set image methods' entry point that point to the interpreter bridge to the
// nterp entry point.
if (can_use_nterp) {
ChangeInterpreterBridgeToNterp(&method, this);
}
}, space->Begin(), image_pointer_size_);
}
if (runtime->IsVerificationSoftFail()) {
header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
if (!method.IsNative() && method.IsInvokable()) {
method.ClearSkipAccessChecks();
}
}, space->Begin(), image_pointer_size_);
}
ClassTable* class_table = nullptr;
{
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
class_table = InsertClassTableForClassLoader(class_loader.Get());
}
// If we have a class table section, read it and use it for verification in
// UpdateAppImageClassLoadersAndDexCaches.
ClassTable::ClassSet temp_set;
const ImageSection& class_table_section = header.GetClassTableSection();
const bool added_class_table = class_table_section.Size() > 0u;
if (added_class_table) {
const uint64_t start_time2 = NanoTime();
size_t read_count = 0;
temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
/*make copy*/false,
&read_count);
VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
}
if (app_image) {
AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
{
ScopedTrace trace("AppImage:UpdateClassLoaders");
// Update class loader and resolved strings. If added_class_table is false, the resolved
// strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
for (const ClassTable::TableSlot& root : temp_set) {
// Note: We probably don't need the read barrier unless we copy the app image objects into
// the region space.
ObjPtr<mirror::Class> klass(root.Read());
// Do not update class loader for boot image classes where the app image
// class loader is only the initiating loader but not the defining loader.
// Avoid read barrier since we are comparing against null.
if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
klass->SetClassLoader(loader);
}
}
}
if (kBitstringSubtypeCheckEnabled) {
// Every class in the app image has initially SubtypeCheckInfo in the
// Uninitialized state.
//
// The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
// after class initialization is complete. The app image ClassStatus as-is
// are almost all ClassStatus::Initialized, and being in the
// SubtypeCheckInfo::kUninitialized state is violating that invariant.
//
// Force every app image class's SubtypeCheck to be at least kIninitialized.
//
// See also ImageWriter::FixupClass.
ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
for (const ClassTable::TableSlot& root : temp_set) {
SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
}
}
}
if (!oat_file->GetBssGcRoots().empty()) {
// Insert oat file to class table for visiting .bss GC roots.
class_table->InsertOatFile(oat_file);
}
if (added_class_table) {
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
class_table->AddClassSet(std::move(temp_set));
}
if (kIsDebugBuild && app_image) {
// This verification needs to happen after the classes have been added to the class loader.
// Since it ensures classes are in the class table.
ScopedTrace trace("AppImage:Verify");
VerifyAppImage(header, class_loader, class_table, space);
}
VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
return true;
}
bool ClassLinker::ClassInClassTable(ObjPtr<mirror::Class> klass) {
ClassTable* const class_table = ClassTableForClassLoader(klass->GetClassLoader());
return class_table != nullptr && class_table->Contains(klass);
}
void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
// Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
// enabling tracing requires the mutator lock, there are no race conditions here.
const bool tracing_enabled = Trace::IsTracingEnabled();
Thread* const self = Thread::Current();
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
if (kUseReadBarrier) {
// We do not track new roots for CC.
DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
kVisitRootFlagClearRootLog |
kVisitRootFlagStartLoggingNewRoots |
kVisitRootFlagStopLoggingNewRoots));
}
if ((flags & kVisitRootFlagAllRoots) != 0) {
// Argument for how root visiting deals with ArtField and ArtMethod roots.
// There is 3 GC cases to handle:
// Non moving concurrent:
// This case is easy to handle since the reference members of ArtMethod and ArtFields are held
// live by the class and class roots.
//
// Moving non-concurrent:
// This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
// To prevent missing roots, this case needs to ensure that there is no
// suspend points between the point which we allocate ArtMethod arrays and place them in a
// class which is in the class table.
//
// Moving concurrent:
// Need to make sure to not copy ArtMethods without doing read barriers since the roots are
// marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
//
// Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
// ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
// these objects.
UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
boot_class_table_->VisitRoots(root_visitor);
// If tracing is enabled, then mark all the class loaders to prevent unloading.
if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
for (const ClassLoaderData& data : class_loaders_) {
GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
root.VisitRoot(visitor, RootInfo(kRootVMInternal));
}
}
} else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
for (auto& root : new_class_roots_) {
ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
root.VisitRoot(visitor, RootInfo(kRootStickyClass));
ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
// Concurrent moving GC marked new roots through the to-space invariant.
CHECK_EQ(new_ref, old_ref);
}
for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
if (old_ref != nullptr) {
DCHECK(old_ref->IsClass());
root.VisitRoot(visitor, RootInfo(kRootStickyClass));
ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
// Concurrent moving GC marked new roots through the to-space invariant.
CHECK_EQ(new_ref, old_ref);
}
}
}
}
if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
new_class_roots_.clear();
new_bss_roots_boot_oat_files_.clear();
}
if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
log_new_roots_ = true;
} else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
log_new_roots_ = false;
}
// We deliberately ignore the class roots in the image since we
// handle image roots by using the MS/CMS rescanning of dirty cards.
}
// Keep in sync with InitCallback. Anything we visit, we need to
// reinit references to when reinitializing a ClassLinker from a
// mapped image.
void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
VisitClassRoots(visitor, flags);
// Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
// unloading if we are marking roots.
DropFindArrayClassCache();
}
class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
public:
explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
: visitor_(visitor),
done_(false) {}
void Visit(ObjPtr<mirror::ClassLoader> class_loader)
REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
ClassTable* const class_table = class_loader->GetClassTable();
if (!done_ && class_table != nullptr) {
DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
if (!class_table->Visit(visitor)) {
// If the visitor ClassTable returns false it means that we don't need to continue.
done_ = true;
}
}
}
private:
// Class visitor that limits the class visits from a ClassTable to the classes with
// the provided defining class loader. This filter is used to avoid multiple visits
// of the same class which can be recorded for multiple initiating class loaders.
class DefiningClassLoaderFilterVisitor : public ClassVisitor {
public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
ClassVisitor* visitor)
: defining_class_loader_(defining_class_loader), visitor_(visitor) { }
bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
if (klass->GetClassLoader() != defining_class_loader_) {
return true;
}
return (*visitor_)(klass);
}
const ObjPtr<mirror::ClassLoader> defining_class_loader_;
ClassVisitor* const visitor_;
};
ClassVisitor* const visitor_;
// If done is true then we don't need to do any more visiting.
bool done_;
};
void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
if (boot_class_table_->Visit(*visitor)) {
VisitClassLoaderClassesVisitor loader_visitor(visitor);
VisitClassLoaders(&loader_visitor);
}
}
void ClassLinker::VisitClasses(ClassVisitor* visitor) {
Thread* const self = Thread::Current();
ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
// Not safe to have thread suspension when we are holding a lock.
if (self != nullptr) {
ScopedAssertNoThreadSuspension nts(__FUNCTION__);
VisitClassesInternal(visitor);
} else {
VisitClassesInternal(visitor);
}
}
class GetClassesInToVector : public ClassVisitor {
public:
bool operator()(ObjPtr<mirror::Class> klass) override {
classes_.push_back(klass);
return true;
}
std::vector<ObjPtr<mirror::Class>> classes_;
};
class GetClassInToObjectArray : public ClassVisitor {
public:
explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
: arr_(arr), index_(0) {}
bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
++index_;
if (index_ <= arr_->GetLength()) {
arr_->Set(index_ - 1, klass);
return true;
}
return false;
}
bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
return index_ <= arr_->GetLength();
}
private:
mirror::ObjectArray<mirror::Class>* const arr_;
int32_t index_;
};
void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
// TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
// is avoiding duplicates.
if (!kMovingClasses) {
ScopedAssertNoThreadSuspension nts(__FUNCTION__);
GetClassesInToVector accumulator;
VisitClasses(&accumulator);
for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
if (!visitor->operator()(klass)) {
return;
}
}
} else {
Thread* const self = Thread::Current();
StackHandleScope<1> hs(self);
auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
// We size the array assuming classes won't be added to the class table during the visit.
// If this assumption fails we iterate again.
while (true) {
size_t class_table_size;
{
ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
// Add 100 in case new classes get loaded when we are filling in the object array.
class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
}
ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
classes.Assign(
mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
CHECK(classes != nullptr); // OOME.
GetClassInToObjectArray accumulator(classes.Get());
VisitClasses(&accumulator);
if (accumulator.Succeeded()) {
break;
}
}
for (int32_t i = 0; i < classes->GetLength(); ++i) {
// If the class table shrank during creation of the clases array we expect null elements. If
// the class table grew then the loop repeats. If classes are created after the loop has
// finished then we don't visit.
ObjPtr<mirror::Class> klass = classes->Get(i);
if (klass != nullptr && !visitor->operator()(klass)) {
return;
}
}
}
}
ClassLinker::~ClassLinker() {
Thread* const self = Thread::Current();
for (const ClassLoaderData& data : class_loaders_) {
// CHA unloading analysis is not needed. No negative consequences are expected because
// all the classloaders are deleted at the same time.
DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
}
class_loaders_.clear();
while (!running_visibly_initialized_callbacks_.empty()) {
std::unique_ptr<VisiblyInitializedCallback> callback(
std::addressof(running_visibly_initialized_callbacks_.front()));
running_visibly_initialized_callbacks_.pop_front();
}
}
void ClassLinker::DeleteClassLoader(Thread*