Turn the thread peer_ into a Object*.
Don't use a JNI global ref for the thread peer_ so that we can
support more threads than we can global refs. This fixes run-test 51.
Fix a race in thread destruction where a thread may be requested to
suspend while deleting itself.
Change-Id: Id8756a575becf80d2a0be0a213325034556927f1
diff --git a/src/debugger.cc b/src/debugger.cc
index e0e02cf..672b660 100644
--- a/src/debugger.cc
+++ b/src/debugger.cc
@@ -1497,7 +1497,7 @@
// query all threads, so it's easier if we just don't tell them about this thread.
return;
}
- Object* peer = soa_.Decode<Object*>(t->GetPeer());
+ Object* peer = t->GetPeer();
if (IsInDesiredThreadGroup(peer)) {
thread_ids_.push_back(gRegistry->Add(peer));
}
@@ -1632,7 +1632,7 @@
JDWP::ObjectId Dbg::GetThreadSelfId() {
ScopedObjectAccessUnchecked soa(Thread::Current());
- return gRegistry->Add(soa.Decode<Object*>(Thread::Current()->GetPeer()));
+ return gRegistry->Add(soa.Self()->GetPeer());
}
void Dbg::SuspendVM() {
@@ -2755,7 +2755,7 @@
void Dbg::PostThreadStartOrStop(Thread* t, uint32_t type) {
if (IsDebuggerActive()) {
ScopedObjectAccessUnchecked soa(Thread::Current());
- JDWP::ObjectId id = gRegistry->Add(soa.Decode<Object*>(t->GetPeer()));
+ JDWP::ObjectId id = gRegistry->Add(t->GetPeer());
gJdwpState->PostThreadChange(id, type == CHUNK_TYPE("THCR"));
// If this thread's just joined the party while we're already debugging, make sure it knows
// to give us updates when it's running.
diff --git a/src/native/dalvik_system_VMStack.cc b/src/native/dalvik_system_VMStack.cc
index 5ef512a..0e6e675 100644
--- a/src/native/dalvik_system_VMStack.cc
+++ b/src/native/dalvik_system_VMStack.cc
@@ -25,10 +25,11 @@
static jobject GetThreadStack(JNIEnv* env, jobject peer) {
bool timeout;
- Thread* self = Thread::Current();
- if (env->IsSameObject(peer, self->GetPeer())) {
+ {
ScopedObjectAccess soa(env);
- return self->CreateInternalStackTrace(soa);
+ if (soa.Decode<Object*>(peer) == soa.Self()->GetPeer()) {
+ return soa.Self()->CreateInternalStackTrace(soa);
+ }
}
// Suspend thread to build stack trace.
Thread* thread = Thread::SuspendForDebugger(peer, true, &timeout);
diff --git a/src/native/java_lang_Thread.cc b/src/native/java_lang_Thread.cc
index cf475e2..f14c03b 100644
--- a/src/native/java_lang_Thread.cc
+++ b/src/native/java_lang_Thread.cc
@@ -25,11 +25,12 @@
namespace art {
static jobject Thread_currentThread(JNIEnv* env, jclass) {
- return reinterpret_cast<JNIEnvExt*>(env)->self->GetPeer();
+ ScopedObjectAccess soa(env);
+ return soa.AddLocalReference<jobject>(soa.Self()->GetPeer());
}
static jboolean Thread_interrupted(JNIEnv* env, jclass) {
- return reinterpret_cast<JNIEnvExt*>(env)->self->Interrupted() ? JNI_TRUE : JNI_FALSE;
+ return static_cast<JNIEnvExt*>(env)->self->Interrupted() ? JNI_TRUE : JNI_FALSE;
}
static jboolean Thread_isInterrupted(JNIEnv* env, jobject java_thread) {
@@ -108,7 +109,7 @@
ScopedUtfChars name(env, java_name);
{
ScopedObjectAccess soa(env);
- if (soa.Env()->IsSameObject(peer, soa.Self()->GetPeer())) {
+ if (soa.Decode<Object*>(peer) == soa.Self()->GetPeer()) {
soa.Self()->SetThreadName(name.c_str());
return;
}
diff --git a/src/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc b/src/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc
index 0ed964b..9423795 100644
--- a/src/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc
+++ b/src/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc
@@ -61,7 +61,8 @@
Runtime::Current()->GetThreadList()->ForEach(ThreadFinder::Callback, &finder);
}
if (finder.thread != NULL) {
- return finder.thread->GetPeer();
+ ScopedObjectAccess soa(env);
+ return soa.AddLocalReference<jobject>(finder.thread->GetPeer());
} else {
return NULL;
}
diff --git a/src/runtime.cc b/src/runtime.cc
index ec2f569..8d88270 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -644,7 +644,7 @@
"Ljava/lang/ClassLoader;");
CHECK(contextClassLoader != NULL);
- contextClassLoader->SetObject(soa.Decode<Object*>(soa.Self()->GetPeer()), class_loader);
+ contextClassLoader->SetObject(soa.Self()->GetPeer(), class_loader);
}
void Runtime::Start() {
diff --git a/src/scoped_thread_state_change.h b/src/scoped_thread_state_change.h
index 39f5c3f..c0fb649 100644
--- a/src/scoped_thread_state_change.h
+++ b/src/scoped_thread_state_change.h
@@ -165,8 +165,7 @@
* passed in), or NULL on failure.
*/
template<typename T>
- T AddLocalReference(Object* obj) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ T AddLocalReference(Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
if (obj == NULL) {
return NULL;
diff --git a/src/thread.cc b/src/thread.cc
index c51d45f..72ceaf0 100644
--- a/src/thread.cc
+++ b/src/thread.cc
@@ -121,6 +121,13 @@
}
{
ScopedObjectAccess soa(self);
+
+ // Copy peer into self, deleting global reference when done.
+ CHECK(self->jpeer_ != NULL);
+ self->opeer_ = soa.Decode<Object*>(self->jpeer_);
+ self->GetJniEnv()->DeleteGlobalRef(self->jpeer_);
+ self->jpeer_ = NULL;
+
{
SirtRef<String> thread_name(self, self->GetThreadName(soa));
self->SetThreadName(thread_name->ToModifiedUtf8().c_str());
@@ -128,10 +135,10 @@
Dbg::PostThreadStart(self);
// Invoke the 'run' method of our java.lang.Thread.
- CHECK(self->peer_ != NULL);
- Object* receiver = soa.Decode<Object*>(self->peer_);
+ Object* receiver = self->opeer_;
jmethodID mid = WellKnownClasses::java_lang_Thread_run;
- AbstractMethod* m = receiver->GetClass()->FindVirtualMethodForVirtualOrInterface(soa.DecodeMethod(mid));
+ AbstractMethod* m =
+ receiver->GetClass()->FindVirtualMethodForVirtualOrInterface(soa.DecodeMethod(mid));
m->Invoke(self, receiver, NULL, NULL);
}
// Detach and delete self.
@@ -244,7 +251,7 @@
Thread* child_thread = new Thread(is_daemon);
// Use global JNI ref to hold peer live while child thread starts.
- child_thread->peer_ = env->NewGlobalRef(java_peer);
+ child_thread->jpeer_ = env->NewGlobalRef(java_peer);
stack_size = FixStackSize(stack_size);
// Thread.start is synchronized, so we know that vmData is 0, and know that we're not racing to
@@ -267,8 +274,8 @@
runtime->EndThreadBirth();
}
// Manually delete the global reference since Thread::Init will not have been run.
- env->DeleteGlobalRef(child_thread->peer_);
- child_thread->peer_ = NULL;
+ env->DeleteGlobalRef(child_thread->jpeer_);
+ child_thread->jpeer_ = NULL;
delete child_thread;
child_thread = NULL;
// TODO: remove from thread group?
@@ -302,7 +309,7 @@
CHECK_PTHREAD_CALL(pthread_setspecific, (Thread::pthread_key_self_, this), "attach self");
DCHECK_EQ(Thread::Current(), this);
- thin_lock_id_ = thread_list->AllocThreadId();
+ thin_lock_id_ = thread_list->AllocThreadId(this);
InitStackHwm();
jni_env_ = new JNIEnvExt(this, java_vm);
@@ -367,7 +374,10 @@
CHECK(IsExceptionPending());
return;
}
- peer_ = env->NewGlobalRef(peer.get());
+ {
+ ScopedObjectAccess soa(this);
+ opeer_ = soa.Decode<Object*>(peer.get());
+ }
env->CallNonvirtualVoidMethod(peer.get(),
WellKnownClasses::java_lang_Thread,
WellKnownClasses::java_lang_Thread_init,
@@ -382,19 +392,18 @@
ScopedObjectAccess soa(self);
SirtRef<String> peer_thread_name(soa.Self(), GetThreadName(soa));
if (peer_thread_name.get() == NULL) {
- Object* native_peer = soa.Decode<Object*>(peer.get());
// The Thread constructor should have set the Thread.name to a
// non-null value. However, because we can run without code
// available (in the compiler, in tests), we manually assign the
// fields the constructor should have set.
soa.DecodeField(WellKnownClasses::java_lang_Thread_daemon)->
- SetBoolean(native_peer, thread_is_daemon);
+ SetBoolean(opeer_, thread_is_daemon);
soa.DecodeField(WellKnownClasses::java_lang_Thread_group)->
- SetObject(native_peer, soa.Decode<Object*>(thread_group));
+ SetObject(opeer_, soa.Decode<Object*>(thread_group));
soa.DecodeField(WellKnownClasses::java_lang_Thread_name)->
- SetObject(native_peer, soa.Decode<Object*>(thread_name.get()));
+ SetObject(opeer_, soa.Decode<Object*>(thread_name.get()));
soa.DecodeField(WellKnownClasses::java_lang_Thread_priority)->
- SetInt(native_peer, thread_priority);
+ SetInt(opeer_, thread_priority);
peer_thread_name.reset(GetThreadName(soa));
}
// 'thread_name' may have been null, so don't trust 'peer_thread_name' to be non-null.
@@ -472,7 +481,7 @@
}
os << GetState()
<< ",Thread*=" << this
- << ",peer=" << peer_
+ << ",peer=" << opeer_
<< ",\"" << *name_ << "\""
<< "]";
}
@@ -484,8 +493,7 @@
String* Thread::GetThreadName(const ScopedObjectAccessUnchecked& soa) const {
Field* f = soa.DecodeField(WellKnownClasses::java_lang_Thread_name);
- Object* native_peer = soa.Decode<Object*>(peer_);
- return (peer_ != NULL) ? reinterpret_cast<String*>(f->GetObject(native_peer)) : NULL;
+ return (opeer_ != NULL) ? reinterpret_cast<String*>(f->GetObject(opeer_)) : NULL;
}
void Thread::GetThreadName(std::string& name) const {
@@ -536,7 +544,9 @@
<< delta << " " << debug_suspend_count_ << " " << this;
DCHECK_GE(suspend_count_, debug_suspend_count_) << this;
Locks::thread_suspend_count_lock_->AssertHeld(self);
-
+ if (this != self && !IsSuspended()) {
+ Locks::thread_list_lock_->AssertHeld(self);
+ }
if (UNLIKELY(delta < 0 && suspend_count_ <= 0)) {
UnsafeLogFatalForSuspendCount(self, this);
return;
@@ -718,14 +728,13 @@
bool is_daemon = false;
Thread* self = Thread::Current();
- if (thread != NULL && thread->peer_ != NULL) {
- ScopedObjectAccess soa(self);
- Object* native_peer = soa.Decode<Object*>(thread->peer_);
- priority = soa.DecodeField(WellKnownClasses::java_lang_Thread_priority)->GetInt(native_peer);
- is_daemon = soa.DecodeField(WellKnownClasses::java_lang_Thread_daemon)->GetBoolean(native_peer);
+ if (thread != NULL && thread->opeer_ != NULL) {
+ ScopedObjectAccessUnchecked soa(self);
+ priority = soa.DecodeField(WellKnownClasses::java_lang_Thread_priority)->GetInt(thread->opeer_);
+ is_daemon = soa.DecodeField(WellKnownClasses::java_lang_Thread_daemon)->GetBoolean(thread->opeer_);
Object* thread_group =
- soa.DecodeField(WellKnownClasses::java_lang_Thread_group)->GetObject(native_peer);
+ soa.DecodeField(WellKnownClasses::java_lang_Thread_group)->GetObject(thread->opeer_);
if (thread_group != NULL) {
Field* group_name_field = soa.DecodeField(WellKnownClasses::java_lang_ThreadGroup_name);
@@ -760,7 +769,7 @@
os << " | group=\"" << group_name << "\""
<< " sCount=" << thread->suspend_count_
<< " dsCount=" << thread->debug_suspend_count_
- << " obj=" << reinterpret_cast<void*>(thread->peer_)
+ << " obj=" << reinterpret_cast<void*>(thread->opeer_)
<< " self=" << reinterpret_cast<const void*>(thread) << "\n";
}
@@ -936,7 +945,8 @@
managed_stack_(),
jni_env_(NULL),
self_(NULL),
- peer_(NULL),
+ opeer_(NULL),
+ jpeer_(NULL),
stack_begin_(NULL),
stack_size_(0),
thin_lock_id_(0),
@@ -1002,29 +1012,24 @@
Thread* self = this;
DCHECK_EQ(self, Thread::Current());
- if (peer_ != NULL) {
+ if (opeer_ != NULL) {
+ ScopedObjectAccess soa(self);
// We may need to call user-supplied managed code, do this before final clean-up.
- HandleUncaughtExceptions();
- RemoveFromThreadGroup();
+ HandleUncaughtExceptions(soa);
+ RemoveFromThreadGroup(soa);
// this.vmData = 0;
- jni_env_->SetIntField(peer_, WellKnownClasses::java_lang_Thread_vmData, 0);
+ soa.DecodeField(WellKnownClasses::java_lang_Thread_vmData)->SetInt(opeer_, 0);
+ Dbg::PostThreadDeath(self);
- {
- ScopedObjectAccess soa(self);
- Dbg::PostThreadDeath(self);
- }
-
- // Thread.join() is implemented as an Object.wait() on the Thread.lock
- // object. Signal anyone who is waiting.
- ScopedLocalRef<jobject> lock(jni_env_,
- jni_env_->GetObjectField(peer_,
- WellKnownClasses::java_lang_Thread_lock));
+ // Thread.join() is implemented as an Object.wait() on the Thread.lock object. Signal anyone
+ // who is waiting.
+ Object* lock = soa.DecodeField(WellKnownClasses::java_lang_Thread_lock)->GetObject(opeer_);
// (This conditional is only needed for tests, where Thread.lock won't have been set.)
- if (lock.get() != NULL) {
- jni_env_->MonitorEnter(lock.get());
- jni_env_->CallVoidMethod(lock.get(), WellKnownClasses::java_lang_Object_notify);
- jni_env_->MonitorExit(lock.get());
+ if (lock != NULL) {
+ lock->MonitorEnter(self);
+ lock->Notify();
+ lock->MonitorExit(self);
}
}
@@ -1035,11 +1040,12 @@
}
Thread::~Thread() {
- if (jni_env_ != NULL && peer_ != NULL) {
+ if (jni_env_ != NULL && jpeer_ != NULL) {
// If pthread_create fails we don't have a jni env here.
- jni_env_->DeleteGlobalRef(peer_);
+ jni_env_->DeleteGlobalRef(jpeer_);
+ jpeer_ = NULL;
}
- peer_ = NULL;
+ opeer_ = NULL;
delete jni_env_;
jni_env_ = NULL;
@@ -1062,10 +1068,12 @@
TearDownAlternateSignalStack();
}
-void Thread::HandleUncaughtExceptions() {
+void Thread::HandleUncaughtExceptions(ScopedObjectAccess& soa) {
if (!IsExceptionPending()) {
return;
}
+ ScopedLocalRef<jobject> peer(jni_env_, soa.AddLocalReference<jobject>(opeer_));
+ ScopedThreadStateChange tsc(this, kNative);
// Get and clear the exception.
ScopedLocalRef<jthrowable> exception(jni_env_, jni_env_->ExceptionOccurred());
@@ -1073,31 +1081,32 @@
// If the thread has its own handler, use that.
ScopedLocalRef<jobject> handler(jni_env_,
- jni_env_->GetObjectField(peer_,
+ jni_env_->GetObjectField(peer.get(),
WellKnownClasses::java_lang_Thread_uncaughtHandler));
if (handler.get() == NULL) {
// Otherwise use the thread group's default handler.
- handler.reset(jni_env_->GetObjectField(peer_, WellKnownClasses::java_lang_Thread_group));
+ handler.reset(jni_env_->GetObjectField(peer.get(), WellKnownClasses::java_lang_Thread_group));
}
// Call the handler.
jni_env_->CallVoidMethod(handler.get(),
WellKnownClasses::java_lang_Thread$UncaughtExceptionHandler_uncaughtException,
- peer_, exception.get());
+ peer.get(), exception.get());
// If the handler threw, clear that exception too.
jni_env_->ExceptionClear();
}
-void Thread::RemoveFromThreadGroup() {
+void Thread::RemoveFromThreadGroup(ScopedObjectAccess& soa) {
// this.group.removeThread(this);
// group can be null if we're in the compiler or a test.
- ScopedLocalRef<jobject> group(jni_env_,
- jni_env_->GetObjectField(peer_,
- WellKnownClasses::java_lang_Thread_group));
- if (group.get() != NULL) {
+ Object* ogroup = soa.DecodeField(WellKnownClasses::java_lang_Thread_group)->GetObject(opeer_);
+ if (ogroup != NULL) {
+ ScopedLocalRef<jobject> group(soa.Env(), soa.AddLocalReference<jobject>(ogroup));
+ ScopedLocalRef<jobject> peer(soa.Env(), soa.AddLocalReference<jobject>(opeer_));
+ ScopedThreadStateChange tsc(soa.Self(), kNative);
jni_env_->CallVoidMethod(group.get(), WellKnownClasses::java_lang_ThreadGroup_removeThread,
- peer_);
+ peer.get());
}
}
@@ -1109,7 +1118,7 @@
return count;
}
-bool Thread::SirtContains(jobject obj) {
+bool Thread::SirtContains(jobject obj) const {
Object** sirt_entry = reinterpret_cast<Object**>(obj);
for (StackIndirectReferenceTable* cur = top_sirt_; cur; cur = cur->GetLink()) {
if (cur->Contains(sirt_entry)) {
@@ -1132,7 +1141,7 @@
}
}
-Object* Thread::DecodeJObject(jobject obj) {
+Object* Thread::DecodeJObject(jobject obj) const {
Locks::mutator_lock_->AssertSharedHeld(this);
if (obj == NULL) {
return NULL;
@@ -1151,7 +1160,7 @@
{
JavaVMExt* vm = Runtime::Current()->GetJavaVM();
IndirectReferenceTable& globals = vm->globals;
- MutexLock mu(this, vm->globals_lock);
+ MutexLock mu(const_cast<Thread*>(this), vm->globals_lock);
result = const_cast<Object*>(globals.Get(ref));
break;
}
@@ -1159,7 +1168,7 @@
{
JavaVMExt* vm = Runtime::Current()->GetJavaVM();
IndirectReferenceTable& weak_globals = vm->weak_globals;
- MutexLock mu(this, vm->weak_globals_lock);
+ MutexLock mu(const_cast<Thread*>(this), vm->weak_globals_lock);
result = const_cast<Object*>(weak_globals.Get(ref));
if (result == kClearedJniWeakGlobal) {
// This is a special case where it's okay to return NULL.
@@ -1967,6 +1976,9 @@
wrapperArg.arg = arg;
wrapperArg.visitor = visitor;
+ if (opeer_ != NULL) {
+ VerifyRootWrapperCallback(opeer_, &wrapperArg);
+ }
if (exception_ != NULL) {
VerifyRootWrapperCallback(exception_, &wrapperArg);
}
@@ -1988,6 +2000,9 @@
}
void Thread::VisitRoots(Heap::RootVisitor* visitor, void* arg) {
+ if (opeer_ != NULL) {
+ visitor(opeer_, arg);
+ }
if (exception_ != NULL) {
visitor(exception_, arg);
}
diff --git a/src/thread.h b/src/thread.h
index b56bcf1..4c065c5 100644
--- a/src/thread.h
+++ b/src/thread.h
@@ -148,7 +148,8 @@
// Dumps the SIGQUIT per-thread header. 'thread' can be NULL for a non-attached thread, in which
// case we use 'tid' to identify the thread, and we'll include as much information as we can.
static void DumpState(std::ostream& os, const Thread* thread, pid_t tid)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_);
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
ThreadState GetState() const {
return static_cast<ThreadState>(state_and_flags_.as_struct.state);
@@ -279,12 +280,14 @@
// Sets the thread's name.
void SetThreadName(const char* name) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- jobject GetPeer() const {
- return peer_;
+ Object* GetPeer() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ CHECK(jpeer_ == NULL);
+ return opeer_;
}
bool HasPeer() const {
- return peer_ != NULL;
+ CHECK(jpeer_ == NULL);
+ return opeer_ != NULL;
}
RuntimeStats* GetStats() {
@@ -386,7 +389,7 @@
}
// Convert a jobject into a Object*
- Object* DecodeJObject(jobject obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ Object* DecodeJObject(jobject obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Implements java.lang.Thread.interrupted.
bool Interrupted();
@@ -531,7 +534,7 @@
};
// Is the given obj in this thread's stack indirect reference table?
- bool SirtContains(jobject obj);
+ bool SirtContains(jobject obj) const;
void SirtVisitRoots(Heap::RootVisitor* visitor, void* arg);
@@ -618,7 +621,7 @@
}
friend class SignalCatcher; // For SetStateUnsafe.
- void DumpState(std::ostream& os) const;
+ void DumpState(std::ostream& os) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void DumpStack(std::ostream& os) const
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -630,8 +633,9 @@
static void* CreateCallback(void* arg);
- void HandleUncaughtExceptions();
- void RemoveFromThreadGroup();
+ void HandleUncaughtExceptions(ScopedObjectAccess& soa)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void RemoveFromThreadGroup(ScopedObjectAccess& soa) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void Init(ThreadList*, JavaVMExt*) EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_);
void InitCardTable();
@@ -698,8 +702,10 @@
// is hard. This field can be read off of Thread::Current to give the address.
Thread* self_;
- // Our managed peer (an instance of java.lang.Thread).
- jobject peer_;
+ // Our managed peer (an instance of java.lang.Thread). The jobject version is used during thread
+ // start up, until the thread is registered and the local opeer_ is used.
+ Object* opeer_;
+ jobject jpeer_;
// The "lowest addressable byte" of the stack
byte* stack_begin_;
diff --git a/src/thread_list.cc b/src/thread_list.cc
index a2a8fe8..d39d424 100644
--- a/src/thread_list.cc
+++ b/src/thread_list.cc
@@ -72,7 +72,9 @@
DumpUnattachedThreads(os);
}
-static void DumpUnattachedThread(std::ostream& os, pid_t tid) {
+static void DumpUnattachedThread(std::ostream& os, pid_t tid) NO_THREAD_SAFETY_ANALYSIS {
+ // TODO: No thread safety analysis as DumpState with a NULL thread won't access fields, should
+ // refactor DumpState to avoid skipping analysis.
Thread::DumpState(os, NULL, tid);
DumpKernelStack(os, tid, " kernel: ", false);
// TODO: Reenable this when the native code in system_server can handle it.
@@ -540,18 +542,23 @@
// suspend and so on, must happen at this point, and not in ~Thread.
self->Destroy();
- {
- // Remove this thread from the list.
+ uint32_t thin_lock_id = self->thin_lock_id_;
+ self->thin_lock_id_ = 0;
+ ReleaseThreadId(self, thin_lock_id);
+ while (self != NULL) {
+ // Remove and delete the Thread* while holding the thread_list_lock_ and
+ // thread_suspend_count_lock_ so that the unregistering thread cannot be suspended.
MutexLock mu(self, *Locks::thread_list_lock_);
CHECK(Contains(self));
- list_.remove(self);
+ // Note: we don't take the thread_suspend_count_lock_ here as to be suspending a thread other
+ // than yourself you need to hold the thread_list_lock_ (see Thread::ModifySuspendCount).
+ if (!self->IsSuspended()) {
+ list_.remove(self);
+ delete self;
+ self = NULL;
+ }
}
- // Delete the Thread* and release the thin lock id.
- uint32_t thin_lock_id = self->thin_lock_id_;
- ReleaseThreadId(thin_lock_id);
- delete self;
-
// Clear the TLS data, so that the underlying native thread is recognizably detached.
// (It may wish to reattach later.)
CHECK_PTHREAD_CALL(pthread_setspecific, (Thread::pthread_key_self_, NULL), "detach self");
@@ -581,8 +588,8 @@
}
}
-uint32_t ThreadList::AllocThreadId() {
- MutexLock mu(Thread::Current(), allocated_ids_lock_);
+uint32_t ThreadList::AllocThreadId(Thread* self) {
+ MutexLock mu(self, allocated_ids_lock_);
for (size_t i = 0; i < allocated_ids_.size(); ++i) {
if (!allocated_ids_[i]) {
allocated_ids_.set(i);
@@ -593,8 +600,8 @@
return 0;
}
-void ThreadList::ReleaseThreadId(uint32_t id) {
- MutexLock mu(Thread::Current(), allocated_ids_lock_);
+void ThreadList::ReleaseThreadId(Thread* self, uint32_t id) {
+ MutexLock mu(self, allocated_ids_lock_);
--id; // Zero is reserved to mean "invalid".
DCHECK(allocated_ids_[id]) << id;
allocated_ids_.reset(id);
diff --git a/src/thread_list.h b/src/thread_list.h
index d64183b..fb989ab 100644
--- a/src/thread_list.h
+++ b/src/thread_list.h
@@ -98,8 +98,8 @@
private:
typedef std::list<Thread*>::const_iterator It; // TODO: C++0x auto
- uint32_t AllocThreadId();
- void ReleaseThreadId(uint32_t id) LOCKS_EXCLUDED(allocated_ids_lock_);
+ uint32_t AllocThreadId(Thread* self);
+ void ReleaseThreadId(Thread* self, uint32_t id) LOCKS_EXCLUDED(allocated_ids_lock_);
bool Contains(Thread* thread) EXCLUSIVE_LOCKS_REQUIRED(Locks::thread_list_lock_);
bool Contains(pid_t tid) EXCLUSIVE_LOCKS_REQUIRED(Locks::thread_list_lock_);
diff --git a/src/well_known_classes.cc b/src/well_known_classes.cc
index 03b9cb2..cb7b70c 100644
--- a/src/well_known_classes.cc
+++ b/src/well_known_classes.cc
@@ -57,7 +57,6 @@
jmethodID WellKnownClasses::java_lang_Float_valueOf;
jmethodID WellKnownClasses::java_lang_Integer_valueOf;
jmethodID WellKnownClasses::java_lang_Long_valueOf;
-jmethodID WellKnownClasses::java_lang_Object_notify;
jmethodID WellKnownClasses::java_lang_ref_FinalizerReference_add;
jmethodID WellKnownClasses::java_lang_ref_ReferenceQueue_add;
jmethodID WellKnownClasses::java_lang_reflect_InvocationHandler_invoke;
@@ -152,9 +151,6 @@
java_lang_Daemons_requestHeapTrim = CacheMethod(env, java_lang_Daemons, true, "requestHeapTrim", "()V");
java_lang_Daemons_start = CacheMethod(env, java_lang_Daemons, true, "start", "()V");
- ScopedLocalRef<jclass> java_lang_Object(env, env->FindClass("java/lang/Object"));
- java_lang_Object_notify = CacheMethod(env, java_lang_Object.get(), false, "notify", "()V");
-
ScopedLocalRef<jclass> java_lang_ref_FinalizerReference(env, env->FindClass("java/lang/ref/FinalizerReference"));
java_lang_ref_FinalizerReference_add = CacheMethod(env, java_lang_ref_FinalizerReference.get(), true, "add", "(Ljava/lang/Object;)V");
ScopedLocalRef<jclass> java_lang_ref_ReferenceQueue(env, env->FindClass("java/lang/ref/ReferenceQueue"));
diff --git a/src/well_known_classes.h b/src/well_known_classes.h
index 1f4217d..90b33c1 100644
--- a/src/well_known_classes.h
+++ b/src/well_known_classes.h
@@ -68,7 +68,6 @@
static jmethodID java_lang_Float_valueOf;
static jmethodID java_lang_Integer_valueOf;
static jmethodID java_lang_Long_valueOf;
- static jmethodID java_lang_Object_notify;
static jmethodID java_lang_ref_FinalizerReference_add;
static jmethodID java_lang_ref_ReferenceQueue_add;
static jmethodID java_lang_reflect_InvocationHandler_invoke;