Move to newer clang annotations
Also enable -Wthread-safety-negative.
Changes:
Switch to capabilities and negative capabilities.
Future work:
Use capabilities to implement uninterruptible annotations to work
with AssertNoThreadSuspension.
Bug: 20072211
Change-Id: I42fcbe0300d98a831c89d1eff3ecd5a7e99ebf33
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h
index 1cc2df6..b90aa0e 100644
--- a/runtime/scoped_thread_state_change.h
+++ b/runtime/scoped_thread_state_change.h
@@ -34,7 +34,7 @@
class ScopedThreadStateChange {
public:
ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
if (UNLIKELY(self_ == nullptr)) {
// Value chosen arbitrarily and won't be used in the destructor since thread_ == null.
@@ -59,7 +59,7 @@
}
}
- ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
+ ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
if (UNLIKELY(self_ == nullptr)) {
if (!expected_has_no_thread_) {
Runtime* runtime = Runtime::Current();
@@ -130,7 +130,7 @@
* it's best if we don't grab a mutex.
*/
template<typename T>
- T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ T AddLocalReference(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
DCHECK_NE(obj, Runtime::Current()->GetClearedJniWeakGlobal());
@@ -139,32 +139,32 @@
template<typename T>
T Decode(jobject obj) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ SHARED_REQUIRES(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
return down_cast<T>(Self()->DecodeJObject(obj));
}
ArtField* DecodeField(jfieldID fid) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ SHARED_REQUIRES(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
return reinterpret_cast<ArtField*>(fid);
}
- jfieldID EncodeField(ArtField* field) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ jfieldID EncodeField(ArtField* field) const SHARED_REQUIRES(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
return reinterpret_cast<jfieldID>(field);
}
- ArtMethod* DecodeMethod(jmethodID mid) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ArtMethod* DecodeMethod(jmethodID mid) const SHARED_REQUIRES(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
return reinterpret_cast<ArtMethod*>(mid);
}
- jmethodID EncodeMethod(ArtMethod* method) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ jmethodID EncodeMethod(ArtMethod* method) const SHARED_REQUIRES(Locks::mutator_lock_) {
Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
return reinterpret_cast<jmethodID>(method);
@@ -176,12 +176,12 @@
protected:
explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
}
explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
vm_(env_ != nullptr ? env_->vm : nullptr) {
}
@@ -220,14 +220,14 @@
class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable {
public:
explicit ScopedObjectAccessUnchecked(JNIEnv* env)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) {
Self()->VerifyStack();
Locks::mutator_lock_->AssertSharedHeld(Self());
}
explicit ScopedObjectAccessUnchecked(Thread* self)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+ REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) {
Self()->VerifyStack();
Locks::mutator_lock_->AssertSharedHeld(Self());
@@ -250,13 +250,13 @@
class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
public:
explicit ScopedObjectAccess(JNIEnv* env)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
+ REQUIRES(!Locks::thread_suspend_count_lock_)
SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
: ScopedObjectAccessUnchecked(env) {
}
explicit ScopedObjectAccess(Thread* self)
- LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
+ REQUIRES(!Locks::thread_suspend_count_lock_)
SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
: ScopedObjectAccessUnchecked(self) {
}