Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |
| 18 | #define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |
| 19 | |
| 20 | #include "scoped_thread_state_change.h" |
| 21 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 22 | #include <android-base/logging.h> |
| 23 | |
Andreas Gampe | c15a2f4 | 2017-04-21 12:09:39 -0700 | [diff] [blame] | 24 | #include "base/casts.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 25 | #include "jni_env_ext-inl.h" |
| 26 | #include "obj_ptr-inl.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 27 | #include "runtime.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 28 | #include "thread-inl.h" |
| 29 | |
| 30 | namespace art { |
| 31 | |
| 32 | inline ScopedThreadStateChange::ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) |
| 33 | : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) { |
| 34 | if (UNLIKELY(self_ == nullptr)) { |
| 35 | // Value chosen arbitrarily and won't be used in the destructor since thread_ == null. |
| 36 | old_thread_state_ = kTerminated; |
| 37 | Runtime* runtime = Runtime::Current(); |
| 38 | CHECK(runtime == nullptr || !runtime->IsStarted() || runtime->IsShuttingDown(self_)); |
| 39 | } else { |
| 40 | DCHECK_EQ(self, Thread::Current()); |
| 41 | // Read state without locks, ok as state is effectively thread local and we're not interested |
| 42 | // in the suspend count (this will be handled in the runnable transitions). |
| 43 | old_thread_state_ = self->GetState(); |
| 44 | if (old_thread_state_ != new_thread_state) { |
| 45 | if (new_thread_state == kRunnable) { |
| 46 | self_->TransitionFromSuspendedToRunnable(); |
| 47 | } else if (old_thread_state_ == kRunnable) { |
| 48 | self_->TransitionFromRunnableToSuspended(new_thread_state); |
| 49 | } else { |
| 50 | // A suspended transition to another effectively suspended transition, ok to use Unsafe. |
| 51 | self_->SetState(new_thread_state); |
| 52 | } |
| 53 | } |
| 54 | } |
| 55 | } |
| 56 | |
| 57 | inline ScopedThreadStateChange::~ScopedThreadStateChange() { |
| 58 | if (UNLIKELY(self_ == nullptr)) { |
| 59 | if (!expected_has_no_thread_) { |
| 60 | Runtime* runtime = Runtime::Current(); |
| 61 | bool shutting_down = (runtime == nullptr) || runtime->IsShuttingDown(nullptr); |
| 62 | CHECK(shutting_down); |
| 63 | } |
| 64 | } else { |
| 65 | if (old_thread_state_ != thread_state_) { |
| 66 | if (old_thread_state_ == kRunnable) { |
| 67 | self_->TransitionFromSuspendedToRunnable(); |
| 68 | } else if (thread_state_ == kRunnable) { |
| 69 | self_->TransitionFromRunnableToSuspended(old_thread_state_); |
| 70 | } else { |
| 71 | // A suspended transition to another effectively suspended transition, ok to use Unsafe. |
| 72 | self_->SetState(old_thread_state_); |
| 73 | } |
| 74 | } |
| 75 | } |
| 76 | } |
| 77 | |
| 78 | template<typename T> |
Mathieu Chartier | 8778c52 | 2016-10-04 19:06:30 -0700 | [diff] [blame] | 79 | inline T ScopedObjectAccessAlreadyRunnable::AddLocalReference(ObjPtr<mirror::Object> obj) const { |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 80 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
Andreas Gampe | c15a2f4 | 2017-04-21 12:09:39 -0700 | [diff] [blame] | 81 | if (kIsDebugBuild) { |
| 82 | CHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. |
| 83 | DCheckObjIsNotClearedJniWeakGlobal(obj); |
| 84 | } |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 85 | return obj == nullptr ? nullptr : Env()->AddLocalReference<T>(obj); |
| 86 | } |
| 87 | |
Andreas Gampe | c73cb64 | 2017-02-22 10:11:30 -0800 | [diff] [blame] | 88 | template<typename T> |
| 89 | inline ObjPtr<T> ScopedObjectAccessAlreadyRunnable::Decode(jobject obj) const { |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 90 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 91 | DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. |
Andreas Gampe | c73cb64 | 2017-02-22 10:11:30 -0800 | [diff] [blame] | 92 | return ObjPtr<T>::DownCast(Self()->DecodeJObject(obj)); |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 93 | } |
| 94 | |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 95 | inline bool ScopedObjectAccessAlreadyRunnable::IsRunnable() const { |
| 96 | return self_->GetState() == kRunnable; |
| 97 | } |
| 98 | |
| 99 | inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(JNIEnv* env) |
Ian Rogers | 55256cb | 2017-12-21 17:07:11 -0800 | [diff] [blame] | 100 | : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->GetVm()) {} |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 101 | |
| 102 | inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(Thread* self) |
| 103 | : self_(self), |
| 104 | env_(down_cast<JNIEnvExt*>(self->GetJniEnv())), |
Ian Rogers | 55256cb | 2017-12-21 17:07:11 -0800 | [diff] [blame] | 105 | vm_(env_ != nullptr ? env_->GetVm() : nullptr) {} |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 106 | |
| 107 | inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(JNIEnv* env) |
| 108 | : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) { |
| 109 | Self()->VerifyStack(); |
| 110 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 111 | } |
| 112 | |
| 113 | inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(Thread* self) |
| 114 | : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) { |
| 115 | Self()->VerifyStack(); |
| 116 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 117 | } |
| 118 | |
Andreas Gampe | 90b936d | 2017-01-31 08:58:55 -0800 | [diff] [blame] | 119 | inline ScopedObjectAccess::ScopedObjectAccess(JNIEnv* env) : ScopedObjectAccessUnchecked(env) {} |
| 120 | inline ScopedObjectAccess::ScopedObjectAccess(Thread* self) : ScopedObjectAccessUnchecked(self) {} |
| 121 | inline ScopedObjectAccess::~ScopedObjectAccess() {} |
| 122 | |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 123 | inline ScopedThreadSuspension::ScopedThreadSuspension(Thread* self, ThreadState suspended_state) |
| 124 | : self_(self), suspended_state_(suspended_state) { |
| 125 | DCHECK(self_ != nullptr); |
| 126 | self_->TransitionFromRunnableToSuspended(suspended_state); |
| 127 | } |
| 128 | |
| 129 | inline ScopedThreadSuspension::~ScopedThreadSuspension() { |
| 130 | DCHECK_EQ(self_->GetState(), suspended_state_); |
| 131 | self_->TransitionFromSuspendedToRunnable(); |
| 132 | } |
| 133 | |
| 134 | } // namespace art |
| 135 | |
| 136 | #endif // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |