Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 17 | #include <ctime> |
| 18 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 19 | #include "object.h" |
| 20 | |
| 21 | #include "array-inl.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 22 | #include "art_field-inl.h" |
| 23 | #include "art_field.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 24 | #include "class-inl.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 25 | #include "class.h" |
Ian Rogers | 04d7aa9 | 2013-03-16 14:29:17 -0700 | [diff] [blame] | 26 | #include "class_linker-inl.h" |
David Sehr | b2ec9f5 | 2018-02-21 13:20:31 -0800 | [diff] [blame] | 27 | #include "dex/descriptors_names.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 28 | #include "dex/dex_file-inl.h" |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 29 | #include "gc/accounting/card_table-inl.h" |
Andreas Gampe | 88dbad3 | 2018-06-26 19:54:12 -0700 | [diff] [blame] | 30 | #include "gc/heap-inl.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 31 | #include "handle_scope-inl.h" |
Ian Rogers | 04d7aa9 | 2013-03-16 14:29:17 -0700 | [diff] [blame] | 32 | #include "iftable-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 33 | #include "monitor.h" |
| 34 | #include "object-inl.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 35 | #include "object-refvisitor-inl.h" |
Ian Rogers | 04d7aa9 | 2013-03-16 14:29:17 -0700 | [diff] [blame] | 36 | #include "object_array-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 37 | #include "runtime.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 38 | #include "throwable.h" |
| 39 | #include "well_known_classes.h" |
| 40 | |
| 41 | namespace art { |
| 42 | namespace mirror { |
| 43 | |
tony.ys_liu | 7380c31 | 2015-01-16 19:16:45 +0800 | [diff] [blame] | 44 | Atomic<uint32_t> Object::hash_code_seed(987654321U + std::time(nullptr)); |
| 45 | |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 46 | class CopyReferenceFieldsWithReadBarrierVisitor { |
| 47 | public: |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 48 | explicit CopyReferenceFieldsWithReadBarrierVisitor(ObjPtr<Object> dest_obj) |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 49 | : dest_obj_(dest_obj) {} |
| 50 | |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 51 | void operator()(ObjPtr<Object> obj, MemberOffset offset, bool /* is_static */) const |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 52 | ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 53 | // GetFieldObject() contains a RB. |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 54 | ObjPtr<Object> ref = obj->GetFieldObject<Object>(offset); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 55 | // No WB here as a large object space does not have a card table |
| 56 | // coverage. Instead, cards will be marked separately. |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 57 | dest_obj_->SetFieldObjectWithoutWriteBarrier<false, false>(offset, ref); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 58 | } |
| 59 | |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 60 | void operator()(ObjPtr<mirror::Class> klass, mirror::Reference* ref) const |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 61 | ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 62 | // Copy java.lang.ref.Reference.referent which isn't visited in |
| 63 | // Object::VisitReferences(). |
Fred Shih | 4ee7a66 | 2014-07-11 09:59:27 -0700 | [diff] [blame] | 64 | DCHECK(klass->IsTypeOfReferenceClass()); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 65 | this->operator()(ref, mirror::Reference::ReferentOffset(), false); |
| 66 | } |
| 67 | |
Mathieu Chartier | da7c650 | 2015-07-23 16:01:26 -0700 | [diff] [blame] | 68 | // Unused since we don't copy class native roots. |
| 69 | void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) |
| 70 | const {} |
| 71 | void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} |
| 72 | |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 73 | private: |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 74 | ObjPtr<Object> const dest_obj_; |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 75 | }; |
| 76 | |
Mathieu Chartier | 51eaa7f | 2016-10-12 16:08:35 -0700 | [diff] [blame] | 77 | Object* Object::CopyObject(ObjPtr<mirror::Object> dest, |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 78 | ObjPtr<mirror::Object> src, |
Hiroshi Yamauchi | 0fbd6e6 | 2014-07-17 16:16:31 -0700 | [diff] [blame] | 79 | size_t num_bytes) { |
Mathieu Chartier | 0353698 | 2016-10-07 10:51:53 -0700 | [diff] [blame] | 80 | // Copy instance data. Don't assume memcpy copies by words (b/32012820). |
| 81 | { |
| 82 | const size_t offset = sizeof(Object); |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 83 | uint8_t* src_bytes = reinterpret_cast<uint8_t*>(src.Ptr()) + offset; |
| 84 | uint8_t* dst_bytes = reinterpret_cast<uint8_t*>(dest.Ptr()) + offset; |
Mathieu Chartier | 0353698 | 2016-10-07 10:51:53 -0700 | [diff] [blame] | 85 | num_bytes -= offset; |
| 86 | DCHECK_ALIGNED(src_bytes, sizeof(uintptr_t)); |
| 87 | DCHECK_ALIGNED(dst_bytes, sizeof(uintptr_t)); |
| 88 | // Use word sized copies to begin. |
| 89 | while (num_bytes >= sizeof(uintptr_t)) { |
Orion Hodson | 88591fe | 2018-03-06 13:35:43 +0000 | [diff] [blame] | 90 | reinterpret_cast<Atomic<uintptr_t>*>(dst_bytes)->store( |
| 91 | reinterpret_cast<Atomic<uintptr_t>*>(src_bytes)->load(std::memory_order_relaxed), |
| 92 | std::memory_order_relaxed); |
Mathieu Chartier | 0353698 | 2016-10-07 10:51:53 -0700 | [diff] [blame] | 93 | src_bytes += sizeof(uintptr_t); |
| 94 | dst_bytes += sizeof(uintptr_t); |
| 95 | num_bytes -= sizeof(uintptr_t); |
| 96 | } |
| 97 | // Copy possible 32 bit word. |
| 98 | if (sizeof(uintptr_t) != sizeof(uint32_t) && num_bytes >= sizeof(uint32_t)) { |
Orion Hodson | 88591fe | 2018-03-06 13:35:43 +0000 | [diff] [blame] | 99 | reinterpret_cast<Atomic<uint32_t>*>(dst_bytes)->store( |
| 100 | reinterpret_cast<Atomic<uint32_t>*>(src_bytes)->load(std::memory_order_relaxed), |
| 101 | std::memory_order_relaxed); |
Mathieu Chartier | 0353698 | 2016-10-07 10:51:53 -0700 | [diff] [blame] | 102 | src_bytes += sizeof(uint32_t); |
| 103 | dst_bytes += sizeof(uint32_t); |
| 104 | num_bytes -= sizeof(uint32_t); |
| 105 | } |
| 106 | // Copy remaining bytes, avoid going past the end of num_bytes since there may be a redzone |
| 107 | // there. |
| 108 | while (num_bytes > 0) { |
Orion Hodson | 88591fe | 2018-03-06 13:35:43 +0000 | [diff] [blame] | 109 | reinterpret_cast<Atomic<uint8_t>*>(dst_bytes)->store( |
| 110 | reinterpret_cast<Atomic<uint8_t>*>(src_bytes)->load(std::memory_order_relaxed), |
| 111 | std::memory_order_relaxed); |
Mathieu Chartier | 0353698 | 2016-10-07 10:51:53 -0700 | [diff] [blame] | 112 | src_bytes += sizeof(uint8_t); |
| 113 | dst_bytes += sizeof(uint8_t); |
| 114 | num_bytes -= sizeof(uint8_t); |
| 115 | } |
| 116 | } |
| 117 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 118 | if (kUseReadBarrier) { |
Mathieu Chartier | 0353698 | 2016-10-07 10:51:53 -0700 | [diff] [blame] | 119 | // We need a RB here. After copying the whole object above, copy references fields one by one |
| 120 | // again with a RB to make sure there are no from space refs. TODO: Optimize this later? |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 121 | CopyReferenceFieldsWithReadBarrierVisitor visitor(dest); |
Mathieu Chartier | 059ef3d | 2015-08-18 13:54:21 -0700 | [diff] [blame] | 122 | src->VisitReferences(visitor, visitor); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 123 | } |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 124 | // Perform write barriers on copied object references. |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 125 | ObjPtr<Class> c = src->GetClass(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 126 | if (c->IsArrayClass()) { |
| 127 | if (!c->GetComponentType()->IsPrimitive()) { |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 128 | ObjectArray<Object>* array = dest->AsObjectArray<Object>(); |
Mathieu Chartier | 88ea61e | 2018-06-20 17:45:41 -0700 | [diff] [blame] | 129 | WriteBarrier::ForArrayWrite(dest, 0, array->GetLength()); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 130 | } |
| 131 | } else { |
Mathieu Chartier | 88ea61e | 2018-06-20 17:45:41 -0700 | [diff] [blame] | 132 | WriteBarrier::ForEveryFieldWrite(dest); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 133 | } |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 134 | return dest.Ptr(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 135 | } |
| 136 | |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 137 | // An allocation pre-fence visitor that copies the object. |
| 138 | class CopyObjectVisitor { |
| 139 | public: |
Mathieu Chartier | 51eaa7f | 2016-10-12 16:08:35 -0700 | [diff] [blame] | 140 | CopyObjectVisitor(Handle<Object>* orig, size_t num_bytes) |
| 141 | : orig_(orig), num_bytes_(num_bytes) {} |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 142 | |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 143 | void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 144 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | 51eaa7f | 2016-10-12 16:08:35 -0700 | [diff] [blame] | 145 | Object::CopyObject(obj, orig_->Get(), num_bytes_); |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 146 | } |
| 147 | |
| 148 | private: |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 149 | Handle<Object>* const orig_; |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 150 | const size_t num_bytes_; |
| 151 | DISALLOW_COPY_AND_ASSIGN(CopyObjectVisitor); |
| 152 | }; |
| 153 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 154 | Object* Object::Clone(Thread* self) { |
| 155 | CHECK(!IsClass()) << "Can't clone classes."; |
| 156 | // Object::SizeOf gets the right size even if we're an array. Using c->AllocObject() here would |
| 157 | // be wrong. |
| 158 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 159 | size_t num_bytes = SizeOf(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 160 | StackHandleScope<1> hs(self); |
| 161 | Handle<Object> this_object(hs.NewHandle(this)); |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 162 | ObjPtr<Object> copy; |
Mathieu Chartier | 51eaa7f | 2016-10-12 16:08:35 -0700 | [diff] [blame] | 163 | CopyObjectVisitor visitor(&this_object, num_bytes); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 164 | if (heap->IsMovableObject(this)) { |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 165 | copy = heap->AllocObject<true>(self, GetClass(), num_bytes, visitor); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 166 | } else { |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 167 | copy = heap->AllocNonMovableObject<true>(self, GetClass(), num_bytes, visitor); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 168 | } |
Mathieu Chartier | 51eaa7f | 2016-10-12 16:08:35 -0700 | [diff] [blame] | 169 | if (this_object->GetClass()->IsFinalizable()) { |
| 170 | heap->AddFinalizerReference(self, ©); |
| 171 | } |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 172 | return copy.Ptr(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 173 | } |
| 174 | |
Ian Rogers | bbcd30b | 2014-10-30 15:25:36 -0700 | [diff] [blame] | 175 | uint32_t Object::GenerateIdentityHashCode() { |
Ian Rogers | bbcd30b | 2014-10-30 15:25:36 -0700 | [diff] [blame] | 176 | uint32_t expected_value, new_value; |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 177 | do { |
Orion Hodson | 88591fe | 2018-03-06 13:35:43 +0000 | [diff] [blame] | 178 | expected_value = hash_code_seed.load(std::memory_order_relaxed); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 179 | new_value = expected_value * 1103515245 + 12345; |
Orion Hodson | 4557b38 | 2018-01-03 11:47:54 +0000 | [diff] [blame] | 180 | } while (!hash_code_seed.CompareAndSetWeakRelaxed(expected_value, new_value) || |
tony.ys_liu | 7380c31 | 2015-01-16 19:16:45 +0800 | [diff] [blame] | 181 | (expected_value & LockWord::kHashMask) == 0); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 182 | return expected_value & LockWord::kHashMask; |
| 183 | } |
| 184 | |
tony.ys_liu | 7380c31 | 2015-01-16 19:16:45 +0800 | [diff] [blame] | 185 | void Object::SetHashCodeSeed(uint32_t new_seed) { |
Orion Hodson | 88591fe | 2018-03-06 13:35:43 +0000 | [diff] [blame] | 186 | hash_code_seed.store(new_seed, std::memory_order_relaxed); |
tony.ys_liu | 7380c31 | 2015-01-16 19:16:45 +0800 | [diff] [blame] | 187 | } |
| 188 | |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 189 | int32_t Object::IdentityHashCode() { |
| 190 | ObjPtr<Object> current_this = this; // The this pointer may get invalidated by thread suspension. |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 191 | while (true) { |
Mathieu Chartier | 4d7f61d | 2014-04-17 14:43:39 -0700 | [diff] [blame] | 192 | LockWord lw = current_this->GetLockWord(false); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 193 | switch (lw.GetState()) { |
| 194 | case LockWord::kUnlocked: { |
| 195 | // Try to compare and swap in a new hash, if we succeed we will return the hash on the next |
| 196 | // loop iteration. |
Mathieu Chartier | 36a270a | 2016-07-28 18:08:51 -0700 | [diff] [blame] | 197 | LockWord hash_word = LockWord::FromHashCode(GenerateIdentityHashCode(), lw.GCState()); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 198 | DCHECK_EQ(hash_word.GetState(), LockWord::kHashCode); |
Mathieu Chartier | 8bb3c68 | 2018-06-18 12:53:10 -0700 | [diff] [blame] | 199 | // Use a strong CAS to prevent spurious failures since these can make the boot image |
| 200 | // non-deterministic. |
Mathieu Chartier | 42c2e50 | 2018-06-19 12:30:56 -0700 | [diff] [blame] | 201 | if (current_this->CasLockWord(lw, hash_word, CASMode::kStrong, std::memory_order_relaxed)) { |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 202 | return hash_word.GetHashCode(); |
| 203 | } |
| 204 | break; |
| 205 | } |
| 206 | case LockWord::kThinLocked: { |
Ian Rogers | 43c69cc | 2014-08-15 11:09:28 -0700 | [diff] [blame] | 207 | // Inflate the thin lock to a monitor and stick the hash code inside of the monitor. May |
| 208 | // fail spuriously. |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 209 | Thread* self = Thread::Current(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 210 | StackHandleScope<1> hs(self); |
| 211 | Handle<mirror::Object> h_this(hs.NewHandle(current_this)); |
| 212 | Monitor::InflateThinLocked(self, h_this, lw, GenerateIdentityHashCode()); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 213 | // A GC may have occurred when we switched to kBlocked. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 214 | current_this = h_this.Get(); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 215 | break; |
| 216 | } |
| 217 | case LockWord::kFatLocked: { |
Roland Levillain | 5120923 | 2016-03-11 16:27:27 +0000 | [diff] [blame] | 218 | // Already inflated, return the hash stored in the monitor. |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 219 | Monitor* monitor = lw.FatLockMonitor(); |
| 220 | DCHECK(monitor != nullptr); |
| 221 | return monitor->GetHashCode(); |
| 222 | } |
| 223 | case LockWord::kHashCode: { |
| 224 | return lw.GetHashCode(); |
| 225 | } |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 226 | default: { |
| 227 | LOG(FATAL) << "Invalid state during hashcode " << lw.GetState(); |
| 228 | break; |
| 229 | } |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 230 | } |
| 231 | } |
Ian Rogers | 0714083 | 2014-09-30 15:43:59 -0700 | [diff] [blame] | 232 | UNREACHABLE(); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 233 | } |
| 234 | |
Mathieu Chartier | a058fdf | 2016-10-06 15:13:58 -0700 | [diff] [blame] | 235 | void Object::CheckFieldAssignmentImpl(MemberOffset field_offset, ObjPtr<Object> new_value) { |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 236 | ObjPtr<Class> c = GetClass(); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 237 | Runtime* runtime = Runtime::Current(); |
| 238 | if (runtime->GetClassLinker() == nullptr || !runtime->IsStarted() || |
| 239 | !runtime->GetHeap()->IsObjectValidationEnabled() || !c->IsResolved()) { |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 240 | return; |
| 241 | } |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 242 | for (ObjPtr<Class> cur = c; cur != nullptr; cur = cur->GetSuperClass()) { |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 243 | for (ArtField& field : cur->GetIFields()) { |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 244 | if (field.GetOffset().Int32Value() == field_offset.Int32Value()) { |
| 245 | CHECK_NE(field.GetTypeAsPrimitiveType(), Primitive::kPrimNot); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 246 | // TODO: resolve the field type for moving GC. |
Vladimir Marko | 4098a7a | 2017-11-06 16:00:51 +0000 | [diff] [blame] | 247 | ObjPtr<mirror::Class> field_type = |
Vladimir Marko | 208f670 | 2017-12-08 12:00:50 +0000 | [diff] [blame] | 248 | kMovingCollector ? field.LookupResolvedType() : field.ResolveType(); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 249 | if (field_type != nullptr) { |
| 250 | CHECK(field_type->IsAssignableFrom(new_value->GetClass())); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 251 | } |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 252 | return; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 253 | } |
| 254 | } |
| 255 | } |
| 256 | if (c->IsArrayClass()) { |
| 257 | // Bounds and assign-ability done in the array setter. |
| 258 | return; |
| 259 | } |
| 260 | if (IsClass()) { |
Mathieu Chartier | 54d220e | 2015-07-30 16:20:06 -0700 | [diff] [blame] | 261 | for (ArtField& field : AsClass()->GetSFields()) { |
| 262 | if (field.GetOffset().Int32Value() == field_offset.Int32Value()) { |
| 263 | CHECK_NE(field.GetTypeAsPrimitiveType(), Primitive::kPrimNot); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 264 | // TODO: resolve the field type for moving GC. |
Vladimir Marko | 4098a7a | 2017-11-06 16:00:51 +0000 | [diff] [blame] | 265 | ObjPtr<mirror::Class> field_type = |
Vladimir Marko | 208f670 | 2017-12-08 12:00:50 +0000 | [diff] [blame] | 266 | kMovingCollector ? field.LookupResolvedType() : field.ResolveType(); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 267 | if (field_type != nullptr) { |
| 268 | CHECK(field_type->IsAssignableFrom(new_value->GetClass())); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 269 | } |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 270 | return; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 271 | } |
| 272 | } |
| 273 | } |
| 274 | LOG(FATAL) << "Failed to find field for assignment to " << reinterpret_cast<void*>(this) |
Roland Levillain | d32ead2 | 2018-05-30 17:38:21 +0100 | [diff] [blame] | 275 | << " of type " << c->PrettyDescriptor() << " at offset " << field_offset; |
Ian Rogers | 08f1f50 | 2014-12-02 15:04:37 -0800 | [diff] [blame] | 276 | UNREACHABLE(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 277 | } |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 278 | |
Hiroshi Yamauchi | eb2baaf | 2015-05-13 21:14:22 -0700 | [diff] [blame] | 279 | ArtField* Object::FindFieldByOffset(MemberOffset offset) { |
| 280 | return IsClass() ? ArtField::FindStaticFieldWithOffset(AsClass(), offset.Uint32Value()) |
| 281 | : ArtField::FindInstanceFieldWithOffset(GetClass(), offset.Uint32Value()); |
| 282 | } |
| 283 | |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 284 | std::string Object::PrettyTypeOf(ObjPtr<mirror::Object> obj) { |
Mathieu Chartier | 9911128 | 2018-06-19 12:30:56 -0700 | [diff] [blame] | 285 | return (obj == nullptr) ? "null" : obj->PrettyTypeOf(); |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 286 | } |
| 287 | |
| 288 | std::string Object::PrettyTypeOf() { |
Mathieu Chartier | bc632f0 | 2017-04-20 13:31:39 -0700 | [diff] [blame] | 289 | // From-space version is the same as the to-space version since the dex file never changes. |
| 290 | // Avoiding the read barrier here is important to prevent recursive AssertToSpaceInvariant |
| 291 | // issues. |
| 292 | ObjPtr<mirror::Class> klass = GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>(); |
| 293 | if (klass == nullptr) { |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 294 | return "(raw)"; |
| 295 | } |
| 296 | std::string temp; |
Mathieu Chartier | bc632f0 | 2017-04-20 13:31:39 -0700 | [diff] [blame] | 297 | std::string result(PrettyDescriptor(klass->GetDescriptor(&temp))); |
| 298 | if (klass->IsClassClass()) { |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 299 | result += "<" + PrettyDescriptor(AsClass()->GetDescriptor(&temp)) + ">"; |
| 300 | } |
| 301 | return result; |
| 302 | } |
| 303 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 304 | } // namespace mirror |
| 305 | } // namespace art |