diff options
Diffstat (limited to 'runtime')
-rw-r--r-- | runtime/class_linker.cc | 17 | ||||
-rw-r--r-- | runtime/class_linker_test.cc | 3 | ||||
-rw-r--r-- | runtime/gc/collector/mark_sweep.cc | 32 | ||||
-rw-r--r-- | runtime/gc/collector/mark_sweep.h | 8 | ||||
-rw-r--r-- | runtime/gc/heap.cc | 8 | ||||
-rw-r--r-- | runtime/hprof/hprof.cc | 8 | ||||
-rw-r--r-- | runtime/indirect_reference_table.cc | 16 | ||||
-rw-r--r-- | runtime/indirect_reference_table.h | 12 | ||||
-rw-r--r-- | runtime/intern_table.cc | 8 | ||||
-rw-r--r-- | runtime/jni_internal.cc | 6 | ||||
-rw-r--r-- | runtime/mirror/string.cc | 4 | ||||
-rw-r--r-- | runtime/mirror/string.h | 1 | ||||
-rw-r--r-- | runtime/reference_table.cc | 10 | ||||
-rw-r--r-- | runtime/reference_table.h | 6 | ||||
-rw-r--r-- | runtime/root_visitor.h | 3 | ||||
-rw-r--r-- | runtime/runtime.cc | 13 | ||||
-rw-r--r-- | runtime/sirt_ref.h | 2 | ||||
-rw-r--r-- | runtime/stack.cc | 4 | ||||
-rw-r--r-- | runtime/stack.h | 12 | ||||
-rw-r--r-- | runtime/thread.cc | 146 | ||||
-rw-r--r-- | runtime/thread.h | 3 | ||||
-rw-r--r-- | runtime/thread_list.cc | 16 | ||||
-rw-r--r-- | runtime/throw_location.cc | 11 |
23 files changed, 175 insertions, 174 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index 37b62ad..179fb1a 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1089,26 +1089,31 @@ void ClassLinker::InitFromImage() { // reinit references to when reinitializing a ClassLinker from a // mapped image. void ClassLinker::VisitRoots(RootVisitor* visitor, void* arg, bool clean_dirty) { - visitor(class_roots_, arg); + class_roots_ = reinterpret_cast<mirror::ObjectArray<mirror::Class>*>(visitor(class_roots_, arg)); + DCHECK(class_roots_ != nullptr); + Thread* self = Thread::Current(); { ReaderMutexLock mu(self, dex_lock_); - for (mirror::DexCache* dex_cache : dex_caches_) { - visitor(dex_cache, arg); + for (mirror::DexCache*& dex_cache : dex_caches_) { + dex_cache = reinterpret_cast<mirror::DexCache*>(visitor(dex_cache, arg)); + DCHECK(dex_cache != nullptr); } } { ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); - for (const std::pair<size_t, mirror::Class*>& it : class_table_) { - visitor(it.second, arg); + for (std::pair<size_t const, mirror::Class*>& it : class_table_) { + it.second = reinterpret_cast<mirror::Class*>(visitor(it.second, arg)); + DCHECK(it.second != nullptr); } // We deliberately ignore the class roots in the image since we // handle image roots by using the MS/CMS rescanning of dirty cards. } - visitor(array_iftable_, arg); + array_iftable_ = reinterpret_cast<mirror::IfTable*>(visitor(array_iftable_, arg)); + DCHECK(array_iftable_ != nullptr); if (clean_dirty) { is_dirty_ = false; } diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index 6442f5a..192111f 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -340,8 +340,9 @@ class ClassLinkerTest : public CommonTest { } } - static void TestRootVisitor(const mirror::Object* root, void*) { + static mirror::Object* TestRootVisitor(mirror::Object* root, void*) { EXPECT_TRUE(root != NULL); + return root; } }; diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 953fbf9..f724cdb 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -497,24 +497,18 @@ void MarkSweep::MarkRoot(const Object* obj) { } } -void MarkSweep::MarkRootParallelCallback(const Object* root, void* arg) { +Object* MarkSweep::MarkRootParallelCallback(Object* root, void* arg) { DCHECK(root != NULL); DCHECK(arg != NULL); reinterpret_cast<MarkSweep*>(arg)->MarkObjectNonNullParallel(root); + return root; } -void MarkSweep::MarkObjectCallback(const Object* root, void* arg) { - DCHECK(root != NULL); - DCHECK(arg != NULL); - MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); - mark_sweep->MarkObjectNonNull(root); -} - -void MarkSweep::ReMarkObjectVisitor(const Object* root, void* arg) { - DCHECK(root != NULL); - DCHECK(arg != NULL); - MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); - mark_sweep->MarkObjectNonNull(root); +Object* MarkSweep::MarkRootCallback(Object* root, void* arg) { + DCHECK(root != nullptr); + DCHECK(arg != nullptr); + reinterpret_cast<MarkSweep*>(arg)->MarkObjectNonNull(root); + return root; } void MarkSweep::VerifyRootCallback(const Object* root, void* arg, size_t vreg, @@ -542,20 +536,20 @@ void MarkSweep::VerifyRoots() { // Marks all objects in the root set. void MarkSweep::MarkRoots() { timings_.StartSplit("MarkRoots"); - Runtime::Current()->VisitNonConcurrentRoots(MarkObjectCallback, this); + Runtime::Current()->VisitNonConcurrentRoots(MarkRootCallback, this); timings_.EndSplit(); } void MarkSweep::MarkNonThreadRoots() { timings_.StartSplit("MarkNonThreadRoots"); - Runtime::Current()->VisitNonThreadRoots(MarkObjectCallback, this); + Runtime::Current()->VisitNonThreadRoots(MarkRootCallback, this); timings_.EndSplit(); } void MarkSweep::MarkConcurrentRoots() { timings_.StartSplit("MarkConcurrentRoots"); // Visit all runtime roots and clear dirty flags. - Runtime::Current()->VisitConcurrentRoots(MarkObjectCallback, this, false, true); + Runtime::Current()->VisitConcurrentRoots(MarkRootCallback, this, false, true); timings_.EndSplit(); } @@ -963,14 +957,14 @@ void MarkSweep::RecursiveMarkDirtyObjects(bool paused, byte minimum_age) { void MarkSweep::ReMarkRoots() { timings_.StartSplit("ReMarkRoots"); - Runtime::Current()->VisitRoots(ReMarkObjectVisitor, this, true, true); + Runtime::Current()->VisitRoots(MarkRootCallback, this, true, true); timings_.EndSplit(); } void MarkSweep::SweepJniWeakGlobals(IsMarkedTester is_marked, void* arg) { JavaVMExt* vm = Runtime::Current()->GetJavaVM(); WriterMutexLock mu(Thread::Current(), vm->weak_globals_lock); - for (const Object** entry : vm->weak_globals) { + for (Object** entry : vm->weak_globals) { if (!is_marked(*entry, arg)) { *entry = kClearedJniWeakGlobal; } @@ -1053,7 +1047,7 @@ void MarkSweep::VerifySystemWeaks() { JavaVMExt* vm = runtime->GetJavaVM(); ReaderMutexLock mu(Thread::Current(), vm->weak_globals_lock); - for (const Object** entry : vm->weak_globals) { + for (Object** entry : vm->weak_globals) { VerifyIsLive(*entry); } } diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h index fdd0c86..8b6ac15 100644 --- a/runtime/gc/collector/mark_sweep.h +++ b/runtime/gc/collector/mark_sweep.h @@ -223,11 +223,11 @@ class MarkSweep : public GarbageCollector { SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); - static void MarkObjectCallback(const mirror::Object* root, void* arg) + static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); - static void MarkRootParallelCallback(const mirror::Object* root, void* arg); + static mirror::Object* MarkRootParallelCallback(mirror::Object* root, void* arg); // Marks an object. void MarkObject(const mirror::Object* obj) @@ -252,10 +252,6 @@ class MarkSweep : public GarbageCollector { static bool IsMarkedArrayCallback(const mirror::Object* object, void* arg) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); - static void ReMarkObjectVisitor(const mirror::Object* root, void* arg) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) - EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); - static void VerifyImageRootVisitor(mirror::Object* root, void* arg) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index e0048a0..916d38e 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -1322,11 +1322,12 @@ void Heap::UpdateAndMarkModUnion(collector::MarkSweep* mark_sweep, base::TimingL image_mod_union_table_->MarkReferences(mark_sweep); } -static void RootMatchesObjectVisitor(const mirror::Object* root, void* arg) { +static mirror::Object* RootMatchesObjectVisitor(mirror::Object* root, void* arg) { mirror::Object* obj = reinterpret_cast<mirror::Object*>(arg); if (root == obj) { LOG(INFO) << "Object " << obj << " is a root"; } + return root; } class ScanVisitor { @@ -1414,9 +1415,10 @@ class VerifyReferenceVisitor { return heap_->IsLiveObjectLocked(obj); } - static void VerifyRoots(const mirror::Object* root, void* arg) { + static mirror::Object* VerifyRoots(mirror::Object* root, void* arg) { VerifyReferenceVisitor* visitor = reinterpret_cast<VerifyReferenceVisitor*>(arg); - (*visitor)(NULL, root, MemberOffset(0), true); + (*visitor)(nullptr, root, MemberOffset(0), true); + return root; } private: diff --git a/runtime/hprof/hprof.cc b/runtime/hprof/hprof.cc index 0b2e741..67620a0 100644 --- a/runtime/hprof/hprof.cc +++ b/runtime/hprof/hprof.cc @@ -484,11 +484,11 @@ class Hprof { } private: - static void RootVisitor(const mirror::Object* obj, void* arg) + static mirror::Object* RootVisitor(mirror::Object* obj, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - CHECK(arg != NULL); - Hprof* hprof = reinterpret_cast<Hprof*>(arg); - hprof->VisitRoot(obj); + DCHECK(arg != NULL); + reinterpret_cast<Hprof*>(arg)->VisitRoot(obj); + return obj; } static void HeapBitmapCallback(mirror::Object* obj, void* arg) diff --git a/runtime/indirect_reference_table.cc b/runtime/indirect_reference_table.cc index 8af4d7e..2bd8353 100644 --- a/runtime/indirect_reference_table.cc +++ b/runtime/indirect_reference_table.cc @@ -40,7 +40,7 @@ IndirectReferenceTable::IndirectReferenceTable(size_t initialCount, CHECK_LE(initialCount, maxCount); CHECK_NE(desiredKind, kSirtOrInvalid); - table_ = reinterpret_cast<const mirror::Object**>(malloc(initialCount * sizeof(const mirror::Object*))); + table_ = reinterpret_cast<mirror::Object**>(malloc(initialCount * sizeof(const mirror::Object*))); CHECK(table_ != NULL); memset(table_, 0xd1, initialCount * sizeof(const mirror::Object*)); @@ -75,7 +75,7 @@ bool IndirectReferenceTable::CheckEntry(const char* what, IndirectRef iref, int return true; } -IndirectRef IndirectReferenceTable::Add(uint32_t cookie, const mirror::Object* obj) { +IndirectRef IndirectReferenceTable::Add(uint32_t cookie, mirror::Object* obj) { IRTSegmentState prevState; prevState.all = cookie; size_t topIndex = segment_state_.parts.topIndex; @@ -101,7 +101,7 @@ IndirectRef IndirectReferenceTable::Add(uint32_t cookie, const mirror::Object* o } DCHECK_GT(newSize, alloc_entries_); - table_ = reinterpret_cast<const mirror::Object**>(realloc(table_, newSize * sizeof(const mirror::Object*))); + table_ = reinterpret_cast<mirror::Object**>(realloc(table_, newSize * sizeof(mirror::Object*))); slot_data_ = reinterpret_cast<IndirectRefSlot*>(realloc(slot_data_, newSize * sizeof(IndirectRefSlot))); if (table_ == NULL || slot_data_ == NULL) { @@ -126,7 +126,7 @@ IndirectRef IndirectReferenceTable::Add(uint32_t cookie, const mirror::Object* o if (numHoles > 0) { DCHECK_GT(topIndex, 1U); // Find the first hole; likely to be near the end of the list. - const mirror::Object** pScan = &table_[topIndex - 1]; + mirror::Object** pScan = &table_[topIndex - 1]; DCHECK(*pScan != NULL); while (*--pScan != NULL) { DCHECK_GE(pScan, table_ + prevState.parts.topIndex); @@ -194,7 +194,8 @@ bool IndirectReferenceTable::GetChecked(IndirectRef iref) const { return true; } -static int Find(mirror::Object* direct_pointer, int bottomIndex, int topIndex, const mirror::Object** table) { +static int Find(mirror::Object* direct_pointer, int bottomIndex, int topIndex, + mirror::Object** table) { for (int i = bottomIndex; i < topIndex; ++i) { if (table[i] == direct_pointer) { return i; @@ -310,13 +311,14 @@ bool IndirectReferenceTable::Remove(uint32_t cookie, IndirectRef iref) { void IndirectReferenceTable::VisitRoots(RootVisitor* visitor, void* arg) { for (auto ref : *this) { - visitor(*ref, arg); + *ref = visitor(const_cast<mirror::Object*>(*ref), arg); + DCHECK(*ref != nullptr); } } void IndirectReferenceTable::Dump(std::ostream& os) const { os << kind_ << " table dump:\n"; - std::vector<const mirror::Object*> entries(table_, table_ + Capacity()); + ReferenceTable::Table entries(table_, table_ + Capacity()); // Remove NULLs. for (int i = entries.size() - 1; i >= 0; --i) { if (entries[i] == NULL) { diff --git a/runtime/indirect_reference_table.h b/runtime/indirect_reference_table.h index 26f53db..51b238c 100644 --- a/runtime/indirect_reference_table.h +++ b/runtime/indirect_reference_table.h @@ -206,7 +206,7 @@ union IRTSegmentState { class IrtIterator { public: - explicit IrtIterator(const mirror::Object** table, size_t i, size_t capacity) + explicit IrtIterator(mirror::Object** table, size_t i, size_t capacity) : table_(table), i_(i), capacity_(capacity) { SkipNullsAndTombstones(); } @@ -217,7 +217,7 @@ class IrtIterator { return *this; } - const mirror::Object** operator*() { + mirror::Object** operator*() { return &table_[i_]; } @@ -233,7 +233,7 @@ class IrtIterator { } } - const mirror::Object** table_; + mirror::Object** table_; size_t i_; size_t capacity_; }; @@ -258,7 +258,7 @@ class IndirectReferenceTable { * Returns NULL if the table is full (max entries reached, or alloc * failed during expansion). */ - IndirectRef Add(uint32_t cookie, const mirror::Object* obj) + IndirectRef Add(uint32_t cookie, mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); /* @@ -266,7 +266,7 @@ class IndirectReferenceTable { * * Returns kInvalidIndirectRefObject if iref is invalid. */ - const mirror::Object* Get(IndirectRef iref) const { + mirror::Object* Get(IndirectRef iref) const { if (!GetChecked(iref)) { return kInvalidIndirectRefObject; } @@ -363,7 +363,7 @@ class IndirectReferenceTable { IRTSegmentState segment_state_; /* bottom of the stack */ - const mirror::Object** table_; + mirror::Object** table_; /* bit mask, ORed into all irefs */ IndirectRefKind kind_; /* extended debugging info */ diff --git a/runtime/intern_table.cc b/runtime/intern_table.cc index e3a75cf..6b0a51b 100644 --- a/runtime/intern_table.cc +++ b/runtime/intern_table.cc @@ -44,14 +44,14 @@ void InternTable::DumpForSigQuit(std::ostream& os) const { void InternTable::VisitRoots(RootVisitor* visitor, void* arg, bool clean_dirty) { MutexLock mu(Thread::Current(), intern_table_lock_); - for (const auto& strong_intern : strong_interns_) { - visitor(strong_intern.second, arg); + for (auto& strong_intern : strong_interns_) { + strong_intern.second = reinterpret_cast<mirror::String*>(visitor(strong_intern.second, arg)); + DCHECK(strong_intern.second != nullptr); } if (clean_dirty) { is_dirty_ = false; } - // Note: we deliberately don't visit the weak_interns_ table and the immutable - // image roots. + // Note: we deliberately don't visit the weak_interns_ table and the immutable image roots. } mirror::String* InternTable::Lookup(Table& table, mirror::String* s, diff --git a/runtime/jni_internal.cc b/runtime/jni_internal.cc index d72ddf6..b471599 100644 --- a/runtime/jni_internal.cc +++ b/runtime/jni_internal.cc @@ -314,14 +314,14 @@ static jfieldID FindFieldID(const ScopedObjectAccess& soa, jclass jni_class, con return soa.EncodeField(field); } -static void PinPrimitiveArray(const ScopedObjectAccess& soa, const Array* array) +static void PinPrimitiveArray(const ScopedObjectAccess& soa, Array* array) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { JavaVMExt* vm = soa.Vm(); MutexLock mu(soa.Self(), vm->pins_lock); vm->pin_table.Add(array); } -static void UnpinPrimitiveArray(const ScopedObjectAccess& soa, const Array* array) +static void UnpinPrimitiveArray(const ScopedObjectAccess& soa, Array* array) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { JavaVMExt* vm = soa.Vm(); MutexLock mu(soa.Self(), vm->pins_lock); @@ -1997,7 +1997,7 @@ class JNI { CHECK_NON_NULL_ARGUMENT(GetStringUTFRegion, java_string); ScopedObjectAccess soa(env); String* s = soa.Decode<String*>(java_string); - const CharArray* chars = s->GetCharArray(); + CharArray* chars = s->GetCharArray(); PinPrimitiveArray(soa, chars); if (is_copy != NULL) { *is_copy = JNI_FALSE; diff --git a/runtime/mirror/string.cc b/runtime/mirror/string.cc index 7d968c7..b82683e 100644 --- a/runtime/mirror/string.cc +++ b/runtime/mirror/string.cc @@ -33,6 +33,10 @@ const CharArray* String::GetCharArray() const { return GetFieldObject<const CharArray*>(ValueOffset(), false); } +CharArray* String::GetCharArray() { + return GetFieldObject<CharArray*>(ValueOffset(), false); +} + void String::ComputeHashCode() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { SetHashCode(ComputeUtf16Hash(GetCharArray(), GetOffset(), GetLength())); } diff --git a/runtime/mirror/string.h b/runtime/mirror/string.h index bf545ea..01d8f31 100644 --- a/runtime/mirror/string.h +++ b/runtime/mirror/string.h @@ -44,6 +44,7 @@ class MANAGED String : public Object { } const CharArray* GetCharArray() const; + CharArray* GetCharArray(); int32_t GetOffset() const { int32_t result = GetField32(OffsetOffset(), false); diff --git a/runtime/reference_table.cc b/runtime/reference_table.cc index 8e23cbb..e95fdb9 100644 --- a/runtime/reference_table.cc +++ b/runtime/reference_table.cc @@ -38,16 +38,16 @@ ReferenceTable::ReferenceTable(const char* name, size_t initial_size, size_t max ReferenceTable::~ReferenceTable() { } -void ReferenceTable::Add(const mirror::Object* obj) { +void ReferenceTable::Add(mirror::Object* obj) { DCHECK(obj != NULL); - if (entries_.size() == max_size_) { + if (entries_.size() >= max_size_) { LOG(FATAL) << "ReferenceTable '" << name_ << "' " << "overflowed (" << max_size_ << " entries)"; } entries_.push_back(obj); } -void ReferenceTable::Remove(const mirror::Object* obj) { +void ReferenceTable::Remove(mirror::Object* obj) { // We iterate backwards on the assumption that references are LIFO. for (int i = entries_.size() - 1; i >= 0; --i) { if (entries_[i] == obj) { @@ -232,8 +232,8 @@ void ReferenceTable::Dump(std::ostream& os, const Table& entries) { } void ReferenceTable::VisitRoots(RootVisitor* visitor, void* arg) { - for (const auto& ref : entries_) { - visitor(ref, arg); + for (auto& ref : entries_) { + ref = visitor(const_cast<mirror::Object*>(ref), arg); } } diff --git a/runtime/reference_table.h b/runtime/reference_table.h index e369fd0..37b3172 100644 --- a/runtime/reference_table.h +++ b/runtime/reference_table.h @@ -39,9 +39,9 @@ class ReferenceTable { ReferenceTable(const char* name, size_t initial_size, size_t max_size); ~ReferenceTable(); - void Add(const mirror::Object* obj); + void Add(mirror::Object* obj); - void Remove(const mirror::Object* obj); + void Remove(mirror::Object* obj); size_t Size() const; @@ -50,7 +50,7 @@ class ReferenceTable { void VisitRoots(RootVisitor* visitor, void* arg); private: - typedef std::vector<const mirror::Object*> Table; + typedef std::vector<mirror::Object*> Table; static void Dump(std::ostream& os, const Table& entries) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); friend class IndirectReferenceTable; // For Dump. diff --git a/runtime/root_visitor.h b/runtime/root_visitor.h index 3aa9b4b..a2d898b 100644 --- a/runtime/root_visitor.h +++ b/runtime/root_visitor.h @@ -23,7 +23,8 @@ class Object; } // namespace mirror class StackVisitor; -typedef void (RootVisitor)(const mirror::Object* root, void* arg); +typedef mirror::Object* (RootVisitor)(mirror::Object* root, void* arg) + __attribute__((warn_unused_result)); typedef void (VerifyRootVisitor)(const mirror::Object* root, void* arg, size_t vreg, const StackVisitor* visitor); typedef bool (IsMarkedTester)(const mirror::Object* object, void* arg); diff --git a/runtime/runtime.cc b/runtime/runtime.cc index 09cbd0b..477fcaf 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -1142,12 +1142,17 @@ void Runtime::VisitConcurrentRoots(RootVisitor* visitor, void* arg, bool only_di void Runtime::VisitNonThreadRoots(RootVisitor* visitor, void* arg) { java_vm_->VisitRoots(visitor, arg); - if (pre_allocated_OutOfMemoryError_ != NULL) { - visitor(pre_allocated_OutOfMemoryError_, arg); + if (pre_allocated_OutOfMemoryError_ != nullptr) { + pre_allocated_OutOfMemoryError_ = reinterpret_cast<mirror::Throwable*>( + visitor(pre_allocated_OutOfMemoryError_, arg)); + DCHECK(pre_allocated_OutOfMemoryError_ != nullptr); } - visitor(resolution_method_, arg); + resolution_method_ = reinterpret_cast<mirror::ArtMethod*>(visitor(resolution_method_, arg)); + DCHECK(resolution_method_ != nullptr); for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) { - visitor(callee_save_methods_[i], arg); + callee_save_methods_[i] = reinterpret_cast<mirror::ArtMethod*>( + visitor(callee_save_methods_[i], arg)); + DCHECK(callee_save_methods_[i] != nullptr); } } diff --git a/runtime/sirt_ref.h b/runtime/sirt_ref.h index 81f0dff..25d6fb3 100644 --- a/runtime/sirt_ref.h +++ b/runtime/sirt_ref.h @@ -30,7 +30,7 @@ class SirtRef { self_->PushSirt(&sirt_); } ~SirtRef() { - CHECK(self_->PopSirt() == &sirt_); + CHECK_EQ(self_->PopSirt(), &sirt_); } T& operator*() const { return *get(); } diff --git a/runtime/stack.cc b/runtime/stack.cc index 206bff3..1715664 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -148,8 +148,8 @@ uint32_t StackVisitor::GetVReg(mirror::ArtMethod* m, uint16_t vreg, VRegKind kin const DexFile::CodeItem* code_item = MethodHelper(m).GetCodeItem(); DCHECK(code_item != NULL) << PrettyMethod(m); // Can't be NULL or how would we compile its instructions? size_t frame_size = m->GetFrameSizeInBytes(); - return GetVReg(cur_quick_frame_, code_item, m->GetCoreSpillMask(), m->GetFpSpillMask(), - frame_size, vreg); + return *GetVRegAddr(cur_quick_frame_, code_item, m->GetCoreSpillMask(), m->GetFpSpillMask(), + frame_size, vreg); } } else { return cur_shadow_frame_->GetVReg(vreg); diff --git a/runtime/stack.h b/runtime/stack.h index 8ecf8f0..bd29ceb 100644 --- a/runtime/stack.h +++ b/runtime/stack.h @@ -150,7 +150,12 @@ class ShadowFrame { mirror::Object* GetVRegReference(size_t i) const { DCHECK_LT(i, NumberOfVRegs()); if (HasReferenceArray()) { - return References()[i]; + mirror::Object* ref = References()[i]; + // If the vreg reference is not equal to the vreg then the vreg reference is stale. + if (reinterpret_cast<uint32_t>(ref) != vregs_[i]) { + return nullptr; + } + return ref; } else { const uint32_t* vreg = &vregs_[i]; return *reinterpret_cast<mirror::Object* const*>(vreg); @@ -459,13 +464,14 @@ class StackVisitor { uintptr_t GetGPR(uint32_t reg) const; void SetGPR(uint32_t reg, uintptr_t value); - uint32_t GetVReg(mirror::ArtMethod** cur_quick_frame, const DexFile::CodeItem* code_item, + // This is a fast-path for getting/setting values in a quick frame. + uint32_t* GetVRegAddr(mirror::ArtMethod** cur_quick_frame, const DexFile::CodeItem* code_item, uint32_t core_spills, uint32_t fp_spills, size_t frame_size, uint16_t vreg) const { int offset = GetVRegOffset(code_item, core_spills, fp_spills, frame_size, vreg); DCHECK_EQ(cur_quick_frame, GetCurrentQuickFrame()); byte* vreg_addr = reinterpret_cast<byte*>(cur_quick_frame) + offset; - return *reinterpret_cast<uint32_t*>(vreg_addr); + return reinterpret_cast<uint32_t*>(vreg_addr); } uintptr_t GetReturnPc() const; diff --git a/runtime/thread.cc b/runtime/thread.cc index a454195..d7d4b1f 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -1012,9 +1012,10 @@ void Thread::AssertNoPendingException() const { } } -static void MonitorExitVisitor(const mirror::Object* object, void* arg) NO_THREAD_SAFETY_ANALYSIS { +static mirror::Object* MonitorExitVisitor(mirror::Object* object, void* arg) + NO_THREAD_SAFETY_ANALYSIS { Thread* self = reinterpret_cast<Thread*>(arg); - mirror::Object* entered_monitor = const_cast<mirror::Object*>(object); + mirror::Object* entered_monitor = object; if (self->HoldsLock(entered_monitor)) { LOG(WARNING) << "Calling MonitorExit on object " << object << " (" << PrettyTypeOf(object) << ")" @@ -1022,6 +1023,7 @@ static void MonitorExitVisitor(const mirror::Object* object, void* arg) NO_THREA << *Thread::Current() << " which is detaching"; entered_monitor->MonitorExit(self); } + return object; } void Thread::Destroy() { @@ -1151,8 +1153,12 @@ void Thread::SirtVisitRoots(RootVisitor* visitor, void* arg) { size_t num_refs = cur->NumberOfReferences(); for (size_t j = 0; j < num_refs; j++) { mirror::Object* object = cur->GetReference(j); - if (object != NULL) { - visitor(object, arg); + if (object != nullptr) { + const mirror::Object* new_obj = visitor(object, arg); + DCHECK(new_obj != nullptr); + if (new_obj != object) { + cur->SetReference(j, const_cast<mirror::Object*>(new_obj)); + } } } } @@ -2019,8 +2025,11 @@ class ReferenceMapVisitor : public StackVisitor { // SIRT for JNI or References for interpreter. for (size_t reg = 0; reg < num_regs; ++reg) { mirror::Object* ref = shadow_frame->GetVRegReference(reg); - if (ref != NULL) { - visitor_(ref, reg, this); + if (ref != nullptr) { + mirror::Object* new_ref = visitor_(ref, reg, this); + if (new_ref != ref) { + shadow_frame->SetVRegReference(reg, new_ref); + } } } } else { @@ -2040,8 +2049,11 @@ class ReferenceMapVisitor : public StackVisitor { for (size_t reg = 0; reg < num_regs; ++reg) { if (TestBitmap(reg, reg_bitmap)) { mirror::Object* ref = shadow_frame->GetVRegReference(reg); - if (ref != NULL) { - visitor_(ref, reg, this); + if (ref != nullptr) { + mirror::Object* new_ref = visitor_(ref, reg, this); + if (new_ref != ref) { + shadow_frame->SetVRegReference(reg, new_ref); + } } } } @@ -2072,19 +2084,25 @@ class ReferenceMapVisitor : public StackVisitor { // Does this register hold a reference? if (TestBitmap(reg, reg_bitmap)) { uint32_t vmap_offset; - mirror::Object* ref; if (vmap_table.IsInContext(reg, kReferenceVReg, &vmap_offset)) { - uintptr_t val = GetGPR(vmap_table.ComputeRegister(core_spills, vmap_offset, - kReferenceVReg)); - ref = reinterpret_cast<mirror::Object*>(val); + int vmap_reg = vmap_table.ComputeRegister(core_spills, vmap_offset, kReferenceVReg); + mirror::Object* ref = reinterpret_cast<mirror::Object*>(GetGPR(vmap_reg)); + if (ref != nullptr) { + mirror::Object* new_ref = visitor_(ref, reg, this); + if (ref != new_ref) { + SetGPR(vmap_reg, reinterpret_cast<uintptr_t>(new_ref)); + } + } } else { - ref = reinterpret_cast<mirror::Object*>(GetVReg(cur_quick_frame, code_item, - core_spills, fp_spills, frame_size, - reg)); - } - - if (ref != NULL) { - visitor_(ref, reg, this); + uint32_t* reg_addr = + GetVRegAddr(cur_quick_frame, code_item, core_spills, fp_spills, frame_size, reg); + mirror::Object* ref = reinterpret_cast<mirror::Object*>(*reg_addr); + if (ref != nullptr) { + mirror::Object* new_ref = visitor_(ref, reg, this); + if (ref != new_ref) { + *reg_addr = reinterpret_cast<uint32_t>(new_ref); + } + } } } } @@ -2110,8 +2128,8 @@ class RootCallbackVisitor { public: RootCallbackVisitor(RootVisitor* visitor, void* arg) : visitor_(visitor), arg_(arg) {} - void operator()(const mirror::Object* obj, size_t, const StackVisitor*) const { - visitor_(obj, arg_); + mirror::Object* operator()(mirror::Object* obj, size_t, const StackVisitor*) const { + return visitor_(obj, arg_); } private: @@ -2135,67 +2153,17 @@ class VerifyCallbackVisitor { void* const arg_; }; -struct VerifyRootWrapperArg { - VerifyRootVisitor* visitor; - void* arg; -}; - -static void VerifyRootWrapperCallback(const mirror::Object* root, void* arg) { - VerifyRootWrapperArg* wrapperArg = reinterpret_cast<VerifyRootWrapperArg*>(arg); - wrapperArg->visitor(root, wrapperArg->arg, 0, NULL); -} - -void Thread::VerifyRoots(VerifyRootVisitor* visitor, void* arg) { - // We need to map from a RootVisitor to VerifyRootVisitor, so pass in nulls for arguments we - // don't have. - VerifyRootWrapperArg wrapperArg; - wrapperArg.arg = arg; - wrapperArg.visitor = visitor; - - if (opeer_ != NULL) { - VerifyRootWrapperCallback(opeer_, &wrapperArg); - } - if (exception_ != NULL) { - VerifyRootWrapperCallback(exception_, &wrapperArg); - } - throw_location_.VisitRoots(VerifyRootWrapperCallback, &wrapperArg); - if (class_loader_override_ != NULL) { - VerifyRootWrapperCallback(class_loader_override_, &wrapperArg); - } - jni_env_->locals.VisitRoots(VerifyRootWrapperCallback, &wrapperArg); - jni_env_->monitors.VisitRoots(VerifyRootWrapperCallback, &wrapperArg); - - SirtVisitRoots(VerifyRootWrapperCallback, &wrapperArg); - - // Visit roots on this thread's stack - Context* context = GetLongJumpContext(); - VerifyCallbackVisitor visitorToCallback(visitor, arg); - ReferenceMapVisitor<VerifyCallbackVisitor> mapper(this, context, visitorToCallback); - mapper.WalkStack(); - ReleaseLongJumpContext(context); - - std::deque<instrumentation::InstrumentationStackFrame>* instrumentation_stack = GetInstrumentationStack(); - typedef std::deque<instrumentation::InstrumentationStackFrame>::const_iterator It; - for (It it = instrumentation_stack->begin(), end = instrumentation_stack->end(); it != end; ++it) { - mirror::Object* this_object = (*it).this_object_; - if (this_object != NULL) { - VerifyRootWrapperCallback(this_object, &wrapperArg); - } - mirror::ArtMethod* method = (*it).method_; - VerifyRootWrapperCallback(method, &wrapperArg); - } -} - void Thread::VisitRoots(RootVisitor* visitor, void* arg) { - if (opeer_ != NULL) { - visitor(opeer_, arg); + if (opeer_ != nullptr) { + opeer_ = visitor(opeer_, arg); } - if (exception_ != NULL) { - visitor(exception_, arg); + if (exception_ != nullptr) { + exception_ = reinterpret_cast<mirror::Throwable*>(visitor(exception_, arg)); } throw_location_.VisitRoots(visitor, arg); - if (class_loader_override_ != NULL) { - visitor(class_loader_override_, arg); + if (class_loader_override_ != nullptr) { + class_loader_override_ = reinterpret_cast<mirror::ClassLoader*>( + visitor(class_loader_override_, arg)); } jni_env_->locals.VisitRoots(visitor, arg); jni_env_->monitors.VisitRoots(visitor, arg); @@ -2209,24 +2177,26 @@ void Thread::VisitRoots(RootVisitor* visitor, void* arg) { mapper.WalkStack(); ReleaseLongJumpContext(context); - for (const instrumentation::InstrumentationStackFrame& frame : *GetInstrumentationStack()) { - mirror::Object* this_object = frame.this_object_; - if (this_object != NULL) { - visitor(this_object, arg); + for (instrumentation::InstrumentationStackFrame& frame : *GetInstrumentationStack()) { + if (frame.this_object_ != nullptr) { + frame.this_object_ = visitor(frame.this_object_, arg); + DCHECK(frame.this_object_ != nullptr); } - mirror::ArtMethod* method = frame.method_; - visitor(method, arg); + frame.method_ = reinterpret_cast<mirror::ArtMethod*>(visitor(frame.method_, arg)); + DCHECK(frame.method_ != nullptr); } } -static void VerifyObject(const mirror::Object* root, void* arg) { - gc::Heap* heap = reinterpret_cast<gc::Heap*>(arg); - heap->VerifyObject(root); +static mirror::Object* VerifyRoot(mirror::Object* root, void* arg) { + DCHECK(root != nullptr); + DCHECK(arg != nullptr); + reinterpret_cast<gc::Heap*>(arg)->VerifyObject(root); + return root; } void Thread::VerifyStackImpl() { UniquePtr<Context> context(Context::Create()); - RootCallbackVisitor visitorToCallback(VerifyObject, Runtime::Current()->GetHeap()); + RootCallbackVisitor visitorToCallback(VerifyRoot, Runtime::Current()->GetHeap()); ReferenceMapVisitor<RootCallbackVisitor> mapper(this, context.get(), visitorToCallback); mapper.WalkStack(); } diff --git a/runtime/thread.h b/runtime/thread.h index f5f8f56..dbf9736 100644 --- a/runtime/thread.h +++ b/runtime/thread.h @@ -395,9 +395,6 @@ class PACKED(4) Thread { void VisitRoots(RootVisitor* visitor, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - void VerifyRoots(VerifyRootVisitor* visitor, void* arg) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - void VerifyStack() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // diff --git a/runtime/thread_list.cc b/runtime/thread_list.cc index aba81fe..44cf810 100644 --- a/runtime/thread_list.cc +++ b/runtime/thread_list.cc @@ -569,10 +569,24 @@ void ThreadList::VisitRoots(RootVisitor* visitor, void* arg) const { } } +struct VerifyRootWrapperArg { + VerifyRootVisitor* visitor; + void* arg; +}; + +static mirror::Object* VerifyRootWrapperCallback(mirror::Object* root, void* arg) { + VerifyRootWrapperArg* wrapperArg = reinterpret_cast<VerifyRootWrapperArg*>(arg); + wrapperArg->visitor(root, wrapperArg->arg, 0, NULL); + return root; +} + void ThreadList::VerifyRoots(VerifyRootVisitor* visitor, void* arg) const { + VerifyRootWrapperArg wrapper; + wrapper.visitor = visitor; + wrapper.arg = arg; MutexLock mu(Thread::Current(), *Locks::thread_list_lock_); for (const auto& thread : list_) { - thread->VerifyRoots(visitor, arg); + thread->VisitRoots(VerifyRootWrapperCallback, &wrapper); } } diff --git a/runtime/throw_location.cc b/runtime/throw_location.cc index e428511..01497ef 100644 --- a/runtime/throw_location.cc +++ b/runtime/throw_location.cc @@ -34,11 +34,14 @@ std::string ThrowLocation::Dump() const { } void ThrowLocation::VisitRoots(RootVisitor* visitor, void* arg) { - if (this_object_ != NULL) { - visitor(this_object_, arg); + if (this_object_ != nullptr) { + this_object_ = const_cast<mirror::Object*>(visitor(this_object_, arg)); + DCHECK(this_object_ != nullptr); } - if (method_ != NULL) { - visitor(method_, arg); + if (method_ != nullptr) { + method_ = const_cast<mirror::ArtMethod*>( + reinterpret_cast<const mirror::ArtMethod*>(visitor(method_, arg))); + DCHECK(method_ != nullptr); } } |