diff options
author | Mathieu Chartier <mathieuc@google.com> | 2015-06-03 17:32:42 -0700 |
---|---|---|
committer | Mathieu Chartier <mathieuc@google.com> | 2015-06-03 18:34:11 -0700 |
commit | c991403cd8b869e4a38c11c6a58223b82b89a1b2 (patch) | |
tree | afaa5b31c572a90db6e7e83f294fda9d16bbe7eb /runtime/gc | |
parent | 8f23620d45399286564986d2541cda761b3fe0ac (diff) | |
download | art-c991403cd8b869e4a38c11c6a58223b82b89a1b2.zip art-c991403cd8b869e4a38c11c6a58223b82b89a1b2.tar.gz art-c991403cd8b869e4a38c11c6a58223b82b89a1b2.tar.bz2 |
Fix valgrind large_object_space_test
Also some cleanup.
Change-Id: I9c1a8093e6356f1b52e332009429b77fa5c1d448
Diffstat (limited to 'runtime/gc')
-rw-r--r-- | runtime/gc/heap.cc | 2 | ||||
-rw-r--r-- | runtime/gc/space/large_object_space.cc | 34 |
2 files changed, 23 insertions, 13 deletions
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index f039f6b..20e791d 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -2252,8 +2252,8 @@ void Heap::PreZygoteFork() { // Set all the cards in the mod-union table since we don't know which objects contain references // to large objects. mod_union_table->SetCards(); - large_object_space_->SetAllLargeObjectsAsZygoteObjects(self); AddModUnionTable(mod_union_table); + large_object_space_->SetAllLargeObjectsAsZygoteObjects(self); if (collector::SemiSpace::kUseRememberedSet) { // Add a new remembered set for the post-zygote non-moving space. accounting::RememberedSet* post_zygote_non_moving_space_rem_set = diff --git a/runtime/gc/space/large_object_space.cc b/runtime/gc/space/large_object_space.cc index 9436c3f..52192e2 100644 --- a/runtime/gc/space/large_object_space.cc +++ b/runtime/gc/space/large_object_space.cc @@ -46,8 +46,8 @@ class ValgrindLargeObjectMapSpace FINAL : public LargeObjectMapSpace { } } - virtual mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, - size_t* usable_size, size_t* bytes_tl_bulk_allocated) + mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, + size_t* usable_size, size_t* bytes_tl_bulk_allocated) OVERRIDE { mirror::Object* obj = LargeObjectMapSpace::Alloc(self, num_bytes + kValgrindRedZoneBytes * 2, bytes_allocated, @@ -63,26 +63,35 @@ class ValgrindLargeObjectMapSpace FINAL : public LargeObjectMapSpace { return object_without_rdz; } - virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { - mirror::Object* object_with_rdz = reinterpret_cast<mirror::Object*>( - reinterpret_cast<uintptr_t>(obj) - kValgrindRedZoneBytes); - return LargeObjectMapSpace::AllocationSize(object_with_rdz, usable_size); + size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { + return LargeObjectMapSpace::AllocationSize(ObjectWithRedzone(obj), usable_size); } - virtual size_t Free(Thread* self, mirror::Object* obj) OVERRIDE { - mirror::Object* object_with_rdz = reinterpret_cast<mirror::Object*>( - reinterpret_cast<uintptr_t>(obj) - kValgrindRedZoneBytes); + bool IsZygoteLargeObject(Thread* self, mirror::Object* obj) const OVERRIDE { + return LargeObjectMapSpace::IsZygoteLargeObject(self, ObjectWithRedzone(obj)); + } + + size_t Free(Thread* self, mirror::Object* obj) OVERRIDE { + mirror::Object* object_with_rdz = ObjectWithRedzone(obj); VALGRIND_MAKE_MEM_UNDEFINED(object_with_rdz, AllocationSize(obj, nullptr)); return LargeObjectMapSpace::Free(self, object_with_rdz); } bool Contains(const mirror::Object* obj) const OVERRIDE { - mirror::Object* object_with_rdz = reinterpret_cast<mirror::Object*>( - reinterpret_cast<uintptr_t>(obj) - kValgrindRedZoneBytes); - return LargeObjectMapSpace::Contains(object_with_rdz); + return LargeObjectMapSpace::Contains(ObjectWithRedzone(obj)); } private: + static const mirror::Object* ObjectWithRedzone(const mirror::Object* obj) { + return reinterpret_cast<const mirror::Object*>( + reinterpret_cast<uintptr_t>(obj) - kValgrindRedZoneBytes); + } + + static mirror::Object* ObjectWithRedzone(mirror::Object* obj) { + return reinterpret_cast<mirror::Object*>( + reinterpret_cast<uintptr_t>(obj) - kValgrindRedZoneBytes); + } + static constexpr size_t kValgrindRedZoneBytes = kPageSize; }; @@ -253,6 +262,7 @@ class AllocationInfo { } // Updates the allocation size and whether or not it is free. void SetByteSize(size_t size, bool free) { + DCHECK_EQ(size & ~kFlagsMask, 0u); DCHECK_ALIGNED(size, FreeListSpace::kAlignment); alloc_size_ = (size / FreeListSpace::kAlignment) | (free ? kFlagFree : 0u); } |