From a8e8f9c0a8e259a807d7b99a148d14104c24209d Mon Sep 17 00:00:00 2001 From: Mathieu Chartier Date: Wed, 9 Apr 2014 14:51:05 -0700 Subject: Refactor space bitmap to support different alignments. Required for: Using space bitmaps instead of std::set in mod union table + remembered set. Using a bitmap instead of set for large object marking. Bug: 13571028 Change-Id: Id024e9563d4ca4278f79607cdb2f81895121b113 --- runtime/gc/accounting/card_table-inl.h | 2 +- runtime/gc/accounting/card_table.h | 5 +- runtime/gc/accounting/heap_bitmap-inl.h | 10 +-- runtime/gc/accounting/heap_bitmap.cc | 10 ++- runtime/gc/accounting/heap_bitmap.h | 11 +-- runtime/gc/accounting/mod_union_table.cc | 7 +- runtime/gc/accounting/mod_union_table.h | 1 - runtime/gc/accounting/remembered_set.cc | 2 +- runtime/gc/accounting/space_bitmap-inl.h | 37 +++++++-- runtime/gc/accounting/space_bitmap.cc | 117 ++++++++++------------------- runtime/gc/accounting/space_bitmap.h | 61 +++++++++++---- runtime/gc/accounting/space_bitmap_test.cc | 23 +++--- runtime/gc/collector/garbage_collector.cc | 4 +- runtime/gc/collector/mark_sweep.cc | 24 +++--- runtime/gc/collector/mark_sweep.h | 4 +- runtime/gc/collector/semi_space-inl.h | 2 +- runtime/gc/collector/semi_space.cc | 14 ++-- runtime/gc/collector/semi_space.h | 5 +- runtime/gc/heap.cc | 25 +++--- runtime/gc/heap.h | 4 +- runtime/gc/heap_test.cc | 10 +-- runtime/gc/space/bump_pointer_space.cc | 2 +- runtime/gc/space/bump_pointer_space.h | 6 +- runtime/gc/space/dlmalloc_space.cc | 4 +- runtime/gc/space/image_space.cc | 10 +-- runtime/gc/space/image_space.h | 10 ++- runtime/gc/space/malloc_space.cc | 10 +-- runtime/gc/space/malloc_space.h | 2 +- runtime/gc/space/rosalloc_space.cc | 4 +- runtime/gc/space/space.cc | 19 +++-- runtime/gc/space/space.h | 22 +++--- runtime/gc/space/zygote_space.cc | 6 +- runtime/gc/space/zygote_space.h | 11 +-- 33 files changed, 251 insertions(+), 233 deletions(-) (limited to 'runtime/gc') diff --git a/runtime/gc/accounting/card_table-inl.h b/runtime/gc/accounting/card_table-inl.h index 564168e..a1d001e 100644 --- a/runtime/gc/accounting/card_table-inl.h +++ b/runtime/gc/accounting/card_table-inl.h @@ -43,7 +43,7 @@ static inline bool byte_cas(byte old_value, byte new_value, byte* address) { } template -inline size_t CardTable::Scan(SpaceBitmap* bitmap, byte* scan_begin, byte* scan_end, +inline size_t CardTable::Scan(ContinuousSpaceBitmap* bitmap, byte* scan_begin, byte* scan_end, const Visitor& visitor, const byte minimum_age) const { DCHECK(bitmap->HasAddress(scan_begin)); DCHECK(bitmap->HasAddress(scan_end - 1)); // scan_end is the byte after the last byte we scan. diff --git a/runtime/gc/accounting/card_table.h b/runtime/gc/accounting/card_table.h index 8b7bfd3..8d5dc07 100644 --- a/runtime/gc/accounting/card_table.h +++ b/runtime/gc/accounting/card_table.h @@ -38,7 +38,7 @@ class Heap; namespace accounting { -class SpaceBitmap; +template class SpaceBitmap; // Maintain a card table from the the write barrier. All writes of // non-NULL values to heap addresses should go through an entry in @@ -102,7 +102,8 @@ class CardTable { // For every dirty at least minumum age between begin and end invoke the visitor with the // specified argument. Returns how many cards the visitor was run on. template - size_t Scan(SpaceBitmap* bitmap, byte* scan_begin, byte* scan_end, const Visitor& visitor, + size_t Scan(SpaceBitmap* bitmap, byte* scan_begin, byte* scan_end, + const Visitor& visitor, const byte minimum_age = kCardDirty) const EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); diff --git a/runtime/gc/accounting/heap_bitmap-inl.h b/runtime/gc/accounting/heap_bitmap-inl.h index 04e85d2..ed7b427 100644 --- a/runtime/gc/accounting/heap_bitmap-inl.h +++ b/runtime/gc/accounting/heap_bitmap-inl.h @@ -37,16 +37,16 @@ inline void HeapBitmap::Visit(const Visitor& visitor) { } inline bool HeapBitmap::Test(const mirror::Object* obj) { - SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj); + ContinuousSpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj); if (LIKELY(bitmap != nullptr)) { return bitmap->Test(obj); } else { - return GetDiscontinuousSpaceObjectSet(obj) != NULL; + return GetDiscontinuousSpaceObjectSet(obj) != nullptr; } } inline void HeapBitmap::Clear(const mirror::Object* obj) { - SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj); + ContinuousSpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj); if (LIKELY(bitmap != nullptr)) { bitmap->Clear(obj); } else { @@ -57,7 +57,7 @@ inline void HeapBitmap::Clear(const mirror::Object* obj) { } inline void HeapBitmap::Set(const mirror::Object* obj) { - SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj); + ContinuousSpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj); if (LIKELY(bitmap != NULL)) { bitmap->Set(obj); } else { @@ -67,7 +67,7 @@ inline void HeapBitmap::Set(const mirror::Object* obj) { } } -inline SpaceBitmap* HeapBitmap::GetContinuousSpaceBitmap(const mirror::Object* obj) const { +inline ContinuousSpaceBitmap* HeapBitmap::GetContinuousSpaceBitmap(const mirror::Object* obj) const { for (const auto& bitmap : continuous_space_bitmaps_) { if (bitmap->HasAddress(obj)) { return bitmap; diff --git a/runtime/gc/accounting/heap_bitmap.cc b/runtime/gc/accounting/heap_bitmap.cc index f94cf24..1db886c 100644 --- a/runtime/gc/accounting/heap_bitmap.cc +++ b/runtime/gc/accounting/heap_bitmap.cc @@ -16,13 +16,15 @@ #include "heap_bitmap.h" +#include "gc/accounting/space_bitmap-inl.h" #include "gc/space/space.h" namespace art { namespace gc { namespace accounting { -void HeapBitmap::ReplaceBitmap(SpaceBitmap* old_bitmap, SpaceBitmap* new_bitmap) { +void HeapBitmap::ReplaceBitmap(ContinuousSpaceBitmap* old_bitmap, + ContinuousSpaceBitmap* new_bitmap) { for (auto& bitmap : continuous_space_bitmaps_) { if (bitmap == old_bitmap) { bitmap = new_bitmap; @@ -42,7 +44,7 @@ void HeapBitmap::ReplaceObjectSet(ObjectSet* old_set, ObjectSet* new_set) { LOG(FATAL) << "object set " << static_cast(old_set) << " not found"; } -void HeapBitmap::AddContinuousSpaceBitmap(accounting::SpaceBitmap* bitmap) { +void HeapBitmap::AddContinuousSpaceBitmap(accounting::ContinuousSpaceBitmap* bitmap) { DCHECK(bitmap != NULL); // Check for interval overlap. @@ -55,14 +57,14 @@ void HeapBitmap::AddContinuousSpaceBitmap(accounting::SpaceBitmap* bitmap) { continuous_space_bitmaps_.push_back(bitmap); } -void HeapBitmap::RemoveContinuousSpaceBitmap(accounting::SpaceBitmap* bitmap) { +void HeapBitmap::RemoveContinuousSpaceBitmap(accounting::ContinuousSpaceBitmap* bitmap) { auto it = std::find(continuous_space_bitmaps_.begin(), continuous_space_bitmaps_.end(), bitmap); DCHECK(it != continuous_space_bitmaps_.end()); continuous_space_bitmaps_.erase(it); } void HeapBitmap::AddDiscontinuousObjectSet(ObjectSet* set) { - DCHECK(set != NULL); + DCHECK(set != nullptr); discontinuous_space_sets_.push_back(set); } diff --git a/runtime/gc/accounting/heap_bitmap.h b/runtime/gc/accounting/heap_bitmap.h index f729c0e..61a2429 100644 --- a/runtime/gc/accounting/heap_bitmap.h +++ b/runtime/gc/accounting/heap_bitmap.h @@ -34,7 +34,7 @@ class HeapBitmap { bool Test(const mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); void Clear(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); void Set(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); - SpaceBitmap* GetContinuousSpaceBitmap(const mirror::Object* obj) const; + ContinuousSpaceBitmap* GetContinuousSpaceBitmap(const mirror::Object* obj) const; ObjectSet* GetDiscontinuousSpaceObjectSet(const mirror::Object* obj) const; void Walk(ObjectCallback* callback, void* arg) @@ -46,7 +46,7 @@ class HeapBitmap { SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Find and replace a bitmap pointer, this is used by for the bitmap swapping in the GC. - void ReplaceBitmap(SpaceBitmap* old_bitmap, SpaceBitmap* new_bitmap) + void ReplaceBitmap(ContinuousSpaceBitmap* old_bitmap, ContinuousSpaceBitmap* new_bitmap) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); // Find and replace a object set pointer, this is used by for the bitmap swapping in the GC. @@ -58,13 +58,14 @@ class HeapBitmap { private: const Heap* const heap_; - void AddContinuousSpaceBitmap(SpaceBitmap* bitmap); - void RemoveContinuousSpaceBitmap(SpaceBitmap* bitmap); + void AddContinuousSpaceBitmap(ContinuousSpaceBitmap* bitmap); + void RemoveContinuousSpaceBitmap(ContinuousSpaceBitmap* bitmap); void AddDiscontinuousObjectSet(ObjectSet* set); void RemoveDiscontinuousObjectSet(ObjectSet* set); // Bitmaps covering continuous spaces. - std::vector> continuous_space_bitmaps_; + std::vector> + continuous_space_bitmaps_; // Sets covering discontinuous spaces. std::vector> discontinuous_space_sets_; diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc index 34ca654..d744dee 100644 --- a/runtime/gc/accounting/mod_union_table.cc +++ b/runtime/gc/accounting/mod_union_table.cc @@ -19,6 +19,7 @@ #include "base/stl_util.h" #include "card_table-inl.h" #include "heap_bitmap.h" +#include "gc/accounting/space_bitmap-inl.h" #include "gc/collector/mark_sweep.h" #include "gc/collector/mark_sweep-inl.h" #include "gc/heap.h" @@ -222,7 +223,7 @@ void ModUnionTableReferenceCache::Verify() { // Check the references of each clean card which is also in the mod union table. CardTable* card_table = heap_->GetCardTable(); - SpaceBitmap* live_bitmap = space_->GetLiveBitmap(); + ContinuousSpaceBitmap* live_bitmap = space_->GetLiveBitmap(); for (const auto& ref_pair : references_) { const byte* card = ref_pair.first; if (*card == CardTable::kCardClean) { @@ -272,7 +273,7 @@ void ModUnionTableReferenceCache::UpdateAndMarkReferences(MarkHeapReferenceCallb uintptr_t end = start + CardTable::kCardSize; auto* space = heap_->FindContinuousSpaceFromObject(reinterpret_cast(start), false); DCHECK(space != nullptr); - SpaceBitmap* live_bitmap = space->GetLiveBitmap(); + ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); live_bitmap->VisitMarkedRange(start, end, add_visitor); // Update the corresponding references for the card. @@ -312,7 +313,7 @@ void ModUnionTableCardCache::UpdateAndMarkReferences(MarkHeapReferenceCallback* void* arg) { CardTable* card_table = heap_->GetCardTable(); ModUnionScanImageRootVisitor scan_visitor(callback, arg); - SpaceBitmap* bitmap = space_->GetLiveBitmap(); + ContinuousSpaceBitmap* bitmap = space_->GetLiveBitmap(); for (const byte* card_addr : cleared_cards_) { uintptr_t start = reinterpret_cast(card_table->AddrFromCard(card_addr)); DCHECK(space_->HasAddress(reinterpret_cast(start))); diff --git a/runtime/gc/accounting/mod_union_table.h b/runtime/gc/accounting/mod_union_table.h index c3a90e2..5ae7c77 100644 --- a/runtime/gc/accounting/mod_union_table.h +++ b/runtime/gc/accounting/mod_union_table.h @@ -44,7 +44,6 @@ class Heap; namespace accounting { -class SpaceBitmap; class HeapBitmap; // The mod-union table is the union of modified cards. It is used to allow the card table to be diff --git a/runtime/gc/accounting/remembered_set.cc b/runtime/gc/accounting/remembered_set.cc index 56f7caa..044216e 100644 --- a/runtime/gc/accounting/remembered_set.cc +++ b/runtime/gc/accounting/remembered_set.cc @@ -112,7 +112,7 @@ void RememberedSet::UpdateAndMarkReferences(MarkHeapReferenceCallback* callback, bool contains_reference_to_target_space = false; RememberedSetObjectVisitor obj_visitor(callback, target_space, &contains_reference_to_target_space, arg); - SpaceBitmap* bitmap = space_->GetLiveBitmap(); + ContinuousSpaceBitmap* bitmap = space_->GetLiveBitmap(); CardSet remove_card_set; for (byte* const card_addr : dirty_cards_) { contains_reference_to_target_space = false; diff --git a/runtime/gc/accounting/space_bitmap-inl.h b/runtime/gc/accounting/space_bitmap-inl.h index 880ff1f..08f7c87 100644 --- a/runtime/gc/accounting/space_bitmap-inl.h +++ b/runtime/gc/accounting/space_bitmap-inl.h @@ -17,14 +17,26 @@ #ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_INL_H_ #define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_INL_H_ +#include "space_bitmap.h" + #include "base/logging.h" +#include "dex_file-inl.h" +#include "heap_bitmap.h" +#include "mirror/art_field-inl.h" +#include "mirror/class-inl.h" +#include "mirror/object-inl.h" +#include "mirror/object_array-inl.h" +#include "object_utils.h" +#include "space_bitmap-inl.h" +#include "UniquePtr.h" #include "utils.h" namespace art { namespace gc { namespace accounting { -inline bool SpaceBitmap::AtomicTestAndSet(const mirror::Object* obj) { +template +inline bool SpaceBitmap::AtomicTestAndSet(const mirror::Object* obj) { uintptr_t addr = reinterpret_cast(obj); DCHECK_GE(addr, heap_begin_); const uintptr_t offset = addr - heap_begin_; @@ -45,7 +57,8 @@ inline bool SpaceBitmap::AtomicTestAndSet(const mirror::Object* obj) { return false; } -inline bool SpaceBitmap::Test(const mirror::Object* obj) const { +template +inline bool SpaceBitmap::Test(const mirror::Object* obj) const { uintptr_t addr = reinterpret_cast(obj); DCHECK(HasAddress(obj)) << obj; DCHECK(bitmap_begin_ != NULL); @@ -54,8 +67,8 @@ inline bool SpaceBitmap::Test(const mirror::Object* obj) const { return (bitmap_begin_[OffsetToIndex(offset)] & OffsetToMask(offset)) != 0; } -template -void SpaceBitmap::VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, +template template +void SpaceBitmap::VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const { DCHECK_LT(visit_begin, visit_end); #if 0 @@ -148,7 +161,8 @@ void SpaceBitmap::VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, #endif } -inline bool SpaceBitmap::Modify(const mirror::Object* obj, bool do_set) { +template template +inline bool SpaceBitmap::Modify(const mirror::Object* obj) { uintptr_t addr = reinterpret_cast(obj); DCHECK_GE(addr, heap_begin_); const uintptr_t offset = addr - heap_begin_; @@ -157,15 +171,24 @@ inline bool SpaceBitmap::Modify(const mirror::Object* obj, bool do_set) { DCHECK_LT(index, bitmap_size_ / kWordSize) << " bitmap_size_ = " << bitmap_size_; uword* address = &bitmap_begin_[index]; uword old_word = *address; - if (do_set) { + if (kSetBit) { *address = old_word | mask; } else { *address = old_word & ~mask; } - DCHECK_EQ(Test(obj), do_set); + DCHECK_EQ(Test(obj), kSetBit); return (old_word & mask) != 0; } +template +inline std::ostream& operator << (std::ostream& stream, const SpaceBitmap& bitmap) { + return stream + << bitmap.GetName() << "[" + << "begin=" << reinterpret_cast(bitmap.HeapBegin()) + << ",end=" << reinterpret_cast(bitmap.HeapLimit()) + << "]"; +} + } // namespace accounting } // namespace gc } // namespace art diff --git a/runtime/gc/accounting/space_bitmap.cc b/runtime/gc/accounting/space_bitmap.cc index 1957c21..7eed05a 100644 --- a/runtime/gc/accounting/space_bitmap.cc +++ b/runtime/gc/accounting/space_bitmap.cc @@ -14,51 +14,24 @@ * limitations under the License. */ -#include "base/logging.h" -#include "dex_file-inl.h" -#include "heap_bitmap.h" -#include "mirror/art_field-inl.h" -#include "mirror/class-inl.h" -#include "mirror/object-inl.h" -#include "mirror/object_array-inl.h" -#include "object_utils.h" #include "space_bitmap-inl.h" -#include "UniquePtr.h" -#include "utils.h" namespace art { namespace gc { namespace accounting { -std::string SpaceBitmap::GetName() const { - return name_; -} - -void SpaceBitmap::SetName(const std::string& name) { - name_ = name; -} - -std::string SpaceBitmap::Dump() const { - return StringPrintf("%s: %p-%p", name_.c_str(), - reinterpret_cast(HeapBegin()), - reinterpret_cast(HeapLimit())); -} - -void ObjectSet::Walk(ObjectCallback* callback, void* arg) { - for (const mirror::Object* obj : contained_) { - callback(const_cast(obj), arg); - } -} - -SpaceBitmap* SpaceBitmap::CreateFromMemMap(const std::string& name, MemMap* mem_map, - byte* heap_begin, size_t heap_capacity) { +template +SpaceBitmap* SpaceBitmap::CreateFromMemMap( + const std::string& name, MemMap* mem_map, byte* heap_begin, size_t heap_capacity) { CHECK(mem_map != nullptr); uword* bitmap_begin = reinterpret_cast(mem_map->Begin()); size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize; return new SpaceBitmap(name, mem_map, bitmap_begin, bitmap_size, heap_begin); } -SpaceBitmap* SpaceBitmap::Create(const std::string& name, byte* heap_begin, size_t heap_capacity) { +template +SpaceBitmap* SpaceBitmap::Create( + const std::string& name, byte* heap_begin, size_t heap_capacity) { CHECK(heap_begin != NULL); // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord. size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize; @@ -72,10 +45,8 @@ SpaceBitmap* SpaceBitmap::Create(const std::string& name, byte* heap_begin, size return CreateFromMemMap(name, mem_map.release(), heap_begin, heap_capacity); } -// Clean up any resources associated with the bitmap. -SpaceBitmap::~SpaceBitmap() {} - -void SpaceBitmap::SetHeapLimit(uintptr_t new_end) { +template +void SpaceBitmap::SetHeapLimit(uintptr_t new_end) { DCHECK(IsAligned(new_end)); size_t new_size = OffsetToIndex(new_end - heap_begin_) * kWordSize; if (new_size < bitmap_size_) { @@ -85,7 +56,8 @@ void SpaceBitmap::SetHeapLimit(uintptr_t new_end) { // should be marked. } -void SpaceBitmap::Clear() { +template +void SpaceBitmap::Clear() { if (bitmap_begin_ != NULL) { // This returns the memory to the system. Successive page faults will return zeroed memory. int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED); @@ -95,14 +67,14 @@ void SpaceBitmap::Clear() { } } -void SpaceBitmap::CopyFrom(SpaceBitmap* source_bitmap) { +template +inline void SpaceBitmap::CopyFrom(SpaceBitmap* source_bitmap) { DCHECK_EQ(Size(), source_bitmap->Size()); std::copy(source_bitmap->Begin(), source_bitmap->Begin() + source_bitmap->Size() / kWordSize, Begin()); } -// Visits set bits in address order. The callback is not permitted to -// change the bitmap bits or max during the traversal. -void SpaceBitmap::Walk(ObjectCallback* callback, void* arg) { +template +inline void SpaceBitmap::Walk(ObjectCallback* callback, void* arg) { CHECK(bitmap_begin_ != NULL); CHECK(callback != NULL); @@ -122,15 +94,11 @@ void SpaceBitmap::Walk(ObjectCallback* callback, void* arg) { } } -// Walk through the bitmaps in increasing address order, and find the -// object pointers that correspond to garbage objects. Call -// zero or more times with lists of these object pointers. -// -// The callback is not permitted to increase the max of either bitmap. -void SpaceBitmap::SweepWalk(const SpaceBitmap& live_bitmap, - const SpaceBitmap& mark_bitmap, - uintptr_t sweep_begin, uintptr_t sweep_end, - SpaceBitmap::SweepCallback* callback, void* arg) { +template +void SpaceBitmap::SweepWalk(const SpaceBitmap& live_bitmap, + const SpaceBitmap& mark_bitmap, + uintptr_t sweep_begin, uintptr_t sweep_end, + SpaceBitmap::SweepCallback* callback, void* arg) { CHECK(live_bitmap.bitmap_begin_ != NULL); CHECK(mark_bitmap.bitmap_begin_ != NULL); CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_); @@ -174,13 +142,10 @@ void SpaceBitmap::SweepWalk(const SpaceBitmap& live_bitmap, } } -static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj, - void* arg); - -// Walk instance fields of the given Class. Separate function to allow recursion on the super -// class. -static void WalkInstanceFields(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj, - mirror::Class* klass, void* arg) +template +void SpaceBitmap::WalkInstanceFields(SpaceBitmap* visited, + ObjectCallback* callback, mirror::Object* obj, + mirror::Class* klass, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { // Visit fields of parent classes first. mirror::Class* super = klass->GetSuperClass(); @@ -203,10 +168,10 @@ static void WalkInstanceFields(SpaceBitmap* visited, ObjectCallback* callback, m } } -// For an unvisited object, visit it then all its children found via fields. -static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj, - void* arg) - SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { +template +void SpaceBitmap::WalkFieldsInOrder(SpaceBitmap* visited, + ObjectCallback* callback, + mirror::Object* obj, void* arg) { if (visited->Test(obj)) { return; } @@ -244,14 +209,13 @@ static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mi } } -// Visits set bits with an in order traversal. The callback is not permitted to change the bitmap -// bits or max during the traversal. -void SpaceBitmap::InOrderWalk(ObjectCallback* callback, void* arg) { - UniquePtr visited(Create("bitmap for in-order walk", - reinterpret_cast(heap_begin_), - IndexToOffset(bitmap_size_ / kWordSize))); - CHECK(bitmap_begin_ != NULL); - CHECK(callback != NULL); +template +void SpaceBitmap::InOrderWalk(ObjectCallback* callback, void* arg) { + UniquePtr> visited( + Create("bitmap for in-order walk", reinterpret_cast(heap_begin_), + IndexToOffset(bitmap_size_ / kWordSize))); + CHECK(bitmap_begin_ != nullptr); + CHECK(callback != nullptr); uintptr_t end = Size() / kWordSize; for (uintptr_t i = 0; i < end; ++i) { // Need uint for unsigned shift. @@ -268,14 +232,15 @@ void SpaceBitmap::InOrderWalk(ObjectCallback* callback, void* arg) { } } -std::ostream& operator << (std::ostream& stream, const SpaceBitmap& bitmap) { - return stream - << bitmap.GetName() << "[" - << "begin=" << reinterpret_cast(bitmap.HeapBegin()) - << ",end=" << reinterpret_cast(bitmap.HeapLimit()) - << "]"; +void ObjectSet::Walk(ObjectCallback* callback, void* arg) { + for (const mirror::Object* obj : contained_) { + callback(const_cast(obj), arg); + } } +template class SpaceBitmap; +template class SpaceBitmap; + } // namespace accounting } // namespace gc } // namespace art diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h index a88f3e4..891c8ed 100644 --- a/runtime/gc/accounting/space_bitmap.h +++ b/runtime/gc/accounting/space_bitmap.h @@ -38,11 +38,9 @@ namespace mirror { namespace gc { namespace accounting { +template class SpaceBitmap { public: - // Alignment of objects within spaces. - static const size_t kAlignment = 8; - typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg); typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg); @@ -57,30 +55,31 @@ class SpaceBitmap { static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map, byte* heap_begin, size_t heap_capacity); - ~SpaceBitmap(); + ~SpaceBitmap() { + } // is the difference from .base to a pointer address. // is the index of .bits that contains the bit representing // . - static size_t OffsetToIndex(size_t offset) { + static size_t OffsetToIndex(size_t offset) ALWAYS_INLINE { return offset / kAlignment / kBitsPerWord; } - static uintptr_t IndexToOffset(size_t index) { + static uintptr_t IndexToOffset(size_t index) ALWAYS_INLINE { return static_cast(index * kAlignment * kBitsPerWord); } // Bits are packed in the obvious way. - static uword OffsetToMask(uintptr_t offset) { + static uword OffsetToMask(uintptr_t offset) ALWAYS_INLINE { return (static_cast(1)) << ((offset / kAlignment) % kBitsPerWord); } - inline bool Set(const mirror::Object* obj) { - return Modify(obj, true); + bool Set(const mirror::Object* obj) ALWAYS_INLINE { + return Modify(obj); } - inline bool Clear(const mirror::Object* obj) { - return Modify(obj, false); + bool Clear(const mirror::Object* obj) ALWAYS_INLINE { + return Modify(obj); } // Returns true if the object was previously marked. @@ -131,12 +130,19 @@ class SpaceBitmap { EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // Visits set bits in address order. The callback is not permitted to change the bitmap bits or + // max during the traversal. void Walk(ObjectCallback* callback, void* arg) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); + // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap + // bits or max during the traversal. void InOrderWalk(ObjectCallback* callback, void* arg) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); + // Walk through the bitmaps in increasing address order, and find the object pointers that + // correspond to garbage objects. Call zero or more times with lists of these object + // pointers. The callback is not permitted to increase the max of either bitmap. static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base, uintptr_t max, SweepCallback* thunk, void* arg); @@ -169,10 +175,18 @@ class SpaceBitmap { // Set the max address which can covered by the bitmap. void SetHeapLimit(uintptr_t new_end); - std::string GetName() const; - void SetName(const std::string& name); + std::string GetName() const { + return name_; + } + + void SetName(const std::string& name) { + name_ = name; + } - std::string Dump() const; + std::string Dump() const { + return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast(HeapBegin()), + reinterpret_cast(HeapLimit())); + } const void* GetObjectWordAddress(const mirror::Object* obj) const { uintptr_t addr = reinterpret_cast(obj); @@ -190,7 +204,17 @@ class SpaceBitmap { heap_begin_(reinterpret_cast(heap_begin)), name_(name) {} - bool Modify(const mirror::Object* obj, bool do_set); + template + bool Modify(const mirror::Object* obj); + + // For an unvisited object, visit it then all its children found via fields. + static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj, + void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); + // Walk instance fields of the given Class. Separate function to allow recursion on the super + // class. + static void WalkInstanceFields(SpaceBitmap* visited, ObjectCallback* callback, + mirror::Object* obj, mirror::Class* klass, void* arg) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Backing storage for bitmap. UniquePtr mem_map_; @@ -272,7 +296,12 @@ class ObjectSet { Objects contained_; }; -std::ostream& operator << (std::ostream& stream, const SpaceBitmap& bitmap); +typedef SpaceBitmap ContinuousSpaceBitmap; +// TODO: Replace usage of ObjectSet with LargeObjectBitmap. +typedef SpaceBitmap LargeObjectBitmap; + +template +std::ostream& operator << (std::ostream& stream, const SpaceBitmap& bitmap); } // namespace accounting } // namespace gc diff --git a/runtime/gc/accounting/space_bitmap_test.cc b/runtime/gc/accounting/space_bitmap_test.cc index 68994a8..7c18052 100644 --- a/runtime/gc/accounting/space_bitmap_test.cc +++ b/runtime/gc/accounting/space_bitmap_test.cc @@ -32,14 +32,15 @@ class SpaceBitmapTest : public CommonRuntimeTest {}; TEST_F(SpaceBitmapTest, Init) { byte* heap_begin = reinterpret_cast(0x10000000); size_t heap_capacity = 16 * MB; - UniquePtr space_bitmap(SpaceBitmap::Create("test bitmap", - heap_begin, heap_capacity)); + UniquePtr space_bitmap( + ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity)); EXPECT_TRUE(space_bitmap.get() != NULL); } class BitmapVerify { public: - BitmapVerify(SpaceBitmap* bitmap, const mirror::Object* begin, const mirror::Object* end) + BitmapVerify(ContinuousSpaceBitmap* bitmap, const mirror::Object* begin, + const mirror::Object* end) : bitmap_(bitmap), begin_(begin), end_(end) {} @@ -50,7 +51,7 @@ class BitmapVerify { EXPECT_EQ(bitmap_->Test(obj), ((reinterpret_cast(obj) & 0xF) != 0)); } - SpaceBitmap* bitmap_; + ContinuousSpaceBitmap* bitmap_; const mirror::Object* begin_; const mirror::Object* end_; }; @@ -59,14 +60,14 @@ TEST_F(SpaceBitmapTest, ScanRange) { byte* heap_begin = reinterpret_cast(0x10000000); size_t heap_capacity = 16 * MB; - UniquePtr space_bitmap(SpaceBitmap::Create("test bitmap", - heap_begin, heap_capacity)); + UniquePtr space_bitmap( + ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity)); EXPECT_TRUE(space_bitmap.get() != NULL); // Set all the odd bits in the first BitsPerWord * 3 to one. for (size_t j = 0; j < kBitsPerWord * 3; ++j) { const mirror::Object* obj = - reinterpret_cast(heap_begin + j * SpaceBitmap::kAlignment); + reinterpret_cast(heap_begin + j * kObjectAlignment); if (reinterpret_cast(obj) & 0xF) { space_bitmap->Set(obj); } @@ -77,10 +78,10 @@ TEST_F(SpaceBitmapTest, ScanRange) { // words. for (size_t i = 0; i < static_cast(kBitsPerWord); ++i) { mirror::Object* start = - reinterpret_cast(heap_begin + i * SpaceBitmap::kAlignment); + reinterpret_cast(heap_begin + i * kObjectAlignment); for (size_t j = 0; j < static_cast(kBitsPerWord * 2); ++j) { mirror::Object* end = - reinterpret_cast(heap_begin + (i + j) * SpaceBitmap::kAlignment); + reinterpret_cast(heap_begin + (i + j) * kObjectAlignment); BitmapVerify(space_bitmap.get(), start, end); } } @@ -118,8 +119,8 @@ void compat_test() NO_THREAD_SAFETY_ANALYSIS { for (int i = 0; i < 5 ; ++i) { - UniquePtr space_bitmap(SpaceBitmap::Create("test bitmap", - heap_begin, heap_capacity)); + UniquePtr space_bitmap( + ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity)); for (int j = 0; j < 10000; ++j) { size_t offset = (r.next() % heap_capacity) & ~(0x7); diff --git a/runtime/gc/collector/garbage_collector.cc b/runtime/gc/collector/garbage_collector.cc index a700c73..d99136a 100644 --- a/runtime/gc/collector/garbage_collector.cc +++ b/runtime/gc/collector/garbage_collector.cc @@ -174,8 +174,8 @@ void GarbageCollector::SwapBitmaps() { if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect || (gc_type == kGcTypeFull && space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect)) { - accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap(); - accounting::SpaceBitmap* mark_bitmap = space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap(); if (live_bitmap != nullptr && live_bitmap != mark_bitmap) { heap_->GetLiveBitmap()->ReplaceBitmap(live_bitmap, mark_bitmap); heap_->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap); diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index bb41b57..f07e6f1 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -123,7 +123,6 @@ void MarkSweep::InitializePhase() { mark_immune_count_ = 0; mark_fastpath_count_ = 0; mark_slowpath_count_ = 0; - FindDefaultSpaceBitmap(); { // TODO: I don't think we should need heap bitmap lock to get the mark bitmap. ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_); @@ -293,7 +292,7 @@ void MarkSweep::ReclaimPhase() { void MarkSweep::FindDefaultSpaceBitmap() { TimingLogger::ScopedSplit split("FindDefaultMarkBitmap", &timings_); for (const auto& space : GetHeap()->GetContinuousSpaces()) { - accounting::SpaceBitmap* bitmap = space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* bitmap = space->GetMarkBitmap(); if (bitmap != nullptr && space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect) { current_space_bitmap_ = bitmap; @@ -359,7 +358,7 @@ inline void MarkSweep::MarkObjectNonNull(Object* obj) { } // Try to take advantage of locality of references within a space, failing this find the space // the hard way. - accounting::SpaceBitmap* object_bitmap = current_space_bitmap_; + accounting::ContinuousSpaceBitmap* object_bitmap = current_space_bitmap_; if (UNLIKELY(!object_bitmap->HasAddress(obj))) { object_bitmap = mark_bitmap_->GetContinuousSpaceBitmap(obj); if (kCountMarkedObjects) { @@ -428,9 +427,9 @@ inline bool MarkSweep::MarkObjectParallel(const Object* obj) { } // Try to take advantage of locality of references within a space, failing this find the space // the hard way. - accounting::SpaceBitmap* object_bitmap = current_space_bitmap_; + accounting::ContinuousSpaceBitmap* object_bitmap = current_space_bitmap_; if (UNLIKELY(!object_bitmap->HasAddress(obj))) { - accounting::SpaceBitmap* new_bitmap = mark_bitmap_->GetContinuousSpaceBitmap(obj); + accounting::ContinuousSpaceBitmap* new_bitmap = mark_bitmap_->GetContinuousSpaceBitmap(obj); if (new_bitmap != NULL) { object_bitmap = new_bitmap; } else { @@ -476,7 +475,7 @@ void MarkSweep::VerifyRootCallback(const Object* root, void* arg, size_t vreg, void MarkSweep::VerifyRoot(const Object* root, size_t vreg, const StackVisitor* visitor, RootType root_type) { // See if the root is on any space bitmap. - if (GetHeap()->GetLiveBitmap()->GetContinuousSpaceBitmap(root) == nullptr) { + if (heap_->GetLiveBitmap()->GetContinuousSpaceBitmap(root) == nullptr) { space::LargeObjectSpace* large_object_space = GetHeap()->GetLargeObjectsSpace(); if (!large_object_space->Contains(root)) { LOG(ERROR) << "Found invalid root: " << root << " with type " << root_type; @@ -686,7 +685,8 @@ class MarkStackTask : public Task { class CardScanTask : public MarkStackTask { public: - CardScanTask(ThreadPool* thread_pool, MarkSweep* mark_sweep, accounting::SpaceBitmap* bitmap, + CardScanTask(ThreadPool* thread_pool, MarkSweep* mark_sweep, + accounting::ContinuousSpaceBitmap* bitmap, byte* begin, byte* end, byte minimum_age, size_t mark_stack_size, Object** mark_stack_obj) : MarkStackTask(thread_pool, mark_sweep, mark_stack_size, mark_stack_obj), @@ -697,7 +697,7 @@ class CardScanTask : public MarkStackTask { } protected: - accounting::SpaceBitmap* const bitmap_; + accounting::ContinuousSpaceBitmap* const bitmap_; byte* const begin_; byte* const end_; const byte minimum_age_; @@ -820,7 +820,7 @@ void MarkSweep::ScanGrayObjects(bool paused, byte minimum_age) { class RecursiveMarkTask : public MarkStackTask { public: RecursiveMarkTask(ThreadPool* thread_pool, MarkSweep* mark_sweep, - accounting::SpaceBitmap* bitmap, uintptr_t begin, uintptr_t end) + accounting::ContinuousSpaceBitmap* bitmap, uintptr_t begin, uintptr_t end) : MarkStackTask(thread_pool, mark_sweep, 0, NULL), bitmap_(bitmap), begin_(begin), @@ -828,7 +828,7 @@ class RecursiveMarkTask : public MarkStackTask { } protected: - accounting::SpaceBitmap* const bitmap_; + accounting::ContinuousSpaceBitmap* const bitmap_; const uintptr_t begin_; const uintptr_t end_; @@ -1045,8 +1045,8 @@ void MarkSweep::SweepArray(accounting::ObjectStack* allocations, bool swap_bitma // Start by sweeping the continuous spaces. for (space::ContinuousSpace* space : sweep_spaces) { space::AllocSpace* alloc_space = space->AsAllocSpace(); - accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap(); - accounting::SpaceBitmap* mark_bitmap = space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap(); if (swap_bitmaps) { std::swap(live_bitmap, mark_bitmap); } diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h index d49e427..6dbb270 100644 --- a/runtime/gc/collector/mark_sweep.h +++ b/runtime/gc/collector/mark_sweep.h @@ -22,6 +22,7 @@ #include "base/macros.h" #include "base/mutex.h" #include "garbage_collector.h" +#include "gc/accounting/space_bitmap.h" #include "immune_region.h" #include "object_callbacks.h" #include "offsets.h" @@ -45,7 +46,6 @@ class Heap; namespace accounting { template class AtomicStack; typedef AtomicStack ObjectStack; - class SpaceBitmap; } // namespace accounting namespace collector { @@ -283,7 +283,7 @@ class MarkSweep : public GarbageCollector { // Current space, we check this space first to avoid searching for the appropriate space for an // object. - accounting::SpaceBitmap* current_space_bitmap_; + accounting::ContinuousSpaceBitmap* current_space_bitmap_; // Cache the heap's mark bitmap to prevent having to do 2 loads during slow path marking. accounting::HeapBitmap* mark_bitmap_; diff --git a/runtime/gc/collector/semi_space-inl.h b/runtime/gc/collector/semi_space-inl.h index df731ff..8a9611f 100644 --- a/runtime/gc/collector/semi_space-inl.h +++ b/runtime/gc/collector/semi_space-inl.h @@ -65,7 +65,7 @@ inline void SemiSpace::MarkObject( } obj_ptr->Assign(forward_address); } else { - accounting::SpaceBitmap* object_bitmap = + accounting::ContinuousSpaceBitmap* object_bitmap = heap_->GetMarkBitmap()->GetContinuousSpaceBitmap(obj); if (LIKELY(object_bitmap != nullptr)) { if (generational_) { diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index ccb38c4..c0e172e 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -333,7 +333,7 @@ void SemiSpace::MarkReachableObjects() { // remain in the space, that is, the remembered set (and the // card table) didn't miss any from-space references in the // space. - accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor visitor(this); live_bitmap->VisitMarkedRange(reinterpret_cast(space->Begin()), reinterpret_cast(space->End()), @@ -341,7 +341,7 @@ void SemiSpace::MarkReachableObjects() { } } else { DCHECK(rem_set == nullptr); - accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); SemiSpaceScanObjectVisitor visitor(this); live_bitmap->VisitMarkedRange(reinterpret_cast(space->Begin()), reinterpret_cast(space->End()), @@ -535,9 +535,9 @@ mirror::Object* SemiSpace::MarkNonForwardedObject(mirror::Object* obj) { // space. GetHeap()->WriteBarrierEveryFieldOf(forward_address); // Handle the bitmaps marking. - accounting::SpaceBitmap* live_bitmap = promo_dest_space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = promo_dest_space->GetLiveBitmap(); DCHECK(live_bitmap != nullptr); - accounting::SpaceBitmap* mark_bitmap = promo_dest_space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = promo_dest_space->GetMarkBitmap(); DCHECK(mark_bitmap != nullptr); DCHECK(!live_bitmap->Test(forward_address)); if (!whole_heap_collection_) { @@ -710,8 +710,8 @@ void SemiSpace::ScanObject(Object* obj) { // Scan anything that's on the mark stack. void SemiSpace::ProcessMarkStack() { - space::MallocSpace* promo_dest_space = NULL; - accounting::SpaceBitmap* live_bitmap = NULL; + space::MallocSpace* promo_dest_space = nullptr; + accounting::ContinuousSpaceBitmap* live_bitmap = nullptr; if (generational_ && !whole_heap_collection_) { // If a bump pointer space only collection (and the promotion is // enabled,) we delay the live-bitmap marking of promoted objects @@ -719,7 +719,7 @@ void SemiSpace::ProcessMarkStack() { promo_dest_space = GetHeap()->GetPrimaryFreeListSpace(); live_bitmap = promo_dest_space->GetLiveBitmap(); DCHECK(live_bitmap != nullptr); - accounting::SpaceBitmap* mark_bitmap = promo_dest_space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = promo_dest_space->GetMarkBitmap(); DCHECK(mark_bitmap != nullptr); DCHECK_EQ(live_bitmap, mark_bitmap); } diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h index 3442751..4169ca9 100644 --- a/runtime/gc/collector/semi_space.h +++ b/runtime/gc/collector/semi_space.h @@ -21,6 +21,7 @@ #include "base/macros.h" #include "base/mutex.h" #include "garbage_collector.h" +#include "gc/accounting/space_bitmap.h" #include "immune_region.h" #include "object_callbacks.h" #include "offsets.h" @@ -42,7 +43,6 @@ class Heap; namespace accounting { template class AtomicStack; typedef AtomicStack ObjectStack; - class SpaceBitmap; } // namespace accounting namespace space { @@ -198,7 +198,8 @@ class SemiSpace : public GarbageCollector { // Destination and source spaces (can be any type of ContinuousMemMapAllocSpace which either has // a live bitmap or doesn't). space::ContinuousMemMapAllocSpace* to_space_; - accounting::SpaceBitmap* to_space_live_bitmap_; // Cached live bitmap as an optimization. + // Cached live bitmap as an optimization. + accounting::ContinuousSpaceBitmap* to_space_live_bitmap_; space::ContinuousMemMapAllocSpace* from_space_; Thread* self_; diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index 479ea2e..78fc71f 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -576,8 +576,8 @@ void Heap::AddSpace(space::Space* space, bool set_as_default) { DCHECK(!space->IsDiscontinuousSpace()); space::ContinuousSpace* continuous_space = space->AsContinuousSpace(); // Continuous spaces don't necessarily have bitmaps. - accounting::SpaceBitmap* live_bitmap = continuous_space->GetLiveBitmap(); - accounting::SpaceBitmap* mark_bitmap = continuous_space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = continuous_space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = continuous_space->GetMarkBitmap(); if (live_bitmap != nullptr) { DCHECK(mark_bitmap != nullptr); live_bitmap_->AddContinuousSpaceBitmap(live_bitmap); @@ -617,8 +617,8 @@ void Heap::RemoveSpace(space::Space* space) { DCHECK(!space->IsDiscontinuousSpace()); space::ContinuousSpace* continuous_space = space->AsContinuousSpace(); // Continuous spaces don't necessarily have bitmaps. - accounting::SpaceBitmap* live_bitmap = continuous_space->GetLiveBitmap(); - accounting::SpaceBitmap* mark_bitmap = continuous_space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = continuous_space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = continuous_space->GetMarkBitmap(); if (live_bitmap != nullptr) { DCHECK(mark_bitmap != nullptr); live_bitmap_->RemoveContinuousSpaceBitmap(live_bitmap); @@ -1098,8 +1098,8 @@ bool Heap::IsLiveObjectLocked(mirror::Object* obj, bool search_allocation_stack, void Heap::DumpSpaces(std::ostream& stream) { for (const auto& space : continuous_spaces_) { - accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap(); - accounting::SpaceBitmap* mark_bitmap = space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap(); stream << space << " " << *space << "\n"; if (live_bitmap != nullptr) { stream << live_bitmap << " " << *live_bitmap << "\n"; @@ -1561,9 +1561,9 @@ class ZygoteCompactingCollector FINAL : public collector::SemiSpace { // Maps from bin sizes to locations. std::multimap bins_; // Live bitmap of the space which contains the bins. - accounting::SpaceBitmap* bin_live_bitmap_; + accounting::ContinuousSpaceBitmap* bin_live_bitmap_; // Mark bitmap of the space which contains the bins. - accounting::SpaceBitmap* bin_mark_bitmap_; + accounting::ContinuousSpaceBitmap* bin_mark_bitmap_; static void Callback(mirror::Object* obj, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { @@ -1759,8 +1759,8 @@ void Heap::FlushAllocStack() { allocation_stack_->Reset(); } -void Heap::MarkAllocStack(accounting::SpaceBitmap* bitmap1, - accounting::SpaceBitmap* bitmap2, +void Heap::MarkAllocStack(accounting::ContinuousSpaceBitmap* bitmap1, + accounting::ContinuousSpaceBitmap* bitmap2, accounting::ObjectSet* large_objects, accounting::ObjectStack* stack) { DCHECK(bitmap1 != nullptr); @@ -2033,7 +2033,8 @@ class VerifyReferenceVisitor { accounting::CardTable::kCardSize); LOG(ERROR) << "Card " << reinterpret_cast(card_addr) << " covers " << cover_begin << "-" << cover_end; - accounting::SpaceBitmap* bitmap = heap_->GetLiveBitmap()->GetContinuousSpaceBitmap(obj); + accounting::ContinuousSpaceBitmap* bitmap = + heap_->GetLiveBitmap()->GetContinuousSpaceBitmap(obj); if (bitmap == nullptr) { LOG(ERROR) << "Object " << obj << " has no bitmap"; @@ -2868,7 +2869,7 @@ void Heap::RemoveRememberedSet(space::Space* space) { void Heap::ClearMarkedObjects() { // Clear all of the spaces' mark bitmaps. for (const auto& space : GetContinuousSpaces()) { - accounting::SpaceBitmap* mark_bitmap = space->GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap(); if (space->GetLiveBitmap() != mark_bitmap) { mark_bitmap->Clear(); } diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h index 912cf7d..874357f 100644 --- a/runtime/gc/heap.h +++ b/runtime/gc/heap.h @@ -470,7 +470,9 @@ class Heap { LOCKS_EXCLUDED(Locks::runtime_shutdown_lock_, Locks::thread_list_lock_); // Mark all the objects in the allocation stack in the specified bitmap. - void MarkAllocStack(accounting::SpaceBitmap* bitmap1, accounting::SpaceBitmap* bitmap2, + // TODO: Refactor? + void MarkAllocStack(accounting::SpaceBitmap* bitmap1, + accounting::SpaceBitmap* bitmap2, accounting::ObjectSet* large_objects, accounting::ObjectStack* stack) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); diff --git a/runtime/gc/heap_test.cc b/runtime/gc/heap_test.cc index 07e5088..a85ad4d 100644 --- a/runtime/gc/heap_test.cc +++ b/runtime/gc/heap_test.cc @@ -60,13 +60,11 @@ TEST_F(HeapTest, GarbageCollectClassLinkerInit) { TEST_F(HeapTest, HeapBitmapCapacityTest) { byte* heap_begin = reinterpret_cast(0x1000); - const size_t heap_capacity = accounting::SpaceBitmap::kAlignment * (sizeof(intptr_t) * 8 + 1); - UniquePtr bitmap(accounting::SpaceBitmap::Create("test bitmap", - heap_begin, - heap_capacity)); + const size_t heap_capacity = kObjectAlignment * (sizeof(intptr_t) * 8 + 1); + UniquePtr bitmap( + accounting::ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity)); mirror::Object* fake_end_of_heap_object = - reinterpret_cast(&heap_begin[heap_capacity - - accounting::SpaceBitmap::kAlignment]); + reinterpret_cast(&heap_begin[heap_capacity - kObjectAlignment]); bitmap->Set(fake_end_of_heap_object); } diff --git a/runtime/gc/space/bump_pointer_space.cc b/runtime/gc/space/bump_pointer_space.cc index 6bd0526..90ffe59 100644 --- a/runtime/gc/space/bump_pointer_space.cc +++ b/runtime/gc/space/bump_pointer_space.cc @@ -197,7 +197,7 @@ void BumpPointerSpace::Walk(ObjectCallback* callback, void* arg) { } } -accounting::SpaceBitmap::SweepCallback* BumpPointerSpace::GetSweepCallback() { +accounting::ContinuousSpaceBitmap::SweepCallback* BumpPointerSpace::GetSweepCallback() { LOG(FATAL) << "Unimplemented"; return nullptr; } diff --git a/runtime/gc/space/bump_pointer_space.h b/runtime/gc/space/bump_pointer_space.h index ecfeae5..e52a9a3 100644 --- a/runtime/gc/space/bump_pointer_space.h +++ b/runtime/gc/space/bump_pointer_space.h @@ -85,11 +85,11 @@ class BumpPointerSpace FINAL : public ContinuousMemMapAllocSpace { return GetMemMap()->Size(); } - accounting::SpaceBitmap* GetLiveBitmap() const OVERRIDE { + accounting::ContinuousSpaceBitmap* GetLiveBitmap() const OVERRIDE { return nullptr; } - accounting::SpaceBitmap* GetMarkBitmap() const OVERRIDE { + accounting::ContinuousSpaceBitmap* GetMarkBitmap() const OVERRIDE { return nullptr; } @@ -138,7 +138,7 @@ class BumpPointerSpace FINAL : public ContinuousMemMapAllocSpace { void Walk(ObjectCallback* callback, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); - accounting::SpaceBitmap::SweepCallback* GetSweepCallback() OVERRIDE; + accounting::ContinuousSpaceBitmap::SweepCallback* GetSweepCallback() OVERRIDE; // Object alignment within the space. static constexpr size_t kAlignment = 8; diff --git a/runtime/gc/space/dlmalloc_space.cc b/runtime/gc/space/dlmalloc_space.cc index be88b33..41a0458 100644 --- a/runtime/gc/space/dlmalloc_space.cc +++ b/runtime/gc/space/dlmalloc_space.cc @@ -14,10 +14,10 @@ * limitations under the License. */ -#include "dlmalloc_space.h" - #include "dlmalloc_space-inl.h" + #include "gc/accounting/card_table.h" +#include "gc/accounting/space_bitmap-inl.h" #include "gc/heap.h" #include "mirror/class-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc index faa539f..91d8820 100644 --- a/runtime/gc/space/image_space.cc +++ b/runtime/gc/space/image_space.cc @@ -35,7 +35,7 @@ namespace space { Atomic ImageSpace::bitmap_index_(0); ImageSpace::ImageSpace(const std::string& name, MemMap* mem_map, - accounting::SpaceBitmap* live_bitmap) + accounting::ContinuousSpaceBitmap* live_bitmap) : MemMapSpace(name, mem_map, mem_map->Begin(), mem_map->End(), mem_map->End(), kGcRetentionPolicyNeverCollect) { DCHECK(live_bitmap != nullptr); @@ -197,10 +197,10 @@ ImageSpace* ImageSpace::Init(const char* image_file_name, bool validate_oat_file uint32_t bitmap_index = bitmap_index_.FetchAndAdd(1); std::string bitmap_name(StringPrintf("imagespace %s live-bitmap %u", image_file_name, bitmap_index)); - UniquePtr bitmap( - accounting::SpaceBitmap::CreateFromMemMap(bitmap_name, image_map.release(), - reinterpret_cast(map->Begin()), - map->Size())); + UniquePtr bitmap( + accounting::ContinuousSpaceBitmap::CreateFromMemMap(bitmap_name, image_map.release(), + reinterpret_cast(map->Begin()), + map->Size())); if (bitmap.get() == nullptr) { *error_msg = StringPrintf("Could not create bitmap '%s'", bitmap_name.c_str()); return nullptr; diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h index 6b63d10..f6daf89 100644 --- a/runtime/gc/space/image_space.h +++ b/runtime/gc/space/image_space.h @@ -17,6 +17,7 @@ #ifndef ART_RUNTIME_GC_SPACE_IMAGE_SPACE_H_ #define ART_RUNTIME_GC_SPACE_IMAGE_SPACE_H_ +#include "gc/accounting/space_bitmap.h" #include "space.h" namespace art { @@ -59,11 +60,11 @@ class ImageSpace : public MemMapSpace { return GetName(); } - accounting::SpaceBitmap* GetLiveBitmap() const { + accounting::ContinuousSpaceBitmap* GetLiveBitmap() const OVERRIDE { return live_bitmap_.get(); } - accounting::SpaceBitmap* GetMarkBitmap() const { + accounting::ContinuousSpaceBitmap* GetMarkBitmap() const OVERRIDE { // ImageSpaces have the same bitmap for both live and marked. This helps reduce the number of // special cases to test against. return live_bitmap_.get(); @@ -100,9 +101,10 @@ class ImageSpace : public MemMapSpace { static Atomic bitmap_index_; - UniquePtr live_bitmap_; + UniquePtr live_bitmap_; - ImageSpace(const std::string& name, MemMap* mem_map, accounting::SpaceBitmap* live_bitmap); + ImageSpace(const std::string& name, MemMap* mem_map, + accounting::ContinuousSpaceBitmap* live_bitmap); // The OatFile associated with the image during early startup to // reserve space contiguous to the image. It is later released to diff --git a/runtime/gc/space/malloc_space.cc b/runtime/gc/space/malloc_space.cc index c3ca096..8f81446 100644 --- a/runtime/gc/space/malloc_space.cc +++ b/runtime/gc/space/malloc_space.cc @@ -48,15 +48,15 @@ MallocSpace::MallocSpace(const std::string& name, MemMap* mem_map, static const uintptr_t kGcCardSize = static_cast(accounting::CardTable::kCardSize); CHECK(IsAligned(reinterpret_cast(mem_map->Begin()))); CHECK(IsAligned(reinterpret_cast(mem_map->End()))); - live_bitmap_.reset(accounting::SpaceBitmap::Create( + live_bitmap_.reset(accounting::ContinuousSpaceBitmap::Create( StringPrintf("allocspace %s live-bitmap %d", name.c_str(), static_cast(bitmap_index)), Begin(), Capacity())); - DCHECK(live_bitmap_.get() != NULL) << "could not create allocspace live bitmap #" + DCHECK(live_bitmap_.get() != nullptr) << "could not create allocspace live bitmap #" << bitmap_index; - mark_bitmap_.reset(accounting::SpaceBitmap::Create( + mark_bitmap_.reset(accounting::ContinuousSpaceBitmap::Create( StringPrintf("allocspace %s mark-bitmap %d", name.c_str(), static_cast(bitmap_index)), Begin(), Capacity())); - DCHECK(live_bitmap_.get() != NULL) << "could not create allocspace mark bitmap #" + DCHECK(live_bitmap_.get() != nullptr) << "could not create allocspace mark bitmap #" << bitmap_index; } for (auto& freed : recent_freed_objects_) { @@ -238,7 +238,7 @@ void MallocSpace::SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* ar // If the bitmaps aren't swapped we need to clear the bits since the GC isn't going to re-swap // the bitmaps as an optimization. if (!context->swap_bitmaps) { - accounting::SpaceBitmap* bitmap = space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap(); for (size_t i = 0; i < num_ptrs; ++i) { bitmap->Clear(ptrs[i]); } diff --git a/runtime/gc/space/malloc_space.h b/runtime/gc/space/malloc_space.h index dd4e5d4..d24016c 100644 --- a/runtime/gc/space/malloc_space.h +++ b/runtime/gc/space/malloc_space.h @@ -149,7 +149,7 @@ class MallocSpace : public ContinuousMemMapAllocSpace { SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) EXCLUSIVE_LOCKS_REQUIRED(lock_); - virtual accounting::SpaceBitmap::SweepCallback* GetSweepCallback() { + virtual accounting::ContinuousSpaceBitmap::SweepCallback* GetSweepCallback() { return &SweepCallback; } diff --git a/runtime/gc/space/rosalloc_space.cc b/runtime/gc/space/rosalloc_space.cc index afac2a2..5a7d941 100644 --- a/runtime/gc/space/rosalloc_space.cc +++ b/runtime/gc/space/rosalloc_space.cc @@ -15,10 +15,10 @@ * limitations under the License. */ -#include "rosalloc_space.h" - #include "rosalloc_space-inl.h" + #include "gc/accounting/card_table.h" +#include "gc/accounting/space_bitmap-inl.h" #include "gc/heap.h" #include "mirror/class-inl.h" #include "mirror/object-inl.h" diff --git a/runtime/gc/space/space.cc b/runtime/gc/space/space.cc index 4af65a9..01e8b04 100644 --- a/runtime/gc/space/space.cc +++ b/runtime/gc/space/space.cc @@ -18,6 +18,7 @@ #include "base/logging.h" #include "gc/accounting/heap_bitmap.h" +#include "gc/accounting/space_bitmap-inl.h" #include "runtime.h" #include "thread-inl.h" @@ -77,8 +78,8 @@ DiscontinuousSpace::DiscontinuousSpace(const std::string& name, void ContinuousMemMapAllocSpace::Sweep(bool swap_bitmaps, size_t* freed_objects, size_t* freed_bytes) { DCHECK(freed_objects != nullptr); DCHECK(freed_bytes != nullptr); - accounting::SpaceBitmap* live_bitmap = GetLiveBitmap(); - accounting::SpaceBitmap* mark_bitmap = GetMarkBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* mark_bitmap = GetMarkBitmap(); // If the bitmaps are bound then sweeping this space clearly won't do anything. if (live_bitmap == mark_bitmap) { return; @@ -94,11 +95,9 @@ void ContinuousMemMapAllocSpace::Sweep(bool swap_bitmaps, size_t* freed_objects, std::swap(live_bitmap, mark_bitmap); } // Bitmaps are pre-swapped for optimization which enables sweeping with the heap unlocked. - accounting::SpaceBitmap::SweepWalk(*live_bitmap, *mark_bitmap, - reinterpret_cast(Begin()), - reinterpret_cast(End()), - GetSweepCallback(), - reinterpret_cast(&scc)); + accounting::ContinuousSpaceBitmap::SweepWalk( + *live_bitmap, *mark_bitmap, reinterpret_cast(Begin()), + reinterpret_cast(End()), GetSweepCallback(), reinterpret_cast(&scc)); *freed_objects += scc.freed_objects; *freed_bytes += scc.freed_bytes; } @@ -106,9 +105,9 @@ void ContinuousMemMapAllocSpace::Sweep(bool swap_bitmaps, size_t* freed_objects, // Returns the old mark bitmap. void ContinuousMemMapAllocSpace::BindLiveToMarkBitmap() { CHECK(!HasBoundBitmaps()); - accounting::SpaceBitmap* live_bitmap = GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* live_bitmap = GetLiveBitmap(); if (live_bitmap != mark_bitmap_.get()) { - accounting::SpaceBitmap* mark_bitmap = mark_bitmap_.release(); + accounting::ContinuousSpaceBitmap* mark_bitmap = mark_bitmap_.release(); Runtime::Current()->GetHeap()->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap); temp_bitmap_.reset(mark_bitmap); mark_bitmap_.reset(live_bitmap); @@ -122,7 +121,7 @@ bool ContinuousMemMapAllocSpace::HasBoundBitmaps() const { void ContinuousMemMapAllocSpace::UnBindBitmaps() { CHECK(HasBoundBitmaps()); // At this point, the temp_bitmap holds our old mark bitmap. - accounting::SpaceBitmap* new_bitmap = temp_bitmap_.release(); + accounting::ContinuousSpaceBitmap* new_bitmap = temp_bitmap_.release(); Runtime::Current()->GetHeap()->GetMarkBitmap()->ReplaceBitmap(mark_bitmap_.get(), new_bitmap); CHECK_EQ(mark_bitmap_.release(), live_bitmap_.get()); mark_bitmap_.reset(new_bitmap); diff --git a/runtime/gc/space/space.h b/runtime/gc/space/space.h index c9022f1..2b27f87 100644 --- a/runtime/gc/space/space.h +++ b/runtime/gc/space/space.h @@ -34,10 +34,6 @@ namespace mirror { namespace gc { -namespace accounting { - class SpaceBitmap; -} // namespace accounting - class Heap; namespace space { @@ -268,8 +264,8 @@ class ContinuousSpace : public Space { return End() - Begin(); } - virtual accounting::SpaceBitmap* GetLiveBitmap() const = 0; - virtual accounting::SpaceBitmap* GetMarkBitmap() const = 0; + virtual accounting::ContinuousSpaceBitmap* GetLiveBitmap() const = 0; + virtual accounting::ContinuousSpaceBitmap* GetMarkBitmap() const = 0; // Maximum which the mapped space can grow to. virtual size_t Capacity() const { @@ -399,24 +395,24 @@ class ContinuousMemMapAllocSpace : public MemMapSpace, public AllocSpace { // Swap the live and mark bitmaps of this space. This is used by the GC for concurrent sweeping. void SwapBitmaps(); - // Reset the space back to an empty space and release memory. + // Clear the space back to an empty space. virtual void Clear() = 0; - accounting::SpaceBitmap* GetLiveBitmap() const { + accounting::ContinuousSpaceBitmap* GetLiveBitmap() const { return live_bitmap_.get(); } - accounting::SpaceBitmap* GetMarkBitmap() const { + accounting::ContinuousSpaceBitmap* GetMarkBitmap() const { return mark_bitmap_.get(); } void Sweep(bool swap_bitmaps, size_t* freed_objects, size_t* freed_bytes); - virtual accounting::SpaceBitmap::SweepCallback* GetSweepCallback() = 0; + virtual accounting::ContinuousSpaceBitmap::SweepCallback* GetSweepCallback() = 0; protected: - UniquePtr live_bitmap_; - UniquePtr mark_bitmap_; - UniquePtr temp_bitmap_; + UniquePtr live_bitmap_; + UniquePtr mark_bitmap_; + UniquePtr temp_bitmap_; ContinuousMemMapAllocSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end, byte* limit, GcRetentionPolicy gc_retention_policy) diff --git a/runtime/gc/space/zygote_space.cc b/runtime/gc/space/zygote_space.cc index a60ab38..1b06b63 100644 --- a/runtime/gc/space/zygote_space.cc +++ b/runtime/gc/space/zygote_space.cc @@ -40,8 +40,8 @@ class CountObjectsAllocated { }; ZygoteSpace* ZygoteSpace::Create(const std::string& name, MemMap* mem_map, - accounting::SpaceBitmap* live_bitmap, - accounting::SpaceBitmap* mark_bitmap) { + accounting::ContinuousSpaceBitmap* live_bitmap, + accounting::ContinuousSpaceBitmap* mark_bitmap) { DCHECK(live_bitmap != nullptr); DCHECK(mark_bitmap != nullptr); size_t objects_allocated = 0; @@ -105,7 +105,7 @@ void ZygoteSpace::SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* ar // If the bitmaps aren't swapped we need to clear the bits since the GC isn't going to re-swap // the bitmaps as an optimization. if (!context->swap_bitmaps) { - accounting::SpaceBitmap* bitmap = zygote_space->GetLiveBitmap(); + accounting::ContinuousSpaceBitmap* bitmap = zygote_space->GetLiveBitmap(); for (size_t i = 0; i < num_ptrs; ++i) { bitmap->Clear(ptrs[i]); } diff --git a/runtime/gc/space/zygote_space.h b/runtime/gc/space/zygote_space.h index 30370aa..50fc62b 100644 --- a/runtime/gc/space/zygote_space.h +++ b/runtime/gc/space/zygote_space.h @@ -17,16 +17,13 @@ #ifndef ART_RUNTIME_GC_SPACE_ZYGOTE_SPACE_H_ #define ART_RUNTIME_GC_SPACE_ZYGOTE_SPACE_H_ +#include "gc/accounting/space_bitmap.h" #include "malloc_space.h" #include "mem_map.h" namespace art { namespace gc { -namespace accounting { -class SpaceBitmap; -} - namespace space { // An zygote space is a space which you cannot allocate into or free from. @@ -34,8 +31,8 @@ class ZygoteSpace FINAL : public ContinuousMemMapAllocSpace { public: // Returns the remaining storage in the out_map field. static ZygoteSpace* Create(const std::string& name, MemMap* mem_map, - accounting::SpaceBitmap* live_bitmap, - accounting::SpaceBitmap* mark_bitmap) + accounting::ContinuousSpaceBitmap* live_bitmap, + accounting::ContinuousSpaceBitmap* mark_bitmap) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); void Dump(std::ostream& os) const; @@ -78,7 +75,7 @@ class ZygoteSpace FINAL : public ContinuousMemMapAllocSpace { } protected: - virtual accounting::SpaceBitmap::SweepCallback* GetSweepCallback() { + virtual accounting::ContinuousSpaceBitmap::SweepCallback* GetSweepCallback() { return &SweepCallback; } -- cgit v1.1