diff options
author | Mathieu Chartier <mathieuc@google.com> | 2014-01-13 17:19:19 -0800 |
---|---|---|
committer | Mathieu Chartier <mathieuc@google.com> | 2014-01-31 09:45:09 -0800 |
commit | a1602f28c0e3127ad511712d4b08db89737ae901 (patch) | |
tree | 44d7abfcaa57de85fd3c3c40d5bff3173cc60387 /runtime/gc/collector/mark_sweep.cc | |
parent | a0e4b01b6b1ef5c88e340e6b2c09f83e535777a1 (diff) | |
download | art-a1602f28c0e3127ad511712d4b08db89737ae901.zip art-a1602f28c0e3127ad511712d4b08db89737ae901.tar.gz art-a1602f28c0e3127ad511712d4b08db89737ae901.tar.bz2 |
Add zygote space as its own space type.
Helps prevent errors caused from doing invalid operations on the
old alloc space.
Removed some duplicated code in mark_sweep.cc and semi_space.cc.
Change-Id: I67a772cab30d698744c918aad581053f282a4a99
Diffstat (limited to 'runtime/gc/collector/mark_sweep.cc')
-rw-r--r-- | runtime/gc/collector/mark_sweep.cc | 59 |
1 files changed, 10 insertions, 49 deletions
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 5d450a7..862d06f 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -89,7 +89,8 @@ constexpr bool kCheckLocks = kDebugLocking; void MarkSweep::ImmuneSpace(space::ContinuousSpace* space) { // Bind live to mark bitmap if necessary. if (space->GetLiveBitmap() != space->GetMarkBitmap()) { - BindLiveToMarkBitmap(space); + CHECK(space->IsContinuousMemMapAllocSpace()); + space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); } // Add the space to the immune region. @@ -143,11 +144,6 @@ MarkSweep::MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_pre mark_stack_(NULL), immune_begin_(NULL), immune_end_(NULL), - soft_reference_list_(NULL), - weak_reference_list_(NULL), - finalizer_reference_list_(NULL), - phantom_reference_list_(NULL), - cleared_reference_list_(NULL), live_stack_freeze_size_(0), gc_barrier_(new Barrier(0)), large_object_lock_("mark sweep large object lock", kMarkSweepLargeObjectLock), @@ -161,11 +157,6 @@ void MarkSweep::InitializePhase() { mark_stack_ = heap_->mark_stack_.get(); DCHECK(mark_stack_ != nullptr); SetImmuneRange(nullptr, nullptr); - soft_reference_list_ = nullptr; - weak_reference_list_ = nullptr; - finalizer_reference_list_ = nullptr; - phantom_reference_list_ = nullptr; - cleared_reference_list_ = nullptr; class_count_ = 0; array_count_ = 0; other_count_ = 0; @@ -347,7 +338,8 @@ void MarkSweep::ReclaimPhase() { timings_.EndSplit(); // Unbind the live and mark bitmaps. - UnBindBitmaps(); + TimingLogger::ScopedSplit split("UnBindBitmaps", &timings_); + GetHeap()->UnBindBitmaps(); } } @@ -589,14 +581,6 @@ void MarkSweep::MarkConcurrentRoots() { timings_.EndSplit(); } -void MarkSweep::BindLiveToMarkBitmap(space::ContinuousSpace* space) { - CHECK(space->IsMallocSpace()); - space::MallocSpace* alloc_space = space->AsMallocSpace(); - accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap(); - accounting::SpaceBitmap* mark_bitmap = alloc_space->BindLiveToMarkBitmap(); - GetHeap()->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap); -} - class ScanObjectVisitor { public: explicit ScanObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE @@ -893,14 +877,8 @@ class RecursiveMarkTask : public MarkStackTask<false> { // recursively marks until the mark stack is emptied. void MarkSweep::RecursiveMark() { TimingLogger::ScopedSplit split("RecursiveMark", &timings_); - // RecursiveMark will build the lists of known instances of the Reference classes. - // See DelayReferenceReferent for details. - CHECK(soft_reference_list_ == NULL); - CHECK(weak_reference_list_ == NULL); - CHECK(finalizer_reference_list_ == NULL); - CHECK(phantom_reference_list_ == NULL); - CHECK(cleared_reference_list_ == NULL); - + // RecursiveMark will build the lists of known instances of the Reference classes. See + // DelayReferenceReferent for details. if (kUseRecursiveMark) { const bool partial = GetGcType() == kGcTypePartial; ScanObjectVisitor scan_visitor(this); @@ -1146,13 +1124,13 @@ void MarkSweep::Sweep(bool swap_bitmaps) { DCHECK(mark_stack_->IsEmpty()); TimingLogger::ScopedSplit("Sweep", &timings_); for (const auto& space : GetHeap()->GetContinuousSpaces()) { - if (space->IsMallocSpace()) { - space::MallocSpace* malloc_space = space->AsMallocSpace(); + if (space->IsContinuousMemMapAllocSpace()) { + space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace(); TimingLogger::ScopedSplit split( - malloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", &timings_); + alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepMallocSpace", &timings_); size_t freed_objects = 0; size_t freed_bytes = 0; - malloc_space->Sweep(swap_bitmaps, &freed_objects, &freed_bytes); + alloc_space->Sweep(swap_bitmaps, &freed_objects, &freed_bytes); heap_->RecordFree(freed_objects, freed_bytes); freed_objects_.FetchAndAdd(freed_objects); freed_bytes_.FetchAndAdd(freed_bytes); @@ -1278,23 +1256,6 @@ inline bool MarkSweep::IsMarked(const Object* object) const return heap_->GetMarkBitmap()->Test(object); } -void MarkSweep::UnBindBitmaps() { - TimingLogger::ScopedSplit split("UnBindBitmaps", &timings_); - for (const auto& space : GetHeap()->GetContinuousSpaces()) { - if (space->IsMallocSpace()) { - space::MallocSpace* alloc_space = space->AsMallocSpace(); - if (alloc_space->temp_bitmap_.get() != NULL) { - // At this point, the temp_bitmap holds our old mark bitmap. - accounting::SpaceBitmap* new_bitmap = alloc_space->temp_bitmap_.release(); - GetHeap()->GetMarkBitmap()->ReplaceBitmap(alloc_space->mark_bitmap_.get(), new_bitmap); - CHECK_EQ(alloc_space->mark_bitmap_.release(), alloc_space->live_bitmap_.get()); - alloc_space->mark_bitmap_.reset(new_bitmap); - DCHECK(alloc_space->temp_bitmap_.get() == NULL); - } - } - } -} - void MarkSweep::FinishPhase() { TimingLogger::ScopedSplit split("FinishPhase", &timings_); // Can't enqueue references if we hold the mutator lock. |