summaryrefslogtreecommitdiffstats
path: root/runtime/gc/collector/semi_space.cc
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-02-18 11:38:45 -0800
committerMathieu Chartier <mathieuc@google.com>2014-02-20 10:00:24 -0800
commit3bb57c7b41bf5419fe895e7aa664d8d430205ba8 (patch)
tree55b28f12c3e5b9174499fe49acec195410880566 /runtime/gc/collector/semi_space.cc
parenta3537fb03e092a82f08ceb670a2eafa703203465 (diff)
downloadart-3bb57c7b41bf5419fe895e7aa664d8d430205ba8.zip
art-3bb57c7b41bf5419fe895e7aa664d8d430205ba8.tar.gz
art-3bb57c7b41bf5419fe895e7aa664d8d430205ba8.tar.bz2
Change ProcessReferences to not use RecursiveMarkObject.
Calling ProcessMarkStack in RecursiveMarkObject caused a lot of overhead due to timing logger splits. Changed the logic to be the same as prior to the reference queue refactoring which involves calling process mark stack after preserving soft references and enqueueing finalizer references. FinalizingGC longest pause is reduced by around 1/2 down to ~300ms. Benchmark score ~400000 -> ~600000. Also changed the timing logger splits in the GC to have (Paused) if the split is a paused part of the GC. Bug: 12129382 Change-Id: I7476d4f23670b19d70738e2fd48e37ec2f57e9f4
Diffstat (limited to 'runtime/gc/collector/semi_space.cc')
-rw-r--r--runtime/gc/collector/semi_space.cc26
1 files changed, 11 insertions, 15 deletions
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index d64ec61..882867b 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -163,7 +163,7 @@ void SemiSpace::ProcessReferences(Thread* self) {
TimingLogger::ScopedSplit split("ProcessReferences", &timings_);
WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
GetHeap()->ProcessReferences(timings_, clear_soft_references_, &MarkedForwardingAddressCallback,
- &RecursiveMarkObjectCallback, this);
+ &MarkObjectCallback, &ProcessMarkStackCallback, this);
}
void SemiSpace::MarkingPhase() {
@@ -310,7 +310,7 @@ void SemiSpace::MarkReachableObjects() {
}
// Recursively process the mark stack.
- ProcessMarkStack(true);
+ ProcessMarkStack();
}
void SemiSpace::ReclaimPhase() {
@@ -571,13 +571,15 @@ Object* SemiSpace::MarkObject(Object* obj) {
return forward_address;
}
-mirror::Object* SemiSpace::RecursiveMarkObjectCallback(mirror::Object* root, void* arg) {
+void SemiSpace::ProcessMarkStackCallback(void* arg) {
+ DCHECK(arg != nullptr);
+ reinterpret_cast<SemiSpace*>(arg)->ProcessMarkStack();
+}
+
+mirror::Object* SemiSpace::MarkObjectCallback(mirror::Object* root, void* arg) {
DCHECK(root != nullptr);
DCHECK(arg != nullptr);
- SemiSpace* semi_space = reinterpret_cast<SemiSpace*>(arg);
- mirror::Object* ret = semi_space->MarkObject(root);
- semi_space->ProcessMarkStack(true);
- return ret;
+ return reinterpret_cast<SemiSpace*>(arg)->MarkObject(root);
}
void SemiSpace::MarkRootCallback(Object** root, void* arg, uint32_t /*thread_id*/,
@@ -587,12 +589,6 @@ void SemiSpace::MarkRootCallback(Object** root, void* arg, uint32_t /*thread_id*
*root = reinterpret_cast<SemiSpace*>(arg)->MarkObject(*root);
}
-Object* SemiSpace::MarkObjectCallback(Object* object, void* arg) {
- DCHECK(object != nullptr);
- DCHECK(arg != nullptr);
- return reinterpret_cast<SemiSpace*>(arg)->MarkObject(object);
-}
-
// Marks all objects in the root set.
void SemiSpace::MarkRoots() {
timings_.StartSplit("MarkRoots");
@@ -680,7 +676,7 @@ void SemiSpace::ScanObject(Object* obj) {
}
// Scan anything that's on the mark stack.
-void SemiSpace::ProcessMarkStack(bool paused) {
+void SemiSpace::ProcessMarkStack() {
space::MallocSpace* promo_dest_space = NULL;
accounting::SpaceBitmap* live_bitmap = NULL;
if (generational_ && !whole_heap_collection_) {
@@ -694,7 +690,7 @@ void SemiSpace::ProcessMarkStack(bool paused) {
DCHECK(mark_bitmap != nullptr);
DCHECK_EQ(live_bitmap, mark_bitmap);
}
- timings_.StartSplit(paused ? "(paused)ProcessMarkStack" : "ProcessMarkStack");
+ timings_.StartSplit("ProcessMarkStack");
while (!mark_stack_->IsEmpty()) {
Object* obj = mark_stack_->PopBack();
if (generational_ && !whole_heap_collection_ && promo_dest_space->HasAddress(obj)) {