summaryrefslogtreecommitdiffstats
path: root/runtime/gc
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-03-04 23:26:43 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2014-03-04 23:26:43 +0000
commit6f2083d3f909bce3988e71cd80374cee038ecba6 (patch)
treeff70112b6d4fcc5c07e153734e4cb05a10dcf99b /runtime/gc
parenta6ed651cfe290b9558f4bb18e1c43250a1abcbbd (diff)
parent893263b7d5bc2ca43a91ecb8071867f5134fc60a (diff)
downloadart-6f2083d3f909bce3988e71cd80374cee038ecba6.zip
art-6f2083d3f909bce3988e71cd80374cee038ecba6.tar.gz
art-6f2083d3f909bce3988e71cd80374cee038ecba6.tar.bz2
Merge "Avoid marking old class linker and intern table roots during pause."
Diffstat (limited to 'runtime/gc')
-rw-r--r--runtime/gc/collector/mark_sweep.cc60
-rw-r--r--runtime/gc/collector/mark_sweep.h16
-rw-r--r--runtime/gc/collector/semi_space.cc2
-rw-r--r--runtime/gc/heap.cc6
4 files changed, 48 insertions, 36 deletions
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index c39e56f..4aff68a 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -86,6 +86,7 @@ static constexpr bool kCountJavaLangRefs = false;
// Turn off kCheckLocks when profiling the GC since it slows the GC down by up to 40%.
static constexpr bool kCheckLocks = kDebugLocking;
+static constexpr bool kVerifyRoots = kIsDebugBuild;
void MarkSweep::ImmuneSpace(space::ContinuousSpace* space) {
// Bind live to mark bitmap if necessary.
@@ -255,7 +256,8 @@ void MarkSweep::PreCleanCards() {
MarkThreadRoots(self);
// TODO: Only mark the dirty roots.
MarkNonThreadRoots();
- MarkConcurrentRoots();
+ MarkConcurrentRoots(
+ static_cast<VisitRootFlags>(kVisitRootFlagClearRootLog | kVisitRootFlagNewRoots));
// Process the newly aged cards.
RecursiveMarkDirtyObjects(false, accounting::CardTable::kCardDirty - 1);
// TODO: Empty allocation stack to reduce the number of objects we need to test / mark as live
@@ -286,17 +288,8 @@ void MarkSweep::MarkingPhase() {
heap_->SwapStacks(self);
WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
- if (Locks::mutator_lock_->IsExclusiveHeld(self)) {
- // If we exclusively hold the mutator lock, all threads must be suspended.
- MarkRoots();
- RevokeAllThreadLocalAllocationStacks(self);
- } else {
- MarkThreadRoots(self);
- // At this point the live stack should no longer have any mutators which push into it.
- MarkNonThreadRoots();
- }
+ MarkRoots(self);
live_stack_freeze_size_ = heap_->GetLiveStack()->Size();
- MarkConcurrentRoots();
UpdateAndMarkModUnion();
MarkReachableObjects();
// Pre-clean dirtied cards to reduce pauses.
@@ -583,17 +576,16 @@ inline void MarkSweep::MarkObject(const Object* obj) {
}
}
-void MarkSweep::MarkRoot(const Object* obj) {
- if (obj != NULL) {
- MarkObjectNonNull(obj);
- }
-}
-
void MarkSweep::MarkRootParallelCallback(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
RootType /*root_type*/) {
reinterpret_cast<MarkSweep*>(arg)->MarkObjectNonNullParallel(*root);
}
+void MarkSweep::VerifyRootMarked(Object** root, void* arg, uint32_t /*thread_id*/,
+ RootType /*root_type*/) {
+ CHECK(reinterpret_cast<MarkSweep*>(arg)->IsMarked(*root));
+}
+
void MarkSweep::MarkRootCallback(Object** root, void* arg, uint32_t /*thread_id*/,
RootType /*root_type*/) {
reinterpret_cast<MarkSweep*>(arg)->MarkObjectNonNull(*root);
@@ -621,11 +613,20 @@ void MarkSweep::VerifyRoots() {
Runtime::Current()->GetThreadList()->VerifyRoots(VerifyRootCallback, this);
}
-// Marks all objects in the root set.
-void MarkSweep::MarkRoots() {
- timings_.StartSplit("MarkRoots");
- Runtime::Current()->VisitNonConcurrentRoots(MarkRootCallback, this);
- timings_.EndSplit();
+void MarkSweep::MarkRoots(Thread* self) {
+ if (Locks::mutator_lock_->IsExclusiveHeld(self)) {
+ // If we exclusively hold the mutator lock, all threads must be suspended.
+ timings_.StartSplit("MarkRoots");
+ Runtime::Current()->VisitRoots(MarkRootCallback, this);
+ timings_.EndSplit();
+ RevokeAllThreadLocalAllocationStacks(self);
+ } else {
+ MarkThreadRoots(self);
+ // At this point the live stack should no longer have any mutators which push into it.
+ MarkNonThreadRoots();
+ MarkConcurrentRoots(
+ static_cast<VisitRootFlags>(kVisitRootFlagAllRoots | kVisitRootFlagStartLoggingNewRoots));
+ }
}
void MarkSweep::MarkNonThreadRoots() {
@@ -634,10 +635,10 @@ void MarkSweep::MarkNonThreadRoots() {
timings_.EndSplit();
}
-void MarkSweep::MarkConcurrentRoots() {
+void MarkSweep::MarkConcurrentRoots(VisitRootFlags flags) {
timings_.StartSplit("MarkConcurrentRoots");
// Visit all runtime roots and clear dirty flags.
- Runtime::Current()->VisitConcurrentRoots(MarkRootCallback, this, false, true);
+ Runtime::Current()->VisitConcurrentRoots(MarkRootCallback, this, flags);
timings_.EndSplit();
}
@@ -1003,9 +1004,18 @@ void MarkSweep::RecursiveMarkDirtyObjects(bool paused, byte minimum_age) {
}
void MarkSweep::ReMarkRoots() {
+ Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
timings_.StartSplit("(Paused)ReMarkRoots");
- Runtime::Current()->VisitRoots(MarkRootCallback, this, true, true);
+ Runtime::Current()->VisitRoots(
+ MarkRootCallback, this, static_cast<VisitRootFlags>(kVisitRootFlagNewRoots |
+ kVisitRootFlagStopLoggingNewRoots |
+ kVisitRootFlagClearRootLog));
timings_.EndSplit();
+ if (kVerifyRoots) {
+ timings_.StartSplit("(Paused)VerifyRoots");
+ Runtime::Current()->VisitRoots(VerifyRootMarked, this);
+ timings_.EndSplit();
+ }
}
void MarkSweep::SweepSystemWeaks() {
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index c55b2b2..5c0a233 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -36,6 +36,7 @@ namespace mirror {
class StackVisitor;
class Thread;
+enum VisitRootFlags : uint8_t;
namespace gc {
@@ -85,8 +86,8 @@ class MarkSweep : public GarbageCollector {
// Find the default mark bitmap.
void FindDefaultMarkBitmap();
- // Marks the root set at the start of a garbage collection.
- void MarkRoots()
+ // Marks all objects in the root set at the start of a garbage collection.
+ void MarkRoots(Thread* self)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -94,7 +95,7 @@ class MarkSweep : public GarbageCollector {
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void MarkConcurrentRoots()
+ void MarkConcurrentRoots(VisitRootFlags flags)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -193,6 +194,11 @@ class MarkSweep : public GarbageCollector {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+ static void VerifyRootMarked(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
+ RootType /*root_type*/)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+
static void ProcessMarkStackPausedCallback(void* arg)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
@@ -205,10 +211,6 @@ class MarkSweep : public GarbageCollector {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
- void MarkRoot(const mirror::Object* obj)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
-
Barrier& GetBarrier() {
return *gc_barrier_;
}
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index 4668a19..a577f90 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -619,7 +619,7 @@ void SemiSpace::MarkRootCallback(Object** root, void* arg, uint32_t /*thread_id*
void SemiSpace::MarkRoots() {
timings_.StartSplit("MarkRoots");
// TODO: Visit up image roots as well?
- Runtime::Current()->VisitRoots(MarkRootCallback, this, false, true);
+ Runtime::Current()->VisitRoots(MarkRootCallback, this);
timings_.EndSplit();
}
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index fc591e7..b97b9ec 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1899,11 +1899,11 @@ class VerifyReferenceVisitor {
// Search to see if any of the roots reference our object.
void* arg = const_cast<void*>(reinterpret_cast<const void*>(obj));
- Runtime::Current()->VisitRoots(&RootMatchesObjectVisitor, arg, false, false);
+ Runtime::Current()->VisitRoots(&RootMatchesObjectVisitor, arg);
// Search to see if any of the roots reference our reference.
arg = const_cast<void*>(reinterpret_cast<const void*>(ref));
- Runtime::Current()->VisitRoots(&RootMatchesObjectVisitor, arg, false, false);
+ Runtime::Current()->VisitRoots(&RootMatchesObjectVisitor, arg);
} else {
LOG(ERROR) << "Root " << ref << " is dead with type " << PrettyTypeOf(ref);
}
@@ -1975,7 +1975,7 @@ bool Heap::VerifyHeapReferences() {
// pointing to dead objects if they are not reachable.
VisitObjects(VerifyObjectVisitor::VisitCallback, &visitor);
// Verify the roots:
- Runtime::Current()->VisitRoots(VerifyReferenceVisitor::VerifyRoots, &visitor, false, false);
+ Runtime::Current()->VisitRoots(VerifyReferenceVisitor::VerifyRoots, &visitor);
if (visitor.Failed()) {
// Dump mod-union tables.
for (const auto& table_pair : mod_union_tables_) {