summaryrefslogtreecommitdiffstats
path: root/runtime/gc/collector/mark_sweep.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-02-18 14:37:05 -0800
committerMathieu Chartier <mathieuc@google.com>2014-03-24 16:35:21 -0700
commit407f702da4f867c074fc3c8c688b8f8c32279eff (patch)
tree6a856b64f655f5aab1c538eab28e9c69f9010122 /runtime/gc/collector/mark_sweep.h
parentd201dec03334bcc25add81704981a78c19927d87 (diff)
downloadart-407f702da4f867c074fc3c8c688b8f8c32279eff.zip
art-407f702da4f867c074fc3c8c688b8f8c32279eff.tar.gz
art-407f702da4f867c074fc3c8c688b8f8c32279eff.tar.bz2
Refactor object reference visiting logic.
Refactored the reference visiting logic to be in mirror::Object instead of MarkSweep. Change-Id: I773249478dc463d83b465e85c2402320488577c0
Diffstat (limited to 'runtime/gc/collector/mark_sweep.h')
-rw-r--r--runtime/gc/collector/mark_sweep.h64
1 files changed, 15 insertions, 49 deletions
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 59f8e28..84b775a 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -33,6 +33,7 @@ namespace mirror {
class Class;
class Object;
template<class T> class ObjectArray;
+ class Reference;
} // namespace mirror
class StackVisitor;
@@ -162,10 +163,12 @@ class MarkSweep : public GarbageCollector {
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // TODO: enable thread safety analysis when in use by multiple worker threads.
- template <typename MarkVisitor>
- void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
- NO_THREAD_SAFETY_ANALYSIS;
+ // No thread safety analysis due to lambdas.
+ template<typename MarkVisitor, typename ReferenceVisitor>
+ void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor,
+ const ReferenceVisitor& ref_visitor)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
void SweepSystemWeaks()
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
@@ -180,14 +183,14 @@ class MarkSweep : public GarbageCollector {
void VerifyIsLive(const mirror::Object* obj)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
- template <bool kVisitClass, typename Visitor>
- static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+ static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* ref, void* arg)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+
static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t thread_id,
RootType root_type)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
@@ -214,6 +217,10 @@ class MarkSweep : public GarbageCollector {
return *gc_barrier_;
}
+ // Schedules an unmarked object for reference processing.
+ void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
+ SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
+
protected:
// Returns true if the object has its bit set in the mark bitmap.
bool IsMarked(const mirror::Object* object) const;
@@ -269,32 +276,6 @@ class MarkSweep : public GarbageCollector {
void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
NO_THREAD_SAFETY_ANALYSIS;
- template <bool kVisitClass, typename Visitor>
- static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
- const Visitor& visitor)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
- // Visit the header, static field references, and interface pointers of a class object.
- template <bool kVisitClass, typename Visitor>
- static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
- const Visitor& visitor)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
- template <bool kVisitClass, typename Visitor>
- static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
- template <bool kVisitClass, typename Visitor>
- static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
- const Visitor& visitor)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
- // Visit all of the references in an object array.
- template <typename Visitor>
- static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
- const Visitor& visitor)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
// Push a single reference on a mark stack.
void PushOnMarkStack(mirror::Object* obj);
@@ -303,10 +284,6 @@ class MarkSweep : public GarbageCollector {
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Schedules an unmarked object for reference processing.
- void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
// Recursively blackens objects on the mark stack.
void ProcessMarkStack(bool paused)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
@@ -316,17 +293,6 @@ class MarkSweep : public GarbageCollector {
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void EnqueueFinalizerReferences(mirror::Object** ref)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- void PreserveSomeSoftReferences(mirror::Object** ref)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- void ClearWhiteReferences(mirror::Object** list)
- SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
// Used to get around thread safety annotations. The call is from MarkingPhase and is guarded by
// IsExclusiveHeld.
void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;