summaryrefslogtreecommitdiffstats
path: root/runtime/gc/collector/mark_sweep.h
diff options
context:
space:
mode:
authorIan Rogers <irogers@google.com>2014-02-25 17:01:10 -0800
committerIan Rogers <irogers@google.com>2014-02-26 16:38:22 -0800
commit6fac447555dc94a935b78198479cce645c837b89 (patch)
treebcf1449999084b1e1dec3dac287f6f3670d7eda0 /runtime/gc/collector/mark_sweep.h
parent7f0ff7e7fff82566bca5f9353eaa2c4f81f0671a (diff)
downloadart-6fac447555dc94a935b78198479cce645c837b89.zip
art-6fac447555dc94a935b78198479cce645c837b89.tar.gz
art-6fac447555dc94a935b78198479cce645c837b89.tar.bz2
Make allocations report usable size.
Work-in-progress to allow arrays to fill usable size. Bug: 13028925. Use C++11's override keyword on GCC >= 2.7 to ensure that we override GC and allocator methods. Move initial mirror::Class set up into a Functor so that all allocated objects have non-zero sizes. Use this property to assert that all objects are never larger than their usable size. Other bits of GC related clean-up, missing initialization, missing use of const, hot methods in .cc files, "unimplemented" functions that fail at runtime in header files, reducing header file includes, move valgrind's space into its own files, reduce number of array allocation routines. Change-Id: Id5760041a2d7f94dcaf17ec760f6095ec75dadaa
Diffstat (limited to 'runtime/gc/collector/mark_sweep.h')
-rw-r--r--runtime/gc/collector/mark_sweep.h23
1 files changed, 13 insertions, 10 deletions
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 29fafd6..c55b2b2 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -64,16 +64,18 @@ class MarkSweep : public GarbageCollector {
~MarkSweep() {}
- virtual void InitializePhase();
- virtual bool IsConcurrent() const;
- virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
- virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- virtual void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ virtual void InitializePhase() OVERRIDE;
+ virtual void MarkingPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ virtual bool HandleDirtyObjectsPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
+ virtual void ReclaimPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ virtual void FinishPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
virtual void MarkReachableObjects()
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
- virtual GcType GetGcType() const {
+
+ virtual bool IsConcurrent() const OVERRIDE;
+
+ virtual GcType GetGcType() const OVERRIDE {
return kGcTypeFull;
}
@@ -131,7 +133,7 @@ class MarkSweep : public GarbageCollector {
void ProcessReferences(Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Update and mark references from immune spaces.
+ // Update and mark references from immune spaces. Virtual as overridden by StickyMarkSweep.
virtual void UpdateAndMarkModUnion()
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -140,7 +142,8 @@ class MarkSweep : public GarbageCollector {
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Sweeps unmarked objects to complete the garbage collection.
+ // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
+ // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
// Sweeps unmarked objects to complete the garbage collection.
@@ -232,7 +235,7 @@ class MarkSweep : public GarbageCollector {
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
// Mark the vm thread roots.
- virtual void MarkThreadRoots(Thread* self)
+ void MarkThreadRoots(Thread* self)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);