summaryrefslogtreecommitdiffstats
path: root/runtime/gc
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-03-20 15:12:30 -0700
committerMathieu Chartier <mathieuc@google.com>2014-03-20 15:34:18 -0700
commit601276abdb746b03675ff945745aa936694d3439 (patch)
tree94a789af17b042b079251cd212787724c2b820d6 /runtime/gc
parentafb21d16bd2267f7f34e63a5b5105f7572455d22 (diff)
downloadart-601276abdb746b03675ff945745aa936694d3439.zip
art-601276abdb746b03675ff945745aa936694d3439.tar.gz
art-601276abdb746b03675ff945745aa936694d3439.tar.bz2
Fix RecordFree to take signed parameters.
RecordFree can get negative bytes allocated when background compaction foreground transitions occur. This caused a DCHECK to fail on debug builds. Also did some refactoring in PreProcessReferences. Bug: 13568814 Change-Id: I57543f1c78544a94f1d241459698b736dba8cfa8
Diffstat (limited to 'runtime/gc')
-rw-r--r--runtime/gc/collector/mark_sweep.cc18
-rw-r--r--runtime/gc/collector/mark_sweep.h2
-rw-r--r--runtime/gc/heap.cc8
-rw-r--r--runtime/gc/heap.h2
4 files changed, 17 insertions, 13 deletions
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 8372734..76a7410 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -145,10 +145,14 @@ void MarkSweep::ProcessReferences(Thread* self) {
&MarkObjectCallback, &ProcessMarkStackPausedCallback, this);
}
-void MarkSweep::PreProcessReferences(Thread* self) {
- timings_.NewSplit("PreProcessReferences");
- GetHeap()->ProcessSoftReferences(timings_, clear_soft_references_, &IsMarkedCallback,
- &MarkObjectCallback, &ProcessMarkStackPausedCallback, this);
+void MarkSweep::PreProcessReferences() {
+ if (IsConcurrent()) {
+ // No reason to do this for non-concurrent GC since pre processing soft references only helps
+ // pauses.
+ timings_.NewSplit("PreProcessReferences");
+ GetHeap()->ProcessSoftReferences(timings_, clear_soft_references_, &IsMarkedCallback,
+ &MarkObjectCallback, &ProcessMarkStackPausedCallback, this);
+ }
}
bool MarkSweep::HandleDirtyObjectsPhase() {
@@ -261,11 +265,7 @@ void MarkSweep::MarkingPhase() {
MarkReachableObjects();
// Pre-clean dirtied cards to reduce pauses.
PreCleanCards();
- if (IsConcurrent()) {
- // No reason to do this for non-concurrent GC since pre processing soft references only helps
- // pauses.
- PreProcessReferences(self);
- }
+ PreProcessReferences();
}
void MarkSweep::UpdateAndMarkModUnion() {
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index b117b20..65583d9 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -126,7 +126,7 @@ class MarkSweep : public GarbageCollector {
void ProcessReferences(Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void PreProcessReferences(Thread* self)
+ void PreProcessReferences()
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index a763e37..79417a6 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1101,8 +1101,12 @@ void Heap::VerifyHeap() {
GetLiveBitmap()->Walk(Heap::VerificationCallback, this);
}
-void Heap::RecordFree(size_t freed_objects, size_t freed_bytes) {
- DCHECK_LE(freed_bytes, num_bytes_allocated_.Load());
+void Heap::RecordFree(ssize_t freed_objects, ssize_t freed_bytes) {
+ // Use signed comparison since freed bytes can be negative when background compaction foreground
+ // transitions occurs. This is caused by the moving objects from a bump pointer space to a
+ // free list backed space typically increasing memory footprint due to padding and binning.
+ DCHECK_LE(freed_bytes, static_cast<ssize_t>(num_bytes_allocated_.Load()));
+ DCHECK_GE(freed_objects, 0);
num_bytes_allocated_.FetchAndSub(freed_bytes);
if (Runtime::Current()->HasStatsEnabled()) {
RuntimeStats* thread_stats = Thread::Current()->GetStats();
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index eb53ba9..3dfd065 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -353,7 +353,7 @@ class Heap {
// Freed bytes can be negative in cases where we copy objects from a compacted space to a
// free-list backed space.
- void RecordFree(size_t freed_objects, size_t freed_bytes);
+ void RecordFree(ssize_t freed_objects, ssize_t freed_bytes);
// Must be called if a field of an Object in the heap changes, and before any GC safe-point.
// The call is not needed if NULL is stored in the field.