summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2013-07-30 20:26:16 +0000
committerAndroid (Google) Code Review <android-gerrit@google.com>2013-07-30 20:26:16 +0000
commit1a8080d4c2e1772cfcc5e9d6587bc63bba3945d9 (patch)
tree2f5ba28c3ecdcbcd5c530405a7d80538fbdcdb3c
parent32e8e0421c34cfbeffdbb35e57751e9c4d8d1fa2 (diff)
parentf082d3ce16c520b2d039869e8eb3055eda04e591 (diff)
downloadart-1a8080d4c2e1772cfcc5e9d6587bc63bba3945d9.zip
art-1a8080d4c2e1772cfcc5e9d6587bc63bba3945d9.tar.gz
art-1a8080d4c2e1772cfcc5e9d6587bc63bba3945d9.tar.bz2
Merge "Remove sorted variable in allocation stacks." into dalvik-dev
-rw-r--r--runtime/gc/accounting/atomic_stack.h41
-rw-r--r--runtime/gc/heap.cc43
2 files changed, 51 insertions, 33 deletions
diff --git a/runtime/gc/accounting/atomic_stack.h b/runtime/gc/accounting/atomic_stack.h
index 92d9ea2..a732566 100644
--- a/runtime/gc/accounting/atomic_stack.h
+++ b/runtime/gc/accounting/atomic_stack.h
@@ -47,7 +47,7 @@ class AtomicStack {
DCHECK(begin_ != NULL);
front_index_ = 0;
back_index_ = 0;
- is_sorted_ = true;
+ debug_is_sorted_ = true;
int result = madvise(begin_, sizeof(T) * capacity_, MADV_DONTNEED);
if (result == -1) {
PLOG(WARNING) << "madvise failed";
@@ -58,8 +58,10 @@ class AtomicStack {
// Returns false if we overflowed the stack.
bool AtomicPushBack(const T& value) {
+ if (kIsDebugBuild) {
+ debug_is_sorted_ = false;
+ }
int32_t index;
- is_sorted_ = false;
do {
index = back_index_;
if (UNLIKELY(static_cast<size_t>(index) >= capacity_)) {
@@ -72,7 +74,9 @@ class AtomicStack {
}
void PushBack(const T& value) {
- is_sorted_ = false;
+ if (kIsDebugBuild) {
+ debug_is_sorted_ = false;
+ }
int32_t index = back_index_;
DCHECK_LT(static_cast<size_t>(index), capacity_);
back_index_ = index + 1;
@@ -122,22 +126,23 @@ class AtomicStack {
}
void Sort() {
- if (!is_sorted_) {
- int32_t start_back_index = back_index_.load();
- int32_t start_front_index = front_index_.load();
- is_sorted_ = true;
- std::sort(Begin(), End());
- CHECK_EQ(start_back_index, back_index_.load());
- CHECK_EQ(start_front_index, front_index_.load());
+ int32_t start_back_index = back_index_.load();
+ int32_t start_front_index = front_index_.load();
+ std::sort(Begin(), End());
+ CHECK_EQ(start_back_index, back_index_.load());
+ CHECK_EQ(start_front_index, front_index_.load());
+ if (kIsDebugBuild) {
+ debug_is_sorted_ = true;
}
}
+ bool ContainsSorted(const T& value) const {
+ DCHECK(debug_is_sorted_);
+ return std::binary_search(Begin(), End(), value);
+ }
+
bool Contains(const T& value) const {
- if (is_sorted_) {
- return std::binary_search(Begin(), End(), value);
- } else {
- return std::find(Begin(), End(), value) != End();
- }
+ return std::find(Begin(), End(), value) != End();
}
private:
@@ -147,7 +152,7 @@ class AtomicStack {
front_index_(0),
begin_(NULL),
capacity_(capacity),
- is_sorted_(true) {
+ debug_is_sorted_(true) {
}
// Size in number of elements.
@@ -156,6 +161,7 @@ class AtomicStack {
CHECK(mem_map_.get() != NULL) << "couldn't allocate mark stack";
byte* addr = mem_map_->Begin();
CHECK(addr != NULL);
+ debug_is_sorted_ = true;
begin_ = reinterpret_cast<T*>(addr);
Reset();
}
@@ -178,7 +184,8 @@ class AtomicStack {
// Maximum number of elements.
size_t capacity_;
- bool is_sorted_;
+ // Whether or not the stack is sorted, only updated in debug mode to avoid performance overhead.
+ bool debug_is_sorted_;
DISALLOW_COPY_AND_ASSIGN(AtomicStack);
};
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 292fd29..00f7e5b 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -524,25 +524,24 @@ bool Heap::IsHeapAddress(const mirror::Object* obj) {
bool Heap::IsLiveObjectLocked(const mirror::Object* obj) {
// Locks::heap_bitmap_lock_->AssertReaderHeld(Thread::Current());
- if (obj == NULL) {
+ if (obj == NULL || UNLIKELY(!IsAligned<kObjectAlignment>(obj))) {
return false;
}
- if (UNLIKELY(!IsAligned<kObjectAlignment>(obj))) {
- return false;
- }
- space::ContinuousSpace* cont_space = FindContinuousSpaceFromObject(obj, true);
- if (cont_space != NULL) {
- if (cont_space->GetLiveBitmap()->Test(obj)) {
+ space::ContinuousSpace* c_space = FindContinuousSpaceFromObject(obj, true);
+ space::DiscontinuousSpace* d_space = NULL;
+ if (c_space != NULL) {
+ if (c_space->GetLiveBitmap()->Test(obj)) {
return true;
}
} else {
- space::DiscontinuousSpace* disc_space = FindDiscontinuousSpaceFromObject(obj, true);
- if (disc_space != NULL) {
- if (disc_space->GetLiveObjects()->Test(obj)) {
+ d_space = FindDiscontinuousSpaceFromObject(obj, true);
+ if (d_space != NULL) {
+ if (d_space->GetLiveObjects()->Test(obj)) {
return true;
}
}
}
+ // This is covering the allocation/live stack swapping that is done without mutators suspended.
for (size_t i = 0; i < 5; ++i) {
if (allocation_stack_->Contains(const_cast<mirror::Object*>(obj)) ||
live_stack_->Contains(const_cast<mirror::Object*>(obj))) {
@@ -550,6 +549,18 @@ bool Heap::IsLiveObjectLocked(const mirror::Object* obj) {
}
NanoSleep(MsToNs(10));
}
+ // We need to check the bitmaps again since there is a race where we mark something as live and
+ // then clear the stack containing it.
+ if (c_space != NULL) {
+ if (c_space->GetLiveBitmap()->Test(obj)) {
+ return true;
+ }
+ } else {
+ d_space = FindDiscontinuousSpaceFromObject(obj, true);
+ if (d_space != NULL && d_space->GetLiveObjects()->Test(obj)) {
+ return true;
+ }
+ }
return false;
}
@@ -1229,10 +1240,10 @@ class VerifyReferenceVisitor {
if (bitmap != NULL && bitmap->Test(obj)) {
LOG(ERROR) << "Object " << obj << " found in live bitmap";
}
- if (alloc_stack->Contains(const_cast<mirror::Object*>(obj))) {
+ if (alloc_stack->ContainsSorted(const_cast<mirror::Object*>(obj))) {
LOG(ERROR) << "Object " << obj << " found in allocation stack";
}
- if (live_stack->Contains(const_cast<mirror::Object*>(obj))) {
+ if (live_stack->ContainsSorted(const_cast<mirror::Object*>(obj))) {
LOG(ERROR) << "Object " << obj << " found in live stack";
}
// Attempt to see if the card table missed the reference.
@@ -1252,10 +1263,10 @@ class VerifyReferenceVisitor {
} else {
LOG(ERROR) << "Root references dead object " << ref << "\nRef type " << PrettyTypeOf(ref);
}
- if (alloc_stack->Contains(const_cast<mirror::Object*>(ref))) {
+ if (alloc_stack->ContainsSorted(const_cast<mirror::Object*>(ref))) {
LOG(ERROR) << "Reference " << ref << " found in allocation stack!";
}
- if (live_stack->Contains(const_cast<mirror::Object*>(ref))) {
+ if (live_stack->ContainsSorted(const_cast<mirror::Object*>(ref))) {
LOG(ERROR) << "Reference " << ref << " found in live stack!";
}
heap_->image_mod_union_table_->Dump(LOG(ERROR) << "Image mod-union table: ");
@@ -1345,8 +1356,8 @@ class VerifyReferenceCardVisitor {
// Card should be either kCardDirty if it got re-dirtied after we aged it, or
// kCardDirty - 1 if it didnt get touched since we aged it.
accounting::ObjectStack* live_stack = heap_->live_stack_.get();
- if (live_stack->Contains(const_cast<mirror::Object*>(ref))) {
- if (live_stack->Contains(const_cast<mirror::Object*>(obj))) {
+ if (live_stack->ContainsSorted(const_cast<mirror::Object*>(ref))) {
+ if (live_stack->ContainsSorted(const_cast<mirror::Object*>(obj))) {
LOG(ERROR) << "Object " << obj << " found in live stack";
}
if (heap_->GetLiveBitmap()->Test(obj)) {