summaryrefslogtreecommitdiffstats
path: root/runtime
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-02-13 18:03:10 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2014-02-13 18:03:10 +0000
commitd2be39a0c106728bc9087ff0c0500b796aadea25 (patch)
tree4e649ace212d48b16e2ab5249a2b4dcf5fbfa5aa /runtime
parent13511e8157d637b617a1a701dba3580f1d5a30fc (diff)
parent8544b462b524a502d2a18efb5a790ef98fa5eb51 (diff)
downloadart-d2be39a0c106728bc9087ff0c0500b796aadea25.zip
art-d2be39a0c106728bc9087ff0c0500b796aadea25.tar.gz
art-d2be39a0c106728bc9087ff0c0500b796aadea25.tar.bz2
Merge "Fix race condition in BumpPointerSpace::Walk."
Diffstat (limited to 'runtime')
-rw-r--r--runtime/gc/space/bump_pointer_space.cc22
1 files changed, 18 insertions, 4 deletions
diff --git a/runtime/gc/space/bump_pointer_space.cc b/runtime/gc/space/bump_pointer_space.cc
index f7bdc4c..f3f594f 100644
--- a/runtime/gc/space/bump_pointer_space.cc
+++ b/runtime/gc/space/bump_pointer_space.cc
@@ -137,6 +137,7 @@ byte* BumpPointerSpace::AllocBlock(size_t bytes) {
void BumpPointerSpace::Walk(ObjectCallback* callback, void* arg) {
byte* pos = Begin();
+ byte* end = End();
byte* main_end = pos;
{
MutexLock mu(Thread::Current(), block_lock_);
@@ -145,16 +146,29 @@ void BumpPointerSpace::Walk(ObjectCallback* callback, void* arg) {
if (num_blocks_ == 0) {
UpdateMainBlock();
}
- main_end += main_block_size_;
+ main_end = Begin() + main_block_size_;
+ if (num_blocks_ == 0) {
+ // We don't have any other blocks, this means someone else may be allocating into the main
+ // block. In this case, we don't want to try and visit the other blocks after the main block
+ // since these could actually be part of the main block.
+ end = main_end;
+ }
}
// Walk all of the objects in the main block first.
while (pos < main_end) {
mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos);
- callback(obj, arg);
- pos = reinterpret_cast<byte*>(GetNextObject(obj));
+ if (obj->GetClass() == nullptr) {
+ // There is a race condition where a thread has just allocated an object but not set the
+ // class. We can't know the size of this object, so we don't visit it and exit the function
+ // since there is guaranteed to be not other blocks.
+ return;
+ } else {
+ callback(obj, arg);
+ pos = reinterpret_cast<byte*>(GetNextObject(obj));
+ }
}
// Walk the other blocks (currently only TLABs).
- while (pos < End()) {
+ while (pos < end) {
BlockHeader* header = reinterpret_cast<BlockHeader*>(pos);
size_t block_size = header->size_;
pos += sizeof(BlockHeader); // Skip the header so that we know where the objects