summaryrefslogtreecommitdiffstats
path: root/runtime/gc
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-03-06 15:52:27 -0800
committerMathieu Chartier <mathieuc@google.com>2014-03-06 16:00:46 -0800
commitf517f1a994fab72ba484bbbac6911e315f59f6cd (patch)
tree5dbd368323e6d3ebdf47db4ab7d5bdd7a2a7845f /runtime/gc
parentc7a8981098646b4e255d9ede1f3798023bac6e84 (diff)
downloadart-f517f1a994fab72ba484bbbac6911e315f59f6cd.zip
art-f517f1a994fab72ba484bbbac6911e315f59f6cd.tar.gz
art-f517f1a994fab72ba484bbbac6911e315f59f6cd.tar.bz2
Restore obj after RequestConcurrentGC.
RequestConcurrentGC can cause thread suspension, this means that another thread could transition the heap or cause moving GC. Bug: 12934910 Change-Id: I5c07161e2e849d7acbdf939f1c24e1ba361a1d6a
Diffstat (limited to 'runtime/gc')
-rw-r--r--runtime/gc/heap-inl.h8
-rw-r--r--runtime/gc/heap.h3
2 files changed, 7 insertions, 4 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index 89ded0b..3d2f7ea 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -107,7 +107,7 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
// optimized out. And for the other allocators, AllocatorMayHaveConcurrentGC is a constant since
// the allocator_type should be constant propagated.
if (AllocatorMayHaveConcurrentGC(allocator) && concurrent_gc_) {
- CheckConcurrentGC(self, new_num_bytes_allocated, obj);
+ CheckConcurrentGC(self, new_num_bytes_allocated, &obj);
}
VerifyObject(obj);
self->VerifyStack();
@@ -280,11 +280,13 @@ inline bool Heap::IsOutOfMemoryOnAllocation(AllocatorType allocator_type, size_t
}
inline void Heap::CheckConcurrentGC(Thread* self, size_t new_num_bytes_allocated,
- mirror::Object* obj) {
+ mirror::Object** obj) {
if (UNLIKELY(new_num_bytes_allocated >= concurrent_start_bytes_)) {
// The SirtRef is necessary since the calls in RequestConcurrentGC are a safepoint.
- SirtRef<mirror::Object> ref(self, obj);
+ SirtRef<mirror::Object> ref(self, *obj);
RequestConcurrentGC(self);
+ // Restore obj in case it moved.
+ *obj = ref.get();
}
}
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index a90af27..92da7e9 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -575,7 +575,8 @@ class Heap {
bool ShouldAllocLargeObject(mirror::Class* c, size_t byte_count) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
ALWAYS_INLINE void CheckConcurrentGC(Thread* self, size_t new_num_bytes_allocated,
- mirror::Object* obj);
+ mirror::Object** obj)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// We don't force this to be inlined since it is a slow path.
template <bool kInstrumented, typename PreFenceVisitor>