summaryrefslogtreecommitdiffstats
path: root/runtime/gc/heap-inl.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-07-14 10:16:05 -0700
committerMathieu Chartier <mathieuc@google.com>2014-07-14 10:58:09 -0700
commitfd22d5bada15d95b5ea8ab5a4dda39077e1a54ee (patch)
tree3dc5aaa74f1272c357d339c3c61d7e1ed0aececf /runtime/gc/heap-inl.h
parente8b8086388159be5fecb23ae6185e70f3dfb5da6 (diff)
downloadart-fd22d5bada15d95b5ea8ab5a4dda39077e1a54ee.zip
art-fd22d5bada15d95b5ea8ab5a4dda39077e1a54ee.tar.gz
art-fd22d5bada15d95b5ea8ab5a4dda39077e1a54ee.tar.bz2
Fix infinite loop when calling SetStatus after OOM.
There was a problem where we would call SetStatus when we had an OOM error. This results in attempting to find the ExceptionInInitializer class which if not loaded does more allocations resulting in an infinite loop. Also some cleanup addressing other comments. Bug: 16082350 Change-Id: I5c1e638a03ddf700ab4e9cad9a3077d2b1b26c43
Diffstat (limited to 'runtime/gc/heap-inl.h')
-rw-r--r--runtime/gc/heap-inl.h10
1 files changed, 6 insertions, 4 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index 419af30..7d3fd2d 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -63,6 +63,7 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
// If we have a thread local allocation we don't need to update bytes allocated.
if (allocator == kAllocatorTypeTLAB && byte_count <= self->TlabSize()) {
obj = self->AllocTlab(byte_count);
+ DCHECK(obj != nullptr) << "AllocTlab can't fail";
obj->SetClass(klass);
if (kUseBakerOrBrooksReadBarrier) {
if (kUseBrooksReadBarrier) {
@@ -71,7 +72,8 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
obj->AssertReadBarrierPointer();
}
bytes_allocated = byte_count;
- pre_fence_visitor(obj, bytes_allocated);
+ usable_size = bytes_allocated;
+ pre_fence_visitor(obj, usable_size);
QuasiAtomic::ThreadFenceForConstructor();
} else {
obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated,
@@ -111,13 +113,13 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas
WriteBarrierField(obj, mirror::Object::ClassOffset(), klass);
}
pre_fence_visitor(obj, usable_size);
- if (kIsDebugBuild && Runtime::Current()->IsStarted()) {
- CHECK_LE(obj->SizeOf(), usable_size);
- }
new_num_bytes_allocated =
static_cast<size_t>(num_bytes_allocated_.FetchAndAddSequentiallyConsistent(bytes_allocated))
+ bytes_allocated;
}
+ if (kIsDebugBuild && Runtime::Current()->IsStarted()) {
+ CHECK_LE(obj->SizeOf(), usable_size);
+ }
// TODO: Deprecate.
if (kInstrumented) {
if (Runtime::Current()->HasStatsEnabled()) {