diff options
Diffstat (limited to 'runtime/gc/heap-inl.h')
-rw-r--r-- | runtime/gc/heap-inl.h | 18 |
1 files changed, 12 insertions, 6 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h index 25f20d6..a06f272 100644 --- a/runtime/gc/heap-inl.h +++ b/runtime/gc/heap-inl.h @@ -65,7 +65,7 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas bool after_is_current_allocator = allocator == GetCurrentAllocator(); if (is_current_allocator && !after_is_current_allocator) { // If the allocator changed, we need to restart the allocation. - return AllocObject<kInstrumented>(self, klass, byte_count); + return AllocObject<kInstrumented>(self, klass, byte_count, pre_fence_visitor); } return nullptr; } @@ -111,7 +111,7 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas DCHECK(!Runtime::Current()->HasStatsEnabled()); } if (AllocatorHasAllocationStack(allocator)) { - PushOnAllocationStack(self, obj); + PushOnAllocationStack(self, &obj); } if (kInstrumented) { if (Dbg::IsAllocTrackingEnabled()) { @@ -135,28 +135,34 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Clas // The size of a thread-local allocation stack in the number of references. static constexpr size_t kThreadLocalAllocationStackSize = 128; -inline void Heap::PushOnAllocationStack(Thread* self, mirror::Object* obj) { +inline void Heap::PushOnAllocationStack(Thread* self, mirror::Object** obj) { if (kUseThreadLocalAllocationStack) { - bool success = self->PushOnThreadLocalAllocationStack(obj); + bool success = self->PushOnThreadLocalAllocationStack(*obj); if (UNLIKELY(!success)) { // Slow path. Allocate a new thread-local allocation stack. mirror::Object** start_address; mirror::Object** end_address; while (!allocation_stack_->AtomicBumpBack(kThreadLocalAllocationStackSize, &start_address, &end_address)) { + // Disable verify object in SirtRef as obj isn't on the alloc stack yet. + SirtRefNoVerify<mirror::Object> ref(self, *obj); CollectGarbageInternal(collector::kGcTypeSticky, kGcCauseForAlloc, false); + *obj = ref.get(); } self->SetThreadLocalAllocationStack(start_address, end_address); // Retry on the new thread-local allocation stack. - success = self->PushOnThreadLocalAllocationStack(obj); + success = self->PushOnThreadLocalAllocationStack(*obj); // Must succeed. CHECK(success); } } else { // This is safe to do since the GC will never free objects which are neither in the allocation // stack or the live bitmap. - while (!allocation_stack_->AtomicPushBack(obj)) { + while (!allocation_stack_->AtomicPushBack(*obj)) { + // Disable verify object in SirtRef as obj isn't on the alloc stack yet. + SirtRefNoVerify<mirror::Object> ref(self, *obj); CollectGarbageInternal(collector::kGcTypeSticky, kGcCauseForAlloc, false); + *obj = ref.get(); } } } |