summaryrefslogtreecommitdiffstats
path: root/runtime/gc
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-01-09 11:23:53 -0800
committerMathieu Chartier <mathieuc@google.com>2014-03-05 12:44:22 -0800
commit661974a5561e5ccdfbac8cb5d8df8b7e6f3483b8 (patch)
tree02e428694277d85a98505f8e814aba378fc7651c /runtime/gc
parent3cd52df3d740f8a656233b199dfcaab165f415ff (diff)
downloadart-661974a5561e5ccdfbac8cb5d8df8b7e6f3483b8.zip
art-661974a5561e5ccdfbac8cb5d8df8b7e6f3483b8.tar.gz
art-661974a5561e5ccdfbac8cb5d8df8b7e6f3483b8.tar.bz2
Fix valgrind gtests and memory leaks.
All tests pass other than image_test which passes if some bad reads are disabled (buzbee working on this). Change-Id: Ifd6b6e3aed0bc867703b6e818353a9f296609422
Diffstat (limited to 'runtime/gc')
-rw-r--r--runtime/gc/allocator/rosalloc.cc6
-rw-r--r--runtime/gc/allocator/rosalloc.h1
-rw-r--r--runtime/gc/heap.cc2
-rw-r--r--runtime/gc/space/dlmalloc_space.cc13
-rw-r--r--runtime/gc/space/malloc_space.h2
-rw-r--r--runtime/gc/space/rosalloc_space-inl.h2
-rw-r--r--runtime/gc/space/rosalloc_space.cc11
-rw-r--r--runtime/gc/space/rosalloc_space.h6
-rw-r--r--runtime/gc/space/valgrind_malloc_space-inl.h21
-rw-r--r--runtime/gc/space/valgrind_malloc_space.h3
10 files changed, 35 insertions, 32 deletions
diff --git a/runtime/gc/allocator/rosalloc.cc b/runtime/gc/allocator/rosalloc.cc
index e86bee6..e13bd71 100644
--- a/runtime/gc/allocator/rosalloc.cc
+++ b/runtime/gc/allocator/rosalloc.cc
@@ -93,6 +93,12 @@ RosAlloc::RosAlloc(void* base, size_t capacity, size_t max_capacity,
}
}
+RosAlloc::~RosAlloc() {
+ for (size_t i = 0; i < kNumOfSizeBrackets; i++) {
+ delete size_bracket_locks_[i];
+ }
+}
+
void* RosAlloc::AllocPages(Thread* self, size_t num_pages, byte page_map_type) {
lock_.AssertHeld(self);
DCHECK(page_map_type == kPageMapRun || page_map_type == kPageMapLargeObject);
diff --git a/runtime/gc/allocator/rosalloc.h b/runtime/gc/allocator/rosalloc.h
index 4b0dd79..738d917 100644
--- a/runtime/gc/allocator/rosalloc.h
+++ b/runtime/gc/allocator/rosalloc.h
@@ -515,6 +515,7 @@ class RosAlloc {
RosAlloc(void* base, size_t capacity, size_t max_capacity,
PageReleaseMode page_release_mode,
size_t page_release_size_threshold = kDefaultPageReleaseSizeThreshold);
+ ~RosAlloc();
void* Alloc(Thread* self, size_t size, size_t* bytes_allocated)
LOCKS_EXCLUDED(lock_);
void Free(Thread* self, void* ptr)
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index b97b9ec..87ee21b 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -152,7 +152,7 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
total_allocation_time_(0),
verify_object_mode_(kVerifyObjectModeDisabled),
disable_moving_gc_count_(0),
- running_on_valgrind_(RUNNING_ON_VALGRIND),
+ running_on_valgrind_(RUNNING_ON_VALGRIND > 0),
use_tlab_(use_tlab) {
if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
LOG(INFO) << "Heap() entering";
diff --git a/runtime/gc/space/dlmalloc_space.cc b/runtime/gc/space/dlmalloc_space.cc
index b591486..0597422 100644
--- a/runtime/gc/space/dlmalloc_space.cc
+++ b/runtime/gc/space/dlmalloc_space.cc
@@ -43,9 +43,8 @@ DlMallocSpace::DlMallocSpace(const std::string& name, MemMap* mem_map, void* msp
}
DlMallocSpace* DlMallocSpace::CreateFromMemMap(MemMap* mem_map, const std::string& name,
- size_t starting_size,
- size_t initial_size, size_t growth_limit,
- size_t capacity) {
+ size_t starting_size, size_t initial_size,
+ size_t growth_limit, size_t capacity) {
DCHECK(mem_map != nullptr);
void* mspace = CreateMspace(mem_map->Begin(), starting_size, initial_size);
if (mspace == nullptr) {
@@ -133,12 +132,12 @@ mirror::Object* DlMallocSpace::AllocWithGrowth(Thread* self, size_t num_bytes,
size_t footprint = mspace_footprint(mspace_);
mspace_set_footprint_limit(mspace_, footprint);
}
- if (result != NULL) {
+ if (result != nullptr) {
// Zero freshly allocated memory, done while not holding the space's lock.
memset(result, 0, num_bytes);
+ // Check that the result is contained in the space.
+ CHECK(!kDebugSpaces || Contains(result));
}
- // Return the new allocation or NULL.
- CHECK(!kDebugSpaces || result == NULL || Contains(result));
return result;
}
@@ -151,7 +150,7 @@ MallocSpace* DlMallocSpace::CreateInstance(const std::string& name, MemMap* mem_
size_t DlMallocSpace::Free(Thread* self, mirror::Object* ptr) {
MutexLock mu(self, lock_);
if (kDebugSpaces) {
- CHECK(ptr != NULL);
+ CHECK(ptr != nullptr);
CHECK(Contains(ptr)) << "Free (" << ptr << ") not in bounds of heap " << *this;
}
const size_t bytes_freed = AllocationSizeNonvirtual(ptr, nullptr);
diff --git a/runtime/gc/space/malloc_space.h b/runtime/gc/space/malloc_space.h
index 30c7c45..fbcee5f 100644
--- a/runtime/gc/space/malloc_space.h
+++ b/runtime/gc/space/malloc_space.h
@@ -139,7 +139,7 @@ class MallocSpace : public ContinuousMemMapAllocSpace {
virtual void* CreateAllocator(void* base, size_t morecore_start, size_t initial_size,
size_t maximum_size, bool low_memory_mode) = 0;
- void RegisterRecentFree(mirror::Object* ptr)
+ virtual void RegisterRecentFree(mirror::Object* ptr)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(lock_);
diff --git a/runtime/gc/space/rosalloc_space-inl.h b/runtime/gc/space/rosalloc_space-inl.h
index 2627c85..d270885 100644
--- a/runtime/gc/space/rosalloc_space-inl.h
+++ b/runtime/gc/space/rosalloc_space-inl.h
@@ -50,7 +50,7 @@ inline mirror::Object* RosAllocSpace::AllocCommon(Thread* self, size_t num_bytes
size_t* bytes_allocated, size_t* usable_size) {
size_t rosalloc_size = 0;
mirror::Object* result = reinterpret_cast<mirror::Object*>(
- rosalloc_for_alloc_->Alloc(self, num_bytes, &rosalloc_size));
+ rosalloc_->Alloc(self, num_bytes, &rosalloc_size));
if (LIKELY(result != NULL)) {
if (kDebugSpaces) {
CHECK(Contains(result)) << "Allocation (" << reinterpret_cast<void*>(result)
diff --git a/runtime/gc/space/rosalloc_space.cc b/runtime/gc/space/rosalloc_space.cc
index b13ac3d..c4ce94d 100644
--- a/runtime/gc/space/rosalloc_space.cc
+++ b/runtime/gc/space/rosalloc_space.cc
@@ -39,8 +39,7 @@ template class ValgrindMallocSpace<RosAllocSpace, allocator::RosAlloc*>;
RosAllocSpace::RosAllocSpace(const std::string& name, MemMap* mem_map,
art::gc::allocator::RosAlloc* rosalloc, byte* begin, byte* end,
byte* limit, size_t growth_limit)
- : MallocSpace(name, mem_map, begin, end, limit, growth_limit), rosalloc_(rosalloc),
- rosalloc_for_alloc_(rosalloc) {
+ : MallocSpace(name, mem_map, begin, end, limit, growth_limit), rosalloc_(rosalloc) {
CHECK(rosalloc != NULL);
}
@@ -64,7 +63,9 @@ RosAllocSpace* RosAllocSpace::CreateFromMemMap(MemMap* mem_map, const std::strin
// Everything is set so record in immutable structure and leave
byte* begin = mem_map->Begin();
- if (RUNNING_ON_VALGRIND > 0) {
+ // TODO: Fix RosAllocSpace to support valgrind. There is currently some issues with
+ // AllocationSize caused by redzones. b/12944686
+ if (false && RUNNING_ON_VALGRIND > 0) {
return new ValgrindMallocSpace<RosAllocSpace, allocator::RosAlloc*>(
name, mem_map, rosalloc, begin, end, begin + capacity, growth_limit, initial_size);
} else {
@@ -72,6 +73,10 @@ RosAllocSpace* RosAllocSpace::CreateFromMemMap(MemMap* mem_map, const std::strin
}
}
+RosAllocSpace::~RosAllocSpace() {
+ delete rosalloc_;
+}
+
RosAllocSpace* RosAllocSpace::Create(const std::string& name, size_t initial_size,
size_t growth_limit, size_t capacity, byte* requested_begin,
bool low_memory_mode) {
diff --git a/runtime/gc/space/rosalloc_space.h b/runtime/gc/space/rosalloc_space.h
index 9f756aa..9b9adf8 100644
--- a/runtime/gc/space/rosalloc_space.h
+++ b/runtime/gc/space/rosalloc_space.h
@@ -105,6 +105,8 @@ class RosAllocSpace : public MallocSpace {
rosalloc_->Verify();
}
+ virtual ~RosAllocSpace();
+
protected:
RosAllocSpace(const std::string& name, MemMap* mem_map, allocator::RosAlloc* rosalloc,
byte* begin, byte* end, byte* limit, size_t growth_limit);
@@ -127,10 +129,6 @@ class RosAllocSpace : public MallocSpace {
// Underlying rosalloc.
allocator::RosAlloc* const rosalloc_;
- // The rosalloc pointer used for allocation. Equal to rosalloc_ or nullptr after
- // InvalidateAllocator() is called.
- allocator::RosAlloc* rosalloc_for_alloc_;
-
friend class collector::MarkSweep;
DISALLOW_COPY_AND_ASSIGN(RosAllocSpace);
diff --git a/runtime/gc/space/valgrind_malloc_space-inl.h b/runtime/gc/space/valgrind_malloc_space-inl.h
index 4b0c8e3..ed97e60 100644
--- a/runtime/gc/space/valgrind_malloc_space-inl.h
+++ b/runtime/gc/space/valgrind_malloc_space-inl.h
@@ -38,9 +38,6 @@ mirror::Object* ValgrindMallocSpace<S, A>::AllocWithGrowth(Thread* self, size_t
if (obj_with_rdz == nullptr) {
return nullptr;
}
- if (usable_size != nullptr) {
- *usable_size -= 2 * kValgrindRedZoneBytes;
- }
mirror::Object* result = reinterpret_cast<mirror::Object*>(
reinterpret_cast<byte*>(obj_with_rdz) + kValgrindRedZoneBytes);
// Make redzones as no access.
@@ -58,9 +55,6 @@ mirror::Object* ValgrindMallocSpace<S, A>::Alloc(Thread* self, size_t num_bytes,
if (obj_with_rdz == nullptr) {
return nullptr;
}
- if (usable_size != nullptr) {
- *usable_size -= 2 * kValgrindRedZoneBytes;
- }
mirror::Object* result = reinterpret_cast<mirror::Object*>(
reinterpret_cast<byte*>(obj_with_rdz) + kValgrindRedZoneBytes);
// Make redzones as no access.
@@ -73,10 +67,7 @@ template <typename S, typename A>
size_t ValgrindMallocSpace<S, A>::AllocationSize(mirror::Object* obj, size_t* usable_size) {
size_t result = S::AllocationSize(reinterpret_cast<mirror::Object*>(
reinterpret_cast<byte*>(obj) - kValgrindRedZoneBytes), usable_size);
- if (usable_size != nullptr) {
- *usable_size -= 2 * kValgrindRedZoneBytes;
- }
- return result - 2 * kValgrindRedZoneBytes;
+ return result;
}
template <typename S, typename A>
@@ -84,11 +75,10 @@ size_t ValgrindMallocSpace<S, A>::Free(Thread* self, mirror::Object* ptr) {
void* obj_after_rdz = reinterpret_cast<void*>(ptr);
void* obj_with_rdz = reinterpret_cast<byte*>(obj_after_rdz) - kValgrindRedZoneBytes;
// Make redzones undefined.
- size_t allocation_size =
- AllocationSize(reinterpret_cast<mirror::Object*>(obj_with_rdz), nullptr);
- VALGRIND_MAKE_MEM_UNDEFINED(obj_with_rdz, allocation_size);
- size_t freed = S::Free(self, reinterpret_cast<mirror::Object*>(obj_with_rdz));
- return freed - 2 * kValgrindRedZoneBytes;
+ size_t usable_size = 0;
+ AllocationSize(ptr, &usable_size);
+ VALGRIND_MAKE_MEM_UNDEFINED(obj_with_rdz, usable_size);
+ return S::Free(self, reinterpret_cast<mirror::Object*>(obj_with_rdz));
}
template <typename S, typename A>
@@ -96,6 +86,7 @@ size_t ValgrindMallocSpace<S, A>::FreeList(Thread* self, size_t num_ptrs, mirror
size_t freed = 0;
for (size_t i = 0; i < num_ptrs; i++) {
freed += Free(self, ptrs[i]);
+ ptrs[i] = nullptr;
}
return freed;
}
diff --git a/runtime/gc/space/valgrind_malloc_space.h b/runtime/gc/space/valgrind_malloc_space.h
index 8d00b30..6b755c4 100644
--- a/runtime/gc/space/valgrind_malloc_space.h
+++ b/runtime/gc/space/valgrind_malloc_space.h
@@ -43,6 +43,9 @@ class ValgrindMallocSpace FINAL : public BaseMallocSpaceType {
size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) OVERRIDE
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void RegisterRecentFree(mirror::Object* ptr) OVERRIDE {
+ }
+
ValgrindMallocSpace(const std::string& name, MemMap* mem_map, AllocatorType allocator,
byte* begin, byte* end, byte* limit, size_t growth_limit,
size_t initial_size);