summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHiroshi Yamauchi <yamauchi@google.com>2014-03-11 21:47:18 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2014-03-11 21:47:18 +0000
commitd46a0b1c6bc5c1058085063f9ada94d5d6708bdc (patch)
tree4a5b80f095a507f852e4d39b256359e43c3eba7e
parentf1375cd6367fba8b192b7628769931853c25e942 (diff)
parent5ccd498d4aa450b0381344724b072a932709a59a (diff)
downloadart-d46a0b1c6bc5c1058085063f9ada94d5d6708bdc.zip
art-d46a0b1c6bc5c1058085063f9ada94d5d6708bdc.tar.gz
art-d46a0b1c6bc5c1058085063f9ada94d5d6708bdc.tar.bz2
Merge "Put the post zygote non-moving space next to the malloc space."
-rw-r--r--runtime/gc/heap-inl.h1
-rw-r--r--runtime/gc/heap.cc55
-rw-r--r--runtime/gc/heap.h1
-rw-r--r--runtime/gc/space/rosalloc_space.cc2
4 files changed, 44 insertions, 15 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index b80e72e..533e5df 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -218,6 +218,7 @@ inline mirror::Object* Heap::TryToAllocate(Thread* self, AllocatorType allocator
ret = self->AllocTlab(alloc_size);
DCHECK(ret != nullptr);
*bytes_allocated = alloc_size;
+ *usable_size = alloc_size;
break;
}
default: {
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 87b4e60..d962f3c 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -161,9 +161,10 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
LOG(INFO) << "Heap() entering";
}
+ const bool is_zygote = Runtime::Current()->IsZygote();
// If we aren't the zygote, switch to the default non zygote allocator. This may update the
// entrypoints.
- if (!Runtime::Current()->IsZygote()) {
+ if (!is_zygote) {
desired_collector_type_ = post_zygote_collector_type_;
large_object_threshold_ = kDefaultLargeObjectThreshold;
} else {
@@ -192,15 +193,44 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
requested_alloc_space_begin = AlignUp(oat_file_end_addr, kPageSize);
}
}
- const char* name = Runtime::Current()->IsZygote() ? "zygote space" : "alloc space";
+ MemMap* malloc_space_mem_map = nullptr;
+ const char* malloc_space_name = is_zygote ? "zygote space" : "alloc space";
+ if (is_zygote) {
+ // Allocate a single mem map that is split into the malloc space
+ // and the post zygote non-moving space to put them adjacent.
+ size_t post_zygote_non_moving_space_size = 64 * MB;
+ size_t non_moving_spaces_size = capacity + post_zygote_non_moving_space_size;
+ std::string error_str;
+ malloc_space_mem_map = MemMap::MapAnonymous(malloc_space_name, requested_alloc_space_begin,
+ non_moving_spaces_size, PROT_READ | PROT_WRITE,
+ true, &error_str);
+ CHECK(malloc_space_mem_map != nullptr) << error_str;
+ post_zygote_non_moving_space_mem_map_.reset(malloc_space_mem_map->RemapAtEnd(
+ malloc_space_mem_map->Begin() + capacity, "post zygote non-moving space",
+ PROT_READ | PROT_WRITE, &error_str));
+ CHECK(post_zygote_non_moving_space_mem_map_.get() != nullptr) << error_str;
+ VLOG(heap) << "malloc space mem map : " << malloc_space_mem_map;
+ VLOG(heap) << "post zygote non-moving space mem map : "
+ << post_zygote_non_moving_space_mem_map_.get();
+ } else {
+ // Allocate a mem map for the malloc space.
+ std::string error_str;
+ malloc_space_mem_map = MemMap::MapAnonymous(malloc_space_name, requested_alloc_space_begin,
+ capacity, PROT_READ | PROT_WRITE, true, &error_str);
+ CHECK(malloc_space_mem_map != nullptr) << error_str;
+ VLOG(heap) << "malloc space mem map : " << malloc_space_mem_map;
+ }
+ CHECK(malloc_space_mem_map != nullptr);
space::MallocSpace* malloc_space;
if (kUseRosAlloc) {
- malloc_space = space::RosAllocSpace::Create(name, initial_size, growth_limit, capacity,
- requested_alloc_space_begin, low_memory_mode_);
+ malloc_space = space::RosAllocSpace::CreateFromMemMap(malloc_space_mem_map, malloc_space_name,
+ kDefaultStartingSize, initial_size,
+ growth_limit, capacity, low_memory_mode_);
CHECK(malloc_space != nullptr) << "Failed to create rosalloc space";
} else {
- malloc_space = space::DlMallocSpace::Create(name, initial_size, growth_limit, capacity,
- requested_alloc_space_begin);
+ malloc_space = space::DlMallocSpace::CreateFromMemMap(malloc_space_mem_map, malloc_space_name,
+ kDefaultStartingSize, initial_size,
+ growth_limit, capacity);
CHECK(malloc_space != nullptr) << "Failed to create dlmalloc space";
}
VLOG(heap) << "malloc_space : " << malloc_space;
@@ -240,12 +270,8 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
// Relies on the spaces being sorted.
byte* heap_begin = continuous_spaces_.front()->Begin();
byte* heap_end = continuous_spaces_.back()->Limit();
- if (Runtime::Current()->IsZygote()) {
- std::string error_str;
- post_zygote_non_moving_space_mem_map_.reset(
- MemMap::MapAnonymous("post zygote non-moving space", nullptr, 64 * MB,
- PROT_READ | PROT_WRITE, true, &error_str));
- CHECK(post_zygote_non_moving_space_mem_map_.get() != nullptr) << error_str;
+ if (is_zygote) {
+ CHECK(post_zygote_non_moving_space_mem_map_.get() != nullptr);
heap_begin = std::min(post_zygote_non_moving_space_mem_map_->Begin(), heap_begin);
heap_end = std::max(post_zygote_non_moving_space_mem_map_->End(), heap_end);
}
@@ -1370,17 +1396,18 @@ void Heap::TransitionCollector(CollectorType collector_type) {
// TODO: Use mem-map from temp space?
MemMap* mem_map = allocator_mem_map_.release();
CHECK(mem_map != nullptr);
+ size_t starting_size = kDefaultStartingSize;
size_t initial_size = kDefaultInitialSize;
mprotect(mem_map->Begin(), initial_size, PROT_READ | PROT_WRITE);
CHECK(main_space_ == nullptr);
if (kUseRosAlloc) {
main_space_ =
- space::RosAllocSpace::CreateFromMemMap(mem_map, "alloc space", kPageSize,
+ space::RosAllocSpace::CreateFromMemMap(mem_map, "alloc space", starting_size,
initial_size, mem_map->Size(),
mem_map->Size(), low_memory_mode_);
} else {
main_space_ =
- space::DlMallocSpace::CreateFromMemMap(mem_map, "alloc space", kPageSize,
+ space::DlMallocSpace::CreateFromMemMap(mem_map, "alloc space", starting_size,
initial_size, mem_map->Size(),
mem_map->Size());
}
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 3a8739a..797f44c 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -120,6 +120,7 @@ class Heap {
// Primitive arrays larger than this size are put in the large object space.
static constexpr size_t kDefaultLargeObjectThreshold = 3 * kPageSize;
+ static constexpr size_t kDefaultStartingSize = kPageSize;
static constexpr size_t kDefaultInitialSize = 2 * MB;
static constexpr size_t kDefaultMaximumSize = 32 * MB;
static constexpr size_t kDefaultMaxFree = 2 * MB;
diff --git a/runtime/gc/space/rosalloc_space.cc b/runtime/gc/space/rosalloc_space.cc
index c4ce94d..80c7ca7 100644
--- a/runtime/gc/space/rosalloc_space.cc
+++ b/runtime/gc/space/rosalloc_space.cc
@@ -94,7 +94,7 @@ RosAllocSpace* RosAllocSpace::Create(const std::string& name, size_t initial_siz
// Note: making this value large means that large allocations are unlikely to succeed as rosalloc
// will ask for this memory from sys_alloc which will fail as the footprint (this value plus the
// size of the large allocation) will be greater than the footprint limit.
- size_t starting_size = kPageSize;
+ size_t starting_size = Heap::kDefaultStartingSize;
MemMap* mem_map = CreateMemMap(name, starting_size, &initial_size, &growth_limit, &capacity,
requested_begin);
if (mem_map == NULL) {