summaryrefslogtreecommitdiffstats
path: root/runtime/gc/heap.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/gc/heap.cc')
-rw-r--r--runtime/gc/heap.cc36
1 files changed, 25 insertions, 11 deletions
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 07d0455..de7d0b8 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -88,7 +88,8 @@ static constexpr bool kCompactZygote = kMovingCollector;
static constexpr size_t kNonMovingSpaceCapacity = 64 * MB;
Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max_free,
- double target_utilization, size_t capacity, const std::string& image_file_name,
+ double target_utilization, double foreground_heap_growth_multiplier, size_t capacity,
+ const std::string& image_file_name,
CollectorType foreground_collector_type, CollectorType background_collector_type,
size_t parallel_gc_threads, size_t conc_gc_threads, bool low_memory_mode,
size_t long_pause_log_threshold, size_t long_gc_log_threshold,
@@ -154,6 +155,7 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
min_free_(min_free),
max_free_(max_free),
target_utilization_(target_utilization),
+ foreground_heap_growth_multiplier_(foreground_heap_growth_multiplier),
total_wait_time_(0),
total_allocation_time_(0),
verify_object_mode_(kVerifyObjectModeDisabled),
@@ -2524,22 +2526,33 @@ collector::GarbageCollector* Heap::FindCollectorByGcType(collector::GcType gc_ty
return nullptr;
}
+double Heap::HeapGrowthMultiplier() const {
+ // If we don't care about pause times we are background, so return 1.0.
+ if (!CareAboutPauseTimes() || IsLowMemoryMode()) {
+ return 1.0;
+ }
+ return foreground_heap_growth_multiplier_;
+}
+
void Heap::GrowForUtilization(collector::GarbageCollector* collector_ran) {
// We know what our utilization is at this moment.
// This doesn't actually resize any memory. It just lets the heap grow more when necessary.
- const size_t bytes_allocated = GetBytesAllocated();
+ const uint64_t bytes_allocated = GetBytesAllocated();
last_gc_size_ = bytes_allocated;
last_gc_time_ns_ = NanoTime();
- size_t target_size;
+ uint64_t target_size;
collector::GcType gc_type = collector_ran->GetGcType();
if (gc_type != collector::kGcTypeSticky) {
// Grow the heap for non sticky GC.
- target_size = bytes_allocated / GetTargetHeapUtilization();
- if (target_size > bytes_allocated + max_free_) {
- target_size = bytes_allocated + max_free_;
- } else if (target_size < bytes_allocated + min_free_) {
- target_size = bytes_allocated + min_free_;
- }
+ const float multiplier = HeapGrowthMultiplier(); // Use the multiplier to grow more for
+ // foreground.
+ intptr_t delta = bytes_allocated / GetTargetHeapUtilization() - bytes_allocated;
+ CHECK_GE(delta, 0);
+ target_size = bytes_allocated + delta * multiplier;
+ target_size = std::min(target_size,
+ bytes_allocated + static_cast<uint64_t>(max_free_ * multiplier));
+ target_size = std::max(target_size,
+ bytes_allocated + static_cast<uint64_t>(min_free_ * multiplier));
native_need_to_run_finalization_ = true;
next_gc_type_ = collector::kGcTypeSticky;
} else {
@@ -2564,7 +2577,7 @@ void Heap::GrowForUtilization(collector::GarbageCollector* collector_ran) {
if (bytes_allocated + max_free_ < max_allowed_footprint_) {
target_size = bytes_allocated + max_free_;
} else {
- target_size = std::max(bytes_allocated, max_allowed_footprint_);
+ target_size = std::max(bytes_allocated, static_cast<uint64_t>(max_allowed_footprint_));
}
}
if (!ignore_max_footprint_) {
@@ -2588,7 +2601,8 @@ void Heap::GrowForUtilization(collector::GarbageCollector* collector_ran) {
// Start a concurrent GC when we get close to the estimated remaining bytes. When the
// allocation rate is very high, remaining_bytes could tell us that we should start a GC
// right away.
- concurrent_start_bytes_ = std::max(max_allowed_footprint_ - remaining_bytes, bytes_allocated);
+ concurrent_start_bytes_ = std::max(max_allowed_footprint_ - remaining_bytes,
+ static_cast<size_t>(bytes_allocated));
}
}
}