summaryrefslogtreecommitdiffstats
path: root/runtime/gc/heap.h
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/gc/heap.h')
-rw-r--r--runtime/gc/heap.h32
1 files changed, 14 insertions, 18 deletions
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index a8989ec..912cf7d 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -150,7 +150,7 @@ class Heap {
explicit Heap(size_t initial_size, size_t growth_limit, size_t min_free,
size_t max_free, double target_utilization, size_t capacity,
const std::string& original_image_file_name,
- CollectorType post_zygote_collector_type, CollectorType background_collector_type,
+ CollectorType foreground_collector_type, CollectorType background_collector_type,
size_t parallel_gc_threads, size_t conc_gc_threads, bool low_memory_mode,
size_t long_pause_threshold, size_t long_gc_threshold,
bool ignore_max_footprint, bool use_tlab, bool verify_pre_gc_heap,
@@ -196,8 +196,6 @@ class Heap {
void VisitObjects(ObjectCallback callback, void* arg)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
- void SwapSemiSpaces() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
-
void CheckPreconditionsForAllocObject(mirror::Class* c, size_t byte_count)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void ThrowOutOfMemoryError(size_t byte_count, bool large_object_allocation);
@@ -249,10 +247,6 @@ class Heap {
// Returns true if there is any chance that the object (obj) will move.
bool IsMovableObject(const mirror::Object* obj) const;
- // Returns true if an object is in the temp space, if this happens its usually indicative of
- // compaction related errors.
- bool IsInTempSpace(const mirror::Object* obj) const;
-
// Enables us to compacting GC until objects are released.
void IncrementDisableMovingGC(Thread* self);
void DecrementDisableMovingGC(Thread* self);
@@ -568,7 +562,8 @@ class Heap {
private:
void Compact(space::ContinuousMemMapAllocSpace* target_space,
- space::ContinuousMemMapAllocSpace* source_space);
+ space::ContinuousMemMapAllocSpace* source_space)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
void FinishGC(Thread* self, collector::GcType gc_type) LOCKS_EXCLUDED(gc_complete_lock_);
@@ -580,7 +575,7 @@ class Heap {
static ALWAYS_INLINE bool AllocatorMayHaveConcurrentGC(AllocatorType allocator_type) {
return AllocatorHasAllocationStack(allocator_type);
}
- static bool IsCompactingGC(CollectorType collector_type) {
+ static bool IsMovingGc(CollectorType collector_type) {
return collector_type == kCollectorTypeSS || collector_type == kCollectorTypeGSS ||
collector_type == kCollectorTypeCC;
}
@@ -609,6 +604,10 @@ class Heap {
size_t bytes)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Need to do this with mutators paused so that somebody doesn't accidentally allocate into the
+ // wrong space.
+ void SwapSemiSpaces() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// Try to allocate a number of bytes, this function never does any GCs. Needs to be inlined so
// that the switch statement is constant optimized in the entrypoints.
template <const bool kInstrumented, const bool kGrow>
@@ -668,6 +667,10 @@ class Heap {
// Find a collector based on GC type.
collector::GarbageCollector* FindCollectorByGcType(collector::GcType gc_type);
+ // Create the main free list space, typically either a RosAlloc space or DlMalloc space.
+ void CreateMainMallocSpace(MemMap* mem_map, size_t initial_size, size_t growth_limit,
+ size_t capacity);
+
// Given the current contents of the alloc space, increase the allowed heap footprint to match
// the target utilization ratio. This should only be called immediately after a full garbage
// collection.
@@ -737,17 +740,10 @@ class Heap {
// A remembered set remembers all of the references from the it's space to the target space.
SafeMap<space::Space*, accounting::RememberedSet*> remembered_sets_;
- // Keep the free list allocator mem map lying around when we transition to background so that we
- // don't have to worry about virtual address space fragmentation.
- UniquePtr<MemMap> allocator_mem_map_;
-
- // The mem-map which we will use for the non-moving space after the zygote is done forking:
- UniquePtr<MemMap> post_zygote_non_moving_space_mem_map_;
-
// The current collector type.
CollectorType collector_type_;
- // Which collector we will switch to after zygote fork.
- CollectorType post_zygote_collector_type_;
+ // Which collector we use when the app is in the foreground.
+ CollectorType foreground_collector_type_;
// Which collector we will use when the app is notified of a transition to background.
CollectorType background_collector_type_;
// Desired collector type, heap trimming daemon transitions the heap if it is != collector_type_.