summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHiroshi Yamauchi <yamauchi@google.com>2015-01-30 16:25:12 -0800
committerHiroshi Yamauchi <yamauchi@google.com>2015-01-30 17:58:54 -0800
commitd25f84250700c35f006d5a1d295231af174c3734 (patch)
treef296729f989ceebc1e201eb6f543644c3e9a5919
parent763abfd0d803f8169e97d3da944043c2464aac0a (diff)
downloadart-d25f84250700c35f006d5a1d295231af174c3734.zip
art-d25f84250700c35f006d5a1d295231af174c3734.tar.gz
art-d25f84250700c35f006d5a1d295231af174c3734.tar.bz2
Clean up enums in RegionSpace.
Split enum RegionState into two enums, RegionState and RegionType. Merge the latter with SubSpaceType. Use RefToRegionUnlocked in RegionSpace::AddLiveBytes. Turn some CHECKs into DCHECKs. Improve the Ritz EAAC run time and the GC time by ~20%. Bug: 12687968 Change-Id: Icdb8ab3e9ec2a1eefc8c9a2e4bb19befcf2562a6
-rw-r--r--runtime/Android.mk1
-rw-r--r--runtime/gc/collector/concurrent_copying.cc26
-rw-r--r--runtime/gc/space/region_space-inl.h32
-rw-r--r--runtime/gc/space/region_space.cc26
-rw-r--r--runtime/gc/space/region_space.h178
5 files changed, 129 insertions, 134 deletions
diff --git a/runtime/Android.mk b/runtime/Android.mk
index 8a4b8c0..78c692d 100644
--- a/runtime/Android.mk
+++ b/runtime/Android.mk
@@ -300,6 +300,7 @@ LIBART_ENUM_OPERATOR_OUT_HEADER_FILES := \
gc/collector/gc_type.h \
gc/allocator_type.h \
gc/collector_type.h \
+ gc/space/region_space.h \
gc/space/space.h \
gc/heap.h \
instrumentation.h \
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 754e217..2ad8e9c 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -810,10 +810,10 @@ class ConcurrentCopyingClearBlackPtrsVisitor {
void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
DCHECK(obj != nullptr);
- CHECK(collector_->heap_->GetMarkBitmap()->Test(obj)) << obj;
- CHECK_EQ(obj->GetReadBarrierPointer(), ReadBarrier::BlackPtr()) << obj;
+ DCHECK(collector_->heap_->GetMarkBitmap()->Test(obj)) << obj;
+ DCHECK_EQ(obj->GetReadBarrierPointer(), ReadBarrier::BlackPtr()) << obj;
obj->SetReadBarrierPointer(ReadBarrier::WhitePtr());
- CHECK_EQ(obj->GetReadBarrierPointer(), ReadBarrier::WhitePtr()) << obj;
+ DCHECK_EQ(obj->GetReadBarrierPointer(), ReadBarrier::WhitePtr()) << obj;
}
private:
@@ -955,10 +955,10 @@ class ConcurrentCopyingComputeUnevacFromSpaceLiveRatioVisitor {
void operator()(mirror::Object* ref) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
DCHECK(ref != nullptr);
- CHECK(collector_->region_space_bitmap_->Test(ref)) << ref;
- CHECK(collector_->region_space_->IsInUnevacFromSpace(ref)) << ref;
+ DCHECK(collector_->region_space_bitmap_->Test(ref)) << ref;
+ DCHECK(collector_->region_space_->IsInUnevacFromSpace(ref)) << ref;
if (kUseBakerReadBarrier) {
- CHECK(ref->GetReadBarrierPointer() == ReadBarrier::BlackPtr()) << ref;
+ DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::BlackPtr()) << ref;
// Clear the black ptr.
ref->SetReadBarrierPointer(ReadBarrier::WhitePtr());
}
@@ -1380,17 +1380,18 @@ mirror::Object* ConcurrentCopying::Copy(mirror::Object* from_ref) {
mirror::Object* ConcurrentCopying::IsMarked(mirror::Object* from_ref) {
DCHECK(from_ref != nullptr);
- if (region_space_->IsInToSpace(from_ref)) {
+ space::RegionSpace::RegionType rtype = region_space_->GetRegionType(from_ref);
+ if (rtype == space::RegionSpace::RegionType::kRegionTypeToSpace) {
// It's already marked.
return from_ref;
}
mirror::Object* to_ref;
- if (region_space_->IsInFromSpace(from_ref)) {
+ if (rtype == space::RegionSpace::RegionType::kRegionTypeFromSpace) {
to_ref = GetFwdPtr(from_ref);
DCHECK(to_ref == nullptr || region_space_->IsInToSpace(to_ref) ||
heap_->non_moving_space_->HasAddress(to_ref))
<< "from_ref=" << from_ref << " to_ref=" << to_ref;
- } else if (region_space_->IsInUnevacFromSpace(from_ref)) {
+ } else if (rtype == space::RegionSpace::RegionType::kRegionTypeUnevacFromSpace) {
if (region_space_bitmap_->Test(from_ref)) {
to_ref = from_ref;
} else {
@@ -1455,12 +1456,13 @@ mirror::Object* ConcurrentCopying::Mark(mirror::Object* from_ref) {
}
DCHECK(from_ref != nullptr);
DCHECK(heap_->collector_type_ == kCollectorTypeCC);
- if (region_space_->IsInToSpace(from_ref)) {
+ space::RegionSpace::RegionType rtype = region_space_->GetRegionType(from_ref);
+ if (rtype == space::RegionSpace::RegionType::kRegionTypeToSpace) {
// It's already marked.
return from_ref;
}
mirror::Object* to_ref;
- if (region_space_->IsInFromSpace(from_ref)) {
+ if (rtype == space::RegionSpace::RegionType::kRegionTypeFromSpace) {
to_ref = GetFwdPtr(from_ref);
if (kUseBakerReadBarrier) {
DCHECK(to_ref != ReadBarrier::GrayPtr()) << "from_ref=" << from_ref << " to_ref=" << to_ref;
@@ -1471,7 +1473,7 @@ mirror::Object* ConcurrentCopying::Mark(mirror::Object* from_ref) {
}
DCHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref))
<< "from_ref=" << from_ref << " to_ref=" << to_ref;
- } else if (region_space_->IsInUnevacFromSpace(from_ref)) {
+ } else if (rtype == space::RegionSpace::RegionType::kRegionTypeUnevacFromSpace) {
// This may or may not succeed, which is ok.
if (kUseBakerReadBarrier) {
from_ref->AtomicSetReadBarrierPointer(ReadBarrier::WhitePtr(), ReadBarrier::GrayPtr());
diff --git a/runtime/gc/space/region_space-inl.h b/runtime/gc/space/region_space-inl.h
index fd00739..a4ed718 100644
--- a/runtime/gc/space/region_space-inl.h
+++ b/runtime/gc/space/region_space-inl.h
@@ -104,7 +104,7 @@ inline mirror::Object* RegionSpace::AllocNonvirtual(size_t num_bytes, size_t* by
inline mirror::Object* RegionSpace::Region::Alloc(size_t num_bytes, size_t* bytes_allocated,
size_t* usable_size) {
- DCHECK_EQ(state_, static_cast<uint8_t>(kRegionToSpace));
+ DCHECK(IsAllocated() && IsInToSpace());
DCHECK(IsAligned<kAlignment>(num_bytes));
Atomic<uint8_t*>* atomic_top = reinterpret_cast<Atomic<uint8_t*>*>(&top_);
uint8_t* old_top;
@@ -132,7 +132,7 @@ inline size_t RegionSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t*
size_t num_bytes = obj->SizeOf();
if (usable_size != nullptr) {
if (LIKELY(num_bytes <= kRegionSize)) {
- DCHECK(RefToRegion(obj)->IsNormal());
+ DCHECK(RefToRegion(obj)->IsAllocated());
*usable_size = RoundUp(num_bytes, kAlignment);
} else {
DCHECK(RefToRegion(obj)->IsLarge());
@@ -142,7 +142,7 @@ inline size_t RegionSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t*
return num_bytes;
}
-template<RegionSpace::SubSpaceType kSubSpaceType>
+template<RegionSpace::RegionType kRegionType>
uint64_t RegionSpace::GetBytesAllocatedInternal() {
uint64_t bytes = 0;
MutexLock mu(Thread::Current(), region_lock_);
@@ -151,33 +151,33 @@ uint64_t RegionSpace::GetBytesAllocatedInternal() {
if (r->IsFree()) {
continue;
}
- switch (kSubSpaceType) {
- case kAllSpaces:
+ switch (kRegionType) {
+ case RegionType::kRegionTypeAll:
bytes += r->BytesAllocated();
break;
- case kFromSpace:
+ case RegionType::kRegionTypeFromSpace:
if (r->IsInFromSpace()) {
bytes += r->BytesAllocated();
}
break;
- case kUnevacFromSpace:
+ case RegionType::kRegionTypeUnevacFromSpace:
if (r->IsInUnevacFromSpace()) {
bytes += r->BytesAllocated();
}
break;
- case kToSpace:
+ case RegionType::kRegionTypeToSpace:
if (r->IsInToSpace()) {
bytes += r->BytesAllocated();
}
break;
default:
- LOG(FATAL) << "Unexpected space type : " << static_cast<int>(kSubSpaceType);
+ LOG(FATAL) << "Unexpected space type : " << kRegionType;
}
}
return bytes;
}
-template<RegionSpace::SubSpaceType kSubSpaceType>
+template<RegionSpace::RegionType kRegionType>
uint64_t RegionSpace::GetObjectsAllocatedInternal() {
uint64_t bytes = 0;
MutexLock mu(Thread::Current(), region_lock_);
@@ -186,27 +186,27 @@ uint64_t RegionSpace::GetObjectsAllocatedInternal() {
if (r->IsFree()) {
continue;
}
- switch (kSubSpaceType) {
- case kAllSpaces:
+ switch (kRegionType) {
+ case RegionType::kRegionTypeAll:
bytes += r->ObjectsAllocated();
break;
- case kFromSpace:
+ case RegionType::kRegionTypeFromSpace:
if (r->IsInFromSpace()) {
bytes += r->ObjectsAllocated();
}
break;
- case kUnevacFromSpace:
+ case RegionType::kRegionTypeUnevacFromSpace:
if (r->IsInUnevacFromSpace()) {
bytes += r->ObjectsAllocated();
}
break;
- case kToSpace:
+ case RegionType::kRegionTypeToSpace:
if (r->IsInToSpace()) {
bytes += r->ObjectsAllocated();
}
break;
default:
- LOG(FATAL) << "Unexpected space type : " << static_cast<int>(kSubSpaceType);
+ LOG(FATAL) << "Unexpected space type : " << kRegionType;
}
}
return bytes;
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 2ecb79e..2c556d9 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -71,7 +71,7 @@ RegionSpace::RegionSpace(const std::string& name, MemMap* mem_map)
}
full_region_ = Region();
DCHECK(!full_region_.IsFree());
- DCHECK(full_region_.IsNormal());
+ DCHECK(full_region_.IsAllocated());
current_region_ = &full_region_;
evac_region_ = nullptr;
size_t ignored;
@@ -115,7 +115,7 @@ size_t RegionSpace::ToSpaceSize() {
}
inline bool RegionSpace::Region::ShouldBeEvacuated() {
- DCHECK(state_ == kRegionToSpace || state_ == kRegionLargeToSpace);
+ DCHECK((IsAllocated() || IsLarge()) && IsInToSpace());
// if the region was allocated after the start of the
// previous GC or the live ratio is below threshold, evacuate
// it.
@@ -126,13 +126,13 @@ inline bool RegionSpace::Region::ShouldBeEvacuated() {
bool is_live_percent_valid = live_bytes_ != static_cast<size_t>(-1);
if (is_live_percent_valid) {
uint live_percent = GetLivePercent();
- if (state_ == kRegionToSpace) {
+ if (IsAllocated()) {
// Side node: live_percent == 0 does not necessarily mean
// there's no live objects due to rounding (there may be a
// few).
result = live_percent < kEvaculateLivePercentThreshold;
} else {
- DCHECK(state_ == kRegionLargeToSpace);
+ DCHECK(IsLarge());
result = live_percent == 0U;
}
} else {
@@ -155,11 +155,14 @@ void RegionSpace::SetFromSpace(accounting::ReadBarrierTable* rb_table, bool forc
bool prev_large_evacuated = false;
for (size_t i = 0; i < num_regions_; ++i) {
Region* r = &regions_[i];
- RegionState state = static_cast<RegionState>(r->state_);
+ RegionState state = r->State();
+ RegionType type = r->Type();
if (!r->IsFree()) {
DCHECK(r->IsInToSpace());
if (LIKELY(num_expected_large_tails == 0U)) {
- DCHECK(state == kRegionToSpace || state == kRegionLargeToSpace);
+ DCHECK((state == RegionState::kRegionStateAllocated ||
+ state == RegionState::kRegionStateLarge) &&
+ type == RegionType::kRegionTypeToSpace);
bool should_evacuate = force_evacuate_all || r->ShouldBeEvacuated();
if (should_evacuate) {
r->SetAsFromSpace();
@@ -168,13 +171,15 @@ void RegionSpace::SetFromSpace(accounting::ReadBarrierTable* rb_table, bool forc
r->SetAsUnevacFromSpace();
DCHECK(r->IsInUnevacFromSpace());
}
- if (UNLIKELY(state == kRegionLargeToSpace)) {
+ if (UNLIKELY(state == RegionState::kRegionStateLarge &&
+ type == RegionType::kRegionTypeToSpace)) {
prev_large_evacuated = should_evacuate;
num_expected_large_tails = RoundUp(r->BytesAllocated(), kRegionSize) / kRegionSize - 1;
DCHECK_GT(num_expected_large_tails, 0U);
}
} else {
- DCHECK(state == kRegionLargeTailToSpace);
+ DCHECK(state == RegionState::kRegionStateLargeTail &&
+ type == RegionType::kRegionTypeToSpace);
if (prev_large_evacuated) {
r->SetAsFromSpace();
DCHECK(r->IsInFromSpace());
@@ -361,7 +366,7 @@ void RegionSpace::RevokeThreadLocalBuffersLocked(Thread* thread) {
if (tlab_start != nullptr) {
DCHECK(IsAligned<kRegionSize>(tlab_start));
Region* r = RefToRegionLocked(reinterpret_cast<mirror::Object*>(tlab_start));
- DCHECK(r->IsNormal());
+ DCHECK(r->IsAllocated());
DCHECK_EQ(thread->GetThreadLocalBytesAllocated(), kRegionSize);
r->RecordThreadLocalAllocations(thread->GetThreadLocalObjectsAllocated(),
thread->GetThreadLocalBytesAllocated());
@@ -402,7 +407,8 @@ void RegionSpace::AssertAllThreadLocalBuffersAreRevoked() {
void RegionSpace::Region::Dump(std::ostream& os) const {
os << "Region[" << idx_ << "]=" << reinterpret_cast<void*>(begin_) << "-" << reinterpret_cast<void*>(top_)
<< "-" << reinterpret_cast<void*>(end_)
- << " state=" << static_cast<uint>(state_) << " objects_allocated=" << objects_allocated_
+ << " state=" << static_cast<uint>(state_) << " type=" << static_cast<uint>(type_)
+ << " objects_allocated=" << objects_allocated_
<< " alloc_time=" << alloc_time_ << " live_bytes=" << live_bytes_
<< " is_newly_allocated=" << is_newly_allocated_ << " is_a_tlab=" << is_a_tlab_ << " thread=" << thread_ << "\n";
}
diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h
index b4a043f..4160547 100644
--- a/runtime/gc/space/region_space.h
+++ b/runtime/gc/space/region_space.h
@@ -17,9 +17,10 @@
#ifndef ART_RUNTIME_GC_SPACE_REGION_SPACE_H_
#define ART_RUNTIME_GC_SPACE_REGION_SPACE_H_
+#include "gc/accounting/read_barrier_table.h"
#include "object_callbacks.h"
#include "space.h"
-#include "gc/accounting/read_barrier_table.h"
+#include "thread.h"
namespace art {
namespace gc {
@@ -94,32 +95,40 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
void AssertAllThreadLocalBuffersAreRevoked() LOCKS_EXCLUDED(Locks::runtime_shutdown_lock_,
Locks::thread_list_lock_);
- enum SubSpaceType {
- kAllSpaces, // All spaces.
- kFromSpace, // From-space. To be evacuated.
- kUnevacFromSpace, // Unevacuated from-space. Not to be evacuated.
- kToSpace, // To-space.
+ enum class RegionType : uint8_t {
+ kRegionTypeAll, // All types.
+ kRegionTypeFromSpace, // From-space. To be evacuated.
+ kRegionTypeUnevacFromSpace, // Unevacuated from-space. Not to be evacuated.
+ kRegionTypeToSpace, // To-space.
+ kRegionTypeNone, // None.
+ };
+
+ enum class RegionState : uint8_t {
+ kRegionStateFree, // Free region.
+ kRegionStateAllocated, // Allocated region.
+ kRegionStateLarge, // Large allocated (allocation larger than the region size).
+ kRegionStateLargeTail, // Large tail (non-first regions of a large allocation).
};
- template<SubSpaceType kSubSpaceType> uint64_t GetBytesAllocatedInternal();
- template<SubSpaceType kSubSpaceType> uint64_t GetObjectsAllocatedInternal();
+ template<RegionType kRegionType> uint64_t GetBytesAllocatedInternal();
+ template<RegionType kRegionType> uint64_t GetObjectsAllocatedInternal();
uint64_t GetBytesAllocated() {
- return GetBytesAllocatedInternal<kAllSpaces>();
+ return GetBytesAllocatedInternal<RegionType::kRegionTypeAll>();
}
uint64_t GetObjectsAllocated() {
- return GetObjectsAllocatedInternal<kAllSpaces>();
+ return GetObjectsAllocatedInternal<RegionType::kRegionTypeAll>();
}
uint64_t GetBytesAllocatedInFromSpace() {
- return GetBytesAllocatedInternal<kFromSpace>();
+ return GetBytesAllocatedInternal<RegionType::kRegionTypeFromSpace>();
}
uint64_t GetObjectsAllocatedInFromSpace() {
- return GetObjectsAllocatedInternal<kFromSpace>();
+ return GetObjectsAllocatedInternal<RegionType::kRegionTypeFromSpace>();
}
uint64_t GetBytesAllocatedInUnevacFromSpace() {
- return GetBytesAllocatedInternal<kUnevacFromSpace>();
+ return GetBytesAllocatedInternal<RegionType::kRegionTypeUnevacFromSpace>();
}
uint64_t GetObjectsAllocatedInUnevacFromSpace() {
- return GetObjectsAllocatedInternal<kUnevacFromSpace>();
+ return GetObjectsAllocatedInternal<RegionType::kRegionTypeUnevacFromSpace>();
}
bool CanMoveObjects() const OVERRIDE {
@@ -181,6 +190,14 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
return false;
}
+ RegionType GetRegionType(mirror::Object* ref) {
+ if (HasAddress(ref)) {
+ Region* r = RefToRegionUnlocked(ref);
+ return r->Type();
+ }
+ return RegionType::kRegionTypeNone;
+ }
+
void SetFromSpace(accounting::ReadBarrierTable* rb_table, bool force_evacuate_all)
LOCKS_EXCLUDED(region_lock_);
@@ -190,7 +207,7 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
void ClearFromSpace();
void AddLiveBytes(mirror::Object* ref, size_t alloc_size) {
- Region* reg = RefToRegion(ref);
+ Region* reg = RefToRegionUnlocked(ref);
reg->AddLiveBytes(alloc_size);
}
@@ -209,38 +226,36 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
template<bool kToSpaceOnly>
void WalkInternal(ObjectCallback* callback, void* arg) NO_THREAD_SAFETY_ANALYSIS;
- enum RegionState {
- kRegionFree, // Free region.
- kRegionToSpace, // To-space region.
- kRegionFromSpace, // From-space region. To be evacuated.
- kRegionUnevacFromSpace, // Unevacuated from-space region. Not to be evacuated.
- kRegionLargeToSpace, // Large (allocation larger than the region size) to-space.
- kRegionLargeFromSpace, // Large from-space. To be evacuated.
- kRegionLargeUnevacFromSpace, // Large unevacuated from-space.
- kRegionLargeTailToSpace, // Large tail (non-first regions of a large allocation).
- kRegionLargeTailFromSpace, // Large tail from-space.
- kRegionLargeTailUnevacFromSpace, // Large tail unevacuated from-space.
- };
-
class Region {
public:
Region()
: idx_(static_cast<size_t>(-1)),
- begin_(nullptr), top_(nullptr), end_(nullptr), state_(kRegionToSpace),
+ begin_(nullptr), top_(nullptr), end_(nullptr),
+ state_(RegionState::kRegionStateAllocated), type_(RegionType::kRegionTypeToSpace),
objects_allocated_(0), alloc_time_(0), live_bytes_(static_cast<size_t>(-1)),
is_newly_allocated_(false), is_a_tlab_(false), thread_(nullptr) {}
Region(size_t idx, uint8_t* begin, uint8_t* end)
- : idx_(idx), begin_(begin), top_(begin), end_(end), state_(kRegionFree),
+ : idx_(idx), begin_(begin), top_(begin), end_(end),
+ state_(RegionState::kRegionStateFree), type_(RegionType::kRegionTypeNone),
objects_allocated_(0), alloc_time_(0), live_bytes_(static_cast<size_t>(-1)),
is_newly_allocated_(false), is_a_tlab_(false), thread_(nullptr) {
DCHECK_LT(begin, end);
DCHECK_EQ(static_cast<size_t>(end - begin), kRegionSize);
}
+ RegionState State() const {
+ return state_;
+ }
+
+ RegionType Type() const {
+ return type_;
+ }
+
void Clear() {
top_ = begin_;
- state_ = kRegionFree;
+ state_ = RegionState::kRegionStateFree;
+ type_ = RegionType::kRegionTypeNone;
objects_allocated_ = 0;
alloc_time_ = 0;
live_bytes_ = static_cast<size_t>(-1);
@@ -257,8 +272,9 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
size_t* usable_size);
bool IsFree() const {
- bool is_free = state_ == kRegionFree;
+ bool is_free = state_ == RegionState::kRegionStateFree;
if (is_free) {
+ DCHECK(IsInNoSpace());
DCHECK_EQ(begin_, top_);
DCHECK_EQ(objects_allocated_, 0U);
}
@@ -268,19 +284,22 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
// Given a free region, declare it non-free (allocated).
void Unfree(uint32_t alloc_time) {
DCHECK(IsFree());
- state_ = kRegionToSpace;
+ state_ = RegionState::kRegionStateAllocated;
+ type_ = RegionType::kRegionTypeToSpace;
alloc_time_ = alloc_time;
}
void UnfreeLarge(uint32_t alloc_time) {
DCHECK(IsFree());
- state_ = kRegionLargeToSpace;
+ state_ = RegionState::kRegionStateLarge;
+ type_ = RegionType::kRegionTypeToSpace;
alloc_time_ = alloc_time;
}
void UnfreeLargeTail(uint32_t alloc_time) {
DCHECK(IsFree());
- state_ = kRegionLargeTailToSpace;
+ state_ = RegionState::kRegionStateLargeTail;
+ type_ = RegionType::kRegionTypeToSpace;
alloc_time_ = alloc_time;
}
@@ -288,25 +307,23 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
is_newly_allocated_ = true;
}
- // Non-large, non-large-tail.
- bool IsNormal() const {
- return state_ == kRegionToSpace || state_ == kRegionFromSpace ||
- state_ == kRegionUnevacFromSpace;
+ // Non-large, non-large-tail allocated.
+ bool IsAllocated() const {
+ return state_ == RegionState::kRegionStateAllocated;
}
+ // Large allocated.
bool IsLarge() const {
- bool is_large = state_ == kRegionLargeToSpace || state_ == kRegionLargeFromSpace ||
- state_ == kRegionLargeUnevacFromSpace;
+ bool is_large = state_ == RegionState::kRegionStateLarge;
if (is_large) {
DCHECK_LT(begin_ + 1 * MB, top_);
}
return is_large;
}
+ // Large-tail allocated.
bool IsLargeTail() const {
- bool is_large_tail = state_ == kRegionLargeTailToSpace ||
- state_ == kRegionLargeTailFromSpace ||
- state_ == kRegionLargeTailUnevacFromSpace;
+ bool is_large_tail = state_ == RegionState::kRegionStateLargeTail;
if (is_large_tail) {
DCHECK_EQ(begin_, top_);
}
@@ -318,71 +335,36 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
}
bool IsInFromSpace() const {
- return state_ == kRegionFromSpace || state_ == kRegionLargeFromSpace ||
- state_ == kRegionLargeTailFromSpace;
+ return type_ == RegionType::kRegionTypeFromSpace;
}
bool IsInToSpace() const {
- return state_ == kRegionToSpace || state_ == kRegionLargeToSpace ||
- state_ == kRegionLargeTailToSpace;
+ return type_ == RegionType::kRegionTypeToSpace;
}
bool IsInUnevacFromSpace() const {
- return state_ == kRegionUnevacFromSpace || state_ == kRegionLargeUnevacFromSpace ||
- state_ == kRegionLargeTailUnevacFromSpace;
+ return type_ == RegionType::kRegionTypeUnevacFromSpace;
+ }
+
+ bool IsInNoSpace() const {
+ return type_ == RegionType::kRegionTypeNone;
}
void SetAsFromSpace() {
- switch (state_) {
- case kRegionToSpace:
- state_ = kRegionFromSpace;
- break;
- case kRegionLargeToSpace:
- state_ = kRegionLargeFromSpace;
- break;
- case kRegionLargeTailToSpace:
- state_ = kRegionLargeTailFromSpace;
- break;
- default:
- LOG(FATAL) << "Unexpected region state : " << static_cast<uint>(state_)
- << " idx=" << idx_;
- }
+ DCHECK(!IsFree() && IsInToSpace());
+ type_ = RegionType::kRegionTypeFromSpace;
live_bytes_ = static_cast<size_t>(-1);
}
void SetAsUnevacFromSpace() {
- switch (state_) {
- case kRegionToSpace:
- state_ = kRegionUnevacFromSpace;
- break;
- case kRegionLargeToSpace:
- state_ = kRegionLargeUnevacFromSpace;
- break;
- case kRegionLargeTailToSpace:
- state_ = kRegionLargeTailUnevacFromSpace;
- break;
- default:
- LOG(FATAL) << "Unexpected region state : " << static_cast<uint>(state_)
- << " idx=" << idx_;
- }
+ DCHECK(!IsFree() && IsInToSpace());
+ type_ = RegionType::kRegionTypeUnevacFromSpace;
live_bytes_ = 0U;
}
void SetUnevacFromSpaceAsToSpace() {
- switch (state_) {
- case kRegionUnevacFromSpace:
- state_ = kRegionToSpace;
- break;
- case kRegionLargeUnevacFromSpace:
- state_ = kRegionLargeToSpace;
- break;
- case kRegionLargeTailUnevacFromSpace:
- state_ = kRegionLargeTailToSpace;
- break;
- default:
- LOG(FATAL) << "Unexpected region state : " << static_cast<uint>(state_)
- << " idx=" << idx_;
- }
+ DCHECK(!IsFree() && IsInUnevacFromSpace());
+ type_ = RegionType::kRegionTypeToSpace;
}
ALWAYS_INLINE bool ShouldBeEvacuated();
@@ -419,7 +401,7 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
DCHECK_EQ(begin_, top_);
return 0;
} else {
- DCHECK(IsNormal()) << static_cast<uint>(state_);
+ DCHECK(IsAllocated()) << static_cast<uint>(state_);
DCHECK_LE(begin_, top_);
size_t bytes = static_cast<size_t>(top_ - begin_);
DCHECK_LE(bytes, kRegionSize);
@@ -437,7 +419,7 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
DCHECK_EQ(objects_allocated_, 0U);
return 0;
} else {
- DCHECK(IsNormal()) << static_cast<uint>(state_);
+ DCHECK(IsAllocated()) << static_cast<uint>(state_);
return objects_allocated_;
}
}
@@ -465,7 +447,7 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
void Dump(std::ostream& os) const;
void RecordThreadLocalAllocations(size_t num_objects, size_t num_bytes) {
- DCHECK(IsNormal());
+ DCHECK(IsAllocated());
DCHECK_EQ(objects_allocated_, 0U);
DCHECK_EQ(top_, end_);
objects_allocated_ = num_objects;
@@ -479,7 +461,8 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
// Can't use Atomic<uint8_t*> as Atomic's copy operator is implicitly deleted.
uint8_t* top_; // The current position of the allocation.
uint8_t* end_; // The end address of the region.
- uint8_t state_; // The region state (see RegionState).
+ RegionState state_; // The region state (see RegionState).
+ RegionType type_; // The region type (see RegionType).
uint64_t objects_allocated_; // The number of objects allocated.
uint32_t alloc_time_; // The allocation time of the region.
size_t live_bytes_; // The live bytes. Used to compute the live percent.
@@ -534,6 +517,9 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
DISALLOW_COPY_AND_ASSIGN(RegionSpace);
};
+std::ostream& operator<<(std::ostream& os, const RegionSpace::RegionState& value);
+std::ostream& operator<<(std::ostream& os, const RegionSpace::RegionType& value);
+
} // namespace space
} // namespace gc
} // namespace art