summaryrefslogtreecommitdiffstats
path: root/runtime/gc
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-03-13 17:20:47 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2014-03-13 17:20:48 +0000
commit0d64958d157ff6a30cbbe79df14ee4c723d14754 (patch)
tree7c64042b391af761e0e3976fe3c1ce93027fa97e /runtime/gc
parent3314dcf81d116c3142520970286ffaede67ab522 (diff)
parent8d562103c3a3452fb15ef4b1c64df767b70507a4 (diff)
downloadart-0d64958d157ff6a30cbbe79df14ee4c723d14754.zip
art-0d64958d157ff6a30cbbe79df14ee4c723d14754.tar.gz
art-0d64958d157ff6a30cbbe79df14ee4c723d14754.tar.bz2
Merge "Refactor immune region logic into its own file."
Diffstat (limited to 'runtime/gc')
-rw-r--r--runtime/gc/collector/immune_region.cc66
-rw-r--r--runtime/gc/collector/immune_region.h65
-rw-r--r--runtime/gc/collector/mark_sweep.cc62
-rw-r--r--runtime/gc/collector/mark_sweep.h21
-rw-r--r--runtime/gc/collector/partial_mark_sweep.cc2
-rw-r--r--runtime/gc/collector/semi_space.cc58
-rw-r--r--runtime/gc/collector/semi_space.h20
-rw-r--r--runtime/gc/space/space-inl.h1
8 files changed, 155 insertions, 140 deletions
diff --git a/runtime/gc/collector/immune_region.cc b/runtime/gc/collector/immune_region.cc
new file mode 100644
index 0000000..9e65384
--- /dev/null
+++ b/runtime/gc/collector/immune_region.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "immune_region.h"
+
+#include "gc/space/space-inl.h"
+#include "mirror/object.h"
+
+namespace art {
+namespace gc {
+namespace collector {
+
+ImmuneRegion::ImmuneRegion() {
+ Reset();
+}
+
+void ImmuneRegion::Reset() {
+ begin_ = nullptr;
+ end_ = nullptr;
+}
+
+bool ImmuneRegion::AddContinuousSpace(space::ContinuousSpace* space) {
+ // Bind live to mark bitmap if necessary.
+ if (space->GetLiveBitmap() != space->GetMarkBitmap()) {
+ CHECK(space->IsContinuousMemMapAllocSpace());
+ space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap();
+ }
+ mirror::Object* space_begin = reinterpret_cast<mirror::Object*>(space->Begin());
+ mirror::Object* space_limit = reinterpret_cast<mirror::Object*>(space->Limit());
+ if (IsEmpty()) {
+ begin_ = space_begin;
+ end_ = space_limit;
+ } else {
+ if (space_limit <= begin_) { // Space is before the immune region.
+ begin_ = space_begin;
+ } else if (space_begin >= end_) { // Space is after the immune region.
+ end_ = space_limit;
+ } else {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ImmuneRegion::ContainsSpace(const space::ContinuousSpace* space) const {
+ return
+ begin_ <= reinterpret_cast<mirror::Object*>(space->Begin()) &&
+ end_ >= reinterpret_cast<mirror::Object*>(space->Limit());
+}
+
+} // namespace collector
+} // namespace gc
+} // namespace art
diff --git a/runtime/gc/collector/immune_region.h b/runtime/gc/collector/immune_region.h
new file mode 100644
index 0000000..21d0b43
--- /dev/null
+++ b/runtime/gc/collector/immune_region.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_
+#define ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_
+
+#include "base/macros.h"
+#include "base/mutex.h"
+#include "gc/space/space-inl.h"
+
+namespace art {
+namespace mirror {
+class Object;
+} // namespace mirror
+namespace gc {
+namespace space {
+class ContinuousSpace;
+} // namespace space
+
+namespace collector {
+
+// An immune region is a continuous region of memory for which all objects contained are assumed to
+// be marked. This is used as an optimization in the GC to avoid needing to test the mark bitmap of
+// the zygote, image spaces, and sometimes non moving spaces. Doing the ContainsObject check is
+// faster than doing a bitmap read. There is no support for discontinuous spaces and you need to be
+// careful that your immune region doesn't contain any large objects.
+class ImmuneRegion {
+ public:
+ ImmuneRegion();
+ void Reset();
+ bool AddContinuousSpace(space::ContinuousSpace* space)
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+ bool ContainsSpace(const space::ContinuousSpace* space) const;
+ // Returns true if an object is inside of the immune region (assumed to be marked).
+ bool ContainsObject(const mirror::Object* obj) const ALWAYS_INLINE {
+ return obj >= begin_ && obj < end_;
+ }
+
+ private:
+ bool IsEmpty() const {
+ return begin_ == end_;
+ }
+
+ mirror::Object* begin_;
+ mirror::Object* end_;
+};
+
+} // namespace collector
+} // namespace gc
+} // namespace art
+
+#endif // ART_RUNTIME_GC_COLLECTOR_IMMUNE_REGION_H_
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 71424bd..8b9f60e 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -88,51 +88,13 @@ static constexpr bool kCountJavaLangRefs = false;
static constexpr bool kCheckLocks = kDebugLocking;
static constexpr bool kVerifyRoots = kIsDebugBuild;
-void MarkSweep::ImmuneSpace(space::ContinuousSpace* space) {
- // Bind live to mark bitmap if necessary.
- if (space->GetLiveBitmap() != space->GetMarkBitmap()) {
- CHECK(space->IsContinuousMemMapAllocSpace());
- space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap();
- }
-
- // Add the space to the immune region.
- // TODO: Use space limits instead of current end_ since the end_ can be changed by dlmalloc
- // callbacks.
- if (immune_begin_ == NULL) {
- DCHECK(immune_end_ == NULL);
- SetImmuneRange(reinterpret_cast<Object*>(space->Begin()),
- reinterpret_cast<Object*>(space->End()));
- } else {
- const space::ContinuousSpace* prev_space = nullptr;
- // Find out if the previous space is immune.
- for (const space::ContinuousSpace* cur_space : GetHeap()->GetContinuousSpaces()) {
- if (cur_space == space) {
- break;
- }
- prev_space = cur_space;
- }
- // If previous space was immune, then extend the immune region. Relies on continuous spaces
- // being sorted by Heap::AddContinuousSpace.
- if (prev_space != nullptr && IsImmuneSpace(prev_space)) {
- immune_begin_ = std::min(reinterpret_cast<Object*>(space->Begin()), immune_begin_);
- immune_end_ = std::max(reinterpret_cast<Object*>(space->End()), immune_end_);
- }
- }
-}
-
-bool MarkSweep::IsImmuneSpace(const space::ContinuousSpace* space) const {
- return
- immune_begin_ <= reinterpret_cast<Object*>(space->Begin()) &&
- immune_end_ >= reinterpret_cast<Object*>(space->End());
-}
-
void MarkSweep::BindBitmaps() {
timings_.StartSplit("BindBitmaps");
WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
// Mark all of the spaces we never collect as immune.
for (const auto& space : GetHeap()->GetContinuousSpaces()) {
if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect) {
- ImmuneSpace(space);
+ CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space;
}
}
timings_.EndSplit();
@@ -144,8 +106,6 @@ MarkSweep::MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_pre
(is_concurrent ? "concurrent mark sweep": "mark sweep")),
current_mark_bitmap_(NULL),
mark_stack_(NULL),
- immune_begin_(NULL),
- immune_end_(NULL),
live_stack_freeze_size_(0),
gc_barrier_(new Barrier(0)),
large_object_lock_("mark sweep large object lock", kMarkSweepLargeObjectLock),
@@ -158,7 +118,7 @@ void MarkSweep::InitializePhase() {
TimingLogger::ScopedSplit split("InitializePhase", &timings_);
mark_stack_ = heap_->mark_stack_.get();
DCHECK(mark_stack_ != nullptr);
- SetImmuneRange(nullptr, nullptr);
+ immune_region_.Reset();
class_count_ = 0;
array_count_ = 0;
other_count_ = 0;
@@ -298,7 +258,7 @@ void MarkSweep::MarkingPhase() {
void MarkSweep::UpdateAndMarkModUnion() {
for (const auto& space : heap_->GetContinuousSpaces()) {
- if (IsImmuneSpace(space)) {
+ if (immune_region_.ContainsSpace(space)) {
const char* name = space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
"UpdateAndMarkImageModUnionTable";
TimingLogger::ScopedSplit split(name, &timings_);
@@ -385,11 +345,6 @@ void MarkSweep::ReclaimPhase() {
}
}
-void MarkSweep::SetImmuneRange(Object* begin, Object* end) {
- immune_begin_ = begin;
- immune_end_ = end;
-}
-
void MarkSweep::FindDefaultMarkBitmap() {
TimingLogger::ScopedSplit split("FindDefaultMarkBitmap", &timings_);
for (const auto& space : GetHeap()->GetContinuousSpaces()) {
@@ -442,7 +397,7 @@ mirror::Object* MarkSweep::MarkObjectCallback(mirror::Object* obj, void* arg) {
}
inline void MarkSweep::UnMarkObjectNonNull(const Object* obj) {
- DCHECK(!IsImmune(obj));
+ DCHECK(!immune_region_.ContainsObject(obj));
if (kUseBrooksPointer) {
// Verify all the objects have the correct Brooks pointer installed.
@@ -474,7 +429,7 @@ inline void MarkSweep::MarkObjectNonNull(const Object* obj) {
obj->AssertSelfBrooksPointer();
}
- if (IsImmune(obj)) {
+ if (immune_region_.ContainsObject(obj)) {
DCHECK(IsMarked(obj));
return;
}
@@ -541,7 +496,7 @@ inline bool MarkSweep::MarkObjectParallel(const Object* obj) {
obj->AssertSelfBrooksPointer();
}
- if (IsImmune(obj)) {
+ if (immune_region_.ContainsObject(obj)) {
DCHECK(IsMarked(obj));
return false;
}
@@ -1109,7 +1064,8 @@ void MarkSweep::SweepArray(accounting::ObjectStack* allocations, bool swap_bitma
std::vector<space::ContinuousSpace*> sweep_spaces;
space::ContinuousSpace* non_moving_space = nullptr;
for (space::ContinuousSpace* space : heap_->GetContinuousSpaces()) {
- if (space->IsAllocSpace() && !IsImmuneSpace(space) && space->GetLiveBitmap() != nullptr) {
+ if (space->IsAllocSpace() && !immune_region_.ContainsSpace(space) &&
+ space->GetLiveBitmap() != nullptr) {
if (space == heap_->GetNonMovingSpace()) {
non_moving_space = space;
} else {
@@ -1330,7 +1286,7 @@ void MarkSweep::ProcessMarkStack(bool paused) {
inline bool MarkSweep::IsMarked(const Object* object) const
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
- if (IsImmune(object)) {
+ if (immune_region_.ContainsObject(object)) {
return true;
}
DCHECK(current_mark_bitmap_ != NULL);
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 8d40c34..df19f88 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -22,6 +22,7 @@
#include "base/macros.h"
#include "base/mutex.h"
#include "garbage_collector.h"
+#include "immune_region.h"
#include "object_callbacks.h"
#include "offsets.h"
#include "UniquePtr.h"
@@ -108,15 +109,6 @@ class MarkSweep : public GarbageCollector {
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Make a space immune, immune spaces have all live objects marked - that is the mark and
- // live bitmaps are bound together.
- void ImmuneSpace(space::ContinuousSpace* space)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- bool IsImmuneSpace(const space::ContinuousSpace* space) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
// Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
// the image. Mark that portion of the heap as immune.
virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -165,9 +157,6 @@ class MarkSweep : public GarbageCollector {
void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
NO_THREAD_SAFETY_ANALYSIS;
- // Everything inside the immune range is assumed to be marked.
- void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
-
void SweepSystemWeaks()
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
@@ -266,11 +255,6 @@ class MarkSweep : public GarbageCollector {
// whether or not we care about pauses.
size_t GetThreadCount(bool paused) const;
- // Returns true if an object is inside of the immune region (assumed to be marked).
- bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
- return obj >= immune_begin_ && obj < immune_end_;
- }
-
static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
const StackVisitor *visitor);
@@ -354,8 +338,7 @@ class MarkSweep : public GarbageCollector {
accounting::ObjectStack* mark_stack_;
// Immune range, every object inside the immune range is assumed to be marked.
- mirror::Object* immune_begin_;
- mirror::Object* immune_end_;
+ ImmuneRegion immune_region_;
// Parallel finger.
AtomicInteger atomic_finger_;
diff --git a/runtime/gc/collector/partial_mark_sweep.cc b/runtime/gc/collector/partial_mark_sweep.cc
index 8ec28f3..15f782a 100644
--- a/runtime/gc/collector/partial_mark_sweep.cc
+++ b/runtime/gc/collector/partial_mark_sweep.cc
@@ -39,7 +39,7 @@ void PartialMarkSweep::BindBitmaps() {
for (const auto& space : GetHeap()->GetContinuousSpaces()) {
if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
CHECK(space->IsZygoteSpace());
- ImmuneSpace(space);
+ CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space;
}
}
}
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index 2da360f..89694d4 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -64,40 +64,6 @@ static constexpr bool kProtectFromSpace = true;
static constexpr bool kClearFromSpace = true;
static constexpr bool kStoreStackTraces = false;
-// TODO: Unduplicate logic.
-void SemiSpace::ImmuneSpace(space::ContinuousSpace* space) {
- // Bind live to mark bitmap if necessary.
- if (space->GetLiveBitmap() != space->GetMarkBitmap()) {
- CHECK(space->IsContinuousMemMapAllocSpace());
- space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap();
- }
- // Add the space to the immune region.
- if (immune_begin_ == nullptr) {
- DCHECK(immune_end_ == nullptr);
- immune_begin_ = reinterpret_cast<Object*>(space->Begin());
- immune_end_ = reinterpret_cast<Object*>(space->End());
- } else {
- const space::ContinuousSpace* prev_space = nullptr;
- // Find out if the previous space is immune.
- for (space::ContinuousSpace* cur_space : GetHeap()->GetContinuousSpaces()) {
- if (cur_space == space) {
- break;
- }
- prev_space = cur_space;
- }
- // If previous space was immune, then extend the immune region. Relies on continuous spaces
- // being sorted by Heap::AddContinuousSpace.
- if (prev_space != nullptr && IsImmuneSpace(prev_space)) {
- immune_begin_ = std::min(reinterpret_cast<Object*>(space->Begin()), immune_begin_);
- // Use Limit() instead of End() because otherwise if the
- // generational mode is enabled, the alloc space might expand
- // due to promotion and the sense of immunity may change in the
- // middle of a GC.
- immune_end_ = std::max(reinterpret_cast<Object*>(space->Limit()), immune_end_);
- }
- }
-}
-
void SemiSpace::BindBitmaps() {
timings_.StartSplit("BindBitmaps");
WriterMutexLock mu(self_, *Locks::heap_bitmap_lock_);
@@ -115,7 +81,7 @@ void SemiSpace::BindBitmaps() {
|| (generational_ && !whole_heap_collection_ &&
(space == GetHeap()->GetNonMovingSpace() ||
space == GetHeap()->GetPrimaryFreeListSpace()))) {
- ImmuneSpace(space);
+ CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space;
}
}
}
@@ -130,8 +96,6 @@ SemiSpace::SemiSpace(Heap* heap, bool generational, const std::string& name_pref
: GarbageCollector(heap,
name_prefix + (name_prefix.empty() ? "" : " ") + "marksweep + semispace"),
mark_stack_(nullptr),
- immune_begin_(nullptr),
- immune_end_(nullptr),
is_large_object_space_immune_(false),
to_space_(nullptr),
to_space_live_bitmap_(nullptr),
@@ -150,8 +114,7 @@ void SemiSpace::InitializePhase() {
TimingLogger::ScopedSplit split("InitializePhase", &timings_);
mark_stack_ = heap_->mark_stack_.get();
DCHECK(mark_stack_ != nullptr);
- immune_begin_ = nullptr;
- immune_end_ = nullptr;
+ immune_region_.Reset();
is_large_object_space_immune_ = false;
saved_bytes_ = 0;
self_ = Thread::Current();
@@ -238,16 +201,10 @@ void SemiSpace::MarkingPhase() {
MarkReachableObjects();
}
-bool SemiSpace::IsImmuneSpace(const space::ContinuousSpace* space) const {
- return
- immune_begin_ <= reinterpret_cast<Object*>(space->Begin()) &&
- immune_end_ >= reinterpret_cast<Object*>(space->End());
-}
-
void SemiSpace::UpdateAndMarkModUnion() {
for (auto& space : heap_->GetContinuousSpaces()) {
// If the space is immune then we need to mark the references to other spaces.
- if (IsImmuneSpace(space)) {
+ if (immune_region_.ContainsSpace(space)) {
accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
if (table != nullptr) {
// TODO: Improve naming.
@@ -295,7 +252,8 @@ void SemiSpace::MarkReachableObjects() {
// enabled,) then we need to scan its live bitmap as roots
// (including the objects on the live stack which have just marked
// in the live bitmap above in MarkAllocStackAsLive().)
- if (IsImmuneSpace(space) && heap_->FindModUnionTableFromSpace(space) == nullptr) {
+ if (immune_region_.ContainsSpace(space) &&
+ heap_->FindModUnionTableFromSpace(space) == nullptr) {
DCHECK(generational_ && !whole_heap_collection_ &&
(space == GetHeap()->GetNonMovingSpace() || space == GetHeap()->GetPrimaryFreeListSpace()));
accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap();
@@ -556,7 +514,7 @@ Object* SemiSpace::MarkObject(Object* obj) {
}
}
Object* forward_address = obj;
- if (obj != nullptr && !IsImmune(obj)) {
+ if (obj != nullptr && !immune_region_.ContainsObject(obj)) {
if (from_space_->HasAddress(obj)) {
forward_address = GetForwardingAddressInFromSpace(obj);
// If the object has already been moved, return the new forward address.
@@ -634,7 +592,7 @@ void SemiSpace::SweepSystemWeaks() {
}
bool SemiSpace::ShouldSweepSpace(space::ContinuousSpace* space) const {
- return space != from_space_ && space != to_space_ && !IsImmuneSpace(space);
+ return space != from_space_ && space != to_space_ && !immune_region_.ContainsSpace(space);
}
void SemiSpace::Sweep(bool swap_bitmaps) {
@@ -744,7 +702,7 @@ void SemiSpace::ProcessMarkStack() {
inline Object* SemiSpace::GetMarkedForwardAddress(mirror::Object* obj) const
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
// All immune objects are assumed marked.
- if (IsImmune(obj)) {
+ if (immune_region_.ContainsObject(obj)) {
return obj;
}
if (from_space_->HasAddress(obj)) {
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index c164c5f..be7ec05 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -18,10 +18,10 @@
#define ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
#include "atomic.h"
-#include "barrier.h"
#include "base/macros.h"
#include "base/mutex.h"
#include "garbage_collector.h"
+#include "immune_region.h"
#include "object_callbacks.h"
#include "offsets.h"
#include "UniquePtr.h"
@@ -104,12 +104,6 @@ class SemiSpace : public GarbageCollector {
void MarkRoots()
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
- // Make a space immune, immune spaces have all live objects marked - that is the mark and
- // live bitmaps are bound together.
- void ImmuneSpace(space::ContinuousSpace* space)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
// Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
// the image. Mark that portion of the heap as immune.
virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -174,13 +168,6 @@ class SemiSpace : public GarbageCollector {
// Returns true if we should sweep the space.
virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const;
- // Returns true if an object is inside of the immune region (assumed to be marked).
- bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
- return obj >= immune_begin_ && obj < immune_end_;
- }
-
- bool IsImmuneSpace(const space::ContinuousSpace* space) const;
-
static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
const StackVisitor *visitor);
@@ -257,9 +244,8 @@ class SemiSpace : public GarbageCollector {
// object.
accounting::ObjectStack* mark_stack_;
- // Immune range, every object inside the immune range is assumed to be marked.
- mirror::Object* immune_begin_;
- mirror::Object* immune_end_;
+ // Immune region, every object inside the immune region is assumed to be marked.
+ ImmuneRegion immune_region_;
// If true, the large object space is immune.
bool is_large_object_space_immune_;
diff --git a/runtime/gc/space/space-inl.h b/runtime/gc/space/space-inl.h
index 02a63f6..3a715ab 100644
--- a/runtime/gc/space/space-inl.h
+++ b/runtime/gc/space/space-inl.h
@@ -21,6 +21,7 @@
#include "dlmalloc_space.h"
#include "image_space.h"
+#include "large_object_space.h"
namespace art {
namespace gc {