summaryrefslogtreecommitdiffstats
path: root/runtime/gc/collector
diff options
context:
space:
mode:
authorHiroshi Yamauchi <yamauchi@google.com>2015-02-09 17:11:42 -0800
committerHiroshi Yamauchi <yamauchi@google.com>2015-03-03 17:33:18 -0800
commite15ea086439b41a805d164d2beb07b4ba96aaa97 (patch)
tree465ee3780acd8b7cb35c8a7f42a1f3c5df3d26ec /runtime/gc/collector
parent0b25c71ac93fb10c484dbacb9e23db505a8e2353 (diff)
downloadart-e15ea086439b41a805d164d2beb07b4ba96aaa97.zip
art-e15ea086439b41a805d164d2beb07b4ba96aaa97.tar.gz
art-e15ea086439b41a805d164d2beb07b4ba96aaa97.tar.bz2
Reserve bits in the lock word for read barriers.
This prepares for the CC collector to use the standard object header model by storing the read barrier state in the lock word. Bug: 19355854 Bug: 12687968 Change-Id: Ia7585662dd2cebf0479a3e74f734afe5059fb70f
Diffstat (limited to 'runtime/gc/collector')
-rw-r--r--runtime/gc/collector/concurrent_copying.cc8
-rw-r--r--runtime/gc/collector/concurrent_copying.h2
-rw-r--r--runtime/gc/collector/mark_compact.cc4
3 files changed, 2 insertions, 12 deletions
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 734c935..057eed1 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -484,14 +484,6 @@ inline mirror::Object* ConcurrentCopying::GetFwdPtr(mirror::Object* from_ref) {
}
}
-inline void ConcurrentCopying::SetFwdPtr(mirror::Object* from_ref, mirror::Object* to_ref) {
- DCHECK(region_space_->IsInFromSpace(from_ref));
- DCHECK(region_space_->IsInToSpace(to_ref) || heap_->GetNonMovingSpace()->HasAddress(to_ref));
- LockWord lw = from_ref->GetLockWord(false);
- DCHECK_NE(lw.GetState(), LockWord::kForwardingAddress);
- from_ref->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(to_ref)), false);
-}
-
// The following visitors are that used to verify that there's no
// references to the from-space left after marking.
class ConcurrentCopyingVerifyNoFromSpaceRefsVisitor {
diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h
index d0e0446..bbb551a 100644
--- a/runtime/gc/collector/concurrent_copying.h
+++ b/runtime/gc/collector/concurrent_copying.h
@@ -230,8 +230,6 @@ class ConcurrentCopying : public GarbageCollector {
bool IsOnAllocStack(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
mirror::Object* GetFwdPtr(mirror::Object* from_ref)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetFwdPtr(mirror::Object* from_ref, mirror::Object* to_ref)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void FlipThreadRoots() LOCKS_EXCLUDED(Locks::mutator_lock_);;
void SwapStacks(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void RecordLiveStackFreezeSize(Thread* self);
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index 234bce5..d1ce0bc 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -91,7 +91,7 @@ void MarkCompact::ForwardObject(mirror::Object* obj) {
const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
LockWord lock_word = obj->GetLockWord(false);
// If we have a non empty lock word, store it and restore it later.
- if (lock_word.GetValue() != LockWord().GetValue()) {
+ if (!LockWord::IsDefault(lock_word)) {
// Set the bit in the bitmap so that we know to restore it later.
objects_with_lockword_->Set(obj);
lock_words_to_restore_.push_back(lock_word);
@@ -509,7 +509,7 @@ void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
// Use memmove since there may be overlap.
memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
// Restore the saved lock word if needed.
- LockWord lock_word;
+ LockWord lock_word = LockWord::Default();
if (UNLIKELY(objects_with_lockword_->Test(obj))) {
lock_word = lock_words_to_restore_.front();
lock_words_to_restore_.pop_front();