diff options
author | Ian Rogers <irogers@google.com> | 2014-01-06 12:55:46 -0800 |
---|---|---|
committer | Ian Rogers <irogers@google.com> | 2014-02-06 23:20:27 -0800 |
commit | ef7d42fca18c16fbaf103822ad16f23246e2905d (patch) | |
tree | c67eea52a349c2ea7f2c3bdda8e73933c05531a8 /runtime/mirror/object-inl.h | |
parent | 822115a225185d2896607eb08d70ce5c7099adef (diff) | |
download | art-ef7d42fca18c16fbaf103822ad16f23246e2905d.zip art-ef7d42fca18c16fbaf103822ad16f23246e2905d.tar.gz art-ef7d42fca18c16fbaf103822ad16f23246e2905d.tar.bz2 |
Object model changes to support 64bit.
Modify mirror objects so that references between them use an ObjectReference
value type rather than an Object* so that functionality to compress larger
references can be captured in the ObjectRefererence implementation.
ObjectReferences are 32bit and all other aspects of object layout remain as
they are currently.
Expand fields in objects holding pointers so they can hold 64bit pointers. Its
expected the size of these will come down by improving where we hold compiler
meta-data.
Stub out x86_64 architecture specific runtime implementation.
Modify OutputStream so that reads and writes are of unsigned quantities.
Make the use of portable or quick code more explicit.
Templatize AtomicInteger to support more than just int32_t as a type.
Add missing, and fix issues relating to, missing annotalysis information on the
mutator lock.
Refactor and share implementations for array copy between System and uses
elsewhere in the runtime.
Fix numerous 64bit build issues.
Change-Id: I1a5694c251a42c9eff71084dfdd4b51fff716822
Diffstat (limited to 'runtime/mirror/object-inl.h')
-rw-r--r-- | runtime/mirror/object-inl.h | 165 |
1 files changed, 106 insertions, 59 deletions
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h index 9161bc5..afa4112 100644 --- a/runtime/mirror/object-inl.h +++ b/runtime/mirror/object-inl.h @@ -32,19 +32,18 @@ namespace art { namespace mirror { -inline Class* Object::GetClass() const { - return GetFieldObject<Class*>(OFFSET_OF_OBJECT_MEMBER(Object, klass_), false); +inline Class* Object::GetClass() { + return GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(Object, klass_), false); } inline void Object::SetClass(Class* new_klass) { - // new_klass may be NULL prior to class linker initialization - // We don't mark the card since the class is guaranteed to be referenced from another location. - // Proxy classes are held live by the class loader, and other classes are roots of the class - // linker. - SetFieldPtr(OFFSET_OF_OBJECT_MEMBER(Object, klass_), new_klass, false, false); + // new_klass may be NULL prior to class linker initialization. + // We don't mark the card as this occurs as part of object allocation. Not all objects have + // backing cards, such as large objects. + SetFieldObjectWithoutWriteBarrier(OFFSET_OF_OBJECT_MEMBER(Object, klass_), new_klass, false, false); } -inline LockWord Object::GetLockWord() const { +inline LockWord Object::GetLockWord() { return LockWord(GetField32(OFFSET_OF_OBJECT_MEMBER(Object, monitor_), true)); } @@ -85,19 +84,19 @@ inline void Object::Wait(Thread* self, int64_t ms, int32_t ns) { Monitor::Wait(self, this, ms, ns, true, kTimedWaiting); } -inline bool Object::VerifierInstanceOf(const Class* klass) const { +inline bool Object::VerifierInstanceOf(Class* klass) { DCHECK(klass != NULL); DCHECK(GetClass() != NULL); return klass->IsInterface() || InstanceOf(klass); } -inline bool Object::InstanceOf(const Class* klass) const { +inline bool Object::InstanceOf(Class* klass) { DCHECK(klass != NULL); DCHECK(GetClass() != NULL); return klass->IsAssignableFrom(GetClass()); } -inline bool Object::IsClass() const { +inline bool Object::IsClass() { Class* java_lang_Class = GetClass()->GetClass(); return GetClass() == java_lang_Class; } @@ -107,12 +106,7 @@ inline Class* Object::AsClass() { return down_cast<Class*>(this); } -inline const Class* Object::AsClass() const { - DCHECK(IsClass()); - return down_cast<const Class*>(this); -} - -inline bool Object::IsObjectArray() const { +inline bool Object::IsObjectArray() { return IsArrayInstance() && !GetClass()->GetComponentType()->IsPrimitive(); } @@ -122,17 +116,11 @@ inline ObjectArray<T>* Object::AsObjectArray() { return down_cast<ObjectArray<T>*>(this); } -template<class T> -inline const ObjectArray<T>* Object::AsObjectArray() const { - DCHECK(IsObjectArray()); - return down_cast<const ObjectArray<T>*>(this); -} - -inline bool Object::IsArrayInstance() const { +inline bool Object::IsArrayInstance() { return GetClass()->IsArrayClass(); } -inline bool Object::IsArtField() const { +inline bool Object::IsArtField() { return GetClass()->IsArtFieldClass(); } @@ -141,12 +129,7 @@ inline ArtField* Object::AsArtField() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_ return down_cast<ArtField*>(this); } -inline const ArtField* Object::AsArtField() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { - DCHECK(IsArtField()); - return down_cast<const ArtField*>(this); -} - -inline bool Object::IsArtMethod() const { +inline bool Object::IsArtMethod() { return GetClass()->IsArtMethodClass(); } @@ -155,12 +138,7 @@ inline ArtMethod* Object::AsArtMethod() { return down_cast<ArtMethod*>(this); } -inline const ArtMethod* Object::AsArtMethod() const { - DCHECK(IsArtMethod()); - return down_cast<const ArtMethod*>(this); -} - -inline bool Object::IsReferenceInstance() const { +inline bool Object::IsReferenceInstance() { return GetClass()->IsReferenceClass(); } @@ -169,11 +147,6 @@ inline Array* Object::AsArray() { return down_cast<Array*>(this); } -inline const Array* Object::AsArray() const { - DCHECK(IsArrayInstance()); - return down_cast<const Array*>(this); -} - inline BooleanArray* Object::AsBooleanArray() { DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->GetComponentType()->IsPrimitiveBoolean()); @@ -186,6 +159,13 @@ inline ByteArray* Object::AsByteArray() { return down_cast<ByteArray*>(this); } +inline ByteArray* Object::AsByteSizedArray() { + DCHECK(GetClass()->IsArrayClass()); + DCHECK(GetClass()->GetComponentType()->IsPrimitiveByte() || + GetClass()->GetComponentType()->IsPrimitiveBoolean()); + return down_cast<ByteArray*>(this); +} + inline CharArray* Object::AsCharArray() { DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->GetComponentType()->IsPrimitiveChar()); @@ -198,6 +178,13 @@ inline ShortArray* Object::AsShortArray() { return down_cast<ShortArray*>(this); } +inline ShortArray* Object::AsShortSizedArray() { + DCHECK(GetClass()->IsArrayClass()); + DCHECK(GetClass()->GetComponentType()->IsPrimitiveShort() || + GetClass()->GetComponentType()->IsPrimitiveChar()); + return down_cast<ShortArray*>(this); +} + inline IntArray* Object::AsIntArray() { DCHECK(GetClass()->IsArrayClass()); DCHECK(GetClass()->GetComponentType()->IsPrimitiveInt() || @@ -222,23 +209,23 @@ inline Throwable* Object::AsThrowable() { return down_cast<Throwable*>(this); } -inline bool Object::IsWeakReferenceInstance() const { +inline bool Object::IsWeakReferenceInstance() { return GetClass()->IsWeakReferenceClass(); } -inline bool Object::IsSoftReferenceInstance() const { +inline bool Object::IsSoftReferenceInstance() { return GetClass()->IsSoftReferenceClass(); } -inline bool Object::IsFinalizerReferenceInstance() const { +inline bool Object::IsFinalizerReferenceInstance() { return GetClass()->IsFinalizerReferenceClass(); } -inline bool Object::IsPhantomReferenceInstance() const { +inline bool Object::IsPhantomReferenceInstance() { return GetClass()->IsPhantomReferenceClass(); } -inline size_t Object::SizeOf() const { +inline size_t Object::SizeOf() { size_t result; if (IsArrayInstance()) { result = AsArray()->SizeOf(); @@ -253,13 +240,13 @@ inline size_t Object::SizeOf() const { return result; } -inline uint32_t Object::GetField32(MemberOffset field_offset, bool is_volatile) const { +inline uint32_t Object::GetField32(MemberOffset field_offset, bool is_volatile) { VerifyObject(this); const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value(); const int32_t* word_addr = reinterpret_cast<const int32_t*>(raw_addr); if (UNLIKELY(is_volatile)) { int32_t result = *(reinterpret_cast<volatile int32_t*>(const_cast<int32_t*>(word_addr))); - QuasiAtomic::MembarLoadLoad(); + QuasiAtomic::MembarLoadLoad(); // Ensure volatile loads don't re-order. return result; } else { return *word_addr; @@ -276,7 +263,7 @@ inline void Object::SetField32(MemberOffset field_offset, uint32_t new_value, bo if (UNLIKELY(is_volatile)) { QuasiAtomic::MembarStoreStore(); // Ensure this store occurs after others in the queue. *word_addr = new_value; - QuasiAtomic::MembarStoreLoad(); // Ensure this store occurs before any loads. + QuasiAtomic::MembarStoreLoad(); // Ensure this store occurs before any volatile loads. } else { *word_addr = new_value; } @@ -289,28 +276,31 @@ inline bool Object::CasField32(MemberOffset field_offset, uint32_t old_value, ui return __sync_bool_compare_and_swap(addr, old_value, new_value); } -inline uint64_t Object::GetField64(MemberOffset field_offset, bool is_volatile) const { +inline uint64_t Object::GetField64(MemberOffset field_offset, bool is_volatile) { VerifyObject(this); const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value(); const int64_t* addr = reinterpret_cast<const int64_t*>(raw_addr); if (UNLIKELY(is_volatile)) { uint64_t result = QuasiAtomic::Read64(addr); - QuasiAtomic::MembarLoadLoad(); + QuasiAtomic::MembarLoadLoad(); // Ensure volatile loads don't re-order. return result; } else { return *addr; } } -inline void Object::SetField64(MemberOffset field_offset, uint64_t new_value, bool is_volatile) { - VerifyObject(this); +inline void Object::SetField64(MemberOffset field_offset, uint64_t new_value, bool is_volatile, + bool this_is_valid) { + if (this_is_valid) { + VerifyObject(this); + } byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value(); int64_t* addr = reinterpret_cast<int64_t*>(raw_addr); if (UNLIKELY(is_volatile)) { QuasiAtomic::MembarStoreStore(); // Ensure this store occurs after others in the queue. QuasiAtomic::Write64(addr, new_value); if (!QuasiAtomic::LongAtomicsUseMutexes()) { - QuasiAtomic::MembarStoreLoad(); // Ensure this store occurs before any loads. + QuasiAtomic::MembarStoreLoad(); // Ensure this store occurs before any volatile loads. } else { // Fence from from mutex is enough. } @@ -319,12 +309,69 @@ inline void Object::SetField64(MemberOffset field_offset, uint64_t new_value, bo } } -inline void Object::WriteBarrierField(const Object* dst, MemberOffset field_offset, - const Object* new_value) { - Runtime::Current()->GetHeap()->WriteBarrierField(dst, field_offset, new_value); +inline bool Object::CasField64(MemberOffset field_offset, uint64_t old_value, uint64_t new_value) { + VerifyObject(this); + byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value(); + volatile uint64_t* addr = reinterpret_cast<volatile uint64_t*>(raw_addr); + return __sync_bool_compare_and_swap(addr, old_value, new_value); +} + +template<class T> +inline T* Object::GetFieldObject(MemberOffset field_offset, bool is_volatile) { + VerifyObject(this); + byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value(); + HeapReference<T>* objref_addr = reinterpret_cast<HeapReference<T>*>(raw_addr); + HeapReference<T> objref = *objref_addr; + + if (UNLIKELY(is_volatile)) { + QuasiAtomic::MembarLoadLoad(); // Ensure loads don't re-order. + } + T* result = objref.AsMirrorPtr(); + VerifyObject(result); + return result; +} + +inline void Object::SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value, + bool is_volatile, bool this_is_valid) { + if (this_is_valid) { + VerifyObject(this); + } + VerifyObject(new_value); + HeapReference<Object> objref(HeapReference<Object>::FromMirrorPtr(new_value)); + byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value(); + HeapReference<Object>* objref_addr = reinterpret_cast<HeapReference<Object>*>(raw_addr); + if (UNLIKELY(is_volatile)) { + QuasiAtomic::MembarStoreStore(); // Ensure this store occurs after others in the queue. + objref_addr->Assign(new_value); + QuasiAtomic::MembarStoreLoad(); // Ensure this store occurs before any loads. + } else { + objref_addr->Assign(new_value); + } +} + +inline void Object::SetFieldObject(MemberOffset field_offset, Object* new_value, bool is_volatile, + bool this_is_valid) { + SetFieldObjectWithoutWriteBarrier(field_offset, new_value, is_volatile, this_is_valid); + if (new_value != nullptr) { + CheckFieldAssignment(field_offset, new_value); + Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value); + } +} + +inline bool Object::CasFieldObject(MemberOffset field_offset, Object* old_value, Object* new_value) { + VerifyObject(this); + byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value(); + volatile uint32_t* addr = reinterpret_cast<volatile uint32_t*>(raw_addr); + HeapReference<Object> old_ref(HeapReference<Object>::FromMirrorPtr(old_value)); + HeapReference<Object> new_ref(HeapReference<Object>::FromMirrorPtr(new_value)); + bool success = __sync_bool_compare_and_swap(addr, old_ref.reference_, new_ref.reference_); + if (success) { + Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value); + } + return success; } -inline void Object::VerifyObject(const Object* obj) { +inline void Object::VerifyObject(Object* obj) { if (kIsDebugBuild) { Runtime::Current()->GetHeap()->VerifyObject(obj); } |