summaryrefslogtreecommitdiffstats
path: root/runtime/mirror
diff options
context:
space:
mode:
authorMingyao Yang <mingyao@google.com>2014-05-15 17:02:16 -0700
committerIan Rogers <irogers@google.com>2014-07-11 17:17:10 -0700
commit98d1cc8033251c93786e2fa8c59a2e555a9493be (patch)
treef0a76b8fff060ee484af09028da65a8339d57057 /runtime/mirror
parentaebf3cda094f34cf846d19a7724bdc8005267c95 (diff)
downloadart-98d1cc8033251c93786e2fa8c59a2e555a9493be.zip
art-98d1cc8033251c93786e2fa8c59a2e555a9493be.tar.gz
art-98d1cc8033251c93786e2fa8c59a2e555a9493be.tar.bz2
Improve performance of invokevirtual/invokeinterface with embedded imt/vtable
Add an embedded version of imt/vtable into class object. Both tables start at fixed offset within class object so method/entry point can be loaded directly from class object for invokeinterface/invokevirtual. Bug: 8142917 Change-Id: I4240d58cfbe9250107c95c0708c036854c455968
Diffstat (limited to 'runtime/mirror')
-rw-r--r--runtime/mirror/array-inl.h5
-rw-r--r--runtime/mirror/array.h3
-rw-r--r--runtime/mirror/art_field-inl.h5
-rw-r--r--runtime/mirror/art_field.h26
-rw-r--r--runtime/mirror/art_method-inl.h23
-rw-r--r--runtime/mirror/art_method.cc8
-rw-r--r--runtime/mirror/art_method.h49
-rw-r--r--runtime/mirror/class-inl.h112
-rw-r--r--runtime/mirror/class.cc137
-rw-r--r--runtime/mirror/class.h163
-rw-r--r--runtime/mirror/class_loader.h6
-rw-r--r--runtime/mirror/dex_cache-inl.h5
-rw-r--r--runtime/mirror/dex_cache.h20
-rw-r--r--runtime/mirror/iftable.h2
-rw-r--r--runtime/mirror/object-inl.h6
-rw-r--r--runtime/mirror/object.h45
-rw-r--r--runtime/mirror/object_array.h7
-rw-r--r--runtime/mirror/object_test.cc2
-rw-r--r--runtime/mirror/proxy.h22
-rw-r--r--runtime/mirror/stack_trace_element.h2
-rw-r--r--runtime/mirror/string-inl.h6
-rw-r--r--runtime/mirror/string.h22
22 files changed, 503 insertions, 173 deletions
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index 43bdf49..f3c8250 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -27,6 +27,11 @@
namespace art {
namespace mirror {
+inline uint32_t Array::ClassSize() {
+ uint32_t vtable_entries = Object::kVTableLength;
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+}
+
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline size_t Array::SizeOf() {
// This is safe from overflow because the array was already allocated, so we know it's sane.
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index 25a4535..6588b57 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -30,6 +30,9 @@ namespace mirror {
class MANAGED Array : public Object {
public:
+ // The size of a java.lang.Class representing an array.
+ static uint32_t ClassSize();
+
// Allocates an array with the given properties, if fill_usable is true the array will be of at
// least component_count size, however, if there's usable space at the end of the allocation the
// array will fill it.
diff --git a/runtime/mirror/art_field-inl.h b/runtime/mirror/art_field-inl.h
index 686fded..90247ed 100644
--- a/runtime/mirror/art_field-inl.h
+++ b/runtime/mirror/art_field-inl.h
@@ -29,6 +29,11 @@
namespace art {
namespace mirror {
+inline uint32_t ArtField::ClassSize() {
+ uint32_t vtable_entries = Object::kVTableLength + 6;
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+}
+
inline Class* ArtField::GetDeclaringClass() {
Class* result = GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(ArtField, declaring_class_));
DCHECK(result != NULL);
diff --git a/runtime/mirror/art_field.h b/runtime/mirror/art_field.h
index 502cec7..741c6eb 100644
--- a/runtime/mirror/art_field.h
+++ b/runtime/mirror/art_field.h
@@ -19,22 +19,33 @@
#include <jni.h>
-#include "class.h"
#include "modifiers.h"
#include "object.h"
#include "object_callbacks.h"
+#include "primitive.h"
#include "read_barrier.h"
namespace art {
struct ArtFieldOffsets;
+class DexFile;
class ScopedObjectAccessAlreadyRunnable;
namespace mirror {
+class DexCache;
+
// C++ mirror of java.lang.reflect.ArtField
-class MANAGED ArtField : public Object {
+class MANAGED ArtField FINAL : public Object {
public:
+ // Size of java.lang.reflect.ArtField.class.
+ static uint32_t ClassSize();
+
+ // Size of an instance of java.lang.reflect.ArtField not including its value array.
+ static constexpr uint32_t InstanceSize() {
+ return sizeof(ArtField);
+ }
+
static ArtField* FromReflectedField(const ScopedObjectAccessAlreadyRunnable& soa,
jobject jlr_field)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -143,11 +154,17 @@ class MANAGED ArtField : public Object {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetTypeDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
Primitive::Type GetTypeAsPrimitiveType() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
bool IsPrimitiveType() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
size_t FieldSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
mirror::DexCache* GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile* GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
private:
@@ -169,11 +186,6 @@ class MANAGED ArtField : public Object {
DISALLOW_IMPLICIT_CONSTRUCTORS(ArtField);
};
-class MANAGED ArtFieldClass : public Class {
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(ArtFieldClass);
-};
-
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/art_method-inl.h b/runtime/mirror/art_method-inl.h
index 8fcacc2..a5b5df6 100644
--- a/runtime/mirror/art_method-inl.h
+++ b/runtime/mirror/art_method-inl.h
@@ -21,14 +21,29 @@
#include "dex_file.h"
#include "entrypoints/entrypoint_utils.h"
+#include "object-inl.h"
#include "object_array.h"
+#include "object_utils.h"
#include "oat.h"
#include "quick/quick_method_frame_info.h"
+#include "read_barrier-inl.h"
#include "runtime-inl.h"
namespace art {
namespace mirror {
+inline uint32_t ArtMethod::ClassSize() {
+ uint32_t vtable_entries = Object::kVTableLength + 8;
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+}
+
+template<ReadBarrierOption kReadBarrierOption>
+inline Class* ArtMethod::GetJavaLangReflectArtMethod() {
+ DCHECK(java_lang_reflect_ArtMethod_ != nullptr);
+ return ReadBarrier::BarrierForRoot<mirror::Class, kReadBarrierOption>(
+ &java_lang_reflect_ArtMethod_);
+}
+
inline Class* ArtMethod::GetDeclaringClass() {
Class* result = GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, declaring_class_));
DCHECK(result != NULL) << this;
@@ -122,8 +137,8 @@ inline void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) {
return;
}
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
- if (code == GetQuickResolutionTrampoline(class_linker) ||
- code == GetQuickToInterpreterBridgeTrampoline(class_linker)) {
+ if (code == class_linker->GetQuickResolutionTrampoline() ||
+ code == class_linker->GetQuickToInterpreterBridgeTrampoline()) {
return;
}
DCHECK(IsWithinQuickCode(pc))
@@ -162,7 +177,7 @@ inline const void* ArtMethod::GetQuickOatEntryPoint() {
// On failure, instead of nullptr we get the quick-generic-jni-trampoline for native method
// indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
// for non-native methods.
- DCHECK(entry_point != GetQuickToInterpreterBridgeTrampoline(runtime->GetClassLinker()));
+ DCHECK(entry_point != runtime->GetClassLinker()->GetQuickToInterpreterBridgeTrampoline());
if (UNLIKELY(entry_point == GetQuickToInterpreterBridge()) ||
UNLIKELY(entry_point == runtime->GetClassLinker()->GetQuickGenericJniTrampoline())) {
return nullptr;
@@ -289,7 +304,7 @@ inline QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo() {
// On failure, instead of nullptr we get the quick-generic-jni-trampoline for native method
// indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
// for non-native methods. And we really shouldn't see a failure for non-native methods here.
- DCHECK(entry_point != GetQuickToInterpreterBridgeTrampoline(runtime->GetClassLinker()));
+ DCHECK(entry_point != runtime->GetClassLinker()->GetQuickToInterpreterBridgeTrampoline());
CHECK(entry_point != GetQuickToInterpreterBridge());
if (UNLIKELY(entry_point == runtime->GetClassLinker()->GetQuickGenericJniTrampoline())) {
diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc
index 4821e29..1fa680d 100644
--- a/runtime/mirror/art_method.cc
+++ b/runtime/mirror/art_method.cc
@@ -355,14 +355,6 @@ void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue*
self->PopManagedStackFragment(fragment);
}
-bool ArtMethod::IsRegistered() {
- void* native_method =
- GetFieldPtr<void*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_jni_));
- CHECK(native_method != nullptr);
- void* jni_stub = GetJniDlsymLookupStub();
- return native_method != jni_stub;
-}
-
void ArtMethod::RegisterNative(Thread* self, const void* native_method, bool is_fast) {
DCHECK(Thread::Current() == self);
CHECK(IsNative()) << PrettyMethod(this);
diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h
index a55c48b..081bee1 100644
--- a/runtime/mirror/art_method.h
+++ b/runtime/mirror/art_method.h
@@ -17,21 +17,19 @@
#ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_
#define ART_RUNTIME_MIRROR_ART_METHOD_H_
-#include "class.h"
#include "dex_file.h"
#include "invoke_type.h"
#include "modifiers.h"
#include "object.h"
#include "object_callbacks.h"
#include "quick/quick_method_frame_info.h"
-#include "read_barrier.h"
+#include "read_barrier_option.h"
namespace art {
struct ArtMethodOffsets;
struct ConstructorMethodOffsets;
union JValue;
-struct MethodClassOffsets;
class MethodHelper;
class ScopedObjectAccessAlreadyRunnable;
class StringPiece;
@@ -39,14 +37,20 @@ class ShadowFrame;
namespace mirror {
-class StaticStorageBase;
-
typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh,
const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result);
-// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
-class MANAGED ArtMethod : public Object {
+// C++ mirror of java.lang.reflect.ArtMethod.
+class MANAGED ArtMethod FINAL : public Object {
public:
+ // Size of java.lang.reflect.ArtMethod.class.
+ static uint32_t ClassSize();
+
+ // Size of an instance of java.lang.reflect.ArtMethod not including its value array.
+ static constexpr uint32_t InstanceSize() {
+ return sizeof(ArtMethod);
+ }
+
static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
jobject jlr_method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -357,8 +361,6 @@ class MANAGED ArtMethod : public Object {
return kPointerSize;
}
- bool IsRegistered() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
void RegisterNative(Thread* self, const void* native_method, bool is_fast)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -411,11 +413,7 @@ class MANAGED ArtMethod : public Object {
static void SetClass(Class* java_lang_reflect_ArtMethod);
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- static Class* GetJavaLangReflectArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(java_lang_reflect_ArtMethod_ != nullptr);
- return ReadBarrier::BarrierForRoot<mirror::Class, kReadBarrierOption>(
- &java_lang_reflect_ArtMethod_);
- }
+ static Class* GetJavaLangReflectArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
static void ResetClass();
@@ -423,27 +421,45 @@ class MANAGED ArtMethod : public Object {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
const DexFile* GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetDeclaringClassDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetShorty() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
uint32_t unused_length;
return GetShorty(&unused_length);
}
+
const char* GetShorty(uint32_t* out_length) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const Signature GetSignature() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile::CodeItem* GetCodeItem() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
bool IsResolvedTypeIdx(uint16_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile::ProtoId& GetPrototype() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile::TypeList* GetParameterTypeList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetDeclaringClassSourceFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
uint16_t GetClassDefIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile::ClassDef& GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetReturnTypeDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
mirror::ClassLoader* GetClassLoader() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
mirror::DexCache* GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
ArtMethod* GetInterfaceMethodIfProxy() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
protected:
@@ -505,11 +521,6 @@ class MANAGED ArtMethod : public Object {
DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod);
};
-class MANAGED ArtMethodClass : public Class {
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethodClass);
-};
-
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 2daa6e4..349d4a3 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -19,8 +19,8 @@
#include "class.h"
-#include "art_field.h"
-#include "art_method.h"
+#include "art_field-inl.h"
+#include "art_method-inl.h"
#include "class_linker-inl.h"
#include "class_loader.h"
#include "common_throws.h"
@@ -29,6 +29,7 @@
#include "gc/heap-inl.h"
#include "iftable.h"
#include "object_array-inl.h"
+#include "read_barrier-inl.h"
#include "runtime.h"
#include "string.h"
@@ -148,6 +149,23 @@ inline void Class::SetImTable(ObjectArray<ArtMethod>* new_imtable) {
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, imtable_), new_imtable);
}
+inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i) {
+ uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry);
+ return GetFieldObject<mirror::ArtMethod>(MemberOffset(offset));
+}
+
+inline void Class::SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method) {
+ uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry);
+ SetFieldObject<false>(MemberOffset(offset), method);
+ CHECK(method == GetImTable()->Get(i));
+}
+
+inline void Class::SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method) {
+ uint32_t offset = EmbeddedVTableOffset().Uint32Value() + i * sizeof(VTableEntry);
+ SetFieldObject<false>(MemberOffset(offset), method);
+ CHECK(method == GetVTableDuringLinking()->Get(i));
+}
+
inline bool Class::Implements(Class* klass) {
DCHECK(klass != NULL);
DCHECK(klass->IsInterface()) << PrettyClass(this);
@@ -373,7 +391,8 @@ inline ObjectArray<ArtField>* Class::GetSFields() {
inline void Class::SetSFields(ObjectArray<ArtField>* new_sfields)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(NULL == GetFieldObject<ObjectArray<ArtField>>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_)));
+ DCHECK((IsRetired() && new_sfields == nullptr) ||
+ (NULL == GetFieldObject<ObjectArray<ArtField>>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_))));
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, sfields_), new_sfields);
}
@@ -435,9 +454,9 @@ inline void Class::SetVerifyErrorClass(Class* klass) {
template<VerifyObjectFlags kVerifyFlags>
inline uint32_t Class::GetAccessFlags() {
- // Check class is loaded or this is java.lang.String that has a
+ // Check class is loaded/retired or this is java.lang.String that has a
// circularity issue during loading the names of its members
- DCHECK(IsLoaded<kVerifyFlags>() ||
+ DCHECK(IsIdxLoaded<kVerifyFlags>() || IsRetired<kVerifyFlags>() ||
IsErroneous<static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis)>() ||
this == String::GetJavaLangString() ||
this == ArtField::GetJavaLangReflectArtField() ||
@@ -503,12 +522,63 @@ inline Object* Class::AllocNonMovableObject(Thread* self) {
return Alloc<true>(self, Runtime::Current()->GetHeap()->GetCurrentNonMovingAllocator());
}
+inline uint32_t Class::ComputeClassSize(bool has_embedded_tables,
+ uint32_t num_vtable_entries,
+ uint32_t num_32bit_static_fields,
+ uint32_t num_64bit_static_fields,
+ uint32_t num_ref_static_fields) {
+ // Space used by java.lang.Class and its instance fields.
+ uint32_t size = sizeof(Class);
+ // Space used by embedded tables.
+ if (has_embedded_tables) {
+ uint32_t embedded_imt_size = kImtSize * sizeof(ImTableEntry);
+ uint32_t embedded_vtable_size = num_vtable_entries * sizeof(VTableEntry);
+ size += embedded_imt_size + embedded_vtable_size;
+ }
+ // Space used by reference statics.
+ size += num_ref_static_fields * sizeof(HeapReference<Object>);
+ // Possible pad for alignment.
+ if (((size & 7) != 0) && (num_64bit_static_fields > 0) && (num_32bit_static_fields == 0)) {
+ size += sizeof(uint32_t);
+ }
+ // Space used for primitive static fields.
+ size += (num_32bit_static_fields * sizeof(uint32_t)) +
+ (num_64bit_static_fields * sizeof(uint64_t));
+ return size;
+}
+
template <bool kVisitClass, typename Visitor>
inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) {
// Visit the static fields first so that we don't overwrite the SFields / IFields instance
// fields.
- VisitStaticFieldsReferences<kVisitClass>(this, visitor);
VisitInstanceFieldsReferences<kVisitClass>(klass, visitor);
+ if (!IsTemp()) {
+ // Temp classes don't ever populate imt/vtable or static fields and they are not even
+ // allocated with the right size for those.
+ VisitStaticFieldsReferences<kVisitClass>(this, visitor);
+ if (ShouldHaveEmbeddedImtAndVTable()) {
+ VisitEmbeddedImtAndVTable(visitor);
+ }
+ }
+}
+
+template<typename Visitor>
+inline void Class::VisitEmbeddedImtAndVTable(const Visitor& visitor) {
+ uint32_t pos = sizeof(mirror::Class);
+
+ size_t count = kImtSize;
+ for (size_t i = 0; i < count; ++i) {
+ MemberOffset offset = MemberOffset(pos);
+ visitor(this, offset, true);
+ pos += sizeof(ImTableEntry);
+ }
+
+ count = ((GetVTable() != NULL) ? GetVTable()->GetLength() : 0);
+ for (size_t i = 0; i < count; ++i) {
+ MemberOffset offset = MemberOffset(pos);
+ visitor(this, offset, true);
+ pos += sizeof(VTableEntry);
+ }
}
template<ReadBarrierOption kReadBarrierOption>
@@ -554,6 +624,36 @@ inline void Class::AssertInitializedOrInitializingInThread(Thread* self) {
}
}
+inline ObjectArray<Class>* Class::GetInterfaces() {
+ CHECK(IsProxyClass());
+ // First static field.
+ DCHECK(GetSFields()->Get(0)->IsArtField());
+ DCHECK_STREQ(GetSFields()->Get(0)->GetName(), "interfaces");
+ MemberOffset field_offset = GetSFields()->Get(0)->GetOffset();
+ return GetFieldObject<ObjectArray<Class>>(field_offset);
+}
+
+inline ObjectArray<ObjectArray<Class>>* Class::GetThrows() {
+ CHECK(IsProxyClass());
+ // Second static field.
+ DCHECK(GetSFields()->Get(1)->IsArtField());
+ DCHECK_STREQ(GetSFields()->Get(1)->GetName(), "throws");
+ MemberOffset field_offset = GetSFields()->Get(1)->GetOffset();
+ return GetFieldObject<ObjectArray<ObjectArray<Class>>>(field_offset);
+}
+
+inline void Class::InitializeClassVisitor::operator()(
+ mirror::Object* obj, size_t usable_size) const {
+ DCHECK_LE(class_size_, usable_size);
+ // Avoid AsClass as object is not yet in live bitmap or allocation stack.
+ mirror::Class* klass = down_cast<mirror::Class*>(obj);
+ // DCHECK(klass->IsClass());
+ klass->SetClassSize(class_size_);
+ klass->SetPrimitiveType(Primitive::kPrimNot); // Default to not being primitive.
+ klass->SetDexClassDefIndex(DexFile::kDexNoIndex16); // Default to no valid class def index.
+ klass->SetDexTypeIndex(DexFile::kDexNoIndex16); // Default to no valid type index.
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index c6472c6..371e984 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -63,7 +63,8 @@ void Class::SetStatus(Status new_status, Thread* self) {
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
bool class_linker_initialized = class_linker != nullptr && class_linker->IsInitialized();
if (LIKELY(class_linker_initialized)) {
- if (UNLIKELY(new_status <= old_status && new_status != kStatusError)) {
+ if (UNLIKELY(new_status <= old_status && new_status != kStatusError &&
+ new_status != kStatusRetired)) {
LOG(FATAL) << "Unexpected change back of class status for " << PrettyClass(this) << " "
<< old_status << " -> " << new_status;
}
@@ -113,11 +114,27 @@ void Class::SetStatus(Status new_status, Thread* self) {
} else {
SetField32Volatile<false>(OFFSET_OF_OBJECT_MEMBER(Class, status_), new_status);
}
- // Classes that are being resolved or initialized need to notify waiters that the class status
- // changed. See ClassLinker::EnsureResolved and ClassLinker::WaitForInitializeClass.
- if ((old_status >= kStatusResolved || new_status >= kStatusResolved) &&
- class_linker_initialized) {
- NotifyAll(self);
+
+ if (!class_linker_initialized) {
+ // When the class linker is being initialized its single threaded and by definition there can be
+ // no waiters. During initialization classes may appear temporary but won't be retired as their
+ // size was statically computed.
+ } else {
+ // Classes that are being resolved or initialized need to notify waiters that the class status
+ // changed. See ClassLinker::EnsureResolved and ClassLinker::WaitForInitializeClass.
+ if (IsTemp()) {
+ // Class is a temporary one, ensure that waiters for resolution get notified of retirement
+ // so that they can grab the new version of the class from the class linker's table.
+ CHECK_LT(new_status, kStatusResolved) << PrettyDescriptor(this);
+ if (new_status == kStatusRetired || new_status == kStatusError) {
+ NotifyAll(self);
+ }
+ } else {
+ CHECK_NE(new_status, kStatusRetired);
+ if (old_status >= kStatusResolved || new_status >= kStatusResolved) {
+ NotifyAll(self);
+ }
+ }
}
}
@@ -217,35 +234,39 @@ void Class::DumpClass(std::ostream& os, int flags) {
os << StringPrintf(" %2zd: %s (cl=%p)\n", i, PrettyClass(interface).c_str(), cl);
}
}
- // After this point, this may have moved due to GetDirectInterface.
- os << " vtable (" << h_this->NumVirtualMethods() << " entries, "
- << (h_super.Get() != nullptr ? h_super->NumVirtualMethods() : 0) << " in super):\n";
- for (size_t i = 0; i < NumVirtualMethods(); ++i) {
- os << StringPrintf(" %2zd: %s\n", i,
- PrettyMethod(h_this->GetVirtualMethodDuringLinking(i)).c_str());
- }
- os << " direct methods (" << h_this->NumDirectMethods() << " entries):\n";
- for (size_t i = 0; i < h_this->NumDirectMethods(); ++i) {
- os << StringPrintf(" %2zd: %s\n", i, PrettyMethod(h_this->GetDirectMethod(i)).c_str());
- }
- if (h_this->NumStaticFields() > 0) {
- os << " static fields (" << h_this->NumStaticFields() << " entries):\n";
- if (h_this->IsResolved() || h_this->IsErroneous()) {
- for (size_t i = 0; i < h_this->NumStaticFields(); ++i) {
- os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetStaticField(i)).c_str());
+ if (!IsLoaded()) {
+ os << " class not yet loaded";
+ } else {
+ // After this point, this may have moved due to GetDirectInterface.
+ os << " vtable (" << h_this->NumVirtualMethods() << " entries, "
+ << (h_super.Get() != nullptr ? h_super->NumVirtualMethods() : 0) << " in super):\n";
+ for (size_t i = 0; i < NumVirtualMethods(); ++i) {
+ os << StringPrintf(" %2zd: %s\n", i,
+ PrettyMethod(h_this->GetVirtualMethodDuringLinking(i)).c_str());
+ }
+ os << " direct methods (" << h_this->NumDirectMethods() << " entries):\n";
+ for (size_t i = 0; i < h_this->NumDirectMethods(); ++i) {
+ os << StringPrintf(" %2zd: %s\n", i, PrettyMethod(h_this->GetDirectMethod(i)).c_str());
+ }
+ if (h_this->NumStaticFields() > 0) {
+ os << " static fields (" << h_this->NumStaticFields() << " entries):\n";
+ if (h_this->IsResolved() || h_this->IsErroneous()) {
+ for (size_t i = 0; i < h_this->NumStaticFields(); ++i) {
+ os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetStaticField(i)).c_str());
+ }
+ } else {
+ os << " <not yet available>";
}
- } else {
- os << " <not yet available>";
}
- }
- if (h_this->NumInstanceFields() > 0) {
- os << " instance fields (" << h_this->NumInstanceFields() << " entries):\n";
- if (h_this->IsResolved() || h_this->IsErroneous()) {
- for (size_t i = 0; i < h_this->NumInstanceFields(); ++i) {
- os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetInstanceField(i)).c_str());
+ if (h_this->NumInstanceFields() > 0) {
+ os << " instance fields (" << h_this->NumInstanceFields() << " entries):\n";
+ if (h_this->IsResolved() || h_this->IsErroneous()) {
+ for (size_t i = 0; i < h_this->NumInstanceFields(); ++i) {
+ os << StringPrintf(" %2zd: %s\n", i, PrettyField(h_this->GetInstanceField(i)).c_str());
+ }
+ } else {
+ os << " <not yet available>";
}
- } else {
- os << " <not yet available>";
}
}
}
@@ -721,9 +742,7 @@ uint32_t Class::NumDirectInterfaces() {
} else if (IsArrayClass()) {
return 2;
} else if (IsProxyClass()) {
- mirror::SynthesizedProxyClass* proxy_class=
- reinterpret_cast<mirror::SynthesizedProxyClass*>(this);
- mirror::ObjectArray<mirror::Class>* interfaces = proxy_class->GetInterfaces();
+ mirror::ObjectArray<mirror::Class>* interfaces = GetInterfaces();
return interfaces != nullptr ? interfaces->GetLength() : 0;
} else {
const DexFile::TypeList* interfaces = GetInterfaceTypeList();
@@ -753,9 +772,7 @@ mirror::Class* Class::GetDirectInterface(Thread* self, Handle<mirror::Class> kla
return class_linker->FindSystemClass(self, "Ljava/io/Serializable;");
}
} else if (klass->IsProxyClass()) {
- mirror::SynthesizedProxyClass* proxy_class =
- reinterpret_cast<mirror::SynthesizedProxyClass*>(klass.Get());
- mirror::ObjectArray<mirror::Class>* interfaces = proxy_class->GetInterfaces();
+ mirror::ObjectArray<mirror::Class>* interfaces = klass.Get()->GetInterfaces();
DCHECK(interfaces != nullptr);
return interfaces->Get(idx);
} else {
@@ -798,5 +815,49 @@ const DexFile::TypeList* Class::GetInterfaceTypeList() {
return GetDexFile().GetInterfacesList(*class_def);
}
+void Class::PopulateEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ObjectArray<ArtMethod>* table = GetImTable();
+ if (table != nullptr) {
+ for (uint32_t i = 0; i < kImtSize; i++) {
+ SetEmbeddedImTableEntry(i, table->Get(i));
+ }
+ }
+
+ table = GetVTableDuringLinking();
+ CHECK(table != nullptr);
+ for (int32_t i = 0; i < table->GetLength(); i++) {
+ SetEmbeddedVTableEntry(i, table->Get(i));
+ }
+}
+
+Class* Class::CopyOf(Thread* self, int32_t new_length) {
+ DCHECK_GE(new_length, static_cast<int32_t>(sizeof(Class)));
+ // We may get copied by a compacting GC.
+ StackHandleScope<1> hs(self);
+ Handle<mirror::Class> h_this(hs.NewHandle(this));
+ gc::Heap* heap = Runtime::Current()->GetHeap();
+ InitializeClassVisitor visitor(new_length);
+
+ mirror::Object* new_class =
+ kMovingClasses ? heap->AllocObject<true>(self, java_lang_Class_, new_length, visitor)
+ : heap->AllocNonMovableObject<true>(self, java_lang_Class_, new_length, visitor);
+ if (UNLIKELY(new_class == nullptr)) {
+ CHECK(self->IsExceptionPending()); // Expect an OOME.
+ return NULL;
+ }
+
+ mirror::Class* new_class_obj = new_class->AsClass();
+ memcpy(new_class_obj, h_this.Get(), sizeof(Class));
+
+ new_class_obj->SetStatus(kStatusResolving, self);
+ new_class_obj->PopulateEmbeddedImtAndVTable();
+ // Correct some fields.
+ new_class_obj->SetLockWord(LockWord(), false);
+ new_class_obj->SetClassSize(new_length);
+
+ Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(new_class_obj);
+ return new_class_obj;
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 7ac53ea..0f42044 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -22,6 +22,7 @@
#include "invoke_type.h"
#include "modifiers.h"
#include "object.h"
+#include "object_array.h"
#include "object_callbacks.h"
#include "primitive.h"
#include "read_barrier.h"
@@ -62,7 +63,6 @@
namespace art {
-struct ClassClassOffsets;
struct ClassOffsets;
class Signature;
class StringPiece;
@@ -70,13 +70,29 @@ class StringPiece;
namespace mirror {
class ArtField;
+class ArtMethod;
class ClassLoader;
class DexCache;
class IfTable;
// C++ mirror of java.lang.Class
-class MANAGED Class : public Object {
+class MANAGED Class FINAL : public Object {
public:
+ // Interface method table size. Increasing this value reduces the chance of two interface methods
+ // colliding in the interface method table but increases the size of classes that implement
+ // (non-marker) interfaces.
+ static constexpr size_t kImtSize = 64;
+
+ // imtable entry embedded in class object.
+ struct MANAGED ImTableEntry {
+ HeapReference<ArtMethod> method;
+ };
+
+ // vtable entry embedded in class object.
+ struct MANAGED VTableEntry {
+ HeapReference<ArtMethod> method;
+ };
+
// Class Status
//
// kStatusNotReady: If a Class cannot be found in the class table by
@@ -95,6 +111,11 @@ class MANAGED Class : public Object {
// using ResolveClass to initialize the super_class_ and ensuring the
// interfaces are resolved.
//
+ // kStatusResolving: Class is just cloned with the right size from
+ // temporary class that's acting as a placeholder for linking. The old
+ // class will be retired. New class is set to this status first before
+ // moving on to being resolved.
+ //
// kStatusResolved: Still holding the lock on Class, the ClassLinker
// shows linking is complete and fields of the Class populated by making
// it kStatusResolved. Java allows circularities of the form where a super
@@ -109,18 +130,20 @@ class MANAGED Class : public Object {
//
// TODO: Explain the other states
enum Status {
+ kStatusRetired = -2,
kStatusError = -1,
kStatusNotReady = 0,
kStatusIdx = 1, // Loaded, DEX idx in super_class_type_idx_ and interfaces_type_idx_.
kStatusLoaded = 2, // DEX idx values resolved.
- kStatusResolved = 3, // Part of linking.
- kStatusVerifying = 4, // In the process of being verified.
- kStatusRetryVerificationAtRuntime = 5, // Compile time verification failed, retry at runtime.
- kStatusVerifyingAtRuntime = 6, // Retrying verification at runtime.
- kStatusVerified = 7, // Logically part of linking; done pre-init.
- kStatusInitializing = 8, // Class init in progress.
- kStatusInitialized = 9, // Ready to go.
- kStatusMax = 10,
+ kStatusResolving = 3, // Just cloned from temporary class object.
+ kStatusResolved = 4, // Part of linking.
+ kStatusVerifying = 5, // In the process of being verified.
+ kStatusRetryVerificationAtRuntime = 6, // Compile time verification failed, retry at runtime.
+ kStatusVerifyingAtRuntime = 7, // Retrying verification at runtime.
+ kStatusVerified = 8, // Logically part of linking; done pre-init.
+ kStatusInitializing = 9, // Class init in progress.
+ kStatusInitialized = 10, // Ready to go.
+ kStatusMax = 11,
};
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -136,6 +159,12 @@ class MANAGED Class : public Object {
return OFFSET_OF_OBJECT_MEMBER(Class, status_);
}
+ // Returns true if the class has been retired.
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ bool IsRetired() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return GetStatus<kVerifyFlags>() == kStatusRetired;
+ }
+
// Returns true if the class has failed to link.
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
bool IsErroneous() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -274,6 +303,13 @@ class MANAGED Class : public Object {
}
}
+ // Returns true if this class is the placeholder and should retire and
+ // be replaced with a class with the right size for embedded imt/vtable.
+ bool IsTemp() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ Status s = GetStatus();
+ return s < Status::kStatusResolving && ShouldHaveEmbeddedImtAndVTable();
+ }
+
String* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Returns the cached name.
void SetName(String* name) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); // Sets the cached name.
// Computes the name, then sets the cached value.
@@ -451,6 +487,25 @@ class MANAGED Class : public Object {
void SetClassSize(uint32_t new_class_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Compute how many bytes would be used a class with the given elements.
+ static uint32_t ComputeClassSize(bool has_embedded_tables,
+ uint32_t num_vtable_entries,
+ uint32_t num_32bit_static_fields,
+ uint32_t num_64bit_static_fields,
+ uint32_t num_ref_static_fields);
+
+ // The size of java.lang.Class.class.
+ static uint32_t ClassClassSize() {
+ // The number of vtable entries in java.lang.Class.
+ uint32_t vtable_entries = Object::kVTableLength + 64;
+ return ComputeClassSize(true, vtable_entries, 0, 1, 0);
+ }
+
+ // The size of a java.lang.Class representing a primitive such as int.class.
+ static uint32_t PrimitiveClassSize() {
+ return ComputeClassSize(false, 0, 0, 0, 0);
+ }
+
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
uint32_t GetObjectSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -623,8 +678,6 @@ class MANAGED Class : public Object {
return OFFSET_OF_OBJECT_MEMBER(Class, vtable_);
}
- ObjectArray<ArtMethod>* GetImTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
void SetImTable(ObjectArray<ArtMethod>* new_imtable)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -632,6 +685,26 @@ class MANAGED Class : public Object {
return OFFSET_OF_OBJECT_MEMBER(Class, imtable_);
}
+ static MemberOffset EmbeddedImTableOffset() {
+ return MemberOffset(sizeof(Class));
+ }
+
+ static MemberOffset EmbeddedVTableOffset() {
+ return MemberOffset(sizeof(Class) + kImtSize * sizeof(mirror::Class::ImTableEntry));
+ }
+
+ bool ShouldHaveEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return IsInstantiable();
+ }
+
+ ArtMethod* GetEmbeddedImTableEntry(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void PopulateEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// Given a method implemented by this class but potentially from a super class, return the
// specific implementation method for this class.
ArtMethod* FindVirtualMethodForVirtual(ArtMethod* method)
@@ -739,11 +812,6 @@ class MANAGED Class : public Object {
void SetReferenceInstanceOffsets(uint32_t new_reference_offsets)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Beginning of static field data
- static MemberOffset FieldsOffset() {
- return OFFSET_OF_OBJECT_MEMBER(Class, fields_);
- }
-
// Returns the number of static fields containing reference types.
uint32_t NumReferenceStaticFields() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK(IsResolved() || IsErroneous());
@@ -751,7 +819,7 @@ class MANAGED Class : public Object {
}
uint32_t NumReferenceStaticFieldsDuringLinking() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(IsLoaded() || IsErroneous());
+ DCHECK(IsLoaded() || IsErroneous() || IsRetired());
return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_reference_static_fields_));
}
@@ -865,25 +933,65 @@ class MANAGED Class : public Object {
template <bool kVisitClass, typename Visitor>
void VisitReferences(mirror::Class* klass, const Visitor& visitor)
- NO_THREAD_SAFETY_ANALYSIS;
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ // Visit references within the embedded tables of the class.
+ // TODO: remove NO_THREAD_SAFETY_ANALYSIS when annotalysis handles visitors better.
+ template<typename Visitor>
+ void VisitEmbeddedImtAndVTable(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS;
std::string GetDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
bool DescriptorEquals(const char* match) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
std::string GetArrayDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile::ClassDef* GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
uint32_t NumDirectInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
uint16_t GetDirectInterfaceTypeIdx(uint32_t idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
static mirror::Class* GetDirectInterface(Thread* self, Handle<mirror::Class> klass, uint32_t idx)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const char* GetSourceFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
std::string GetLocation() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile& GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
const DexFile::TypeList* GetInterfaceTypeList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Asserts we are initialized or initializing in the given thread.
void AssertInitializedOrInitializingInThread(Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ Class* CopyOf(Thread* self, int32_t new_length)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ // For proxy class only.
+ ObjectArray<Class>* GetInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ // For proxy class only.
+ ObjectArray<ObjectArray<Class>>* GetThrows() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ // Used to initialize a class in the allocation code path to ensure it is guarded by a StoreStore
+ // fence.
+ class InitializeClassVisitor {
+ public:
+ explicit InitializeClassVisitor(uint32_t class_size) : class_size_(class_size) {
+ }
+
+ void operator()(mirror::Object* obj, size_t usable_size) const
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ private:
+ const uint32_t class_size_;
+
+ DISALLOW_COPY_AND_ASSIGN(InitializeClassVisitor);
+ };
+
private:
void SetVerifyErrorClass(Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -902,6 +1010,8 @@ class MANAGED Class : public Object {
void CheckObjectAlloc() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ObjectArray<ArtMethod>* GetImTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// defining class loader, or NULL for the "bootstrap" system loader
HeapReference<ClassLoader> class_loader_;
@@ -1012,7 +1122,12 @@ class MANAGED Class : public Object {
// values are kept in a table in gDvm.
// InitiatingLoaderList initiating_loader_list_;
- // Location of first static field.
+ // The following data exist in real class objects.
+ // Embedded Imtable, for class object that's not an interface, fixed size.
+ ImTableEntry embedded_imtable_[0];
+ // Embedded Vtable, for class object that's not an interface, variable size.
+ VTableEntry embedded_vtable_[0];
+ // Static fields, variable size.
uint32_t fields_[0];
// java.lang.Class
@@ -1024,14 +1139,6 @@ class MANAGED Class : public Object {
std::ostream& operator<<(std::ostream& os, const Class::Status& rhs);
-class MANAGED ClassClass : public Class {
- private:
- int32_t pad_;
- int64_t serialVersionUID_;
- friend struct art::ClassClassOffsets; // for verifying offset information
- DISALLOW_IMPLICIT_CONSTRUCTORS(ClassClass);
-};
-
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h
index 74dae38..f3594e4 100644
--- a/runtime/mirror/class_loader.h
+++ b/runtime/mirror/class_loader.h
@@ -27,6 +27,12 @@ namespace mirror {
// C++ mirror of java.lang.ClassLoader
class MANAGED ClassLoader : public Object {
+ public:
+ // Size of an instance of java.lang.ClassLoader.
+ static constexpr uint32_t InstanceSize() {
+ return sizeof(ClassLoader);
+ }
+
private:
// Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
HeapReference<Object> packages_;
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 7e40f64..08cff99 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -24,6 +24,11 @@
namespace art {
namespace mirror {
+inline uint32_t DexCache::ClassSize() {
+ uint32_t vtable_entries = Object::kVTableLength + 1;
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+}
+
inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ArtMethod* method = GetResolvedMethods()->Get(method_idx);
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 65a5026..bfd603a 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -18,10 +18,8 @@
#define ART_RUNTIME_MIRROR_DEX_CACHE_H_
#include "art_method.h"
-#include "class.h"
#include "object.h"
#include "object_array.h"
-#include "string.h"
namespace art {
@@ -33,15 +31,21 @@ union JValue;
namespace mirror {
class ArtField;
+class ArtMethod;
class Class;
+class String;
-class MANAGED DexCacheClass : public Class {
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(DexCacheClass);
-};
-
-class MANAGED DexCache : public Object {
+// C++ mirror of java.lang.DexCache.
+class MANAGED DexCache FINAL : public Object {
public:
+ // Size of java.lang.DexCache.class.
+ static uint32_t ClassSize();
+
+ // Size of an instance of java.lang.DexCache not including referenced values.
+ static constexpr uint32_t InstanceSize() {
+ return sizeof(DexCache);
+ }
+
void Init(const DexFile* dex_file,
String* location,
ObjectArray<String>* strings,
diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h
index ad312ed..5feb602 100644
--- a/runtime/mirror/iftable.h
+++ b/runtime/mirror/iftable.h
@@ -23,7 +23,7 @@
namespace art {
namespace mirror {
-class MANAGED IfTable : public ObjectArray<Object> {
+class MANAGED IfTable FINAL : public ObjectArray<Object> {
public:
Class* GetInterface(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Class* interface = Get((i * kMax) + kInterface)->AsClass();
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 089ef57..d9f442c 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -35,6 +35,11 @@
namespace art {
namespace mirror {
+inline uint32_t Object::ClassSize() {
+ uint32_t vtable_entries = kVTableLength;
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+}
+
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline Class* Object::GetClass() {
return GetFieldObject<Class, kVerifyFlags, kReadBarrierOption>(
@@ -687,6 +692,7 @@ inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Vi
template<bool kVisitClass, typename Visitor>
inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
+ DCHECK(!klass->IsTemp());
klass->VisitFieldsReferences<kVisitClass, true>(
klass->GetReferenceStaticOffsets<kVerifyNone>(), visitor);
}
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index d29011a..11998cc 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -63,13 +63,24 @@ static constexpr bool kCheckFieldAssignments = false;
// C++ mirror of java.lang.Object
class MANAGED LOCKABLE Object {
public:
+ // The number of vtable entries in java.lang.Object.
+ static constexpr size_t kVTableLength = 11;
+
+ // The size of the java.lang.Class representing a java.lang.Object.
+ static uint32_t ClassSize();
+
+ // Size of an instance of java.lang.Object.
+ static constexpr uint32_t InstanceSize() {
+ return sizeof(Object);
+ }
+
static MemberOffset ClassOffset() {
return OFFSET_OF_OBJECT_MEMBER(Object, klass_);
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- Class* GetClass() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE Class* GetClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
void SetClass(Class* new_klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -202,27 +213,27 @@ class MANAGED LOCKABLE Object {
// Accessor for Java type fields.
template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier, bool kIsVolatile = false>
- T* GetFieldObject(MemberOffset field_offset) ALWAYS_INLINE
+ ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- T* GetFieldObjectVolatile(MemberOffset field_offset) ALWAYS_INLINE
+ ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
- void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value)
- ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
- void SetFieldObject(MemberOffset field_offset, Object* new_value) ALWAYS_INLINE
+ ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, Object* new_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) ALWAYS_INLINE
+ ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
@@ -235,45 +246,45 @@ class MANAGED LOCKABLE Object {
HeapReference<Object>* GetFieldObjectReferenceAddr(MemberOffset field_offset);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
- int32_t GetField32(MemberOffset field_offset) ALWAYS_INLINE
+ ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- int32_t GetField32Volatile(MemberOffset field_offset) ALWAYS_INLINE
+ ALWAYS_INLINE int32_t GetField32Volatile(MemberOffset field_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
- void SetField32(MemberOffset field_offset, int32_t new_value) ALWAYS_INLINE
+ ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetField32Volatile(MemberOffset field_offset, int32_t new_value) ALWAYS_INLINE
+ ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value,
- int32_t new_value) ALWAYS_INLINE
+ ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,
+ int32_t old_value, int32_t new_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
- int64_t GetField64(MemberOffset field_offset) ALWAYS_INLINE
+ ALWAYS_INLINE int64_t GetField64(MemberOffset field_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- int64_t GetField64Volatile(MemberOffset field_offset) ALWAYS_INLINE
+ ALWAYS_INLINE int64_t GetField64Volatile(MemberOffset field_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
- void SetField64(MemberOffset field_offset, int64_t new_value) ALWAYS_INLINE
+ ALWAYS_INLINE void SetField64(MemberOffset field_offset, int64_t new_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetField64Volatile(MemberOffset field_offset, int64_t new_value) ALWAYS_INLINE
+ ALWAYS_INLINE void SetField64Volatile(MemberOffset field_offset, int64_t new_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive, bool kCheckTransaction = true,
diff --git a/runtime/mirror/object_array.h b/runtime/mirror/object_array.h
index 54d1240..7012b19 100644
--- a/runtime/mirror/object_array.h
+++ b/runtime/mirror/object_array.h
@@ -23,8 +23,13 @@ namespace art {
namespace mirror {
template<class T>
-class MANAGED ObjectArray : public Array {
+class MANAGED ObjectArray: public Array {
public:
+ // The size of Object[].class.
+ static uint32_t ClassSize() {
+ return Array::ClassSize();
+ }
+
static ObjectArray<T>* Alloc(Thread* self, Class* object_array_class, int32_t length,
gc::AllocatorType allocator_type)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/mirror/object_test.cc b/runtime/mirror/object_test.cc
index f85fb27..7e1de5d 100644
--- a/runtime/mirror/object_test.cc
+++ b/runtime/mirror/object_test.cc
@@ -28,7 +28,7 @@
#include "class_linker-inl.h"
#include "common_runtime_test.h"
#include "dex_file.h"
-#include "entrypoints/entrypoint_utils.h"
+#include "entrypoints/entrypoint_utils-inl.h"
#include "gc/accounting/card_table-inl.h"
#include "gc/heap.h"
#include "iftable-inl.h"
diff --git a/runtime/mirror/proxy.h b/runtime/mirror/proxy.h
index 6e4947e..db511d6 100644
--- a/runtime/mirror/proxy.h
+++ b/runtime/mirror/proxy.h
@@ -25,28 +25,8 @@ struct ProxyOffsets;
namespace mirror {
-// All proxy objects have a class which is a synthesized proxy class. The synthesized proxy class
-// has the static fields used to implement reflection on proxy objects.
-class MANAGED SynthesizedProxyClass : public Class {
- public:
- ObjectArray<Class>* GetInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetFieldObject<ObjectArray<Class>>(OFFSET_OF_OBJECT_MEMBER(SynthesizedProxyClass,
- interfaces_));
- }
-
- ObjectArray<ObjectArray<Class>>* GetThrows() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetFieldObject<ObjectArray<ObjectArray<Class>>>(OFFSET_OF_OBJECT_MEMBER(SynthesizedProxyClass,
- throws_));
- }
-
- private:
- HeapReference<ObjectArray<Class>> interfaces_;
- HeapReference<ObjectArray<ObjectArray<Class>>> throws_;
- DISALLOW_IMPLICIT_CONSTRUCTORS(SynthesizedProxyClass);
-};
-
// C++ mirror of java.lang.reflect.Proxy.
-class MANAGED Proxy : public Object {
+class MANAGED Proxy FINAL : public Object {
private:
HeapReference<Object> h_;
diff --git a/runtime/mirror/stack_trace_element.h b/runtime/mirror/stack_trace_element.h
index abecbc5..52b0927 100644
--- a/runtime/mirror/stack_trace_element.h
+++ b/runtime/mirror/stack_trace_element.h
@@ -29,7 +29,7 @@ struct StackTraceElementOffsets;
namespace mirror {
// C++ mirror of java.lang.StackTraceElement
-class MANAGED StackTraceElement : public Object {
+class MANAGED StackTraceElement FINAL : public Object {
public:
String* GetDeclaringClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(StackTraceElement, declaring_class_));
diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h
index 315f7b1..6736497 100644
--- a/runtime/mirror/string-inl.h
+++ b/runtime/mirror/string-inl.h
@@ -18,6 +18,7 @@
#define ART_RUNTIME_MIRROR_STRING_INL_H_
#include "array.h"
+#include "class.h"
#include "intern_table.h"
#include "runtime.h"
#include "string.h"
@@ -26,6 +27,11 @@
namespace art {
namespace mirror {
+inline uint32_t String::ClassSize() {
+ uint32_t vtable_entries = Object::kVTableLength + 51;
+ return Class::ComputeClassSize(true, vtable_entries, 1, 1, 2);
+}
+
inline CharArray* String::GetCharArray() {
return GetFieldObject<CharArray>(ValueOffset());
}
diff --git a/runtime/mirror/string.h b/runtime/mirror/string.h
index b8acede..8ab4db9 100644
--- a/runtime/mirror/string.h
+++ b/runtime/mirror/string.h
@@ -19,22 +19,28 @@
#include <gtest/gtest.h>
-#include "class.h"
#include "object_callbacks.h"
#include "read_barrier.h"
namespace art {
template<class T> class Handle;
-struct StringClassOffsets;
struct StringOffsets;
class StringPiece;
namespace mirror {
// C++ mirror of java.lang.String
-class MANAGED String : public Object {
+class MANAGED String FINAL : public Object {
public:
+ // Size of java.lang.String.class.
+ static uint32_t ClassSize();
+
+ // Size of an instance of java.lang.String not including its value array.
+ static constexpr uint32_t InstanceSize() {
+ return sizeof(String);
+ }
+
static MemberOffset CountOffset() {
return OFFSET_OF_OBJECT_MEMBER(String, count_);
}
@@ -160,16 +166,6 @@ class MANAGED String : public Object {
DISALLOW_IMPLICIT_CONSTRUCTORS(String);
};
-class MANAGED StringClass : public Class {
- private:
- HeapReference<CharArray> ASCII_;
- HeapReference<Object> CASE_INSENSITIVE_ORDER_;
- uint32_t REPLACEMENT_CHAR_;
- int64_t serialVersionUID_;
- friend struct art::StringClassOffsets; // for verifying offset information
- DISALLOW_IMPLICIT_CONSTRUCTORS(StringClass);
-};
-
} // namespace mirror
} // namespace art