summaryrefslogtreecommitdiffstats
path: root/runtime
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-10-23 16:48:06 -0700
committerMathieu Chartier <mathieuc@google.com>2014-10-27 16:49:43 -0700
commit2d2621a1463d2f3f03fa73503fa42e43657cdcfc (patch)
tree1f7cf393693d72db5c186f99b04ac511958c6418 /runtime
parentac293277b69882105810fabd6c53f55de58823fe (diff)
downloadart-2d2621a1463d2f3f03fa73503fa42e43657cdcfc.zip
art-2d2621a1463d2f3f03fa73503fa42e43657cdcfc.tar.gz
art-2d2621a1463d2f3f03fa73503fa42e43657cdcfc.tar.bz2
Optimize method linking
Added more inlining, removed imt array allocation and replaced it with a handle scope. Removed some un-necessary handle scopes. Added logic to base interface method tables from the superclass so that we dont need to reconstruct for every interface (large win). Facebook launch Dalvik KK MR2: TotalTime: 3165 TotalTime: 3652 TotalTime: 3143 TotalTime: 3298 TotalTime: 3212 TotalTime: 3211 Facebook launch TOT before: WaitTime: 3702 WaitTime: 3616 WaitTime: 3616 WaitTime: 3687 WaitTime: 3742 WaitTime: 3767 After optimizations: WaitTime: 2903 WaitTime: 2953 WaitTime: 2918 WaitTime: 2940 WaitTime: 2879 WaitTime: 2792 LinkInterfaceMethods no longer one of the hottest methods, new list: 4.73% art::ClassLinker::LinkVirtualMethods(art::Thread*, art::Handle<art::mirror::Class>) 3.07% art::DexFile::FindClassDef(char const*) const 2.94% art::mirror::Class::FindDeclaredStaticField(art::mirror::DexCache const*, unsigned int) 2.90% art::DexFile::FindStringId(char const*) const Bug: 18054905 Bug: 16828525 (cherry picked from commit 1fb463e42cf1d67595cff66d19c0f99e3046f4c4) Change-Id: I27cc70178fd3655fbe5a3178887fcba189d21321
Diffstat (limited to 'runtime')
-rw-r--r--runtime/class_linker.cc337
-rw-r--r--runtime/class_linker.h9
-rw-r--r--runtime/entrypoints/entrypoint_utils-inl.h9
-rw-r--r--runtime/gc/accounting/card_table.h5
-rw-r--r--runtime/gc/heap.h10
-rw-r--r--runtime/gc/space/image_space.cc3
-rw-r--r--runtime/handle_scope-inl.h4
-rw-r--r--runtime/handle_scope.h22
-rw-r--r--runtime/image.cc2
-rw-r--r--runtime/image.h1
-rw-r--r--runtime/method_helper-inl.h17
-rw-r--r--runtime/method_helper.h13
-rw-r--r--runtime/mirror/array.h4
-rw-r--r--runtime/mirror/art_method-inl.h24
-rw-r--r--runtime/mirror/art_method.cc15
-rw-r--r--runtime/mirror/art_method.h18
-rw-r--r--runtime/mirror/class-inl.h33
-rw-r--r--runtime/mirror/class.cc52
-rw-r--r--runtime/mirror/class.h64
-rw-r--r--runtime/mirror/iftable-inl.h2
-rw-r--r--runtime/mirror/iftable.h7
-rw-r--r--runtime/mirror/object_array.h10
-rw-r--r--runtime/runtime-inl.h5
-rw-r--r--runtime/runtime.cc3
-rw-r--r--runtime/runtime.h7
25 files changed, 390 insertions, 286 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 854effd..0af8bd2 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -435,6 +435,7 @@ void ClassLinker::InitWithoutImage(const std::vector<const DexFile*>& boot_class
Runtime* runtime = Runtime::Current();
runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod());
+ runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod());
runtime->SetDefaultImt(runtime->CreateDefaultImt(this));
// Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
@@ -3211,7 +3212,11 @@ mirror::Class* ClassLinker::CreateArrayClass(Thread* self, const char* descripto
new_class->SetPrimitiveType(Primitive::kPrimNot);
new_class->SetClassLoader(component_type->GetClassLoader());
new_class->SetStatus(mirror::Class::kStatusLoaded, self);
- new_class->PopulateEmbeddedImtAndVTable();
+ {
+ StackHandleScope<mirror::Class::kImtSize> hs(self,
+ Runtime::Current()->GetImtUnimplementedMethod());
+ new_class->PopulateEmbeddedImtAndVTable(&hs);
+ }
new_class->SetStatus(mirror::Class::kStatusInitialized, self);
// don't need to set new_class->SetObjectSize(..)
// because Object::SizeOf delegates to Array::SizeOf
@@ -4423,7 +4428,9 @@ bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle<mirror:
if (!LinkSuperClass(klass)) {
return false;
}
- if (!LinkMethods(self, klass, interfaces)) {
+ StackHandleScope<mirror::Class::kImtSize> imt_handle_scope(
+ self, Runtime::Current()->GetImtUnimplementedMethod());
+ if (!LinkMethods(self, klass, interfaces, &imt_handle_scope)) {
return false;
}
if (!LinkInstanceFields(self, klass)) {
@@ -4442,7 +4449,7 @@ bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle<mirror:
CHECK_EQ(klass->GetClassSize(), class_size) << PrettyDescriptor(klass.Get());
if (klass->ShouldHaveEmbeddedImtAndVTable()) {
- klass->PopulateEmbeddedImtAndVTable();
+ klass->PopulateEmbeddedImtAndVTable(&imt_handle_scope);
}
// This will notify waiters on klass that saw the not yet resolved
@@ -4452,7 +4459,7 @@ bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle<mirror:
} else {
CHECK(!klass->IsResolved());
// Retire the temporary class and create the correctly sized resolved class.
- *new_class = klass->CopyOf(self, class_size);
+ *new_class = klass->CopyOf(self, class_size, &imt_handle_scope);
if (UNLIKELY(*new_class == nullptr)) {
CHECK(self->IsExceptionPending()); // Expect an OOME.
klass->SetStatus(mirror::Class::kStatusError, self);
@@ -4586,7 +4593,8 @@ bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
// Populate the class vtable and itable. Compute return type indices.
bool ClassLinker::LinkMethods(Thread* self, Handle<mirror::Class> klass,
- Handle<mirror::ObjectArray<mirror::Class>> interfaces) {
+ Handle<mirror::ObjectArray<mirror::Class>> interfaces,
+ StackHandleScope<mirror::Class::kImtSize>* out_imt) {
self->AllowThreadSuspension();
if (klass->IsInterface()) {
// No vtable.
@@ -4598,22 +4606,19 @@ bool ClassLinker::LinkMethods(Thread* self, Handle<mirror::Class> klass,
for (size_t i = 0; i < count; ++i) {
klass->GetVirtualMethodDuringLinking(i)->SetMethodIndex(i);
}
- // Link interface method tables
- return LinkInterfaceMethods(klass, interfaces);
- } else {
- // Link virtual and interface method tables
- return LinkVirtualMethods(self, klass) && LinkInterfaceMethods(klass, interfaces);
+ } else if (!LinkVirtualMethods(self, klass)) { // Link virtual methods first.
+ return false;
}
- return true;
+ return LinkInterfaceMethods(self, klass, interfaces, out_imt); // Link interface method last.
}
bool ClassLinker::LinkVirtualMethods(Thread* self, Handle<mirror::Class> klass) {
+ const size_t num_virtual_methods = klass->NumVirtualMethods();
if (klass->HasSuperClass()) {
- uint32_t max_count = klass->NumVirtualMethods() +
- klass->GetSuperClass()->GetVTableLength();
- size_t actual_count = klass->GetSuperClass()->GetVTableLength();
- CHECK_LE(actual_count, max_count);
- StackHandleScope<4> hs(self);
+ const size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
+ const size_t max_count = num_virtual_methods + super_vtable_length;
+ size_t actual_count = super_vtable_length;
+ StackHandleScope<2> hs(self);
Handle<mirror::Class> super_class(hs.NewHandle(klass->GetSuperClass()));
MutableHandle<mirror::ObjectArray<mirror::ArtMethod>> vtable;
if (super_class->ShouldHaveEmbeddedImtAndVTable()) {
@@ -4622,9 +4627,8 @@ bool ClassLinker::LinkVirtualMethods(Thread* self, Handle<mirror::Class> klass)
CHECK(self->IsExceptionPending()); // OOME.
return false;
}
- int len = super_class->GetVTableLength();
- for (int i = 0; i < len; i++) {
- vtable->Set<false>(i, super_class->GetVTableEntry(i));
+ for (size_t i = 0; i < super_vtable_length; i++) {
+ vtable->SetWithoutChecks<false>(i, super_class->GetEmbeddedVTableEntry(i));
}
} else {
CHECK(super_class->GetVTable() != nullptr) << PrettyClass(super_class.Get());
@@ -4636,16 +4640,14 @@ bool ClassLinker::LinkVirtualMethods(Thread* self, Handle<mirror::Class> klass)
}
// See if any of our virtual methods override the superclass.
- MutableMethodHelper local_mh(hs.NewHandle<mirror::ArtMethod>(nullptr));
- MutableMethodHelper super_mh(hs.NewHandle<mirror::ArtMethod>(nullptr));
- for (size_t i = 0; i < klass->NumVirtualMethods(); ++i) {
+ for (size_t i = 0; i < num_virtual_methods; ++i) {
mirror::ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i);
- local_mh.ChangeMethod(local_method);
+ MethodProtoHelper local_helper(local_method);
size_t j = 0;
for (; j < actual_count; ++j) {
- mirror::ArtMethod* super_method = vtable->Get(j);
- super_mh.ChangeMethod(super_method);
- if (local_mh.HasSameNameAndSignature(&super_mh)) {
+ mirror::ArtMethod* super_method = vtable->GetWithoutChecks(j);
+ MethodProtoHelper super_helper(super_method);
+ if (local_helper.HasSameNameAndSignature(super_helper)) {
if (klass->CanAccessMember(super_method->GetDeclaringClass(),
super_method->GetAccessFlags())) {
if (super_method->IsFinal()) {
@@ -4654,21 +4656,20 @@ bool ClassLinker::LinkVirtualMethods(Thread* self, Handle<mirror::Class> klass)
super_method->GetDeclaringClassDescriptor());
return false;
}
- vtable->Set<false>(j, local_method);
+ vtable->SetWithoutChecks<false>(j, local_method);
local_method->SetMethodIndex(j);
break;
- } else {
- LOG(WARNING) << "Before Android 4.1, method " << PrettyMethod(local_method)
- << " would have incorrectly overridden the package-private method in "
- << PrettyDescriptor(super_method->GetDeclaringClassDescriptor());
}
+ LOG(WARNING) << "Before Android 4.1, method " << PrettyMethod(local_method)
+ << " would have incorrectly overridden the package-private method in "
+ << PrettyDescriptor(super_method->GetDeclaringClassDescriptor());
}
}
if (j == actual_count) {
// Not overriding, append.
- vtable->Set<false>(actual_count, local_method);
+ vtable->SetWithoutChecks<false>(actual_count, local_method);
local_method->SetMethodIndex(actual_count);
- actual_count += 1;
+ ++actual_count;
}
}
if (!IsUint(16, actual_count)) {
@@ -4687,72 +4688,76 @@ bool ClassLinker::LinkVirtualMethods(Thread* self, Handle<mirror::Class> klass)
klass->SetVTable(vtable.Get());
} else {
CHECK_EQ(klass.Get(), GetClassRoot(kJavaLangObject));
- uint32_t num_virtual_methods = klass->NumVirtualMethods();
if (!IsUint(16, num_virtual_methods)) {
- ThrowClassFormatError(klass.Get(), "Too many methods: %d", num_virtual_methods);
+ ThrowClassFormatError(klass.Get(), "Too many methods: %d",
+ static_cast<int>(num_virtual_methods));
return false;
}
- StackHandleScope<1> hs(self);
- Handle<mirror::ObjectArray<mirror::ArtMethod>>
- vtable(hs.NewHandle(AllocArtMethodArray(self, num_virtual_methods)));
- if (UNLIKELY(vtable.Get() == nullptr)) {
+ mirror::ObjectArray<mirror::ArtMethod>* vtable = AllocArtMethodArray(self, num_virtual_methods);
+ if (UNLIKELY(vtable == nullptr)) {
CHECK(self->IsExceptionPending()); // OOME.
return false;
}
for (size_t i = 0; i < num_virtual_methods; ++i) {
mirror::ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i);
- vtable->Set<false>(i, virtual_method);
+ vtable->SetWithoutChecks<false>(i, virtual_method);
virtual_method->SetMethodIndex(i & 0xFFFF);
}
- klass->SetVTable(vtable.Get());
+ klass->SetVTable(vtable);
}
return true;
}
-bool ClassLinker::LinkInterfaceMethods(Handle<mirror::Class> klass,
- Handle<mirror::ObjectArray<mirror::Class>> interfaces) {
- Thread* const self = Thread::Current();
+bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass,
+ Handle<mirror::ObjectArray<mirror::Class>> interfaces,
+ StackHandleScope<mirror::Class::kImtSize>* out_imt) {
+ static constexpr size_t kInterfaceCacheSize = 8;
+ StackHandleScope<3 + kInterfaceCacheSize> hs(self);
Runtime* const runtime = Runtime::Current();
- // Set the imt table to be all conflicts by default.
- klass->SetImTable(runtime->GetDefaultImt());
- size_t super_ifcount;
- if (klass->HasSuperClass()) {
- super_ifcount = klass->GetSuperClass()->GetIfTableCount();
- } else {
- super_ifcount = 0;
- }
- uint32_t num_interfaces =
- interfaces.Get() == nullptr ? klass->NumDirectInterfaces() : interfaces->GetLength();
- size_t ifcount = super_ifcount + num_interfaces;
- for (size_t i = 0; i < num_interfaces; i++) {
- mirror::Class* interface =
- interfaces.Get() == nullptr ? mirror::Class::GetDirectInterface(self, klass, i) :
- interfaces->Get(i);
- ifcount += interface->GetIfTableCount();
- }
- if (ifcount == 0) {
- // Class implements no interfaces.
- DCHECK_EQ(klass->GetIfTableCount(), 0);
- DCHECK(klass->GetIfTable() == nullptr);
- return true;
- }
- if (ifcount == super_ifcount) {
+ const bool has_superclass = klass->HasSuperClass();
+ const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
+ const bool have_interfaces = interfaces.Get() != nullptr;
+ const size_t num_interfaces =
+ have_interfaces ? interfaces->GetLength() : klass->NumDirectInterfaces();
+ if (num_interfaces == 0) {
+ if (super_ifcount == 0) {
+ // Class implements no interfaces.
+ DCHECK_EQ(klass->GetIfTableCount(), 0);
+ DCHECK(klass->GetIfTable() == nullptr);
+ return true;
+ }
// Class implements same interfaces as parent, are any of these not marker interfaces?
bool has_non_marker_interface = false;
mirror::IfTable* super_iftable = klass->GetSuperClass()->GetIfTable();
- for (size_t i = 0; i < ifcount; ++i) {
+ for (size_t i = 0; i < super_ifcount; ++i) {
if (super_iftable->GetMethodArrayCount(i) > 0) {
has_non_marker_interface = true;
break;
}
}
+ // Class just inherits marker interfaces from parent so recycle parent's iftable.
if (!has_non_marker_interface) {
- // Class just inherits marker interfaces from parent so recycle parent's iftable.
klass->SetIfTable(super_iftable);
return true;
}
}
- StackHandleScope<5> hs(self);
+ size_t ifcount = super_ifcount + num_interfaces;
+ for (size_t i = 0; i < num_interfaces; i++) {
+ mirror::Class* interface = have_interfaces ?
+ interfaces->GetWithoutChecks(i) : mirror::Class::GetDirectInterface(self, klass, i);
+ DCHECK(interface != nullptr);
+ if (i < kInterfaceCacheSize) {
+ hs.NewHandle(interface);
+ }
+ if (UNLIKELY(!interface->IsInterface())) {
+ std::string temp;
+ ThrowIncompatibleClassChangeError(klass.Get(), "Class %s implements non-interface class %s",
+ PrettyDescriptor(klass.Get()).c_str(),
+ PrettyDescriptor(interface->GetDescriptor(&temp)).c_str());
+ return false;
+ }
+ ifcount += interface->GetIfTableCount();
+ }
MutableHandle<mirror::IfTable> iftable(hs.NewHandle(AllocIfTable(self, ifcount)));
if (UNLIKELY(iftable.Get() == nullptr)) {
CHECK(self->IsExceptionPending()); // OOME.
@@ -4769,16 +4774,12 @@ bool ClassLinker::LinkInterfaceMethods(Handle<mirror::Class> klass,
// Flatten the interface inheritance hierarchy.
size_t idx = super_ifcount;
for (size_t i = 0; i < num_interfaces; i++) {
- mirror::Class* interface =
- interfaces.Get() == nullptr ? mirror::Class::GetDirectInterface(self, klass, i) :
- interfaces->Get(i);
- DCHECK(interface != nullptr);
- if (!interface->IsInterface()) {
- std::string temp;
- ThrowIncompatibleClassChangeError(klass.Get(), "Class %s implements non-interface class %s",
- PrettyDescriptor(klass.Get()).c_str(),
- PrettyDescriptor(interface->GetDescriptor(&temp)).c_str());
- return false;
+ mirror::Class* interface;
+ if (i < kInterfaceCacheSize) {
+ interface = hs.GetReference(i)->AsClass();
+ } else {
+ interface = have_interfaces ? interfaces->Get(i) :
+ mirror::Class::GetDirectInterface(self, klass, i);
}
// Check if interface is already in iftable
bool duplicate = false;
@@ -4812,6 +4813,7 @@ bool ClassLinker::LinkInterfaceMethods(Handle<mirror::Class> klass,
self->AllowThreadSuspension();
// Shrink iftable in case duplicates were found
if (idx < ifcount) {
+ DCHECK_NE(num_interfaces, 0U);
iftable.Assign(down_cast<mirror::IfTable*>(iftable->CopyOf(self, idx * mirror::IfTable::kMax)));
if (UNLIKELY(iftable.Get() == nullptr)) {
CHECK(self->IsExceptionPending()); // OOME.
@@ -4819,48 +4821,100 @@ bool ClassLinker::LinkInterfaceMethods(Handle<mirror::Class> klass,
}
ifcount = idx;
} else {
- CHECK_EQ(idx, ifcount);
+ DCHECK_EQ(idx, ifcount);
}
klass->SetIfTable(iftable.Get());
-
// If we're an interface, we don't need the vtable pointers, so we're done.
if (klass->IsInterface()) {
return true;
}
- self->AllowThreadSuspension();
- // Allocate imtable
- bool imtable_changed = false;
- Handle<mirror::ObjectArray<mirror::ArtMethod>> imtable(
- hs.NewHandle(AllocArtMethodArray(self, mirror::Class::kImtSize)));
- if (UNLIKELY(imtable.Get() == nullptr)) {
- CHECK(self->IsExceptionPending()); // OOME.
- return false;
- }
- MutableMethodHelper interface_mh(hs.NewHandle<mirror::ArtMethod>(nullptr));
- MutableMethodHelper vtable_mh(hs.NewHandle<mirror::ArtMethod>(nullptr));
+ size_t miranda_list_size = 0;
size_t max_miranda_methods = 0; // The max size of miranda_list.
for (size_t i = 0; i < ifcount; ++i) {
max_miranda_methods += iftable->GetInterface(i)->NumVirtualMethods();
}
- Handle<mirror::ObjectArray<mirror::ArtMethod>>
+ MutableHandle<mirror::ObjectArray<mirror::ArtMethod>>
miranda_list(hs.NewHandle(AllocArtMethodArray(self, max_miranda_methods)));
- size_t miranda_list_size = 0; // The current size of miranda_list.
+ MutableHandle<mirror::ObjectArray<mirror::ArtMethod>> vtable(
+ hs.NewHandle(klass->GetVTableDuringLinking()));
+ // Copy the IMT from the super class if possible.
+ bool extend_super_iftable = false;
+ if (has_superclass) {
+ mirror::Class* super_class = klass->GetSuperClass();
+ extend_super_iftable = true;
+ if (super_class->ShouldHaveEmbeddedImtAndVTable()) {
+ for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
+ out_imt->SetReference(i, super_class->GetEmbeddedImTableEntry(i));
+ }
+ } else {
+ // No imt in the super class, need to reconstruct from the iftable.
+ mirror::IfTable* if_table = super_class->GetIfTable();
+ mirror::ArtMethod* conflict_method = runtime->GetImtConflictMethod();
+ const size_t length = super_class->GetIfTableCount();
+ for (size_t i = 0; i < length; ++i) {
+ mirror::Class* interface = iftable->GetInterface(i);
+ const size_t num_virtuals = interface->NumVirtualMethods();
+ const size_t method_array_count = if_table->GetMethodArrayCount(i);
+ DCHECK_EQ(num_virtuals, method_array_count);
+ if (method_array_count == 0) {
+ continue;
+ }
+ mirror::ObjectArray<mirror::ArtMethod>* method_array = if_table->GetMethodArray(i);
+ for (size_t j = 0; j < num_virtuals; ++j) {
+ mirror::ArtMethod* method = method_array->GetWithoutChecks(j);
+ if (method->IsMiranda()) {
+ continue;
+ }
+ mirror::ArtMethod* interface_method = interface->GetVirtualMethod(j);
+ uint32_t imt_index = interface_method->GetDexMethodIndex() % mirror::Class::kImtSize;
+ mirror::ArtMethod* imt_ref = out_imt->GetReference(imt_index)->AsArtMethod();
+ if (imt_ref == runtime->GetImtUnimplementedMethod()) {
+ out_imt->SetReference(imt_index, method);
+ } else if (imt_ref != conflict_method) {
+ out_imt->SetReference(imt_index, conflict_method);
+ }
+ }
+ }
+ }
+ }
for (size_t i = 0; i < ifcount; ++i) {
self->AllowThreadSuspension();
size_t num_methods = iftable->GetInterface(i)->NumVirtualMethods();
if (num_methods > 0) {
StackHandleScope<2> hs(self);
- Handle<mirror::ObjectArray<mirror::ArtMethod>>
- method_array(hs.NewHandle(AllocArtMethodArray(self, num_methods)));
+ const bool is_super = i < super_ifcount;
+ const bool super_interface = is_super && extend_super_iftable;
+ Handle<mirror::ObjectArray<mirror::ArtMethod>> method_array;
+ Handle<mirror::ObjectArray<mirror::ArtMethod>> input_array;
+ if (super_interface) {
+ mirror::IfTable* if_table = klass->GetSuperClass()->GetIfTable();
+ DCHECK(if_table != nullptr);
+ DCHECK(if_table->GetMethodArray(i) != nullptr);
+ // If we are working on a super interface, try extending the existing method array.
+ method_array = hs.NewHandle(if_table->GetMethodArray(i)->Clone(self)->
+ AsObjectArray<mirror::ArtMethod>());
+ // We are overwriting a super class interface, try to only virtual methods instead of the
+ // whole vtable.
+ input_array = hs.NewHandle(klass->GetVirtualMethods());
+ } else {
+ method_array = hs.NewHandle(AllocArtMethodArray(self, num_methods));
+ // A new interface, we need the whole vtable incase a new interface method is implemented
+ // in the whole superclass.
+ input_array = vtable;
+ }
if (UNLIKELY(method_array.Get() == nullptr)) {
CHECK(self->IsExceptionPending()); // OOME.
return false;
}
iftable->SetMethodArray(i, method_array.Get());
- Handle<mirror::ObjectArray<mirror::ArtMethod>> vtable(
- hs.NewHandle(klass->GetVTableDuringLinking()));
+ if (input_array.Get() == nullptr) {
+ // If the added virtual methods is empty, do nothing.
+ DCHECK(super_interface);
+ continue;
+ }
for (size_t j = 0; j < num_methods; ++j) {
- interface_mh.ChangeMethod(iftable->GetInterface(i)->GetVirtualMethod(j));
+ mirror::ArtMethod* interface_method = iftable->GetInterface(i)->GetVirtualMethod(j);
+ MethodProtoHelper interface_helper(interface_method);
int32_t k;
// For each method listed in the interface's method list, find the
// matching method in our class's method list. We want to favor the
@@ -4870,66 +4924,62 @@ bool ClassLinker::LinkInterfaceMethods(Handle<mirror::Class> klass,
// it -- otherwise it would use the same vtable slot. In .dex files
// those don't end up in the virtual method table, so it shouldn't
// matter which direction we go. We walk it backward anyway.)
- for (k = vtable->GetLength() - 1; k >= 0; --k) {
- vtable_mh.ChangeMethod(vtable->Get(k));
- if (interface_mh.HasSameNameAndSignature(&vtable_mh)) {
- if (!vtable_mh.Get()->IsAbstract() && !vtable_mh.Get()->IsPublic()) {
+ for (k = input_array->GetLength() - 1; k >= 0; --k) {
+ mirror::ArtMethod* vtable_method = input_array->GetWithoutChecks(k);
+ MethodProtoHelper vtable_helper(vtable_method);
+ if (interface_helper.HasSameNameAndSignature(vtable_helper)) {
+ if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
ThrowIllegalAccessError(
klass.Get(),
"Method '%s' implementing interface method '%s' is not public",
- PrettyMethod(vtable_mh.Get()).c_str(),
- PrettyMethod(interface_mh.Get()).c_str());
+ PrettyMethod(vtable_method).c_str(),
+ PrettyMethod(interface_method).c_str());
return false;
}
- method_array->Set<false>(j, vtable_mh.Get());
+ method_array->SetWithoutChecks<false>(j, vtable_method);
// Place method in imt if entry is empty, place conflict otherwise.
- uint32_t imt_index = interface_mh.Get()->GetDexMethodIndex() % mirror::Class::kImtSize;
- if (imtable->Get(imt_index) == nullptr) {
- imtable->Set<false>(imt_index, vtable_mh.Get());
- imtable_changed = true;
- } else {
- imtable->Set<false>(imt_index, runtime->GetImtConflictMethod());
+ uint32_t imt_index = interface_method->GetDexMethodIndex() % mirror::Class::kImtSize;
+ mirror::ArtMethod* imt_ref = out_imt->GetReference(imt_index)->AsArtMethod();
+ mirror::ArtMethod* conflict_method = runtime->GetImtConflictMethod();
+ if (imt_ref == runtime->GetImtUnimplementedMethod()) {
+ out_imt->SetReference(imt_index, vtable_method);
+ } else if (imt_ref != conflict_method) {
+ // If we are not a conflict and we have the same signature and name as the imt entry,
+ // it must be that we overwrote a superclass vtable entry.
+ if (MethodProtoHelper(imt_ref).HasSameNameAndSignature(vtable_helper)) {
+ out_imt->SetReference(imt_index, vtable_method);
+ } else {
+ out_imt->SetReference(imt_index, conflict_method);
+ }
}
break;
}
}
- if (k < 0) {
- StackHandleScope<1> hs(self);
- auto miranda_method = hs.NewHandle<mirror::ArtMethod>(nullptr);
+ if (k < 0 && !super_interface) {
+ mirror::ArtMethod* miranda_method = nullptr;
for (size_t l = 0; l < miranda_list_size; ++l) {
mirror::ArtMethod* mir_method = miranda_list->Get(l);
- DCHECK(mir_method != nullptr);
- vtable_mh.ChangeMethod(mir_method);
- if (interface_mh.HasSameNameAndSignature(&vtable_mh)) {
- miranda_method.Assign(mir_method);
+ MethodProtoHelper vtable_helper(mir_method);
+ if (interface_helper.HasSameNameAndSignature(vtable_helper)) {
+ miranda_method = mir_method;
break;
}
}
- if (miranda_method.Get() == nullptr) {
+ if (miranda_method == nullptr) {
// Point the interface table at a phantom slot.
- miranda_method.Assign(down_cast<mirror::ArtMethod*>(interface_mh.Get()->Clone(self)));
- if (UNLIKELY(miranda_method.Get() == nullptr)) {
+ miranda_method = interface_method->Clone(self)->AsArtMethod();
+ if (UNLIKELY(miranda_method == nullptr)) {
CHECK(self->IsExceptionPending()); // OOME.
return false;
}
DCHECK_LT(miranda_list_size, max_miranda_methods);
- miranda_list->Set<false>(miranda_list_size++, miranda_method.Get());
+ miranda_list->Set<false>(miranda_list_size++, miranda_method);
}
- method_array->Set<false>(j, miranda_method.Get());
+ method_array->SetWithoutChecks<false>(j, miranda_method);
}
}
}
}
- if (imtable_changed) {
- // Fill in empty entries in interface method table with conflict.
- mirror::ArtMethod* imt_conflict_method = runtime->GetImtConflictMethod();
- for (size_t i = 0; i < mirror::Class::kImtSize; i++) {
- if (imtable->Get(i) == nullptr) {
- imtable->Set<false>(i, imt_conflict_method);
- }
- }
- klass->SetImTable(imtable.Get());
- }
if (miranda_list_size > 0) {
int old_method_count = klass->NumVirtualMethods();
int new_method_count = old_method_count + miranda_list_size;
@@ -4945,10 +4995,6 @@ bool ClassLinker::LinkInterfaceMethods(Handle<mirror::Class> klass,
}
klass->SetVirtualMethods(virtuals);
- StackHandleScope<1> hs(self);
- MutableHandle<mirror::ObjectArray<mirror::ArtMethod>> vtable(
- hs.NewHandle(klass->GetVTableDuringLinking()));
- CHECK(vtable.Get() != nullptr);
int old_vtable_count = vtable->GetLength();
int new_vtable_count = old_vtable_count + miranda_list_size;
vtable.Assign(vtable->CopyOf(self, new_vtable_count));
@@ -4962,19 +5008,20 @@ bool ClassLinker::LinkInterfaceMethods(Handle<mirror::Class> klass,
method->SetAccessFlags(method->GetAccessFlags() | kAccMiranda);
method->SetMethodIndex(0xFFFF & (old_vtable_count + i));
klass->SetVirtualMethod(old_method_count + i, method);
- vtable->Set<false>(old_vtable_count + i, method);
+ vtable->SetWithoutChecks<false>(old_vtable_count + i, method);
}
// TODO: do not assign to the vtable field until it is fully constructed.
klass->SetVTable(vtable.Get());
}
- mirror::ObjectArray<mirror::ArtMethod>* vtable = klass->GetVTableDuringLinking();
- for (int i = 0; i < vtable->GetLength(); ++i) {
- CHECK(vtable->Get(i) != nullptr);
+ if (kIsDebugBuild) {
+ mirror::ObjectArray<mirror::ArtMethod>* vtable = klass->GetVTableDuringLinking();
+ for (int i = 0; i < vtable->GetLength(); ++i) {
+ CHECK(vtable->GetWithoutChecks(i) != nullptr);
+ }
}
self->AllowThreadSuspension();
-
return true;
}
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 1847926..51cd730 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -52,6 +52,7 @@ class InternTable;
template<class T> class ObjectLock;
class ScopedObjectAccessAlreadyRunnable;
template<class T> class Handle;
+template<size_t kNumReferences> class PACKED(4) StackHandleScope;
typedef bool (ClassVisitor)(mirror::Class* c, void* arg);
@@ -561,14 +562,16 @@ class ClassLinker {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
bool LinkMethods(Thread* self, Handle<mirror::Class> klass,
- Handle<mirror::ObjectArray<mirror::Class>> interfaces)
+ Handle<mirror::ObjectArray<mirror::Class>> interfaces,
+ StackHandleScope<mirror::Class::kImtSize>* out_imt)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
bool LinkVirtualMethods(Thread* self, Handle<mirror::Class> klass)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool LinkInterfaceMethods(Handle<mirror::Class> klass,
- Handle<mirror::ObjectArray<mirror::Class>> interfaces)
+ bool LinkInterfaceMethods(Thread* const self, Handle<mirror::Class> klass,
+ Handle<mirror::ObjectArray<mirror::Class>> interfaces,
+ StackHandleScope<mirror::Class::kImtSize>* out_imt)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
bool LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size)
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index ccc5d83..670bf2a 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -437,7 +437,14 @@ static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx,
case kInterface: {
uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index);
- if (!imt_method->IsImtConflictMethod()) {
+ if (!imt_method->IsImtConflictMethod() && !imt_method->IsImtUnimplementedMethod()) {
+ if (kIsDebugBuild) {
+ mirror::Class* klass = (*this_object)->GetClass();
+ mirror::ArtMethod* method = klass->FindVirtualMethodForInterface(resolved_method);
+ CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " <<
+ PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " <<
+ PrettyClass(klass);
+ }
return imt_method;
} else {
mirror::ArtMethod* interface_method =
diff --git a/runtime/gc/accounting/card_table.h b/runtime/gc/accounting/card_table.h
index e1343c8..9bd3fba 100644
--- a/runtime/gc/accounting/card_table.h
+++ b/runtime/gc/accounting/card_table.h
@@ -54,9 +54,8 @@ class CardTable {
static CardTable* Create(const uint8_t* heap_begin, size_t heap_capacity);
// Set the card associated with the given address to GC_CARD_DIRTY.
- void MarkCard(const void *addr) {
- uint8_t* card_addr = CardFromAddr(addr);
- *card_addr = kCardDirty;
+ ALWAYS_INLINE void MarkCard(const void *addr) {
+ *CardFromAddr(addr) = kCardDirty;
}
// Is the object on a dirty card?
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index ff1e38b..7b891a6 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -382,18 +382,18 @@ class Heap {
// Must be called if a field of an Object in the heap changes, and before any GC safe-point.
// The call is not needed if NULL is stored in the field.
- void WriteBarrierField(const mirror::Object* dst, MemberOffset /*offset*/,
- const mirror::Object* /*new_value*/) {
+ ALWAYS_INLINE void WriteBarrierField(const mirror::Object* dst, MemberOffset /*offset*/,
+ const mirror::Object* /*new_value*/) {
card_table_->MarkCard(dst);
}
// Write barrier for array operations that update many field positions
- void WriteBarrierArray(const mirror::Object* dst, int /*start_offset*/,
- size_t /*length TODO: element_count or byte_count?*/) {
+ ALWAYS_INLINE void WriteBarrierArray(const mirror::Object* dst, int /*start_offset*/,
+ size_t /*length TODO: element_count or byte_count?*/) {
card_table_->MarkCard(dst);
}
- void WriteBarrierEveryFieldOf(const mirror::Object* obj) {
+ ALWAYS_INLINE void WriteBarrierEveryFieldOf(const mirror::Object* obj) {
card_table_->MarkCard(obj);
}
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index c020240..414c5ea 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -630,6 +630,9 @@ ImageSpace* ImageSpace::Init(const char* image_filename, const char* image_locat
runtime->SetResolutionMethod(down_cast<mirror::ArtMethod*>(resolution_method));
mirror::Object* imt_conflict_method = image_header.GetImageRoot(ImageHeader::kImtConflictMethod);
runtime->SetImtConflictMethod(down_cast<mirror::ArtMethod*>(imt_conflict_method));
+ mirror::Object* imt_unimplemented_method =
+ image_header.GetImageRoot(ImageHeader::kImtUnimplementedMethod);
+ runtime->SetImtUnimplementedMethod(down_cast<mirror::ArtMethod*>(imt_unimplemented_method));
mirror::Object* default_imt = image_header.GetImageRoot(ImageHeader::kDefaultImt);
runtime->SetDefaultImt(down_cast<mirror::ObjectArray<mirror::ArtMethod>*>(default_imt));
diff --git a/runtime/handle_scope-inl.h b/runtime/handle_scope-inl.h
index 2717180..b0aadec 100644
--- a/runtime/handle_scope-inl.h
+++ b/runtime/handle_scope-inl.h
@@ -25,13 +25,13 @@
namespace art {
template<size_t kNumReferences>
-inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self)
+inline StackHandleScope<kNumReferences>::StackHandleScope(Thread* self, mirror::Object* fill_value)
: HandleScope(self->GetTopHandleScope(), kNumReferences), self_(self), pos_(0) {
COMPILE_ASSERT(kNumReferences >= 1, stack_handle_scope_must_contain_at_least_1_reference);
// TODO: Figure out how to use a compile assert.
CHECK_EQ(&storage_[0], GetReferences());
for (size_t i = 0; i < kNumReferences; ++i) {
- SetReference(i, nullptr);
+ SetReference(i, fill_value);
}
self_->PushHandleScope(this);
}
diff --git a/runtime/handle_scope.h b/runtime/handle_scope.h
index 13c939f..beb7ee0 100644
--- a/runtime/handle_scope.h
+++ b/runtime/handle_scope.h
@@ -166,11 +166,11 @@ class HandleWrapper : public MutableHandle<T> {
template<size_t kNumReferences>
class PACKED(4) StackHandleScope FINAL : public HandleScope {
public:
- explicit StackHandleScope(Thread* self);
- ~StackHandleScope();
+ explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr);
+ ALWAYS_INLINE ~StackHandleScope();
template<class T>
- MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
SetReference(pos_, object);
MutableHandle<T> h(GetHandle<T>(pos_));
pos_++;
@@ -178,25 +178,25 @@ class PACKED(4) StackHandleScope FINAL : public HandleScope {
}
template<class T>
- HandleWrapper<T> NewHandleWrapper(T** object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
SetReference(pos_, *object);
MutableHandle<T> h(GetHandle<T>(pos_));
pos_++;
return HandleWrapper<T>(object, h);
}
- private:
- template<class T>
- ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i)
+ ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK_LT(i, kNumReferences);
- return MutableHandle<T>(&GetReferences()[i]);
+ GetReferences()[i].Assign(object);
}
- ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ private:
+ template<class T>
+ ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK_LT(i, kNumReferences);
- GetReferences()[i].Assign(object);
+ return MutableHandle<T>(&GetReferences()[i]);
}
// Reference storage needs to be first as expected by the HandleScope layout.
diff --git a/runtime/image.cc b/runtime/image.cc
index c065d8e..2ee3fa4 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -24,7 +24,7 @@
namespace art {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '1', '0', '\0' };
+const uint8_t ImageHeader::kImageVersion[] = { '0', '1', '1', '\0' };
ImageHeader::ImageHeader(uint32_t image_begin,
uint32_t image_size,
diff --git a/runtime/image.h b/runtime/image.h
index ec95d01..03de509 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -105,6 +105,7 @@ class PACKED(4) ImageHeader {
enum ImageRoot {
kResolutionMethod,
kImtConflictMethod,
+ kImtUnimplementedMethod,
kDefaultImt,
kCalleeSaveMethod,
kRefsOnlySaveMethod,
diff --git a/runtime/method_helper-inl.h b/runtime/method_helper-inl.h
index 143f4bc..21cc67c 100644
--- a/runtime/method_helper-inl.h
+++ b/runtime/method_helper-inl.h
@@ -80,6 +80,23 @@ inline mirror::String* MethodHelperT<HandleKind>::ResolveString(uint32_t string_
return s;
}
+inline MethodProtoHelper::MethodProtoHelper(mirror::ArtMethod* method) {
+ method = method->GetInterfaceMethodIfProxy();
+ dex_file_ = method->GetDexFile();
+ mid_ = &dex_file_->GetMethodId(method->GetDexMethodIndex());
+ name_ = dex_file_->StringDataAndUtf16LengthByIdx(mid_->name_idx_, &name_len_);
+}
+
+inline bool MethodProtoHelper::HasSameNameAndSignature(const MethodProtoHelper& other) const {
+ if (name_len_ != other.name_len_ || strcmp(name_, other.name_) != 0) {
+ return false;
+ }
+ if (dex_file_ == other.dex_file_) {
+ return mid_->name_idx_ == other.mid_->name_idx_ && mid_->proto_idx_ == other.mid_->proto_idx_;
+ }
+ return dex_file_->GetMethodSignature(*mid_) == other.dex_file_->GetMethodSignature(*other.mid_);
+}
+
} // namespace art
#endif // ART_RUNTIME_METHOD_HELPER_INL_H_
diff --git a/runtime/method_helper.h b/runtime/method_helper.h
index fe364d3..913b41b 100644
--- a/runtime/method_helper.h
+++ b/runtime/method_helper.h
@@ -24,6 +24,19 @@
namespace art {
+class MethodProtoHelper {
+ public:
+ ALWAYS_INLINE MethodProtoHelper(mirror::ArtMethod* method)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE bool HasSameNameAndSignature(const MethodProtoHelper& other) const;
+
+ private:
+ const DexFile* dex_file_;
+ const DexFile::MethodId* mid_;
+ const char* name_;
+ uint32_t name_len_;
+};
+
template <template <class T> class HandleKind>
class MethodHelperT {
public:
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index 12bec89..83e3688 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -49,7 +49,7 @@ class MANAGED Array : public Object {
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
size_t SizeOf() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- int32_t GetLength() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE int32_t GetLength() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return GetField32<kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(Array, length_));
}
@@ -82,7 +82,7 @@ class MANAGED Array : public Object {
// Returns true if the index is valid. If not, throws an ArrayIndexOutOfBoundsException and
// returns false.
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- bool CheckIsValidIndex(int32_t index) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE bool CheckIsValidIndex(int32_t index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
protected:
void ThrowArrayStoreException(Object* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/mirror/art_method-inl.h b/runtime/mirror/art_method-inl.h
index d262fd5..cb6ac4f 100644
--- a/runtime/mirror/art_method-inl.h
+++ b/runtime/mirror/art_method-inl.h
@@ -291,6 +291,13 @@ inline bool ArtMethod::IsImtConflictMethod() {
return result;
}
+inline bool ArtMethod::IsImtUnimplementedMethod() {
+ bool result = this == Runtime::Current()->GetImtUnimplementedMethod();
+ // Check that if we do think it is phony it looks like the imt unimplemented method.
+ DCHECK(!result || IsRuntimeMethod());
+ return result;
+}
+
inline uintptr_t ArtMethod::NativeQuickPcOffset(const uintptr_t pc) {
const void* code = Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(this);
return pc - reinterpret_cast<uintptr_t>(code);
@@ -309,7 +316,7 @@ inline QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo(const void* code_pointe
}
inline const DexFile* ArtMethod::GetDexFile() {
- return GetInterfaceMethodIfProxy()->GetDeclaringClass()->GetDexCache()->GetDexFile();
+ return GetDexCache()->GetDexFile();
}
inline const char* ArtMethod::GetDeclaringClassDescriptor() {
@@ -441,6 +448,21 @@ inline ArtMethod* ArtMethod::GetInterfaceMethodIfProxy() {
return interface_method;
}
+inline void ArtMethod::SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings) {
+ SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_strings_),
+ new_dex_cache_strings);
+}
+
+inline void ArtMethod::SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods) {
+ SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_),
+ new_dex_cache_methods);
+}
+
+inline void ArtMethod::SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_classes) {
+ SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_),
+ new_dex_cache_classes);
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc
index b219004..014d1a9 100644
--- a/runtime/mirror/art_method.cc
+++ b/runtime/mirror/art_method.cc
@@ -91,21 +91,6 @@ void ArtMethod::ResetClass() {
java_lang_reflect_ArtMethod_ = GcRoot<Class>(nullptr);
}
-void ArtMethod::SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings) {
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_strings_),
- new_dex_cache_strings);
-}
-
-void ArtMethod::SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods) {
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_),
- new_dex_cache_methods);
-}
-
-void ArtMethod::SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_classes) {
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_),
- new_dex_cache_classes);
-}
-
size_t ArtMethod::NumArgRegisters(const StringPiece& shorty) {
CHECK_LE(1U, shorty.length());
uint32_t num_registers = 0;
diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h
index 3b92012..92b2c30 100644
--- a/runtime/mirror/art_method.h
+++ b/runtime/mirror/art_method.h
@@ -65,7 +65,7 @@ class MANAGED ArtMethod FINAL : public Object {
return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
}
- uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetAccessFlags(uint32_t new_access_flags) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
// Not called within a transaction.
@@ -203,7 +203,7 @@ class MANAGED ArtMethod FINAL : public Object {
// Number of 32bit registers that would be required to hold all the arguments
static size_t NumArgRegisters(const StringPiece& shorty);
- uint32_t GetDexMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetDexMethodIndex(uint32_t new_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
// Not called within a transaction.
@@ -226,11 +226,11 @@ class MANAGED ArtMethod FINAL : public Object {
return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_);
}
- ArtMethod* GetDexCacheResolvedMethod(uint16_t method_idx)
+ ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_idx)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method)
+ ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods)
+ ALWAYS_INLINE void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
bool HasDexCacheResolvedMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
bool HasSameDexCacheResolvedMethods(ArtMethod* other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -432,6 +432,8 @@ class MANAGED ArtMethod FINAL : public Object {
bool IsImtConflictMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ bool IsImtUnimplementedMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
uintptr_t NativeQuickPcOffset(const uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
#ifdef NDEBUG
uintptr_t NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point)
@@ -564,9 +566,11 @@ class MANAGED ArtMethod FINAL : public Object {
static GcRoot<Class> java_lang_reflect_ArtMethod_;
private:
- ObjectArray<ArtMethod>* GetDexCacheResolvedMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjectArray<ArtMethod>* GetDexCacheResolvedMethods()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ObjectArray<Class>* GetDexCacheResolvedTypes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjectArray<Class>* GetDexCacheResolvedTypes()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
friend struct art::ArtMethodOffsets; // for verifying offset information
DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod);
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index e3295ef..892bf44 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -114,19 +114,19 @@ template<VerifyObjectFlags kVerifyFlags>
inline ArtMethod* Class::GetVirtualMethod(uint32_t i) {
DCHECK(IsResolved<kVerifyFlags>() || IsErroneous<kVerifyFlags>())
<< PrettyClass(this) << " status=" << GetStatus();
- return GetVirtualMethods()->Get(i);
+ return GetVirtualMethods()->GetWithoutChecks(i);
}
inline ArtMethod* Class::GetVirtualMethodDuringLinking(uint32_t i) {
DCHECK(IsLoaded() || IsErroneous());
- return GetVirtualMethods()->Get(i);
+ return GetVirtualMethods()->GetWithoutChecks(i);
}
inline void Class::SetVirtualMethod(uint32_t i, ArtMethod* f) // TODO: uint16_t
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ObjectArray<ArtMethod>* virtual_methods =
GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_));
- virtual_methods->Set<false>(i, f);
+ virtual_methods->SetWithoutChecks<false>(i, f);
}
inline ObjectArray<ArtMethod>* Class::GetVTable() {
@@ -143,14 +143,6 @@ inline void Class::SetVTable(ObjectArray<ArtMethod>* new_vtable) {
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_), new_vtable);
}
-inline ObjectArray<ArtMethod>* Class::GetImTable() {
- return GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, imtable_));
-}
-
-inline void Class::SetImTable(ObjectArray<ArtMethod>* new_imtable) {
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, imtable_), new_imtable);
-}
-
inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i) {
uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry);
return GetFieldObject<mirror::ArtMethod>(MemberOffset(offset));
@@ -159,7 +151,6 @@ inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i) {
inline void Class::SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method) {
uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry);
SetFieldObject<false>(MemberOffset(offset), method);
- CHECK(method == GetImTable()->Get(i));
}
inline bool Class::HasVTable() {
@@ -761,6 +752,24 @@ inline void Class::SetAccessFlags(uint32_t new_access_flags) {
}
}
+inline uint32_t Class::NumDirectInterfaces() {
+ if (IsPrimitive()) {
+ return 0;
+ } else if (IsArrayClass()) {
+ return 2;
+ } else if (IsProxyClass()) {
+ mirror::ObjectArray<mirror::Class>* interfaces = GetInterfaces();
+ return interfaces != nullptr ? interfaces->GetLength() : 0;
+ } else {
+ const DexFile::TypeList* interfaces = GetInterfaceTypeList();
+ if (interfaces == nullptr) {
+ return 0;
+ } else {
+ return interfaces->Size();
+ }
+ }
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 6df7204..8eafd6f 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -738,24 +738,6 @@ const DexFile::ClassDef* Class::GetClassDef() {
return &GetDexFile().GetClassDef(class_def_idx);
}
-uint32_t Class::NumDirectInterfaces() {
- if (IsPrimitive()) {
- return 0;
- } else if (IsArrayClass()) {
- return 2;
- } else if (IsProxyClass()) {
- mirror::ObjectArray<mirror::Class>* interfaces = GetInterfaces();
- return interfaces != nullptr ? interfaces->GetLength() : 0;
- } else {
- const DexFile::TypeList* interfaces = GetInterfaceTypeList();
- if (interfaces == nullptr) {
- return 0;
- } else {
- return interfaces->Size();
- }
- }
-}
-
uint16_t Class::GetDirectInterfaceTypeIdx(uint32_t idx) {
DCHECK(!IsPrimitive());
DCHECK(!IsArrayClass());
@@ -817,22 +799,21 @@ const DexFile::TypeList* Class::GetInterfaceTypeList() {
return GetDexFile().GetInterfacesList(*class_def);
}
-void Class::PopulateEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- ObjectArray<ArtMethod>* table = GetImTable();
- if (table != nullptr) {
- for (uint32_t i = 0; i < kImtSize; i++) {
- SetEmbeddedImTableEntry(i, table->Get(i));
- }
+void Class::PopulateEmbeddedImtAndVTable(StackHandleScope<kImtSize>* imt_handle_scope) {
+ for (uint32_t i = 0; i < kImtSize; i++) {
+ // Replace null with conflict.
+ mirror::Object* obj = imt_handle_scope->GetReference(i);
+ DCHECK(obj != nullptr);
+ SetEmbeddedImTableEntry(i, obj->AsArtMethod());
}
- table = GetVTableDuringLinking();
+ ObjectArray<ArtMethod>* table = GetVTableDuringLinking();
CHECK(table != nullptr) << PrettyClass(this);
SetEmbeddedVTableLength(table->GetLength());
for (int32_t i = 0; i < table->GetLength(); i++) {
- SetEmbeddedVTableEntry(i, table->Get(i));
+ SetEmbeddedVTableEntry(i, table->GetWithoutChecks(i));
}
- SetImTable(nullptr);
// Keep java.lang.Object class's vtable around for since it's easier
// to be reused by array classes during their linking.
if (!IsObjectClass()) {
@@ -844,9 +825,10 @@ void Class::PopulateEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_
class CopyClassVisitor {
public:
explicit CopyClassVisitor(Thread* self, Handle<mirror::Class>* orig,
- size_t new_length, size_t copy_bytes)
+ size_t new_length, size_t copy_bytes,
+ StackHandleScope<mirror::Class::kImtSize>* imt_handle_scope)
: self_(self), orig_(orig), new_length_(new_length),
- copy_bytes_(copy_bytes) {
+ copy_bytes_(copy_bytes), imt_handle_scope_(imt_handle_scope) {
}
void operator()(Object* obj, size_t usable_size) const
@@ -855,7 +837,7 @@ class CopyClassVisitor {
mirror::Class* new_class_obj = obj->AsClass();
mirror::Object::CopyObject(self_, new_class_obj, orig_->Get(), copy_bytes_);
new_class_obj->SetStatus(Class::kStatusResolving, self_);
- new_class_obj->PopulateEmbeddedImtAndVTable();
+ new_class_obj->PopulateEmbeddedImtAndVTable(imt_handle_scope_);
new_class_obj->SetClassSize(new_length_);
}
@@ -864,10 +846,12 @@ class CopyClassVisitor {
Handle<mirror::Class>* const orig_;
const size_t new_length_;
const size_t copy_bytes_;
+ StackHandleScope<mirror::Class::kImtSize>* const imt_handle_scope_;
DISALLOW_COPY_AND_ASSIGN(CopyClassVisitor);
};
-Class* Class::CopyOf(Thread* self, int32_t new_length) {
+Class* Class::CopyOf(Thread* self, int32_t new_length,
+ StackHandleScope<kImtSize>* imt_handle_scope) {
DCHECK_GE(new_length, static_cast<int32_t>(sizeof(Class)));
// We may get copied by a compacting GC.
StackHandleScope<1> hs(self);
@@ -875,17 +859,15 @@ Class* Class::CopyOf(Thread* self, int32_t new_length) {
gc::Heap* heap = Runtime::Current()->GetHeap();
// The num_bytes (3rd param) is sizeof(Class) as opposed to SizeOf()
// to skip copying the tail part that we will overwrite here.
- CopyClassVisitor visitor(self, &h_this, new_length, sizeof(Class));
-
+ CopyClassVisitor visitor(self, &h_this, new_length, sizeof(Class), imt_handle_scope);
mirror::Object* new_class =
kMovingClasses
? heap->AllocObject<true>(self, java_lang_Class_.Read(), new_length, visitor)
: heap->AllocNonMovableObject<true>(self, java_lang_Class_.Read(), new_length, visitor);
if (UNLIKELY(new_class == nullptr)) {
CHECK(self->IsExceptionPending()); // Expect an OOME.
- return NULL;
+ return nullptr;
}
-
return new_class->AsClass();
}
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index c06071b..6a7faaa 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -36,6 +36,7 @@ template<class T> class Handle;
template<class T> class Handle;
class Signature;
class StringPiece;
+template<size_t kNumReferences> class PACKED(4) StackHandleScope;
namespace mirror {
@@ -196,46 +197,46 @@ class MANAGED Class FINAL : public Object {
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetAccessFlags(uint32_t new_access_flags) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Returns true if the class is an interface.
- bool IsInterface() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsInterface() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (GetAccessFlags() & kAccInterface) != 0;
}
// Returns true if the class is declared public.
- bool IsPublic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsPublic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (GetAccessFlags() & kAccPublic) != 0;
}
// Returns true if the class is declared final.
- bool IsFinal() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsFinal() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (GetAccessFlags() & kAccFinal) != 0;
}
- bool IsFinalizable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsFinalizable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (GetAccessFlags() & kAccClassIsFinalizable) != 0;
}
- void SetFinalizable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE void SetFinalizable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
uint32_t flags = GetField32(OFFSET_OF_OBJECT_MEMBER(Class, access_flags_));
SetAccessFlags(flags | kAccClassIsFinalizable);
}
// Returns true if the class is abstract.
- bool IsAbstract() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsAbstract() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (GetAccessFlags() & kAccAbstract) != 0;
}
// Returns true if the class is an annotation.
- bool IsAnnotation() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsAnnotation() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (GetAccessFlags() & kAccAnnotation) != 0;
}
// Returns true if the class is synthetic.
- bool IsSynthetic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsSynthetic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (GetAccessFlags() & kAccSynthetic) != 0;
}
@@ -592,7 +593,7 @@ class MANAGED Class FINAL : public Object {
// downcast would be necessary. Similarly for interfaces, a class that implements (or an interface
// that extends) another can be assigned to its parent, but not vice-versa. All Classes may assign
// to themselves. Classes for primitive types may not assign to each other.
- inline bool IsAssignableFrom(Class* src) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ ALWAYS_INLINE bool IsAssignableFrom(Class* src) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK(src != NULL);
if (this == src) {
// Can always assign to things of the same type.
@@ -609,7 +610,7 @@ class MANAGED Class FINAL : public Object {
}
}
- Class* GetSuperClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE Class* GetSuperClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetSuperClass(Class *new_super_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
// Super class is assigned once, except during class linker initialization.
@@ -648,12 +649,13 @@ class MANAGED Class FINAL : public Object {
void SetDexCache(DexCache* new_dex_cache) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ObjectArray<ArtMethod>* GetDirectMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjectArray<ArtMethod>* GetDirectMethods()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetDirectMethods(ObjectArray<ArtMethod>* new_direct_methods)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* GetDirectMethod(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ArtMethod* GetDirectMethod(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetDirectMethod(uint32_t i, ArtMethod* f) // TODO: uint16_t
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -662,13 +664,14 @@ class MANAGED Class FINAL : public Object {
uint32_t NumDirectMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- ObjectArray<ArtMethod>* GetVirtualMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjectArray<ArtMethod>* GetVirtualMethods()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetVirtualMethods(ObjectArray<ArtMethod>* new_virtual_methods)
+ ALWAYS_INLINE void SetVirtualMethods(ObjectArray<ArtMethod>* new_virtual_methods)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Returns the number of non-inherited virtual methods.
- uint32_t NumVirtualMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE uint32_t NumVirtualMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
ArtMethod* GetVirtualMethod(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -678,9 +681,10 @@ class MANAGED Class FINAL : public Object {
void SetVirtualMethod(uint32_t i, ArtMethod* f) // TODO: uint16_t
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ObjectArray<ArtMethod>* GetVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjectArray<ArtMethod>* GetVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ObjectArray<ArtMethod>* GetVTableDuringLinking() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ObjectArray<ArtMethod>* GetVTableDuringLinking()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetVTable(ObjectArray<ArtMethod>* new_vtable)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -689,13 +693,6 @@ class MANAGED Class FINAL : public Object {
return OFFSET_OF_OBJECT_MEMBER(Class, vtable_);
}
- void SetImTable(ObjectArray<ArtMethod>* new_imtable)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- static MemberOffset ImTableOffset() {
- return OFFSET_OF_OBJECT_MEMBER(Class, imtable_);
- }
-
static MemberOffset EmbeddedImTableOffset() {
return MemberOffset(sizeof(Class));
}
@@ -705,7 +702,7 @@ class MANAGED Class FINAL : public Object {
}
static MemberOffset EmbeddedVTableOffset() {
- return MemberOffset(sizeof(Class) + kImtSize * sizeof(mirror::Class::ImTableEntry) + sizeof(int32_t));
+ return MemberOffset(sizeof(Class) + kImtSize * sizeof(ImTableEntry) + sizeof(int32_t));
}
bool ShouldHaveEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -730,7 +727,8 @@ class MANAGED Class FINAL : public Object {
void SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void PopulateEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void PopulateEmbeddedImtAndVTable(StackHandleScope<kImtSize>* imt_handle_scope)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given a method implemented by this class but potentially from a super class, return the
// specific implementation method for this class.
@@ -798,11 +796,11 @@ class MANAGED Class FINAL : public Object {
ArtMethod* FindClassInitializer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- int32_t GetIfTableCount() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE int32_t GetIfTableCount() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- IfTable* GetIfTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE IfTable* GetIfTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetIfTable(IfTable* new_iftable) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE void SetIfTable(IfTable* new_iftable) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Get instance fields of the class (See also GetSFields).
ObjectArray<ArtField>* GetIFields() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -970,7 +968,7 @@ class MANAGED Class FINAL : public Object {
const DexFile::ClassDef* GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- uint32_t NumDirectInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE uint32_t NumDirectInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
uint16_t GetDirectInterfaceTypeIdx(uint32_t idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -990,7 +988,7 @@ class MANAGED Class FINAL : public Object {
void AssertInitializedOrInitializingInThread(Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- Class* CopyOf(Thread* self, int32_t new_length)
+ Class* CopyOf(Thread* self, int32_t new_length, StackHandleScope<kImtSize>* imt_handle_scope)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// For proxy class only.
@@ -1039,8 +1037,6 @@ class MANAGED Class FINAL : public Object {
void CheckObjectAlloc() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ObjectArray<ArtMethod>* GetImTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
// defining class loader, or NULL for the "bootstrap" system loader
HeapReference<ClassLoader> class_loader_;
diff --git a/runtime/mirror/iftable-inl.h b/runtime/mirror/iftable-inl.h
index 3f20bf4..d1309d2 100644
--- a/runtime/mirror/iftable-inl.h
+++ b/runtime/mirror/iftable-inl.h
@@ -27,7 +27,7 @@ inline void IfTable::SetInterface(int32_t i, Class* interface) {
DCHECK(interface->IsInterface());
const size_t idx = i * kMax + kInterface;
DCHECK_EQ(Get(idx), static_cast<Object*>(nullptr));
- Set<false>(idx, interface);
+ SetWithoutChecks<false>(idx, interface);
}
} // namespace mirror
diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h
index 5feb602..4d899d2 100644
--- a/runtime/mirror/iftable.h
+++ b/runtime/mirror/iftable.h
@@ -25,13 +25,14 @@ namespace mirror {
class MANAGED IfTable FINAL : public ObjectArray<Object> {
public:
- Class* GetInterface(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- Class* interface = Get((i * kMax) + kInterface)->AsClass();
+ ALWAYS_INLINE Class* GetInterface(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ Class* interface = GetWithoutChecks((i * kMax) + kInterface)->AsClass();
DCHECK(interface != NULL);
return interface;
}
- void SetInterface(int32_t i, Class* interface) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE void SetInterface(int32_t i, Class* interface)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
ObjectArray<ArtMethod>* GetMethodArray(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ObjectArray<ArtMethod>* method_array =
diff --git a/runtime/mirror/object_array.h b/runtime/mirror/object_array.h
index 7012b19..6404faf 100644
--- a/runtime/mirror/object_array.h
+++ b/runtime/mirror/object_array.h
@@ -45,11 +45,11 @@ class MANAGED ObjectArray: public Array {
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
bool CheckAssignable(T* object) NO_THREAD_SAFETY_ANALYSIS;
- void Set(int32_t i, T* object) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE void Set(int32_t i, T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// TODO fix thread safety analysis: should be SHARED_LOCKS_REQUIRED(Locks::mutator_lock_).
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void Set(int32_t i, T* object) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS;
+ ALWAYS_INLINE void Set(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS;
// Set element without bound and element type checks, to be used in limited
// circumstances, such as during boot image writing.
@@ -57,15 +57,15 @@ class MANAGED ObjectArray: public Array {
// SHARED_LOCKS_REQUIRED(Locks::mutator_lock_).
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetWithoutChecks(int32_t i, T* object) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS;
+ ALWAYS_INLINE void SetWithoutChecks(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS;
// TODO fix thread safety analysis broken by the use of template. This should be
// SHARED_LOCKS_REQUIRED(Locks::mutator_lock_).
template<bool kTransactionActive, bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetWithoutChecksAndWriteBarrier(int32_t i, T* object) ALWAYS_INLINE
+ ALWAYS_INLINE void SetWithoutChecksAndWriteBarrier(int32_t i, T* object)
NO_THREAD_SAFETY_ANALYSIS;
- T* GetWithoutChecks(int32_t i) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE T* GetWithoutChecks(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Copy src into this array (dealing with overlaps as memmove does) without assignability checks.
void AssignableMemmove(int32_t dst_pos, ObjectArray<T>* src, int32_t src_pos,
diff --git a/runtime/runtime-inl.h b/runtime/runtime-inl.h
index fe05073..cdf8d54 100644
--- a/runtime/runtime-inl.h
+++ b/runtime/runtime-inl.h
@@ -59,6 +59,11 @@ inline mirror::ArtMethod* Runtime::GetImtConflictMethod() {
return imt_conflict_method_.Read();
}
+inline mirror::ArtMethod* Runtime::GetImtUnimplementedMethod() {
+ CHECK(!imt_unimplemented_method_.IsNull());
+ return imt_unimplemented_method_.Read();
+}
+
inline mirror::ObjectArray<mirror::ArtMethod>* Runtime::GetDefaultImt()
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
CHECK(HasDefaultImt());
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 8ba098f..db7936c 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -1169,6 +1169,9 @@ void Runtime::VisitNonThreadRoots(RootCallback* callback, void* arg) {
if (HasImtConflictMethod()) {
imt_conflict_method_.VisitRoot(callback, arg, 0, kRootVMInternal);
}
+ if (!imt_unimplemented_method_.IsNull()) {
+ imt_unimplemented_method_.VisitRoot(callback, arg, 0, kRootVMInternal);
+ }
if (HasDefaultImt()) {
default_imt_.VisitRoot(callback, arg, 0, kRootVMInternal);
}
diff --git a/runtime/runtime.h b/runtime/runtime.h
index bfa7d72..11db613 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -312,6 +312,7 @@ class Runtime {
// Returns a special method that calls into a trampoline for runtime imt conflicts.
mirror::ArtMethod* GetImtConflictMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ mirror::ArtMethod* GetImtUnimplementedMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
bool HasImtConflictMethod() const {
return !imt_conflict_method_.IsNull();
@@ -320,6 +321,9 @@ class Runtime {
void SetImtConflictMethod(mirror::ArtMethod* method) {
imt_conflict_method_ = GcRoot<mirror::ArtMethod>(method);
}
+ void SetImtUnimplementedMethod(mirror::ArtMethod* method) {
+ imt_unimplemented_method_ = GcRoot<mirror::ArtMethod>(method);
+ }
mirror::ArtMethod* CreateImtConflictMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -511,6 +515,9 @@ class Runtime {
GcRoot<mirror::Throwable> pre_allocated_NoClassDefFoundError_;
GcRoot<mirror::ArtMethod> resolution_method_;
GcRoot<mirror::ArtMethod> imt_conflict_method_;
+ // Unresolved method has the same behavior as the conflict method, it is used by the class linker
+ // for differentiating between unfilled imt slots vs conflict slots in superclasses.
+ GcRoot<mirror::ArtMethod> imt_unimplemented_method_;
GcRoot<mirror::ObjectArray<mirror::ArtMethod>> default_imt_;
// Special sentinel object used to invalid conditions in JNI (cleared weak references) and