summaryrefslogtreecommitdiffstats
path: root/runtime/mirror
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/mirror')
-rw-r--r--runtime/mirror/abstract_method.cc16
-rw-r--r--runtime/mirror/abstract_method.h12
-rw-r--r--runtime/mirror/array-inl.h32
-rw-r--r--runtime/mirror/array.cc20
-rw-r--r--runtime/mirror/array.h16
-rw-r--r--runtime/mirror/art_method-inl.h488
-rw-r--r--runtime/mirror/art_method.cc574
-rw-r--r--runtime/mirror/art_method.h650
-rw-r--r--runtime/mirror/class-inl.h315
-rw-r--r--runtime/mirror/class.cc250
-rw-r--r--runtime/mirror/class.h282
-rw-r--r--runtime/mirror/dex_cache-inl.h52
-rw-r--r--runtime/mirror/dex_cache.cc28
-rw-r--r--runtime/mirror/dex_cache.h44
-rw-r--r--runtime/mirror/field-inl.h12
-rw-r--r--runtime/mirror/field.cc1
-rw-r--r--runtime/mirror/iftable.h23
-rw-r--r--runtime/mirror/method.cc8
-rw-r--r--runtime/mirror/method.h4
-rw-r--r--runtime/mirror/object-inl.h66
-rw-r--r--runtime/mirror/object.cc3
-rw-r--r--runtime/mirror/object.h26
-rw-r--r--runtime/mirror/object_array.h4
-rw-r--r--runtime/mirror/object_test.cc24
-rw-r--r--runtime/mirror/reference-inl.h4
-rw-r--r--runtime/mirror/reference.cc2
-rw-r--r--runtime/mirror/reference.h2
-rw-r--r--runtime/mirror/string-inl.h4
-rw-r--r--runtime/mirror/string.h2
-rw-r--r--runtime/mirror/throwable.cc34
30 files changed, 753 insertions, 2245 deletions
diff --git a/runtime/mirror/abstract_method.cc b/runtime/mirror/abstract_method.cc
index 81c656b..91a9870 100644
--- a/runtime/mirror/abstract_method.cc
+++ b/runtime/mirror/abstract_method.cc
@@ -16,14 +16,14 @@
#include "abstract_method.h"
-#include "mirror/art_method-inl.h"
+#include "art_method-inl.h"
namespace art {
namespace mirror {
-bool AbstractMethod::CreateFromArtMethod(mirror::ArtMethod* method) {
- auto* interface_method = method->GetInterfaceMethodIfProxy();
- SetFieldObject<false>(ArtMethodOffset(), method);
+bool AbstractMethod::CreateFromArtMethod(ArtMethod* method) {
+ auto* interface_method = method->GetInterfaceMethodIfProxy(sizeof(void*));
+ SetArtMethod(method);
SetFieldObject<false>(DeclaringClassOffset(), method->GetDeclaringClass());
SetFieldObject<false>(
DeclaringClassOfOverriddenMethodOffset(), interface_method->GetDeclaringClass());
@@ -32,8 +32,12 @@ bool AbstractMethod::CreateFromArtMethod(mirror::ArtMethod* method) {
return true;
}
-mirror::ArtMethod* AbstractMethod::GetArtMethod() {
- return GetFieldObject<mirror::ArtMethod>(ArtMethodOffset());
+ArtMethod* AbstractMethod::GetArtMethod() {
+ return reinterpret_cast<ArtMethod*>(GetField64(ArtMethodOffset()));
+}
+
+void AbstractMethod::SetArtMethod(ArtMethod* method) {
+ SetField64<false>(ArtMethodOffset(), reinterpret_cast<uint64_t>(method));
}
mirror::Class* AbstractMethod::GetDeclaringClass() {
diff --git a/runtime/mirror/abstract_method.h b/runtime/mirror/abstract_method.h
index ef51d7f..99d697a 100644
--- a/runtime/mirror/abstract_method.h
+++ b/runtime/mirror/abstract_method.h
@@ -26,18 +26,19 @@
namespace art {
struct AbstractMethodOffsets;
+class ArtMethod;
namespace mirror {
-class ArtMethod;
-
// C++ mirror of java.lang.reflect.AbstractMethod.
class MANAGED AbstractMethod : public AccessibleObject {
public:
// Called from Constructor::CreateFromArtMethod, Method::CreateFromArtMethod.
- bool CreateFromArtMethod(mirror::ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ bool CreateFromArtMethod(ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- mirror::ArtMethod* GetArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ArtMethod* GetArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Only used by the image writer.
+ void SetArtMethod(ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
mirror::Class* GetDeclaringClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
private:
@@ -57,9 +58,10 @@ class MANAGED AbstractMethod : public AccessibleObject {
return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, dex_method_index_));
}
- HeapReference<mirror::ArtMethod> art_method_;
HeapReference<mirror::Class> declaring_class_;
HeapReference<mirror::Class> declaring_class_of_overridden_method_;
+ uint32_t padding_;
+ uint64_t art_method_;
uint32_t access_flags_;
uint32_t dex_method_index_;
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index e93717e..d343292 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -20,19 +20,19 @@
#include "array.h"
#include "base/bit_utils.h"
+#include "base/casts.h"
#include "base/logging.h"
#include "base/stringprintf.h"
-#include "base/casts.h"
-#include "class.h"
+#include "class-inl.h"
#include "gc/heap-inl.h"
#include "thread.h"
namespace art {
namespace mirror {
-inline uint32_t Array::ClassSize() {
+inline uint32_t Array::ClassSize(size_t pointer_size) {
uint32_t vtable_entries = Object::kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
@@ -371,6 +371,30 @@ inline void PrimitiveArray<T>::Memcpy(int32_t dst_pos, PrimitiveArray<T>* src, i
}
}
+template<typename T>
+inline T PointerArray::GetElementPtrSize(uint32_t idx, size_t ptr_size) {
+ // C style casts here since we sometimes have T be a pointer, or sometimes an integer
+ // (for stack traces).
+ if (ptr_size == 8) {
+ return (T)static_cast<uintptr_t>(AsLongArray()->GetWithoutChecks(idx));
+ }
+ DCHECK_EQ(ptr_size, 4u);
+ return (T)static_cast<uintptr_t>(AsIntArray()->GetWithoutChecks(idx));
+}
+
+template<bool kTransactionActive, bool kUnchecked, typename T>
+inline void PointerArray::SetElementPtrSize(uint32_t idx, T element, size_t ptr_size) {
+ if (ptr_size == 8) {
+ (kUnchecked ? down_cast<LongArray*>(static_cast<Object*>(this)) : AsLongArray())->
+ SetWithoutChecks<kTransactionActive>(idx, (uint64_t)(element));
+ } else {
+ DCHECK_EQ(ptr_size, 4u);
+ DCHECK_LE((uintptr_t)element, 0xFFFFFFFFu);
+ (kUnchecked ? down_cast<IntArray*>(static_cast<Object*>(this)) : AsIntArray())
+ ->SetWithoutChecks<kTransactionActive>(idx, static_cast<uint32_t>((uintptr_t)element));
+ }
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/array.cc b/runtime/mirror/array.cc
index b92f017..d72c03f 100644
--- a/runtime/mirror/array.cc
+++ b/runtime/mirror/array.cc
@@ -125,6 +125,26 @@ void Array::ThrowArrayStoreException(Object* object) {
art::ThrowArrayStoreException(object->GetClass(), this->GetClass());
}
+Array* Array::CopyOf(Thread* self, int32_t new_length) {
+ CHECK(GetClass()->GetComponentType()->IsPrimitive()) << "Will miss write barriers";
+ DCHECK_GE(new_length, 0);
+ // We may get copied by a compacting GC.
+ StackHandleScope<1> hs(self);
+ auto h_this(hs.NewHandle(this));
+ auto* heap = Runtime::Current()->GetHeap();
+ gc::AllocatorType allocator_type = heap->IsMovableObject(this) ? heap->GetCurrentAllocator() :
+ heap->GetCurrentNonMovingAllocator();
+ const auto component_size = GetClass()->GetComponentSize();
+ const auto component_shift = GetClass()->GetComponentSizeShift();
+ Array* new_array = Alloc<true>(self, GetClass(), new_length, component_shift, allocator_type);
+ if (LIKELY(new_array != nullptr)) {
+ memcpy(new_array->GetRawData(component_size, 0), h_this->GetRawData(component_size, 0),
+ std::min(h_this->GetLength(), new_length) << component_shift);
+ }
+ return new_array;
+}
+
+
template <typename T> GcRoot<Class> PrimitiveArray<T>::array_class_;
// Explicitly instantiate all the primitive array types.
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index 832ad68..c4f6c84 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -31,7 +31,7 @@ namespace mirror {
class MANAGED Array : public Object {
public:
// The size of a java.lang.Class representing an array.
- static uint32_t ClassSize();
+ static uint32_t ClassSize(size_t pointer_size);
// Allocates an array with the given properties, if kFillUsable is true the array will be of at
// least component_count size, however, if there's usable space at the end of the allocation the
@@ -84,6 +84,8 @@ class MANAGED Array : public Object {
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
ALWAYS_INLINE bool CheckIsValidIndex(int32_t index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ Array* CopyOf(Thread* self, int32_t new_length) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
protected:
void ThrowArrayStoreException(Object* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -174,6 +176,18 @@ class MANAGED PrimitiveArray : public Array {
DISALLOW_IMPLICIT_CONSTRUCTORS(PrimitiveArray);
};
+// Either an IntArray or a LongArray.
+class PointerArray : public Array {
+ public:
+ template<typename T>
+ T GetElementPtrSize(uint32_t idx, size_t ptr_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive = false, bool kUnchecked = false, typename T>
+ void SetElementPtrSize(uint32_t idx, T element, size_t ptr_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+};
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/art_method-inl.h b/runtime/mirror/art_method-inl.h
deleted file mode 100644
index 7c8067a..0000000
--- a/runtime/mirror/art_method-inl.h
+++ /dev/null
@@ -1,488 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_MIRROR_ART_METHOD_INL_H_
-#define ART_RUNTIME_MIRROR_ART_METHOD_INL_H_
-
-#include "art_method.h"
-
-#include "art_field.h"
-#include "class.h"
-#include "class_linker-inl.h"
-#include "dex_cache.h"
-#include "dex_file.h"
-#include "dex_file-inl.h"
-#include "object-inl.h"
-#include "object_array.h"
-#include "oat.h"
-#include "quick/quick_method_frame_info.h"
-#include "read_barrier-inl.h"
-#include "runtime-inl.h"
-#include "utils.h"
-
-namespace art {
-namespace mirror {
-
-inline uint32_t ArtMethod::ClassSize() {
- uint32_t vtable_entries = Object::kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
-}
-
-template<ReadBarrierOption kReadBarrierOption>
-inline Class* ArtMethod::GetJavaLangReflectArtMethod() {
- DCHECK(!java_lang_reflect_ArtMethod_.IsNull());
- return java_lang_reflect_ArtMethod_.Read<kReadBarrierOption>();
-}
-
-inline Class* ArtMethod::GetDeclaringClass() {
- Class* result = GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, declaring_class_));
- DCHECK(result != nullptr) << this;
- DCHECK(result->IsIdxLoaded() || result->IsErroneous()) << this;
- return result;
-}
-
-inline void ArtMethod::SetDeclaringClass(Class *new_declaring_class) {
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, declaring_class_),
- new_declaring_class);
-}
-
-inline uint32_t ArtMethod::GetAccessFlags() {
- DCHECK(GetDeclaringClass()->IsIdxLoaded() || GetDeclaringClass()->IsErroneous());
- return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, access_flags_));
-}
-
-inline uint16_t ArtMethod::GetMethodIndex() {
- DCHECK(GetDeclaringClass()->IsResolved() || GetDeclaringClass()->IsErroneous());
- return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_));
-}
-
-inline uint16_t ArtMethod::GetMethodIndexDuringLinking() {
- return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_));
-}
-
-inline uint32_t ArtMethod::GetDexMethodIndex() {
- DCHECK(GetDeclaringClass()->IsLoaded() || GetDeclaringClass()->IsErroneous());
- return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_));
-}
-
-inline ObjectArray<ArtMethod>* ArtMethod::GetDexCacheResolvedMethods() {
- return GetFieldObject<ObjectArray<ArtMethod>>(
- OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_));
-}
-
-inline ArtMethod* ArtMethod::GetDexCacheResolvedMethod(uint16_t method_index) {
- ArtMethod* method = GetDexCacheResolvedMethods()->Get(method_index);
- if (method != nullptr && !method->GetDeclaringClass()->IsErroneous()) {
- return method;
- } else {
- return nullptr;
- }
-}
-
-inline void ArtMethod::SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method) {
- GetDexCacheResolvedMethods()->Set<false>(method_idx, new_method);
-}
-
-inline bool ArtMethod::HasDexCacheResolvedMethods() {
- return GetDexCacheResolvedMethods() != nullptr;
-}
-
-inline bool ArtMethod::HasSameDexCacheResolvedMethods(ObjectArray<ArtMethod>* other_cache) {
- return GetDexCacheResolvedMethods() == other_cache;
-}
-
-inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod* other) {
- return GetDexCacheResolvedMethods() == other->GetDexCacheResolvedMethods();
-}
-
-
-inline ObjectArray<Class>* ArtMethod::GetDexCacheResolvedTypes() {
- return GetFieldObject<ObjectArray<Class>>(
- OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_));
-}
-
-template <bool kWithCheck>
-inline Class* ArtMethod::GetDexCacheResolvedType(uint32_t type_index) {
- Class* klass;
- if (kWithCheck) {
- klass = GetDexCacheResolvedTypes()->Get(type_index);
- } else {
- klass = GetDexCacheResolvedTypes()->GetWithoutChecks(type_index);
- }
- return (klass != nullptr && !klass->IsErroneous()) ? klass : nullptr;
-}
-
-inline bool ArtMethod::HasDexCacheResolvedTypes() {
- return GetDexCacheResolvedTypes() != nullptr;
-}
-
-inline bool ArtMethod::HasSameDexCacheResolvedTypes(ObjectArray<Class>* other_cache) {
- return GetDexCacheResolvedTypes() == other_cache;
-}
-
-inline bool ArtMethod::HasSameDexCacheResolvedTypes(ArtMethod* other) {
- return GetDexCacheResolvedTypes() == other->GetDexCacheResolvedTypes();
-}
-
-inline mirror::Class* ArtMethod::GetClassFromTypeIndex(uint16_t type_idx, bool resolve) {
- mirror::Class* type = GetDexCacheResolvedType(type_idx);
- if (type == nullptr && resolve) {
- type = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, this);
- CHECK(type != nullptr || Thread::Current()->IsExceptionPending());
- }
- return type;
-}
-
-inline uint32_t ArtMethod::GetCodeSize() {
- DCHECK(!IsRuntimeMethod() && !IsProxyMethod()) << PrettyMethod(this);
- return GetCodeSize(EntryPointToCodePointer(GetEntryPointFromQuickCompiledCode()));
-}
-
-inline uint32_t ArtMethod::GetCodeSize(const void* code) {
- if (code == nullptr) {
- return 0u;
- }
- return reinterpret_cast<const OatQuickMethodHeader*>(code)[-1].code_size_;
-}
-
-inline bool ArtMethod::CheckIncompatibleClassChange(InvokeType type) {
- switch (type) {
- case kStatic:
- return !IsStatic();
- case kDirect:
- return !IsDirect() || IsStatic();
- case kVirtual: {
- Class* methods_class = GetDeclaringClass();
- return IsDirect() || (methods_class->IsInterface() && !IsMiranda());
- }
- case kSuper:
- // Constructors and static methods are called with invoke-direct.
- // Interface methods cannot be invoked with invoke-super.
- return IsConstructor() || IsStatic() || GetDeclaringClass()->IsInterface();
- case kInterface: {
- Class* methods_class = GetDeclaringClass();
- return IsDirect() || !(methods_class->IsInterface() || methods_class->IsObjectClass());
- }
- default:
- LOG(FATAL) << "Unreachable - invocation type: " << type;
- UNREACHABLE();
- }
-}
-
-inline uint32_t ArtMethod::GetQuickOatCodeOffset() {
- DCHECK(!Runtime::Current()->IsStarted());
- return PointerToLowMemUInt32(GetEntryPointFromQuickCompiledCode());
-}
-
-inline void ArtMethod::SetQuickOatCodeOffset(uint32_t code_offset) {
- DCHECK(!Runtime::Current()->IsStarted());
- SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(code_offset));
-}
-
-inline const uint8_t* ArtMethod::GetMappingTable(size_t pointer_size) {
- const void* code_pointer = GetQuickOatCodePointer(pointer_size);
- if (code_pointer == nullptr) {
- return nullptr;
- }
- return GetMappingTable(code_pointer, pointer_size);
-}
-
-inline const uint8_t* ArtMethod::GetMappingTable(const void* code_pointer, size_t pointer_size) {
- DCHECK(code_pointer != nullptr);
- DCHECK_EQ(code_pointer, GetQuickOatCodePointer(pointer_size));
- uint32_t offset =
- reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].mapping_table_offset_;
- if (UNLIKELY(offset == 0u)) {
- return nullptr;
- }
- return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
-}
-
-inline const uint8_t* ArtMethod::GetVmapTable(size_t pointer_size) {
- const void* code_pointer = GetQuickOatCodePointer(pointer_size);
- if (code_pointer == nullptr) {
- return nullptr;
- }
- return GetVmapTable(code_pointer, pointer_size);
-}
-
-inline const uint8_t* ArtMethod::GetVmapTable(const void* code_pointer, size_t pointer_size) {
- CHECK(!IsOptimized(pointer_size)) << "Unimplemented vmap table for optimized compiler";
- DCHECK(code_pointer != nullptr);
- DCHECK_EQ(code_pointer, GetQuickOatCodePointer(pointer_size));
- uint32_t offset =
- reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].vmap_table_offset_;
- if (UNLIKELY(offset == 0u)) {
- return nullptr;
- }
- return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
-}
-
-inline CodeInfo ArtMethod::GetOptimizedCodeInfo() {
- DCHECK(IsOptimized(sizeof(void*)));
- const void* code_pointer = GetQuickOatCodePointer(sizeof(void*));
- DCHECK(code_pointer != nullptr);
- uint32_t offset =
- reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].vmap_table_offset_;
- const void* data = reinterpret_cast<const void*>(reinterpret_cast<const uint8_t*>(code_pointer) - offset);
- return CodeInfo(data);
-}
-
-inline const uint8_t* ArtMethod::GetNativeGcMap(size_t pointer_size) {
- const void* code_pointer = GetQuickOatCodePointer(pointer_size);
- if (code_pointer == nullptr) {
- return nullptr;
- }
- return GetNativeGcMap(code_pointer, pointer_size);
-}
-
-inline const uint8_t* ArtMethod::GetNativeGcMap(const void* code_pointer, size_t pointer_size) {
- DCHECK(code_pointer != nullptr);
- DCHECK_EQ(code_pointer, GetQuickOatCodePointer(pointer_size));
- uint32_t offset =
- reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].gc_map_offset_;
- if (UNLIKELY(offset == 0u)) {
- return nullptr;
- }
- return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
-}
-
-inline bool ArtMethod::IsRuntimeMethod() {
- return GetDexMethodIndex() == DexFile::kDexNoIndex;
-}
-
-inline bool ArtMethod::IsCalleeSaveMethod() {
- if (!IsRuntimeMethod()) {
- return false;
- }
- Runtime* runtime = Runtime::Current();
- bool result = false;
- for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
- if (this == runtime->GetCalleeSaveMethod(Runtime::CalleeSaveType(i))) {
- result = true;
- break;
- }
- }
- return result;
-}
-
-inline bool ArtMethod::IsResolutionMethod() {
- bool result = this == Runtime::Current()->GetResolutionMethod();
- // Check that if we do think it is phony it looks like the resolution method.
- DCHECK(!result || IsRuntimeMethod());
- return result;
-}
-
-inline bool ArtMethod::IsImtConflictMethod() {
- bool result = this == Runtime::Current()->GetImtConflictMethod();
- // Check that if we do think it is phony it looks like the imt conflict method.
- DCHECK(!result || IsRuntimeMethod());
- return result;
-}
-
-inline bool ArtMethod::IsImtUnimplementedMethod() {
- bool result = this == Runtime::Current()->GetImtUnimplementedMethod();
- // Check that if we do think it is phony it looks like the imt unimplemented method.
- DCHECK(!result || IsRuntimeMethod());
- return result;
-}
-
-inline uintptr_t ArtMethod::NativeQuickPcOffset(const uintptr_t pc) {
- const void* code = Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(
- this, sizeof(void*));
- return pc - reinterpret_cast<uintptr_t>(code);
-}
-
-inline QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo(const void* code_pointer) {
- DCHECK(code_pointer != nullptr);
- DCHECK_EQ(code_pointer, GetQuickOatCodePointer(sizeof(void*)));
- return reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].frame_info_;
-}
-
-inline const DexFile* ArtMethod::GetDexFile() {
- return GetDexCache()->GetDexFile();
-}
-
-inline const char* ArtMethod::GetDeclaringClassDescriptor() {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- uint32_t dex_method_idx = method->GetDexMethodIndex();
- if (UNLIKELY(dex_method_idx == DexFile::kDexNoIndex)) {
- return "<runtime method>";
- }
- const DexFile* dex_file = method->GetDexFile();
- return dex_file->GetMethodDeclaringClassDescriptor(dex_file->GetMethodId(dex_method_idx));
-}
-
-inline const char* ArtMethod::GetShorty(uint32_t* out_length) {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- const DexFile* dex_file = method->GetDexFile();
- return dex_file->GetMethodShorty(dex_file->GetMethodId(method->GetDexMethodIndex()), out_length);
-}
-
-inline const Signature ArtMethod::GetSignature() {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- uint32_t dex_method_idx = method->GetDexMethodIndex();
- if (dex_method_idx != DexFile::kDexNoIndex) {
- const DexFile* dex_file = method->GetDexFile();
- return dex_file->GetMethodSignature(dex_file->GetMethodId(dex_method_idx));
- }
- return Signature::NoSignature();
-}
-
-inline const char* ArtMethod::GetName() {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- uint32_t dex_method_idx = method->GetDexMethodIndex();
- if (LIKELY(dex_method_idx != DexFile::kDexNoIndex)) {
- const DexFile* dex_file = method->GetDexFile();
- return dex_file->GetMethodName(dex_file->GetMethodId(dex_method_idx));
- }
- Runtime* runtime = Runtime::Current();
- if (method == runtime->GetResolutionMethod()) {
- return "<runtime internal resolution method>";
- } else if (method == runtime->GetImtConflictMethod()) {
- return "<runtime internal imt conflict method>";
- } else if (method == runtime->GetCalleeSaveMethod(Runtime::kSaveAll)) {
- return "<runtime internal callee-save all registers method>";
- } else if (method == runtime->GetCalleeSaveMethod(Runtime::kRefsOnly)) {
- return "<runtime internal callee-save reference registers method>";
- } else if (method == runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs)) {
- return "<runtime internal callee-save reference and argument registers method>";
- } else {
- return "<unknown runtime internal method>";
- }
-}
-
-inline const DexFile::CodeItem* ArtMethod::GetCodeItem() {
- return GetDeclaringClass()->GetDexFile().GetCodeItem(GetCodeItemOffset());
-}
-
-inline bool ArtMethod::IsResolvedTypeIdx(uint16_t type_idx) {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- return method->GetDexCacheResolvedType(type_idx) != nullptr;
-}
-
-inline int32_t ArtMethod::GetLineNumFromDexPC(uint32_t dex_pc) {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- if (dex_pc == DexFile::kDexNoIndex) {
- return method->IsNative() ? -2 : -1;
- }
- return method->GetDexFile()->GetLineNumFromPC(method, dex_pc);
-}
-
-inline const DexFile::ProtoId& ArtMethod::GetPrototype() {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- const DexFile* dex_file = method->GetDexFile();
- return dex_file->GetMethodPrototype(dex_file->GetMethodId(method->GetDexMethodIndex()));
-}
-
-inline const DexFile::TypeList* ArtMethod::GetParameterTypeList() {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- const DexFile* dex_file = method->GetDexFile();
- const DexFile::ProtoId& proto = dex_file->GetMethodPrototype(
- dex_file->GetMethodId(method->GetDexMethodIndex()));
- return dex_file->GetProtoParameters(proto);
-}
-
-inline const char* ArtMethod::GetDeclaringClassSourceFile() {
- return GetInterfaceMethodIfProxy()->GetDeclaringClass()->GetSourceFile();
-}
-
-inline uint16_t ArtMethod::GetClassDefIndex() {
- return GetInterfaceMethodIfProxy()->GetDeclaringClass()->GetDexClassDefIndex();
-}
-
-inline const DexFile::ClassDef& ArtMethod::GetClassDef() {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- return method->GetDexFile()->GetClassDef(GetClassDefIndex());
-}
-
-inline const char* ArtMethod::GetReturnTypeDescriptor() {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- const DexFile* dex_file = method->GetDexFile();
- const DexFile::MethodId& method_id = dex_file->GetMethodId(method->GetDexMethodIndex());
- const DexFile::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
- uint16_t return_type_idx = proto_id.return_type_idx_;
- return dex_file->GetTypeDescriptor(dex_file->GetTypeId(return_type_idx));
-}
-
-inline const char* ArtMethod::GetTypeDescriptorFromTypeIdx(uint16_t type_idx) {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- const DexFile* dex_file = method->GetDexFile();
- return dex_file->GetTypeDescriptor(dex_file->GetTypeId(type_idx));
-}
-
-inline mirror::ClassLoader* ArtMethod::GetClassLoader() {
- return GetInterfaceMethodIfProxy()->GetDeclaringClass()->GetClassLoader();
-}
-
-inline mirror::DexCache* ArtMethod::GetDexCache() {
- return GetInterfaceMethodIfProxy()->GetDeclaringClass()->GetDexCache();
-}
-
-inline bool ArtMethod::IsProxyMethod() {
- return GetDeclaringClass()->IsProxyClass();
-}
-
-inline ArtMethod* ArtMethod::GetInterfaceMethodIfProxy() {
- if (LIKELY(!IsProxyMethod())) {
- return this;
- }
- mirror::Class* klass = GetDeclaringClass();
- mirror::ArtMethod* interface_method = GetDexCacheResolvedMethods()->Get(GetDexMethodIndex());
- DCHECK(interface_method != nullptr);
- DCHECK_EQ(interface_method,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy(klass, this));
- return interface_method;
-}
-
-inline void ArtMethod::SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods) {
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_),
- new_dex_cache_methods);
-}
-
-inline void ArtMethod::SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_classes) {
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_),
- new_dex_cache_classes);
-}
-
-inline mirror::Class* ArtMethod::GetReturnType(bool resolve) {
- DCHECK(!IsProxyMethod());
- const DexFile* dex_file = GetDexFile();
- const DexFile::MethodId& method_id = dex_file->GetMethodId(GetDexMethodIndex());
- const DexFile::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
- uint16_t return_type_idx = proto_id.return_type_idx_;
- mirror::Class* type = GetDexCacheResolvedType(return_type_idx);
- if (type == nullptr && resolve) {
- type = Runtime::Current()->GetClassLinker()->ResolveType(return_type_idx, this);
- CHECK(type != nullptr || Thread::Current()->IsExceptionPending());
- }
- return type;
-}
-
-inline void ArtMethod::CheckObjectSizeEqualsMirrorSize() {
- // Using the default, check the class object size to make sure it matches the size of the
- // object.
- size_t this_size = sizeof(*this);
-#ifdef ART_METHOD_HAS_PADDING_FIELD_ON_64_BIT
- this_size += sizeof(void*) - sizeof(uint32_t);
-#endif
- DCHECK_EQ(GetClass()->GetObjectSize(), this_size);
-}
-
-} // namespace mirror
-} // namespace art
-
-#endif // ART_RUNTIME_MIRROR_ART_METHOD_INL_H_
diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc
deleted file mode 100644
index 9518c9d..0000000
--- a/runtime/mirror/art_method.cc
+++ /dev/null
@@ -1,574 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "art_method.h"
-
-#include "abstract_method.h"
-#include "arch/context.h"
-#include "art_field-inl.h"
-#include "art_method-inl.h"
-#include "base/stringpiece.h"
-#include "class-inl.h"
-#include "dex_file-inl.h"
-#include "dex_instruction.h"
-#include "entrypoints/entrypoint_utils.h"
-#include "entrypoints/runtime_asm_entrypoints.h"
-#include "gc/accounting/card_table-inl.h"
-#include "interpreter/interpreter.h"
-#include "jit/jit.h"
-#include "jit/jit_code_cache.h"
-#include "jni_internal.h"
-#include "mapping_table.h"
-#include "object_array-inl.h"
-#include "object_array.h"
-#include "object-inl.h"
-#include "scoped_thread_state_change.h"
-#include "string.h"
-#include "well_known_classes.h"
-
-namespace art {
-namespace mirror {
-
-extern "C" void art_quick_invoke_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
- const char*);
-#if defined(__LP64__) || defined(__arm__) || defined(__i386__)
-extern "C" void art_quick_invoke_static_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
- const char*);
-#endif
-
-// TODO: get global references for these
-GcRoot<Class> ArtMethod::java_lang_reflect_ArtMethod_;
-
-ArtMethod* ArtMethod::FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
- jobject jlr_method) {
- auto* abstract_method = soa.Decode<mirror::AbstractMethod*>(jlr_method);
- DCHECK(abstract_method != nullptr);
- return abstract_method->GetArtMethod();
-}
-
-void ArtMethod::VisitRoots(RootVisitor* visitor) {
- java_lang_reflect_ArtMethod_.VisitRootIfNonNull(visitor, RootInfo(kRootStickyClass));
-}
-
-mirror::String* ArtMethod::GetNameAsString(Thread* self) {
- mirror::ArtMethod* method = GetInterfaceMethodIfProxy();
- const DexFile* dex_file = method->GetDexFile();
- uint32_t dex_method_idx = method->GetDexMethodIndex();
- const DexFile::MethodId& method_id = dex_file->GetMethodId(dex_method_idx);
- StackHandleScope<1> hs(self);
- Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
- return Runtime::Current()->GetClassLinker()->ResolveString(*dex_file, method_id.name_idx_,
- dex_cache);
-}
-
-InvokeType ArtMethod::GetInvokeType() {
- // TODO: kSuper?
- if (GetDeclaringClass()->IsInterface()) {
- return kInterface;
- } else if (IsStatic()) {
- return kStatic;
- } else if (IsDirect()) {
- return kDirect;
- } else {
- return kVirtual;
- }
-}
-
-void ArtMethod::SetClass(Class* java_lang_reflect_ArtMethod) {
- CHECK(java_lang_reflect_ArtMethod_.IsNull());
- CHECK(java_lang_reflect_ArtMethod != nullptr);
- java_lang_reflect_ArtMethod_ = GcRoot<Class>(java_lang_reflect_ArtMethod);
-}
-
-void ArtMethod::ResetClass() {
- CHECK(!java_lang_reflect_ArtMethod_.IsNull());
- java_lang_reflect_ArtMethod_ = GcRoot<Class>(nullptr);
-}
-
-size_t ArtMethod::NumArgRegisters(const StringPiece& shorty) {
- CHECK_LE(1U, shorty.length());
- uint32_t num_registers = 0;
- for (size_t i = 1; i < shorty.length(); ++i) {
- char ch = shorty[i];
- if (ch == 'D' || ch == 'J') {
- num_registers += 2;
- } else {
- num_registers += 1;
- }
- }
- return num_registers;
-}
-
-static bool HasSameNameAndSignature(ArtMethod* method1, ArtMethod* method2)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- ScopedAssertNoThreadSuspension ants(Thread::Current(), "HasSameNameAndSignature");
- const DexFile* dex_file = method1->GetDexFile();
- const DexFile::MethodId& mid = dex_file->GetMethodId(method1->GetDexMethodIndex());
- if (method1->GetDexCache() == method2->GetDexCache()) {
- const DexFile::MethodId& mid2 = dex_file->GetMethodId(method2->GetDexMethodIndex());
- return mid.name_idx_ == mid2.name_idx_ && mid.proto_idx_ == mid2.proto_idx_;
- }
- const DexFile* dex_file2 = method2->GetDexFile();
- const DexFile::MethodId& mid2 = dex_file2->GetMethodId(method2->GetDexMethodIndex());
- if (!DexFileStringEquals(dex_file, mid.name_idx_, dex_file2, mid2.name_idx_)) {
- return false; // Name mismatch.
- }
- return dex_file->GetMethodSignature(mid) == dex_file2->GetMethodSignature(mid2);
-}
-
-ArtMethod* ArtMethod::FindOverriddenMethod() {
- if (IsStatic()) {
- return nullptr;
- }
- Class* declaring_class = GetDeclaringClass();
- Class* super_class = declaring_class->GetSuperClass();
- uint16_t method_index = GetMethodIndex();
- ArtMethod* result = nullptr;
- // Did this method override a super class method? If so load the result from the super class'
- // vtable
- if (super_class->HasVTable() && method_index < super_class->GetVTableLength()) {
- result = super_class->GetVTableEntry(method_index);
- } else {
- // Method didn't override superclass method so search interfaces
- if (IsProxyMethod()) {
- result = GetDexCacheResolvedMethods()->Get(GetDexMethodIndex());
- CHECK_EQ(result,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
- } else {
- IfTable* iftable = GetDeclaringClass()->GetIfTable();
- for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) {
- Class* interface = iftable->GetInterface(i);
- for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
- mirror::ArtMethod* interface_method = interface->GetVirtualMethod(j);
- if (HasSameNameAndSignature(this, interface_method)) {
- result = interface_method;
- break;
- }
- }
- }
- }
- }
- if (kIsDebugBuild) {
- DCHECK(result == nullptr || HasSameNameAndSignature(this, result));
- }
- return result;
-}
-
-uint32_t ArtMethod::FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
- uint32_t name_and_signature_idx) {
- const DexFile* dexfile = GetDexFile();
- const uint32_t dex_method_idx = GetDexMethodIndex();
- const DexFile::MethodId& mid = dexfile->GetMethodId(dex_method_idx);
- const DexFile::MethodId& name_and_sig_mid = other_dexfile.GetMethodId(name_and_signature_idx);
- DCHECK_STREQ(dexfile->GetMethodName(mid), other_dexfile.GetMethodName(name_and_sig_mid));
- DCHECK_EQ(dexfile->GetMethodSignature(mid), other_dexfile.GetMethodSignature(name_and_sig_mid));
- if (dexfile == &other_dexfile) {
- return dex_method_idx;
- }
- const char* mid_declaring_class_descriptor = dexfile->StringByTypeIdx(mid.class_idx_);
- const DexFile::StringId* other_descriptor =
- other_dexfile.FindStringId(mid_declaring_class_descriptor);
- if (other_descriptor != nullptr) {
- const DexFile::TypeId* other_type_id =
- other_dexfile.FindTypeId(other_dexfile.GetIndexForStringId(*other_descriptor));
- if (other_type_id != nullptr) {
- const DexFile::MethodId* other_mid = other_dexfile.FindMethodId(
- *other_type_id, other_dexfile.GetStringId(name_and_sig_mid.name_idx_),
- other_dexfile.GetProtoId(name_and_sig_mid.proto_idx_));
- if (other_mid != nullptr) {
- return other_dexfile.GetIndexForMethodId(*other_mid);
- }
- }
- }
- return DexFile::kDexNoIndex;
-}
-
-uint32_t ArtMethod::ToDexPc(const uintptr_t pc, bool abort_on_failure) {
- const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
- uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
- if (IsOptimized(sizeof(void*))) {
- CodeInfo code_info = GetOptimizedCodeInfo();
- return code_info.GetStackMapForNativePcOffset(sought_offset).GetDexPc(code_info);
- }
-
- MappingTable table(entry_point != nullptr ?
- GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
- if (table.TotalSize() == 0) {
- // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping
- // but they have no suspend checks and, consequently, we never call ToDexPc() for them.
- DCHECK(IsNative() || IsCalleeSaveMethod() || IsProxyMethod()) << PrettyMethod(this);
- return DexFile::kDexNoIndex; // Special no mapping case
- }
- // Assume the caller wants a pc-to-dex mapping so check here first.
- typedef MappingTable::PcToDexIterator It;
- for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
- if (cur.NativePcOffset() == sought_offset) {
- return cur.DexPc();
- }
- }
- // Now check dex-to-pc mappings.
- typedef MappingTable::DexToPcIterator It2;
- for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
- if (cur.NativePcOffset() == sought_offset) {
- return cur.DexPc();
- }
- }
- if (abort_on_failure) {
- LOG(FATAL) << "Failed to find Dex offset for PC offset " << reinterpret_cast<void*>(sought_offset)
- << "(PC " << reinterpret_cast<void*>(pc) << ", entry_point=" << entry_point
- << " current entry_point=" << GetQuickOatEntryPoint(sizeof(void*))
- << ") in " << PrettyMethod(this);
- }
- return DexFile::kDexNoIndex;
-}
-
-uintptr_t ArtMethod::ToNativeQuickPc(const uint32_t dex_pc, bool abort_on_failure) {
- const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
- MappingTable table(entry_point != nullptr ?
- GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
- if (table.TotalSize() == 0) {
- DCHECK_EQ(dex_pc, 0U);
- return 0; // Special no mapping/pc == 0 case
- }
- // Assume the caller wants a dex-to-pc mapping so check here first.
- typedef MappingTable::DexToPcIterator It;
- for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
- if (cur.DexPc() == dex_pc) {
- return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
- }
- }
- // Now check pc-to-dex mappings.
- typedef MappingTable::PcToDexIterator It2;
- for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
- if (cur.DexPc() == dex_pc) {
- return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
- }
- }
- if (abort_on_failure) {
- LOG(FATAL) << "Failed to find native offset for dex pc 0x" << std::hex << dex_pc
- << " in " << PrettyMethod(this);
- }
- return UINTPTR_MAX;
-}
-
-uint32_t ArtMethod::FindCatchBlock(Handle<ArtMethod> h_this, Handle<Class> exception_type,
- uint32_t dex_pc, bool* has_no_move_exception) {
- const DexFile::CodeItem* code_item = h_this->GetCodeItem();
- // Set aside the exception while we resolve its type.
- Thread* self = Thread::Current();
- StackHandleScope<1> hs(self);
- Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
- self->ClearException();
- // Default to handler not found.
- uint32_t found_dex_pc = DexFile::kDexNoIndex;
- // Iterate over the catch handlers associated with dex_pc.
- for (CatchHandlerIterator it(*code_item, dex_pc); it.HasNext(); it.Next()) {
- uint16_t iter_type_idx = it.GetHandlerTypeIndex();
- // Catch all case
- if (iter_type_idx == DexFile::kDexNoIndex16) {
- found_dex_pc = it.GetHandlerAddress();
- break;
- }
- // Does this catch exception type apply?
- Class* iter_exception_type = h_this->GetClassFromTypeIndex(iter_type_idx, true);
- if (UNLIKELY(iter_exception_type == nullptr)) {
- // Now have a NoClassDefFoundError as exception. Ignore in case the exception class was
- // removed by a pro-guard like tool.
- // Note: this is not RI behavior. RI would have failed when loading the class.
- self->ClearException();
- // Delete any long jump context as this routine is called during a stack walk which will
- // release its in use context at the end.
- delete self->GetLongJumpContext();
- LOG(WARNING) << "Unresolved exception class when finding catch block: "
- << DescriptorToDot(h_this->GetTypeDescriptorFromTypeIdx(iter_type_idx));
- } else if (iter_exception_type->IsAssignableFrom(exception_type.Get())) {
- found_dex_pc = it.GetHandlerAddress();
- break;
- }
- }
- if (found_dex_pc != DexFile::kDexNoIndex) {
- const Instruction* first_catch_instr =
- Instruction::At(&code_item->insns_[found_dex_pc]);
- *has_no_move_exception = (first_catch_instr->Opcode() != Instruction::MOVE_EXCEPTION);
- }
- // Put the exception back.
- if (exception.Get() != nullptr) {
- self->SetException(exception.Get());
- }
- return found_dex_pc;
-}
-
-void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) {
- if (IsNative() || IsRuntimeMethod() || IsProxyMethod()) {
- return;
- }
- if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
- return;
- }
- const void* code = GetEntryPointFromQuickCompiledCode();
- if (code == GetQuickInstrumentationEntryPoint()) {
- return;
- }
- ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
- if (class_linker->IsQuickToInterpreterBridge(code) ||
- class_linker->IsQuickResolutionStub(code)) {
- return;
- }
- // If we are the JIT then we may have just compiled the method after the
- // IsQuickToInterpreterBridge check.
- jit::Jit* const jit = Runtime::Current()->GetJit();
- if (jit != nullptr &&
- jit->GetCodeCache()->ContainsCodePtr(reinterpret_cast<const void*>(code))) {
- return;
- }
- /*
- * During a stack walk, a return PC may point past-the-end of the code
- * in the case that the last instruction is a call that isn't expected to
- * return. Thus, we check <= code + GetCodeSize().
- *
- * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
- */
- CHECK(PcIsWithinQuickCode(reinterpret_cast<uintptr_t>(code), pc))
- << PrettyMethod(this)
- << " pc=" << std::hex << pc
- << " code=" << code
- << " size=" << GetCodeSize(
- EntryPointToCodePointer(reinterpret_cast<const void*>(code)));
-}
-
-bool ArtMethod::IsEntrypointInterpreter() {
- ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
- const void* oat_quick_code = class_linker->GetOatMethodQuickCodeFor(this);
- return oat_quick_code == nullptr || oat_quick_code != GetEntryPointFromQuickCompiledCode();
-}
-
-const void* ArtMethod::GetQuickOatEntryPoint(size_t pointer_size) {
- if (IsAbstract() || IsRuntimeMethod() || IsProxyMethod()) {
- return nullptr;
- }
- Runtime* runtime = Runtime::Current();
- ClassLinker* class_linker = runtime->GetClassLinker();
- const void* code = runtime->GetInstrumentation()->GetQuickCodeFor(this, pointer_size);
- // On failure, instead of null we get the quick-generic-jni-trampoline for native method
- // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
- // for non-native methods.
- if (class_linker->IsQuickToInterpreterBridge(code) ||
- class_linker->IsQuickGenericJniStub(code)) {
- return nullptr;
- }
- return code;
-}
-
-#ifndef NDEBUG
-uintptr_t ArtMethod::NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point) {
- CHECK_NE(quick_entry_point, GetQuickToInterpreterBridge());
- CHECK_EQ(quick_entry_point, Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*)));
- return pc - reinterpret_cast<uintptr_t>(quick_entry_point);
-}
-#endif
-
-void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result,
- const char* shorty) {
- if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEnd())) {
- ThrowStackOverflowError(self);
- return;
- }
-
- if (kIsDebugBuild) {
- self->AssertThreadSuspensionIsAllowable();
- CHECK_EQ(kRunnable, self->GetState());
- CHECK_STREQ(GetShorty(), shorty);
- }
-
- // Push a transition back into managed code onto the linked list in thread.
- ManagedStack fragment;
- self->PushManagedStackFragment(&fragment);
-
- Runtime* runtime = Runtime::Current();
- // Call the invoke stub, passing everything as arguments.
- // If the runtime is not yet started or it is required by the debugger, then perform the
- // Invocation by the interpreter.
- if (UNLIKELY(!runtime->IsStarted() || Dbg::IsForcedInterpreterNeededForCalling(self, this))) {
- if (IsStatic()) {
- art::interpreter::EnterInterpreterFromInvoke(self, this, nullptr, args, result);
- } else {
- Object* receiver = reinterpret_cast<StackReference<Object>*>(&args[0])->AsMirrorPtr();
- art::interpreter::EnterInterpreterFromInvoke(self, this, receiver, args + 1, result);
- }
- } else {
- const bool kLogInvocationStartAndReturn = false;
- bool have_quick_code = GetEntryPointFromQuickCompiledCode() != nullptr;
- if (LIKELY(have_quick_code)) {
- if (kLogInvocationStartAndReturn) {
- LOG(INFO) << StringPrintf("Invoking '%s' quick code=%p", PrettyMethod(this).c_str(),
- GetEntryPointFromQuickCompiledCode());
- }
-
- // Ensure that we won't be accidentally calling quick compiled code when -Xint.
- if (kIsDebugBuild && runtime->GetInstrumentation()->IsForcedInterpretOnly()) {
- DCHECK(!runtime->UseJit());
- CHECK(IsEntrypointInterpreter())
- << "Don't call compiled code when -Xint " << PrettyMethod(this);
- }
-
-#if defined(__LP64__) || defined(__arm__) || defined(__i386__)
- if (!IsStatic()) {
- (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty);
- } else {
- (*art_quick_invoke_static_stub)(this, args, args_size, self, result, shorty);
- }
-#else
- (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty);
-#endif
- if (UNLIKELY(self->GetException() == Thread::GetDeoptimizationException())) {
- // Unusual case where we were running generated code and an
- // exception was thrown to force the activations to be removed from the
- // stack. Continue execution in the interpreter.
- self->ClearException();
- ShadowFrame* shadow_frame = self->GetAndClearDeoptimizationShadowFrame(result);
- self->SetTopOfStack(nullptr);
- self->SetTopOfShadowStack(shadow_frame);
- interpreter::EnterInterpreterFromDeoptimize(self, shadow_frame, result);
- }
- if (kLogInvocationStartAndReturn) {
- LOG(INFO) << StringPrintf("Returned '%s' quick code=%p", PrettyMethod(this).c_str(),
- GetEntryPointFromQuickCompiledCode());
- }
- } else {
- LOG(INFO) << "Not invoking '" << PrettyMethod(this) << "' code=null";
- if (result != nullptr) {
- result->SetJ(0);
- }
- }
- }
-
- // Pop transition.
- self->PopManagedStackFragment(fragment);
-}
-
-// Counts the number of references in the parameter list of the corresponding method.
-// Note: Thus does _not_ include "this" for non-static methods.
-static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- uint32_t shorty_len;
- const char* shorty = method->GetShorty(&shorty_len);
- uint32_t refs = 0;
- for (uint32_t i = 1; i < shorty_len ; ++i) {
- if (shorty[i] == 'L') {
- refs++;
- }
- }
- return refs;
-}
-
-QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo() {
- Runtime* runtime = Runtime::Current();
-
- if (UNLIKELY(IsAbstract())) {
- return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
- }
-
- // For Proxy method we add special handling for the direct method case (there is only one
- // direct method - constructor). Direct method is cloned from original
- // java.lang.reflect.Proxy class together with code and as a result it is executed as usual
- // quick compiled method without any stubs. So the frame info should be returned as it is a
- // quick method not a stub. However, if instrumentation stubs are installed, the
- // instrumentation->GetQuickCodeFor() returns the artQuickProxyInvokeHandler instead of an
- // oat code pointer, thus we have to add a special case here.
- if (UNLIKELY(IsProxyMethod())) {
- if (IsDirect()) {
- CHECK(IsConstructor());
- return GetQuickFrameInfo(EntryPointToCodePointer(GetEntryPointFromQuickCompiledCode()));
- } else {
- return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
- }
- }
-
- if (UNLIKELY(IsRuntimeMethod())) {
- return runtime->GetRuntimeMethodFrameInfo(this);
- }
-
- const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*));
- ClassLinker* class_linker = runtime->GetClassLinker();
- // On failure, instead of null we get the quick-generic-jni-trampoline for native method
- // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
- // for non-native methods. And we really shouldn't see a failure for non-native methods here.
- DCHECK(!class_linker->IsQuickToInterpreterBridge(entry_point));
-
- if (class_linker->IsQuickGenericJniStub(entry_point)) {
- // Generic JNI frame.
- DCHECK(IsNative());
- uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(this) + 1;
- size_t scope_size = HandleScope::SizeOf(handle_refs);
- QuickMethodFrameInfo callee_info = runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
-
- // Callee saves + handle scope + method ref + alignment
- size_t frame_size = RoundUp(callee_info.FrameSizeInBytes() + scope_size
- - sizeof(void*) // callee-save frame stores a whole method pointer
- + sizeof(StackReference<mirror::ArtMethod>),
- kStackAlignment);
-
- return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
- }
-
- const void* code_pointer = EntryPointToCodePointer(entry_point);
- return GetQuickFrameInfo(code_pointer);
-}
-
-void ArtMethod::RegisterNative(const void* native_method, bool is_fast) {
- CHECK(IsNative()) << PrettyMethod(this);
- CHECK(!IsFastNative()) << PrettyMethod(this);
- CHECK(native_method != nullptr) << PrettyMethod(this);
- if (is_fast) {
- SetAccessFlags(GetAccessFlags() | kAccFastNative);
- }
- SetEntryPointFromJni(native_method);
-}
-
-void ArtMethod::UnregisterNative() {
- CHECK(IsNative() && !IsFastNative()) << PrettyMethod(this);
- // restore stub to lookup native pointer via dlsym
- RegisterNative(GetJniDlsymLookupStub(), false);
-}
-
-bool ArtMethod::EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params) {
- auto* dex_cache = GetDexCache();
- auto* dex_file = dex_cache->GetDexFile();
- const auto& method_id = dex_file->GetMethodId(GetDexMethodIndex());
- const auto& proto_id = dex_file->GetMethodPrototype(method_id);
- const DexFile::TypeList* proto_params = dex_file->GetProtoParameters(proto_id);
- auto count = proto_params != nullptr ? proto_params->Size() : 0u;
- auto param_len = params.Get() != nullptr ? params->GetLength() : 0u;
- if (param_len != count) {
- return false;
- }
- auto* cl = Runtime::Current()->GetClassLinker();
- for (size_t i = 0; i < count; ++i) {
- auto type_idx = proto_params->GetTypeItem(i).type_idx_;
- auto* type = cl->ResolveType(type_idx, this);
- if (type == nullptr) {
- Thread::Current()->AssertPendingException();
- return false;
- }
- if (type != params->GetWithoutChecks(i)) {
- return false;
- }
- }
- return true;
-}
-
-} // namespace mirror
-} // namespace art
diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h
deleted file mode 100644
index 0da5925..0000000
--- a/runtime/mirror/art_method.h
+++ /dev/null
@@ -1,650 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_
-#define ART_RUNTIME_MIRROR_ART_METHOD_H_
-
-#include "dex_file.h"
-#include "gc_root.h"
-#include "invoke_type.h"
-#include "method_reference.h"
-#include "modifiers.h"
-#include "object.h"
-#include "object_callbacks.h"
-#include "quick/quick_method_frame_info.h"
-#include "read_barrier_option.h"
-#include "stack.h"
-#include "stack_map.h"
-
-namespace art {
-
-struct ArtMethodOffsets;
-struct ConstructorMethodOffsets;
-union JValue;
-class ScopedObjectAccessAlreadyRunnable;
-class StringPiece;
-class ShadowFrame;
-
-namespace mirror {
-
-typedef void (EntryPointFromInterpreter)(Thread* self, const DexFile::CodeItem* code_item,
- ShadowFrame* shadow_frame, JValue* result);
-
-#define ART_METHOD_HAS_PADDING_FIELD_ON_64_BIT
-
-// C++ mirror of java.lang.reflect.ArtMethod.
-class MANAGED ArtMethod FINAL : public Object {
- public:
- // Size of java.lang.reflect.ArtMethod.class.
- static uint32_t ClassSize();
-
- static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
- jobject jlr_method)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- Class* GetDeclaringClass() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- static MemberOffset DeclaringClassOffset() {
- return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
- }
-
- ALWAYS_INLINE uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- void SetAccessFlags(uint32_t new_access_flags) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Not called within a transaction.
- SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, access_flags_), new_access_flags);
- }
-
- // Approximate what kind of method call would be used for this method.
- InvokeType GetInvokeType() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Returns true if the method is declared public.
- bool IsPublic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccPublic) != 0;
- }
-
- // Returns true if the method is declared private.
- bool IsPrivate() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccPrivate) != 0;
- }
-
- // Returns true if the method is declared static.
- bool IsStatic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccStatic) != 0;
- }
-
- // Returns true if the method is a constructor.
- bool IsConstructor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccConstructor) != 0;
- }
-
- // Returns true if the method is a class initializer.
- bool IsClassInitializer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return IsConstructor() && IsStatic();
- }
-
- // Returns true if the method is static, private, or a constructor.
- bool IsDirect() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return IsDirect(GetAccessFlags());
- }
-
- static bool IsDirect(uint32_t access_flags) {
- return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
- }
-
- // Returns true if the method is declared synchronized.
- bool IsSynchronized() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
- return (GetAccessFlags() & synchonized) != 0;
- }
-
- bool IsFinal() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccFinal) != 0;
- }
-
- bool IsMiranda() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccMiranda) != 0;
- }
-
- bool IsNative() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccNative) != 0;
- }
-
- bool ShouldNotInline() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccDontInline) != 0;
- }
-
- void SetShouldNotInline() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- SetAccessFlags(GetAccessFlags() | kAccDontInline);
- }
-
- bool IsFastNative() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- uint32_t mask = kAccFastNative | kAccNative;
- return (GetAccessFlags() & mask) == mask;
- }
-
- bool IsAbstract() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccAbstract) != 0;
- }
-
- bool IsSynthetic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccSynthetic) != 0;
- }
-
- bool IsProxyMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- bool IsPreverified() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return (GetAccessFlags() & kAccPreverified) != 0;
- }
-
- void SetPreverified() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(!IsPreverified());
- SetAccessFlags(GetAccessFlags() | kAccPreverified);
- }
-
- bool IsOptimized(size_t pointer_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Temporary solution for detecting if a method has been optimized: the compiler
- // does not create a GC map. Instead, the vmap table contains the stack map
- // (as in stack_map.h).
- return !IsNative()
- && GetEntryPointFromQuickCompiledCodePtrSize(pointer_size) != nullptr
- && GetQuickOatCodePointer(pointer_size) != nullptr
- && GetNativeGcMap(pointer_size) == nullptr;
- }
-
- bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- uint16_t GetMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Doesn't do erroneous / unresolved class checks.
- uint16_t GetMethodIndexDuringLinking() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- size_t GetVtableIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetMethodIndex();
- }
-
- void SetMethodIndex(uint16_t new_method_index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Not called within a transaction.
- SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_), new_method_index);
- }
-
- static MemberOffset DexMethodIndexOffset() {
- return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_);
- }
-
- static MemberOffset MethodIndexOffset() {
- return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
- }
-
- uint32_t GetCodeItemOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_code_item_offset_));
- }
-
- void SetCodeItemOffset(uint32_t new_code_off) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Not called within a transaction.
- SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_code_item_offset_), new_code_off);
- }
-
- // Number of 32bit registers that would be required to hold all the arguments
- static size_t NumArgRegisters(const StringPiece& shorty);
-
- ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- void SetDexMethodIndex(uint32_t new_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Not called within a transaction.
- SetField32<false>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_), new_idx);
- }
-
- static MemberOffset DexCacheResolvedMethodsOffset() {
- return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_);
- }
-
- static MemberOffset DexCacheResolvedTypesOffset() {
- return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_);
- }
-
- ALWAYS_INLINE ObjectArray<ArtMethod>* GetDexCacheResolvedMethods()
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_idx)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool HasDexCacheResolvedMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool HasSameDexCacheResolvedMethods(ArtMethod* other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool HasSameDexCacheResolvedMethods(ObjectArray<ArtMethod>* other_cache)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- template <bool kWithCheck = true>
- Class* GetDexCacheResolvedType(uint32_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool HasDexCacheResolvedTypes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool HasSameDexCacheResolvedTypes(ArtMethod* other) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool HasSameDexCacheResolvedTypes(ObjectArray<Class>* other_cache)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Get the Class* from the type index into this method's dex cache.
- mirror::Class* GetClassFromTypeIndex(uint16_t type_idx, bool resolve)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Find the method that this method overrides.
- ArtMethod* FindOverriddenMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Find the method index for this method within other_dexfile. If this method isn't present then
- // return DexFile::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
- // name and signature in the other_dexfile, such as the method index used to resolve this method
- // in the other_dexfile.
- uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
- uint32_t name_and_signature_idx)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- EntryPointFromInterpreter* GetEntryPointFromInterpreter()
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CheckObjectSizeEqualsMirrorSize();
- return GetEntryPointFromInterpreterPtrSize(sizeof(void*));
- }
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- EntryPointFromInterpreter* GetEntryPointFromInterpreterPtrSize(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetFieldPtrWithSize<EntryPointFromInterpreter*, kVerifyFlags>(
- EntryPointFromInterpreterOffset(pointer_size), pointer_size);
- }
-
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CheckObjectSizeEqualsMirrorSize();
- SetEntryPointFromInterpreterPtrSize(entry_point_from_interpreter, sizeof(void*));
- }
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetEntryPointFromInterpreterPtrSize(EntryPointFromInterpreter* entry_point_from_interpreter,
- size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- SetFieldPtrWithSize<false, true, kVerifyFlags>(
- EntryPointFromInterpreterOffset(pointer_size), entry_point_from_interpreter, pointer_size);
- }
-
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- const void* GetEntryPointFromQuickCompiledCode() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CheckObjectSizeEqualsMirrorSize();
- return GetEntryPointFromQuickCompiledCodePtrSize(sizeof(void*));
- }
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetFieldPtrWithSize<const void*, kVerifyFlags>(
- EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
- }
-
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CheckObjectSizeEqualsMirrorSize();
- SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
- sizeof(void*));
- }
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
- const void* entry_point_from_quick_compiled_code, size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- SetFieldPtrWithSize<false, true, kVerifyFlags>(
- EntryPointFromQuickCompiledCodeOffset(pointer_size), entry_point_from_quick_compiled_code,
- pointer_size);
- }
-
- uint32_t GetCodeSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Check whether the given PC is within the quick compiled code associated with this method's
- // quick entrypoint. This code isn't robust for instrumentation, etc. and is only used for
- // debug purposes.
- bool PcIsWithinQuickCode(uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return PcIsWithinQuickCode(
- reinterpret_cast<uintptr_t>(GetEntryPointFromQuickCompiledCode()), pc);
- }
-
- void AssertPcIsWithinQuickCode(uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Returns true if the entrypoint points to the interpreter, as
- // opposed to the compiled code, that is, this method will be
- // interpretered on invocation.
- bool IsEntrypointInterpreter() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- uint32_t GetQuickOatCodeOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetQuickOatCodeOffset(uint32_t code_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- ALWAYS_INLINE static const void* EntryPointToCodePointer(const void* entry_point) {
- uintptr_t code = reinterpret_cast<uintptr_t>(entry_point);
- // TODO: Make this Thumb2 specific. It is benign on other architectures as code is always at
- // least 2 byte aligned.
- code &= ~0x1;
- return reinterpret_cast<const void*>(code);
- }
-
- // Actual entry point pointer to compiled oat code or null.
- const void* GetQuickOatEntryPoint(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Actual pointer to compiled oat code or null.
- const void* GetQuickOatCodePointer(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return EntryPointToCodePointer(GetQuickOatEntryPoint(pointer_size));
- }
-
- // Callers should wrap the uint8_t* in a MappingTable instance for convenient access.
- const uint8_t* GetMappingTable(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- const uint8_t* GetMappingTable(const void* code_pointer, size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Callers should wrap the uint8_t* in a VmapTable instance for convenient access.
- const uint8_t* GetVmapTable(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- const uint8_t* GetVmapTable(const void* code_pointer, size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- CodeInfo GetOptimizedCodeInfo() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Callers should wrap the uint8_t* in a GcMap instance for convenient access.
- const uint8_t* GetNativeGcMap(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- const uint8_t* GetNativeGcMap(const void* code_pointer, size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- template <bool kCheckFrameSize = true>
- uint32_t GetFrameSizeInBytes() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- uint32_t result = GetQuickFrameInfo().FrameSizeInBytes();
- if (kCheckFrameSize) {
- DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
- }
- return result;
- }
-
- QuickMethodFrameInfo GetQuickFrameInfo() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- QuickMethodFrameInfo GetQuickFrameInfo(const void* code_pointer)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- FrameOffset GetReturnPcOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetReturnPcOffset(GetFrameSizeInBytes());
- }
-
- FrameOffset GetReturnPcOffset(uint32_t frame_size_in_bytes)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK_EQ(frame_size_in_bytes, GetFrameSizeInBytes());
- return FrameOffset(frame_size_in_bytes - sizeof(void*));
- }
-
- FrameOffset GetHandleScopeOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- constexpr size_t handle_scope_offset = sizeof(StackReference<mirror::ArtMethod>);
- DCHECK_LT(handle_scope_offset, GetFrameSizeInBytes());
- return FrameOffset(handle_scope_offset);
- }
-
- void RegisterNative(const void* native_method, bool is_fast)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- void UnregisterNative() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- static MemberOffset EntryPointFromInterpreterOffset(size_t pointer_size) {
- return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
- PtrSizedFields, entry_point_from_interpreter_) / sizeof(void*) * pointer_size);
- }
-
- static MemberOffset EntryPointFromJniOffset(size_t pointer_size) {
- return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
- PtrSizedFields, entry_point_from_jni_) / sizeof(void*) * pointer_size);
- }
-
- static MemberOffset EntryPointFromQuickCompiledCodeOffset(size_t pointer_size) {
- return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
- PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*) * pointer_size);
- }
-
- void* GetEntryPointFromJni() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CheckObjectSizeEqualsMirrorSize();
- return GetEntryPointFromJniPtrSize(sizeof(void*));
- }
- ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetFieldPtrWithSize<void*>(EntryPointFromJniOffset(pointer_size), pointer_size);
- }
-
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- void SetEntryPointFromJni(const void* entrypoint) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- CheckObjectSizeEqualsMirrorSize();
- SetEntryPointFromJniPtrSize<kVerifyFlags>(entrypoint, sizeof(void*));
- }
- template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, size_t pointer_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- SetFieldPtrWithSize<false, true, kVerifyFlags>(
- EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size);
- }
-
- // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
- // conventions for a method of managed code. Returns false for Proxy methods.
- bool IsRuntimeMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Is this a hand crafted method used for something like describing callee saves?
- bool IsCalleeSaveMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- bool IsResolutionMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- bool IsImtConflictMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- bool IsImtUnimplementedMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- uintptr_t NativeQuickPcOffset(const uintptr_t pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-#ifdef NDEBUG
- uintptr_t NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return pc - reinterpret_cast<uintptr_t>(quick_entry_point);
- }
-#else
- uintptr_t NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-#endif
-
- // Converts a native PC to a dex PC.
- uint32_t ToDexPc(const uintptr_t pc, bool abort_on_failure = true)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Converts a dex PC to a native PC.
- uintptr_t ToNativeQuickPc(const uint32_t dex_pc, bool abort_on_failure = true)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- MethodReference ToMethodReference() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return MethodReference(GetDexFile(), GetDexMethodIndex());
- }
-
- // Find the catch block for the given exception type and dex_pc. When a catch block is found,
- // indicates whether the found catch block is responsible for clearing the exception or whether
- // a move-exception instruction is present.
- static uint32_t FindCatchBlock(Handle<ArtMethod> h_this, Handle<Class> exception_type,
- uint32_t dex_pc, bool* has_no_move_exception)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- static void SetClass(Class* java_lang_reflect_ArtMethod);
-
- template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- static Class* GetJavaLangReflectArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- static void ResetClass();
-
- static void VisitRoots(RootVisitor* visitor)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const DexFile* GetDexFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const char* GetDeclaringClassDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const char* GetShorty() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- uint32_t unused_length;
- return GetShorty(&unused_length);
- }
-
- const char* GetShorty(uint32_t* out_length) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const Signature GetSignature() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- ALWAYS_INLINE const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- mirror::String* GetNameAsString(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const DexFile::CodeItem* GetCodeItem() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- bool IsResolvedTypeIdx(uint16_t type_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const DexFile::ProtoId& GetPrototype() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const DexFile::TypeList* GetParameterTypeList() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const char* GetDeclaringClassSourceFile() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- uint16_t GetClassDefIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const DexFile::ClassDef& GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const char* GetReturnTypeDescriptor() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large
- // number of bugs at call sites.
- mirror::Class* GetReturnType(bool resolve = true) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- mirror::ClassLoader* GetClassLoader() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- mirror::DexCache* GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // May cause thread suspension due to class resolution.
- bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- static size_t SizeWithoutPointerFields(size_t pointer_size) {
- size_t total = sizeof(ArtMethod) - sizeof(PtrSizedFields);
-#ifdef ART_METHOD_HAS_PADDING_FIELD_ON_64_BIT
- // Add 4 bytes if 64 bit, otherwise 0.
- total += pointer_size - sizeof(uint32_t);
-#endif
- return total;
- }
-
- // Size of an instance of java.lang.reflect.ArtMethod not including its value array.
- static size_t InstanceSize(size_t pointer_size) {
- return SizeWithoutPointerFields(pointer_size) +
- (sizeof(PtrSizedFields) / sizeof(void*)) * pointer_size;
- }
-
- protected:
- // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
- // The class we are a part of.
- HeapReference<Class> declaring_class_;
-
- // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
- HeapReference<ObjectArray<ArtMethod>> dex_cache_resolved_methods_;
-
- // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
- HeapReference<ObjectArray<Class>> dex_cache_resolved_types_;
-
- // Access flags; low 16 bits are defined by spec.
- uint32_t access_flags_;
-
- /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
-
- // Offset to the CodeItem.
- uint32_t dex_code_item_offset_;
-
- // Index into method_ids of the dex file associated with this method.
- uint32_t dex_method_index_;
-
- /* End of dex file fields. */
-
- // Entry within a dispatch table for this method. For static/direct methods the index is into
- // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
- // ifTable.
- uint32_t method_index_;
-
- // Fake padding field gets inserted here.
-
- // Must be the last fields in the method.
- struct PACKED(4) PtrSizedFields {
- // Method dispatch from the interpreter invokes this pointer which may cause a bridge into
- // compiled code.
- void* entry_point_from_interpreter_;
-
- // Pointer to JNI function registered to this method, or a function to resolve the JNI function.
- void* entry_point_from_jni_;
-
- // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
- // the interpreter.
- void* entry_point_from_quick_compiled_code_;
- } ptr_sized_fields_;
-
- static GcRoot<Class> java_lang_reflect_ArtMethod_;
-
- private:
- ALWAYS_INLINE void CheckObjectSizeEqualsMirrorSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- ALWAYS_INLINE ObjectArray<Class>* GetDexCacheResolvedTypes()
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- static size_t PtrSizedFieldsOffset(size_t pointer_size) {
- size_t offset = OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_);
-#ifdef ART_METHOD_HAS_PADDING_FIELD_ON_64_BIT
- // Add 4 bytes if 64 bit, otherwise 0.
- offset += pointer_size - sizeof(uint32_t);
-#endif
- return offset;
- }
-
- // Code points to the start of the quick code.
- static uint32_t GetCodeSize(const void* code);
-
- static bool PcIsWithinQuickCode(uintptr_t code, uintptr_t pc) {
- if (code == 0) {
- return pc == 0;
- }
- /*
- * During a stack walk, a return PC may point past-the-end of the code
- * in the case that the last instruction is a call that isn't expected to
- * return. Thus, we check <= code + GetCodeSize().
- *
- * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
- */
- return code <= pc && pc <= code + GetCodeSize(
- EntryPointToCodePointer(reinterpret_cast<const void*>(code)));
- }
-
- friend struct art::ArtMethodOffsets; // for verifying offset information
- DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod);
-};
-
-} // namespace mirror
-} // namespace art
-
-#endif // ART_RUNTIME_MIRROR_ART_METHOD_H_
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 5752a15..835b94a 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -20,6 +20,7 @@
#include "class.h"
#include "art_field-inl.h"
+#include "art_method.h"
#include "art_method-inl.h"
#include "class_loader.h"
#include "common_throws.h"
@@ -60,130 +61,157 @@ inline DexCache* Class::GetDexCache() {
return GetFieldObject<DexCache, kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(Class, dex_cache_));
}
-inline ObjectArray<ArtMethod>* Class::GetDirectMethods() {
+inline ArtMethod* Class::GetDirectMethodsPtr() {
DCHECK(IsLoaded() || IsErroneous());
- return GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_));
+ return GetDirectMethodsPtrUnchecked();
}
-inline void Class::SetDirectMethods(ObjectArray<ArtMethod>* new_direct_methods)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(nullptr == GetFieldObject<ObjectArray<ArtMethod>>(
- OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_)));
- DCHECK_NE(0, new_direct_methods->GetLength());
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_), new_direct_methods);
+inline ArtMethod* Class::GetDirectMethodsPtrUnchecked() {
+ return reinterpret_cast<ArtMethod*>(GetField64(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_)));
}
-inline ArtMethod* Class::GetDirectMethod(int32_t i) {
- return GetDirectMethods()->Get(i);
+inline ArtMethod* Class::GetVirtualMethodsPtrUnchecked() {
+ return reinterpret_cast<ArtMethod*>(GetField64(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_)));
}
-inline void Class::SetDirectMethod(uint32_t i, ArtMethod* f) // TODO: uint16_t
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- ObjectArray<ArtMethod>* direct_methods =
- GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_));
- direct_methods->Set<false>(i, f);
+inline void Class::SetDirectMethodsPtr(ArtMethod* new_direct_methods) {
+ DCHECK(GetDirectMethodsPtrUnchecked() == nullptr);
+ SetDirectMethodsPtrUnchecked(new_direct_methods);
}
-// Returns the number of static, private, and constructor methods.
-inline uint32_t Class::NumDirectMethods() {
- return (GetDirectMethods() != nullptr) ? GetDirectMethods()->GetLength() : 0;
+inline void Class::SetDirectMethodsPtrUnchecked(ArtMethod* new_direct_methods) {
+ SetField64<false>(OFFSET_OF_OBJECT_MEMBER(Class, direct_methods_),
+ reinterpret_cast<uint64_t>(new_direct_methods));
+}
+
+inline ArtMethod* Class::GetDirectMethodUnchecked(size_t i, size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ auto* methods = GetDirectMethodsPtrUnchecked();
+ DCHECK(methods != nullptr);
+ return reinterpret_cast<ArtMethod*>(reinterpret_cast<uintptr_t>(methods) +
+ ArtMethod::ObjectSize(pointer_size) * i);
+}
+
+inline ArtMethod* Class::GetDirectMethod(size_t i, size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ auto* methods = GetDirectMethodsPtr();
+ DCHECK(methods != nullptr);
+ return reinterpret_cast<ArtMethod*>(reinterpret_cast<uintptr_t>(methods) +
+ ArtMethod::ObjectSize(pointer_size) * i);
}
template<VerifyObjectFlags kVerifyFlags>
-inline ObjectArray<ArtMethod>* Class::GetVirtualMethods() {
- DCHECK(IsLoaded() || IsErroneous());
- return GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_));
+inline ArtMethod* Class::GetVirtualMethodsPtr() {
+ DCHECK(IsLoaded<kVerifyFlags>() || IsErroneous<kVerifyFlags>());
+ return GetVirtualMethodsPtrUnchecked();
}
-inline void Class::SetVirtualMethods(ObjectArray<ArtMethod>* new_virtual_methods) {
+inline void Class::SetVirtualMethodsPtr(ArtMethod* new_virtual_methods) {
// TODO: we reassign virtual methods to grow the table for miranda
// methods.. they should really just be assigned once.
- DCHECK_NE(0, new_virtual_methods->GetLength());
- SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_), new_virtual_methods);
-}
-
-inline uint32_t Class::NumVirtualMethods() {
- return (GetVirtualMethods() != nullptr) ? GetVirtualMethods()->GetLength() : 0;
+ SetField64<false>(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_),
+ reinterpret_cast<uint64_t>(new_virtual_methods));
}
template<VerifyObjectFlags kVerifyFlags>
-inline ArtMethod* Class::GetVirtualMethod(uint32_t i) {
+inline ArtMethod* Class::GetVirtualMethod(size_t i, size_t pointer_size) {
+ CheckPointerSize(pointer_size);
DCHECK(IsResolved<kVerifyFlags>() || IsErroneous<kVerifyFlags>())
<< PrettyClass(this) << " status=" << GetStatus();
- return GetVirtualMethods()->GetWithoutChecks(i);
+ return GetVirtualMethodUnchecked(i, pointer_size);
}
-inline ArtMethod* Class::GetVirtualMethodDuringLinking(uint32_t i) {
+inline ArtMethod* Class::GetVirtualMethodDuringLinking(size_t i, size_t pointer_size) {
+ CheckPointerSize(pointer_size);
DCHECK(IsLoaded() || IsErroneous());
- return GetVirtualMethods()->GetWithoutChecks(i);
+ return GetVirtualMethodUnchecked(i, pointer_size);
}
-inline void Class::SetVirtualMethod(uint32_t i, ArtMethod* f) // TODO: uint16_t
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- ObjectArray<ArtMethod>* virtual_methods =
- GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, virtual_methods_));
- virtual_methods->SetWithoutChecks<false>(i, f);
+inline ArtMethod* Class::GetVirtualMethodUnchecked(size_t i, size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ auto* methods = GetVirtualMethodsPtrUnchecked();
+ DCHECK(methods != nullptr);
+ return reinterpret_cast<ArtMethod*>(reinterpret_cast<uintptr_t>(methods) +
+ ArtMethod::ObjectSize(pointer_size) * i);
}
-inline ObjectArray<ArtMethod>* Class::GetVTable() {
+inline PointerArray* Class::GetVTable() {
DCHECK(IsResolved() || IsErroneous());
- return GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
+ return GetFieldObject<PointerArray>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
}
-inline ObjectArray<ArtMethod>* Class::GetVTableDuringLinking() {
+inline PointerArray* Class::GetVTableDuringLinking() {
DCHECK(IsLoaded() || IsErroneous());
- return GetFieldObject<ObjectArray<ArtMethod>>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
+ return GetFieldObject<PointerArray>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_));
}
-inline void Class::SetVTable(ObjectArray<ArtMethod>* new_vtable) {
+inline void Class::SetVTable(PointerArray* new_vtable) {
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(Class, vtable_), new_vtable);
}
-inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i) {
- uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry);
- return GetFieldObject<mirror::ArtMethod>(MemberOffset(offset));
+inline MemberOffset Class::EmbeddedImTableEntryOffset(uint32_t i, size_t pointer_size) {
+ DCHECK_LT(i, kImtSize);
+ return MemberOffset(
+ EmbeddedImTableOffset(pointer_size).Uint32Value() + i * ImTableEntrySize(pointer_size));
+}
+
+inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i, size_t pointer_size) {
+ DCHECK(ShouldHaveEmbeddedImtAndVTable());
+ return GetFieldPtrWithSize<ArtMethod*>(
+ EmbeddedImTableEntryOffset(i, pointer_size), pointer_size);
}
-inline void Class::SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method) {
- uint32_t offset = EmbeddedImTableOffset().Uint32Value() + i * sizeof(ImTableEntry);
- SetFieldObject<false>(MemberOffset(offset), method);
+inline void Class::SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size) {
+ DCHECK(ShouldHaveEmbeddedImtAndVTable());
+ SetFieldPtrWithSize<false>(EmbeddedImTableEntryOffset(i, pointer_size), method, pointer_size);
}
inline bool Class::HasVTable() {
- return (GetVTable() != nullptr) || ShouldHaveEmbeddedImtAndVTable();
+ return GetVTable() != nullptr || ShouldHaveEmbeddedImtAndVTable();
}
inline int32_t Class::GetVTableLength() {
if (ShouldHaveEmbeddedImtAndVTable()) {
return GetEmbeddedVTableLength();
}
- return (GetVTable() != nullptr) ? GetVTable()->GetLength() : 0;
+ return GetVTable() != nullptr ? GetVTable()->GetLength() : 0;
}
-inline ArtMethod* Class::GetVTableEntry(uint32_t i) {
+inline ArtMethod* Class::GetVTableEntry(uint32_t i, size_t pointer_size) {
if (ShouldHaveEmbeddedImtAndVTable()) {
- return GetEmbeddedVTableEntry(i);
+ return GetEmbeddedVTableEntry(i, pointer_size);
}
- return (GetVTable() != nullptr) ? GetVTable()->Get(i) : nullptr;
+ auto* vtable = GetVTable();
+ DCHECK(vtable != nullptr);
+ return vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
}
inline int32_t Class::GetEmbeddedVTableLength() {
- return GetField32(EmbeddedVTableLengthOffset());
+ return GetField32(MemberOffset(EmbeddedVTableLengthOffset()));
}
inline void Class::SetEmbeddedVTableLength(int32_t len) {
- SetField32<false>(EmbeddedVTableLengthOffset(), len);
+ SetField32<false>(MemberOffset(EmbeddedVTableLengthOffset()), len);
}
-inline ArtMethod* Class::GetEmbeddedVTableEntry(uint32_t i) {
- uint32_t offset = EmbeddedVTableOffset().Uint32Value() + i * sizeof(VTableEntry);
- return GetFieldObject<mirror::ArtMethod>(MemberOffset(offset));
+inline MemberOffset Class::EmbeddedVTableEntryOffset(uint32_t i, size_t pointer_size) {
+ return MemberOffset(
+ EmbeddedVTableOffset(pointer_size).Uint32Value() + i * VTableEntrySize(pointer_size));
}
-inline void Class::SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method) {
- uint32_t offset = EmbeddedVTableOffset().Uint32Value() + i * sizeof(VTableEntry);
- SetFieldObject<false>(MemberOffset(offset), method);
- CHECK(method == GetVTableDuringLinking()->Get(i));
+inline ArtMethod* Class::GetEmbeddedVTableEntry(uint32_t i, size_t pointer_size) {
+ return GetFieldPtrWithSize<ArtMethod*>(EmbeddedVTableEntryOffset(i, pointer_size), pointer_size);
+}
+
+inline void Class::SetEmbeddedVTableEntryUnchecked(
+ uint32_t i, ArtMethod* method, size_t pointer_size) {
+ SetFieldPtrWithSize<false>(EmbeddedVTableEntryOffset(i, pointer_size), method, pointer_size);
+}
+
+inline void Class::SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size) {
+ auto* vtable = GetVTableDuringLinking();
+ CHECK_EQ(method, vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size));
+ SetEmbeddedVTableEntryUnchecked(i, method, pointer_size);
}
inline bool Class::Implements(Class* klass) {
@@ -340,41 +368,43 @@ inline bool Class::IsSubClass(Class* klass) {
return false;
}
-inline ArtMethod* Class::FindVirtualMethodForInterface(ArtMethod* method) {
+inline ArtMethod* Class::FindVirtualMethodForInterface(ArtMethod* method, size_t pointer_size) {
Class* declaring_class = method->GetDeclaringClass();
DCHECK(declaring_class != nullptr) << PrettyClass(this);
DCHECK(declaring_class->IsInterface()) << PrettyMethod(method);
// TODO cache to improve lookup speed
- int32_t iftable_count = GetIfTableCount();
+ const int32_t iftable_count = GetIfTableCount();
IfTable* iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; i++) {
if (iftable->GetInterface(i) == declaring_class) {
- return iftable->GetMethodArray(i)->Get(method->GetMethodIndex());
+ return iftable->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
+ method->GetMethodIndex(), pointer_size);
}
}
return nullptr;
}
-inline ArtMethod* Class::FindVirtualMethodForVirtual(ArtMethod* method) {
+inline ArtMethod* Class::FindVirtualMethodForVirtual(ArtMethod* method, size_t pointer_size) {
DCHECK(!method->GetDeclaringClass()->IsInterface() || method->IsMiranda());
// The argument method may from a super class.
// Use the index to a potentially overridden one for this instance's class.
- return GetVTableEntry(method->GetMethodIndex());
+ return GetVTableEntry(method->GetMethodIndex(), pointer_size);
}
-inline ArtMethod* Class::FindVirtualMethodForSuper(ArtMethod* method) {
+inline ArtMethod* Class::FindVirtualMethodForSuper(ArtMethod* method, size_t pointer_size) {
DCHECK(!method->GetDeclaringClass()->IsInterface());
- return GetSuperClass()->GetVTableEntry(method->GetMethodIndex());
+ return GetSuperClass()->GetVTableEntry(method->GetMethodIndex(), pointer_size);
}
-inline ArtMethod* Class::FindVirtualMethodForVirtualOrInterface(ArtMethod* method) {
+inline ArtMethod* Class::FindVirtualMethodForVirtualOrInterface(ArtMethod* method,
+ size_t pointer_size) {
if (method->IsDirect()) {
return method;
}
if (method->GetDeclaringClass()->IsInterface() && !method->IsMiranda()) {
- return FindVirtualMethodForInterface(method);
+ return FindVirtualMethodForInterface(method, pointer_size);
}
- return FindVirtualMethodForVirtual(method);
+ return FindVirtualMethodForVirtual(method, pointer_size);
}
inline IfTable* Class::GetIfTable() {
@@ -406,24 +436,24 @@ inline MemberOffset Class::GetFirstReferenceInstanceFieldOffset() {
: ClassOffset();
}
-inline MemberOffset Class::GetFirstReferenceStaticFieldOffset() {
+inline MemberOffset Class::GetFirstReferenceStaticFieldOffset(size_t pointer_size) {
DCHECK(IsResolved());
uint32_t base = sizeof(mirror::Class); // Static fields come after the class.
if (ShouldHaveEmbeddedImtAndVTable()) {
// Static fields come after the embedded tables.
- base = mirror::Class::ComputeClassSize(true, GetEmbeddedVTableLength(),
- 0, 0, 0, 0, 0);
+ base = mirror::Class::ComputeClassSize(
+ true, GetEmbeddedVTableLength(), 0, 0, 0, 0, 0, pointer_size);
}
return MemberOffset(base);
}
-inline MemberOffset Class::GetFirstReferenceStaticFieldOffsetDuringLinking() {
+inline MemberOffset Class::GetFirstReferenceStaticFieldOffsetDuringLinking(size_t pointer_size) {
DCHECK(IsLoaded());
uint32_t base = sizeof(mirror::Class); // Static fields come after the class.
if (ShouldHaveEmbeddedImtAndVTable()) {
// Static fields come after the embedded tables.
base = mirror::Class::ComputeClassSize(true, GetVTableDuringLinking()->GetLength(),
- 0, 0, 0, 0, 0);
+ 0, 0, 0, 0, 0, pointer_size);
}
return MemberOffset(base);
}
@@ -499,14 +529,12 @@ inline uint32_t Class::GetAccessFlags() {
// circularity issue during loading the names of its members
DCHECK(IsIdxLoaded<kVerifyFlags>() || IsRetired<kVerifyFlags>() ||
IsErroneous<static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis)>() ||
- this == String::GetJavaLangString() ||
- this == ArtMethod::GetJavaLangReflectArtMethod())
+ this == String::GetJavaLangString())
<< "IsIdxLoaded=" << IsIdxLoaded<kVerifyFlags>()
<< " IsRetired=" << IsRetired<kVerifyFlags>()
<< " IsErroneous=" <<
IsErroneous<static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis)>()
<< " IsString=" << (this == String::GetJavaLangString())
- << " IsArtMethod=" << (this == ArtMethod::GetJavaLangReflectArtMethod())
<< " descriptor=" << PrettyDescriptor(this);
return GetField32<kVerifyFlags>(AccessFlagsOffset());
}
@@ -594,20 +622,20 @@ inline uint32_t Class::ComputeClassSize(bool has_embedded_tables,
uint32_t num_16bit_static_fields,
uint32_t num_32bit_static_fields,
uint32_t num_64bit_static_fields,
- uint32_t num_ref_static_fields) {
+ uint32_t num_ref_static_fields,
+ size_t pointer_size) {
// Space used by java.lang.Class and its instance fields.
uint32_t size = sizeof(Class);
// Space used by embedded tables.
if (has_embedded_tables) {
- uint32_t embedded_imt_size = kImtSize * sizeof(ImTableEntry);
- uint32_t embedded_vtable_size = num_vtable_entries * sizeof(VTableEntry);
- size += embedded_imt_size +
- sizeof(int32_t) /* vtable len */ +
- embedded_vtable_size;
+ const uint32_t embedded_imt_size = kImtSize * ImTableEntrySize(pointer_size);
+ const uint32_t embedded_vtable_size = num_vtable_entries * VTableEntrySize(pointer_size);
+ size = RoundUp(size + sizeof(uint32_t) /* embedded vtable len */, pointer_size) +
+ embedded_imt_size + embedded_vtable_size;
}
// Space used by reference statics.
- size += num_ref_static_fields * sizeof(HeapReference<Object>);
+ size += num_ref_static_fields * sizeof(HeapReference<Object>);
if (!IsAligned<8>(size) && num_64bit_static_fields > 0) {
uint32_t gap = 8 - (size & 0x7);
size += gap; // will be padded
@@ -629,10 +657,8 @@ inline uint32_t Class::ComputeClassSize(bool has_embedded_tables,
}
// Guaranteed to be at least 4 byte aligned. No need for further alignments.
// Space used for primitive static fields.
- size += (num_8bit_static_fields * sizeof(uint8_t)) +
- (num_16bit_static_fields * sizeof(uint16_t)) +
- (num_32bit_static_fields * sizeof(uint32_t)) +
- (num_64bit_static_fields * sizeof(uint64_t));
+ size += num_8bit_static_fields * sizeof(uint8_t) + num_16bit_static_fields * sizeof(uint16_t) +
+ num_32bit_static_fields * sizeof(uint32_t) + num_64bit_static_fields * sizeof(uint64_t);
return size;
}
@@ -651,40 +677,10 @@ inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor)
// allocated with the right size for those. Also, unresolved classes don't have fields
// linked yet.
VisitStaticFieldsReferences<kVisitClass>(this, visitor);
- if (ShouldHaveEmbeddedImtAndVTable()) {
- VisitEmbeddedImtAndVTable(visitor);
- }
- }
-}
-
-template<typename Visitor>
-inline void Class::VisitEmbeddedImtAndVTable(const Visitor& visitor) {
- uint32_t pos = sizeof(mirror::Class);
-
- size_t count = kImtSize;
- for (size_t i = 0; i < count; ++i) {
- MemberOffset offset = MemberOffset(pos);
- visitor(this, offset, true);
- pos += sizeof(ImTableEntry);
- }
-
- // Skip vtable length.
- pos += sizeof(int32_t);
-
- count = GetEmbeddedVTableLength();
- for (size_t i = 0; i < count; ++i) {
- MemberOffset offset = MemberOffset(pos);
- visitor(this, offset, true);
- pos += sizeof(VTableEntry);
}
}
template<ReadBarrierOption kReadBarrierOption>
-inline bool Class::IsArtMethodClass() const {
- return this == ArtMethod::GetJavaLangReflectArtMethod<kReadBarrierOption>();
-}
-
-template<ReadBarrierOption kReadBarrierOption>
inline bool Class::IsReferenceClass() const {
return this == Reference::GetJavaLangRefReference<kReadBarrierOption>();
}
@@ -812,27 +808,92 @@ inline ObjectArray<String>* Class::GetDexCacheStrings() {
}
template<class Visitor>
-void mirror::Class::VisitFieldRoots(Visitor& visitor) {
+void mirror::Class::VisitNativeRoots(Visitor& visitor, size_t pointer_size) {
ArtField* const sfields = GetSFieldsUnchecked();
// Since we visit class roots while we may be writing these fields, check against null.
- // TODO: Is this safe for concurrent compaction?
if (sfields != nullptr) {
for (size_t i = 0, count = NumStaticFields(); i < count; ++i) {
+ auto* f = &sfields[i];
if (kIsDebugBuild && IsResolved()) {
- CHECK_EQ(sfields[i].GetDeclaringClass(), this) << GetStatus();
+ CHECK_EQ(f->GetDeclaringClass(), this) << GetStatus();
}
- visitor.VisitRoot(sfields[i].DeclaringClassRoot().AddressWithoutBarrier());
+ f->VisitRoots(visitor);
}
}
ArtField* const ifields = GetIFieldsUnchecked();
if (ifields != nullptr) {
for (size_t i = 0, count = NumInstanceFields(); i < count; ++i) {
+ auto* f = &ifields[i];
if (kIsDebugBuild && IsResolved()) {
- CHECK_EQ(ifields[i].GetDeclaringClass(), this) << GetStatus();
+ CHECK_EQ(f->GetDeclaringClass(), this) << GetStatus();
}
- visitor.VisitRoot(ifields[i].DeclaringClassRoot().AddressWithoutBarrier());
+ f->VisitRoots(visitor);
}
}
+ for (auto& m : GetDirectMethods(pointer_size)) {
+ m.VisitRoots(visitor);
+ }
+ for (auto& m : GetVirtualMethods(pointer_size)) {
+ m.VisitRoots(visitor);
+ }
+}
+
+inline StrideIterator<ArtMethod> Class::DirectMethodsBegin(size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ auto* methods = GetDirectMethodsPtrUnchecked();
+ auto stride = ArtMethod::ObjectSize(pointer_size);
+ return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods), stride);
+}
+
+inline StrideIterator<ArtMethod> Class::DirectMethodsEnd(size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ auto* methods = GetDirectMethodsPtrUnchecked();
+ auto stride = ArtMethod::ObjectSize(pointer_size);
+ auto count = NumDirectMethods();
+ return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods) + stride * count, stride);
+}
+
+inline IterationRange<StrideIterator<ArtMethod>> Class::GetDirectMethods(size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ return MakeIterationRange(DirectMethodsBegin(pointer_size), DirectMethodsEnd(pointer_size));
+}
+
+inline StrideIterator<ArtMethod> Class::VirtualMethodsBegin(size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ auto* methods = GetVirtualMethodsPtrUnchecked();
+ auto stride = ArtMethod::ObjectSize(pointer_size);
+ return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods), stride);
+}
+
+inline StrideIterator<ArtMethod> Class::VirtualMethodsEnd(size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ auto* methods = GetVirtualMethodsPtrUnchecked();
+ auto stride = ArtMethod::ObjectSize(pointer_size);
+ auto count = NumVirtualMethods();
+ return StrideIterator<ArtMethod>(reinterpret_cast<uintptr_t>(methods) + stride * count, stride);
+}
+
+inline IterationRange<StrideIterator<ArtMethod>> Class::GetVirtualMethods(size_t pointer_size) {
+ return MakeIterationRange(VirtualMethodsBegin(pointer_size), VirtualMethodsEnd(pointer_size));
+}
+
+inline MemberOffset Class::EmbeddedImTableOffset(size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ // Round up since we want the embedded imt and vtable to be pointer size aligned in case 64 bits.
+ // Add 32 bits for embedded vtable length.
+ return MemberOffset(
+ RoundUp(EmbeddedVTableLengthOffset().Uint32Value() + sizeof(uint32_t), pointer_size));
+}
+
+inline MemberOffset Class::EmbeddedVTableOffset(size_t pointer_size) {
+ CheckPointerSize(pointer_size);
+ return MemberOffset(EmbeddedImTableOffset(pointer_size).Uint32Value() +
+ kImtSize * ImTableEntrySize(pointer_size));
+}
+
+inline void Class::CheckPointerSize(size_t pointer_size) {
+ DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
+ DCHECK_EQ(pointer_size, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
}
} // namespace mirror
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 56c586a..f0b7bfd 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -145,9 +145,10 @@ void Class::SetDexCache(DexCache* new_dex_cache) {
}
void Class::SetClassSize(uint32_t new_class_size) {
- if (kIsDebugBuild && (new_class_size < GetClassSize())) {
- DumpClass(LOG(ERROR), kDumpClassFullDetail);
- CHECK_GE(new_class_size, GetClassSize()) << " class=" << PrettyTypeOf(this);
+ if (kIsDebugBuild && new_class_size < GetClassSize()) {
+ DumpClass(LOG(INTERNAL_FATAL), kDumpClassFullDetail);
+ LOG(INTERNAL_FATAL) << new_class_size << " vs " << GetClassSize();
+ LOG(FATAL) << " class=" << PrettyTypeOf(this);
}
// Not called within a transaction.
SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, class_size_), new_class_size);
@@ -205,10 +206,11 @@ void Class::DumpClass(std::ostream& os, int flags) {
return;
}
- Thread* self = Thread::Current();
+ Thread* const self = Thread::Current();
StackHandleScope<2> hs(self);
Handle<mirror::Class> h_this(hs.NewHandle(this));
Handle<mirror::Class> h_super(hs.NewHandle(GetSuperClass()));
+ auto image_pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
std::string temp;
os << "----- " << (IsInterface() ? "interface" : "class") << " "
@@ -244,12 +246,13 @@ void Class::DumpClass(std::ostream& os, int flags) {
os << " vtable (" << h_this->NumVirtualMethods() << " entries, "
<< (h_super.Get() != nullptr ? h_super->NumVirtualMethods() : 0) << " in super):\n";
for (size_t i = 0; i < NumVirtualMethods(); ++i) {
- os << StringPrintf(" %2zd: %s\n", i,
- PrettyMethod(h_this->GetVirtualMethodDuringLinking(i)).c_str());
+ os << StringPrintf(" %2zd: %s\n", i, PrettyMethod(
+ h_this->GetVirtualMethodDuringLinking(i, image_pointer_size)).c_str());
}
os << " direct methods (" << h_this->NumDirectMethods() << " entries):\n";
for (size_t i = 0; i < h_this->NumDirectMethods(); ++i) {
- os << StringPrintf(" %2zd: %s\n", i, PrettyMethod(h_this->GetDirectMethod(i)).c_str());
+ os << StringPrintf(" %2zd: %s\n", i, PrettyMethod(
+ h_this->GetDirectMethod(i, image_pointer_size)).c_str());
}
if (h_this->NumStaticFields() > 0) {
os << " static fields (" << h_this->NumStaticFields() << " entries):\n";
@@ -275,7 +278,7 @@ void Class::DumpClass(std::ostream& os, int flags) {
}
void Class::SetReferenceInstanceOffsets(uint32_t new_reference_offsets) {
- if (kIsDebugBuild && (new_reference_offsets != kClassWalkSuper)) {
+ if (kIsDebugBuild && new_reference_offsets != kClassWalkSuper) {
// Sanity check that the number of bits set in the reference offset bitmap
// agrees with the number of references
uint32_t count = 0;
@@ -342,9 +345,10 @@ void Class::SetClassLoader(ClassLoader* new_class_loader) {
}
}
-ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const StringPiece& signature) {
+ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size) {
// Check the current class before checking the interfaces.
- ArtMethod* method = FindDeclaredVirtualMethod(name, signature);
+ ArtMethod* method = FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -352,7 +356,7 @@ ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const StringPiece
int32_t iftable_count = GetIfTableCount();
IfTable* iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; ++i) {
- method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(name, signature);
+ method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -360,9 +364,10 @@ ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const StringPiece
return nullptr;
}
-ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const Signature& signature) {
+ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size) {
// Check the current class before checking the interfaces.
- ArtMethod* method = FindDeclaredVirtualMethod(name, signature);
+ ArtMethod* method = FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -370,7 +375,7 @@ ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const Signature&
int32_t iftable_count = GetIfTableCount();
IfTable* iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; ++i) {
- method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(name, signature);
+ method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -378,9 +383,10 @@ ArtMethod* Class::FindInterfaceMethod(const StringPiece& name, const Signature&
return nullptr;
}
-ArtMethod* Class::FindInterfaceMethod(const DexCache* dex_cache, uint32_t dex_method_idx) {
+ArtMethod* Class::FindInterfaceMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size) {
// Check the current class before checking the interfaces.
- ArtMethod* method = FindDeclaredVirtualMethod(dex_cache, dex_method_idx);
+ ArtMethod* method = FindDeclaredVirtualMethod(dex_cache, dex_method_idx, pointer_size);
if (method != nullptr) {
return method;
}
@@ -388,7 +394,8 @@ ArtMethod* Class::FindInterfaceMethod(const DexCache* dex_cache, uint32_t dex_me
int32_t iftable_count = GetIfTableCount();
IfTable* iftable = GetIfTable();
for (int32_t i = 0; i < iftable_count; ++i) {
- method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(dex_cache, dex_method_idx);
+ method = iftable->GetInterface(i)->FindDeclaredVirtualMethod(
+ dex_cache, dex_method_idx, pointer_size);
if (method != nullptr) {
return method;
}
@@ -396,41 +403,42 @@ ArtMethod* Class::FindInterfaceMethod(const DexCache* dex_cache, uint32_t dex_me
return nullptr;
}
-ArtMethod* Class::FindDeclaredDirectMethod(const StringPiece& name, const StringPiece& signature) {
- for (size_t i = 0; i < NumDirectMethods(); ++i) {
- ArtMethod* method = GetDirectMethod(i);
- if (name == method->GetName() && method->GetSignature() == signature) {
- return method;
+ArtMethod* Class::FindDeclaredDirectMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size) {
+ for (auto& method : GetDirectMethods(pointer_size)) {
+ if (name == method.GetName() && method.GetSignature() == signature) {
+ return &method;
}
}
return nullptr;
}
-ArtMethod* Class::FindDeclaredDirectMethod(const StringPiece& name, const Signature& signature) {
- for (size_t i = 0; i < NumDirectMethods(); ++i) {
- ArtMethod* method = GetDirectMethod(i);
- if (name == method->GetName() && signature == method->GetSignature()) {
- return method;
+ArtMethod* Class::FindDeclaredDirectMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size) {
+ for (auto& method : GetDirectMethods(pointer_size)) {
+ if (name == method.GetName() && signature == method.GetSignature()) {
+ return &method;
}
}
return nullptr;
}
-ArtMethod* Class::FindDeclaredDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx) {
+ArtMethod* Class::FindDeclaredDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size) {
if (GetDexCache() == dex_cache) {
- for (size_t i = 0; i < NumDirectMethods(); ++i) {
- ArtMethod* method = GetDirectMethod(i);
- if (method->GetDexMethodIndex() == dex_method_idx) {
- return method;
+ for (auto& method : GetDirectMethods(pointer_size)) {
+ if (method.GetDexMethodIndex() == dex_method_idx) {
+ return &method;
}
}
}
return nullptr;
}
-ArtMethod* Class::FindDirectMethod(const StringPiece& name, const StringPiece& signature) {
+ArtMethod* Class::FindDirectMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size) {
for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
- ArtMethod* method = klass->FindDeclaredDirectMethod(name, signature);
+ ArtMethod* method = klass->FindDeclaredDirectMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -438,9 +446,10 @@ ArtMethod* Class::FindDirectMethod(const StringPiece& name, const StringPiece& s
return nullptr;
}
-ArtMethod* Class::FindDirectMethod(const StringPiece& name, const Signature& signature) {
+ArtMethod* Class::FindDirectMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size) {
for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
- ArtMethod* method = klass->FindDeclaredDirectMethod(name, signature);
+ ArtMethod* method = klass->FindDeclaredDirectMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -448,9 +457,10 @@ ArtMethod* Class::FindDirectMethod(const StringPiece& name, const Signature& sig
return nullptr;
}
-ArtMethod* Class::FindDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx) {
+ArtMethod* Class::FindDirectMethod(
+ const DexCache* dex_cache, uint32_t dex_method_idx, size_t pointer_size) {
for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
- ArtMethod* method = klass->FindDeclaredDirectMethod(dex_cache, dex_method_idx);
+ ArtMethod* method = klass->FindDeclaredDirectMethod(dex_cache, dex_method_idx, pointer_size);
if (method != nullptr) {
return method;
}
@@ -458,44 +468,44 @@ ArtMethod* Class::FindDirectMethod(const DexCache* dex_cache, uint32_t dex_metho
return nullptr;
}
-ArtMethod* Class::FindDeclaredVirtualMethod(const StringPiece& name, const StringPiece& signature) {
- for (size_t i = 0; i < NumVirtualMethods(); ++i) {
- ArtMethod* method = GetVirtualMethod(i);
- if (name == method->GetName() && method->GetSignature() == signature) {
- return method;
+ArtMethod* Class::FindDeclaredVirtualMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size) {
+ for (auto& method : GetVirtualMethods(pointer_size)) {
+ if (name == method.GetName() && method.GetSignature() == signature) {
+ return &method;
}
}
return nullptr;
}
-ArtMethod* Class::FindDeclaredVirtualMethod(const StringPiece& name, const Signature& signature) {
- for (size_t i = 0; i < NumVirtualMethods(); ++i) {
- ArtMethod* method = GetVirtualMethod(i);
- if (name == method->GetName() && signature == method->GetSignature()) {
- return method;
+ArtMethod* Class::FindDeclaredVirtualMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size) {
+ for (auto& method : GetVirtualMethods(pointer_size)) {
+ if (name == method.GetName() && signature == method.GetSignature()) {
+ return &method;
}
}
return nullptr;
}
-ArtMethod* Class::FindDeclaredVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx) {
+ArtMethod* Class::FindDeclaredVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size) {
if (GetDexCache() == dex_cache) {
- for (size_t i = 0; i < NumVirtualMethods(); ++i) {
- ArtMethod* method = GetVirtualMethod(i);
- if (method->GetDexMethodIndex() == dex_method_idx &&
- // A miranda method may have a different DexCache and is always created by linking,
- // never *declared* in the class.
- !method->IsMiranda()) {
- return method;
+ for (auto& method : GetVirtualMethods(pointer_size)) {
+ // A miranda method may have a different DexCache and is always created by linking,
+ // never *declared* in the class.
+ if (method.GetDexMethodIndex() == dex_method_idx && !method.IsMiranda()) {
+ return &method;
}
}
}
return nullptr;
}
-ArtMethod* Class::FindVirtualMethod(const StringPiece& name, const StringPiece& signature) {
+ArtMethod* Class::FindVirtualMethod(
+ const StringPiece& name, const StringPiece& signature, size_t pointer_size) {
for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
- ArtMethod* method = klass->FindDeclaredVirtualMethod(name, signature);
+ ArtMethod* method = klass->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -503,9 +513,10 @@ ArtMethod* Class::FindVirtualMethod(const StringPiece& name, const StringPiece&
return nullptr;
}
-ArtMethod* Class::FindVirtualMethod(const StringPiece& name, const Signature& signature) {
+ArtMethod* Class::FindVirtualMethod(
+ const StringPiece& name, const Signature& signature, size_t pointer_size) {
for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
- ArtMethod* method = klass->FindDeclaredVirtualMethod(name, signature);
+ ArtMethod* method = klass->FindDeclaredVirtualMethod(name, signature, pointer_size);
if (method != nullptr) {
return method;
}
@@ -513,9 +524,10 @@ ArtMethod* Class::FindVirtualMethod(const StringPiece& name, const Signature& si
return nullptr;
}
-ArtMethod* Class::FindVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx) {
+ArtMethod* Class::FindVirtualMethod(
+ const DexCache* dex_cache, uint32_t dex_method_idx, size_t pointer_size) {
for (Class* klass = this; klass != nullptr; klass = klass->GetSuperClass()) {
- ArtMethod* method = klass->FindDeclaredVirtualMethod(dex_cache, dex_method_idx);
+ ArtMethod* method = klass->FindDeclaredVirtualMethod(dex_cache, dex_method_idx, pointer_size);
if (method != nullptr) {
return method;
}
@@ -523,13 +535,12 @@ ArtMethod* Class::FindVirtualMethod(const DexCache* dex_cache, uint32_t dex_meth
return nullptr;
}
-ArtMethod* Class::FindClassInitializer() {
- for (size_t i = 0; i < NumDirectMethods(); ++i) {
- ArtMethod* method = GetDirectMethod(i);
- if (method->IsClassInitializer()) {
- DCHECK_STREQ(method->GetName(), "<clinit>");
- DCHECK_STREQ(method->GetSignature().ToString().c_str(), "()V");
- return method;
+ArtMethod* Class::FindClassInitializer(size_t pointer_size) {
+ for (ArtMethod& method : GetDirectMethods(pointer_size)) {
+ if (method.IsClassInitializer()) {
+ DCHECK_STREQ(method.GetName(), "<clinit>");
+ DCHECK_STREQ(method.GetSignature().ToString().c_str(), "()V");
+ return &method;
}
}
return nullptr;
@@ -684,23 +695,18 @@ ArtField* Class::FindField(Thread* self, Handle<Class> klass, const StringPiece&
return nullptr;
}
-static void SetPreverifiedFlagOnMethods(mirror::ObjectArray<mirror::ArtMethod>* methods)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- if (methods != nullptr) {
- for (int32_t index = 0, end = methods->GetLength(); index < end; ++index) {
- mirror::ArtMethod* method = methods->GetWithoutChecks(index);
- DCHECK(method != nullptr);
- if (!method->IsNative() && !method->IsAbstract()) {
- method->SetPreverified();
- }
+void Class::SetPreverifiedFlagOnAllMethods(size_t pointer_size) {
+ DCHECK(IsVerified());
+ for (auto& m : GetDirectMethods(pointer_size)) {
+ if (!m.IsNative() && !m.IsAbstract()) {
+ m.SetPreverified();
+ }
+ }
+ for (auto& m : GetVirtualMethods(pointer_size)) {
+ if (!m.IsNative() && !m.IsAbstract()) {
+ m.SetPreverified();
}
}
-}
-
-void Class::SetPreverifiedFlagOnAllMethods() {
- DCHECK(IsVerified());
- SetPreverifiedFlagOnMethods(GetDirectMethods());
- SetPreverifiedFlagOnMethods(GetVirtualMethods());
}
const char* Class::GetDescriptor(std::string* storage) {
@@ -795,21 +801,20 @@ const DexFile::TypeList* Class::GetInterfaceTypeList() {
return GetDexFile().GetInterfacesList(*class_def);
}
-void Class::PopulateEmbeddedImtAndVTable(StackHandleScope<kImtSize>* imt_handle_scope) {
- for (uint32_t i = 0; i < kImtSize; i++) {
- // Replace null with conflict.
- mirror::Object* obj = imt_handle_scope->GetReference(i);
- DCHECK(obj != nullptr);
- SetEmbeddedImTableEntry(i, obj->AsArtMethod());
+void Class::PopulateEmbeddedImtAndVTable(ArtMethod* const (&methods)[kImtSize],
+ size_t pointer_size) {
+ for (size_t i = 0; i < kImtSize; i++) {
+ auto method = methods[i];
+ DCHECK(method != nullptr);
+ SetEmbeddedImTableEntry(i, method, pointer_size);
}
-
- ObjectArray<ArtMethod>* table = GetVTableDuringLinking();
+ PointerArray* table = GetVTableDuringLinking();
CHECK(table != nullptr) << PrettyClass(this);
- SetEmbeddedVTableLength(table->GetLength());
- for (int32_t i = 0; i < table->GetLength(); i++) {
- SetEmbeddedVTableEntry(i, table->GetWithoutChecks(i));
+ const size_t table_length = table->GetLength();
+ SetEmbeddedVTableLength(table_length);
+ for (size_t i = 0; i < table_length; i++) {
+ SetEmbeddedVTableEntry(i, table->GetElementPtrSize<ArtMethod*>(i, pointer_size), pointer_size);
}
-
// Keep java.lang.Object class's vtable around for since it's easier
// to be reused by array classes during their linking.
if (!IsObjectClass()) {
@@ -820,21 +825,20 @@ void Class::PopulateEmbeddedImtAndVTable(StackHandleScope<kImtSize>* imt_handle_
// The pre-fence visitor for Class::CopyOf().
class CopyClassVisitor {
public:
- explicit CopyClassVisitor(Thread* self, Handle<mirror::Class>* orig,
- size_t new_length, size_t copy_bytes,
- StackHandleScope<mirror::Class::kImtSize>* imt_handle_scope)
+ explicit CopyClassVisitor(Thread* self, Handle<mirror::Class>* orig, size_t new_length,
+ size_t copy_bytes, ArtMethod* const (&imt)[mirror::Class::kImtSize],
+ size_t pointer_size)
: self_(self), orig_(orig), new_length_(new_length),
- copy_bytes_(copy_bytes), imt_handle_scope_(imt_handle_scope) {
+ copy_bytes_(copy_bytes), imt_(imt), pointer_size_(pointer_size) {
}
- void operator()(Object* obj, size_t usable_size) const
+ void operator()(mirror::Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- UNUSED(usable_size);
StackHandleScope<1> hs(self_);
Handle<mirror::Class> h_new_class_obj(hs.NewHandle(obj->AsClass()));
mirror::Object::CopyObject(self_, h_new_class_obj.Get(), orig_->Get(), copy_bytes_);
mirror::Class::SetStatus(h_new_class_obj, Class::kStatusResolving, self_);
- h_new_class_obj->PopulateEmbeddedImtAndVTable(imt_handle_scope_);
+ h_new_class_obj->PopulateEmbeddedImtAndVTable(imt_, pointer_size_);
h_new_class_obj->SetClassSize(new_length_);
}
@@ -843,12 +847,13 @@ class CopyClassVisitor {
Handle<mirror::Class>* const orig_;
const size_t new_length_;
const size_t copy_bytes_;
- StackHandleScope<mirror::Class::kImtSize>* const imt_handle_scope_;
+ ArtMethod* const (&imt_)[mirror::Class::kImtSize];
+ const size_t pointer_size_;
DISALLOW_COPY_AND_ASSIGN(CopyClassVisitor);
};
Class* Class::CopyOf(Thread* self, int32_t new_length,
- StackHandleScope<kImtSize>* imt_handle_scope) {
+ ArtMethod* const (&imt)[mirror::Class::kImtSize], size_t pointer_size) {
DCHECK_GE(new_length, static_cast<int32_t>(sizeof(Class)));
// We may get copied by a compacting GC.
StackHandleScope<1> hs(self);
@@ -856,13 +861,12 @@ Class* Class::CopyOf(Thread* self, int32_t new_length,
gc::Heap* heap = Runtime::Current()->GetHeap();
// The num_bytes (3rd param) is sizeof(Class) as opposed to SizeOf()
// to skip copying the tail part that we will overwrite here.
- CopyClassVisitor visitor(self, &h_this, new_length, sizeof(Class), imt_handle_scope);
- mirror::Object* new_class =
- kMovingClasses
- ? heap->AllocObject<true>(self, java_lang_Class_.Read(), new_length, visitor)
- : heap->AllocNonMovableObject<true>(self, java_lang_Class_.Read(), new_length, visitor);
+ CopyClassVisitor visitor(self, &h_this, new_length, sizeof(Class), imt, pointer_size);
+ mirror::Object* new_class = kMovingClasses ?
+ heap->AllocObject<true>(self, java_lang_Class_.Read(), new_length, visitor) :
+ heap->AllocNonMovableObject<true>(self, java_lang_Class_.Read(), new_length, visitor);
if (UNLIKELY(new_class == nullptr)) {
- CHECK(self->IsExceptionPending()); // Expect an OOME.
+ self->AssertPendingOOMException();
return nullptr;
}
return new_class->AsClass();
@@ -873,26 +877,32 @@ bool Class::ProxyDescriptorEquals(const char* match) {
return Runtime::Current()->GetClassLinker()->GetDescriptorForProxy(this) == match;
}
-mirror::ArtMethod* Class::GetDeclaredConstructor(
+// TODO: Move this to java_lang_Class.cc?
+ArtMethod* Class::GetDeclaredConstructor(
Thread* self, Handle<mirror::ObjectArray<mirror::Class>> args) {
- auto* direct_methods = GetDirectMethods();
- size_t count = direct_methods != nullptr ? direct_methods->GetLength() : 0u;
- for (size_t i = 0; i < count; ++i) {
- auto* m = direct_methods->GetWithoutChecks(i);
+ for (auto& m : GetDirectMethods(sizeof(void*))) {
// Skip <clinit> which is a static constructor, as well as non constructors.
- if (m->IsStatic() || !m->IsConstructor()) {
+ if (m.IsStatic() || !m.IsConstructor()) {
continue;
}
// May cause thread suspension and exceptions.
- if (m->EqualParameters(args)) {
- return m;
+ if (m.GetInterfaceMethodIfProxy(sizeof(void*))->EqualParameters(args)) {
+ return &m;
}
- if (self->IsExceptionPending()) {
+ if (UNLIKELY(self->IsExceptionPending())) {
return nullptr;
}
}
return nullptr;
}
+uint32_t Class::Depth() {
+ uint32_t depth = 0;
+ for (Class* klass = this; klass->GetSuperClass() != nullptr; klass = klass->GetSuperClass()) {
+ depth++;
+ }
+ return depth;
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index b99fc68..ba8a693 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -17,6 +17,7 @@
#ifndef ART_RUNTIME_MIRROR_CLASS_H_
#define ART_RUNTIME_MIRROR_CLASS_H_
+#include "base/iteration_range.h"
#include "dex_file.h"
#include "gc_root.h"
#include "gc/allocator_type.h"
@@ -27,6 +28,8 @@
#include "object_callbacks.h"
#include "primitive.h"
#include "read_barrier_option.h"
+#include "stride_iterator.h"
+#include "utils.h"
#ifndef IMT_SIZE
#error IMT_SIZE not defined
@@ -35,6 +38,7 @@
namespace art {
class ArtField;
+class ArtMethod;
struct ClassOffsets;
template<class T> class Handle;
template<class T> class Handle;
@@ -44,7 +48,6 @@ template<size_t kNumReferences> class PACKED(4) StackHandleScope;
namespace mirror {
-class ArtMethod;
class ClassLoader;
class Constructor;
class DexCache;
@@ -64,16 +67,6 @@ class MANAGED Class FINAL : public Object {
// (non-marker) interfaces.
static constexpr size_t kImtSize = IMT_SIZE;
- // imtable entry embedded in class object.
- struct MANAGED ImTableEntry {
- HeapReference<ArtMethod> method;
- };
-
- // vtable entry embedded in class object.
- struct MANAGED VTableEntry {
- HeapReference<ArtMethod> method;
- };
-
// Class Status
//
// kStatusRetired: Class that's temporarily used till class linking time
@@ -406,13 +399,7 @@ class MANAGED Class FINAL : public Object {
}
// Depth of class from java.lang.Object
- uint32_t Depth() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- uint32_t depth = 0;
- for (Class* klass = this; klass->GetSuperClass() != nullptr; klass = klass->GetSuperClass()) {
- depth++;
- }
- return depth;
- }
+ uint32_t Depth() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
@@ -427,9 +414,6 @@ class MANAGED Class FINAL : public Object {
bool IsThrowableClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- bool IsArtMethodClass() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
bool IsReferenceClass() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
static MemberOffset ComponentTypeOffset() {
@@ -469,12 +453,27 @@ class MANAGED Class FINAL : public Object {
bool IsInstantiable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return (!IsPrimitive() && !IsInterface() && !IsAbstract()) ||
- ((IsAbstract()) && IsArrayClass());
+ (IsAbstract() && IsArrayClass());
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
bool IsObjectArrayClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetComponentType<kVerifyFlags>() != nullptr && !GetComponentType<kVerifyFlags>()->IsPrimitive();
+ return GetComponentType<kVerifyFlags>() != nullptr &&
+ !GetComponentType<kVerifyFlags>()->IsPrimitive();
+ }
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ bool IsIntArrayClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
+ auto* component_type = GetComponentType<kVerifyFlags>();
+ return component_type != nullptr && component_type->template IsPrimitiveInt<kNewFlags>();
+ }
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ bool IsLongArrayClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
+ auto* component_type = GetComponentType<kVerifyFlags>();
+ return component_type != nullptr && component_type->template IsPrimitiveLong<kNewFlags>();
}
// Creates a raw object instance but does not invoke the default constructor.
@@ -517,18 +516,19 @@ class MANAGED Class FINAL : public Object {
uint32_t num_16bit_static_fields,
uint32_t num_32bit_static_fields,
uint32_t num_64bit_static_fields,
- uint32_t num_ref_static_fields);
+ uint32_t num_ref_static_fields,
+ size_t pointer_size);
// The size of java.lang.Class.class.
- static uint32_t ClassClassSize() {
+ static uint32_t ClassClassSize(size_t pointer_size) {
// The number of vtable entries in java.lang.Class.
- uint32_t vtable_entries = Object::kVTableLength + 66;
- return ComputeClassSize(true, vtable_entries, 0, 0, 0, 1, 0);
+ uint32_t vtable_entries = Object::kVTableLength + 65;
+ return ComputeClassSize(true, vtable_entries, 0, 0, 0, 1, 0, pointer_size);
}
// The size of a java.lang.Class representing a primitive such as int.class.
- static uint32_t PrimitiveClassSize() {
- return ComputeClassSize(false, 0, 0, 0, 0, 0, 0);
+ static uint32_t PrimitiveClassSize(size_t pointer_size) {
+ return ComputeClassSize(false, 0, 0, 0, 0, 0, 0, pointer_size);
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
@@ -673,60 +673,82 @@ class MANAGED Class FINAL : public Object {
// Also updates the dex_cache_strings_ variable from new_dex_cache.
void SetDexCache(DexCache* new_dex_cache) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE ObjectArray<ArtMethod>* GetDirectMethods()
+ ALWAYS_INLINE StrideIterator<ArtMethod> DirectMethodsBegin(size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ ALWAYS_INLINE StrideIterator<ArtMethod> DirectMethodsEnd(size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetDirectMethods(ObjectArray<ArtMethod>* new_direct_methods)
+ ALWAYS_INLINE IterationRange<StrideIterator<ArtMethod>> GetDirectMethods(size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE ArtMethod* GetDirectMethod(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ArtMethod* GetDirectMethodsPtr() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);\
+
+ void SetDirectMethodsPtr(ArtMethod* new_direct_methods)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Used by image writer.
+ void SetDirectMethodsPtrUnchecked(ArtMethod* new_direct_methods)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetDirectMethod(uint32_t i, ArtMethod* f) // TODO: uint16_t
+ ALWAYS_INLINE ArtMethod* GetDirectMethod(size_t i, size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Use only when we are allocating populating the method arrays.
+ ALWAYS_INLINE ArtMethod* GetDirectMethodUnchecked(size_t i, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ArtMethod* GetVirtualMethodUnchecked(size_t i, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// Returns the number of static, private, and constructor methods.
- uint32_t NumDirectMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE uint32_t NumDirectMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_direct_methods_));
+ }
+ void SetNumDirectMethods(uint32_t num) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, num_direct_methods_), num);
+ }
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- ALWAYS_INLINE ObjectArray<ArtMethod>* GetVirtualMethods()
+ ALWAYS_INLINE ArtMethod* GetVirtualMethodsPtr() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ ALWAYS_INLINE StrideIterator<ArtMethod> VirtualMethodsBegin(size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ ALWAYS_INLINE StrideIterator<ArtMethod> VirtualMethodsEnd(size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE void SetVirtualMethods(ObjectArray<ArtMethod>* new_virtual_methods)
+ ALWAYS_INLINE IterationRange<StrideIterator<ArtMethod>> GetVirtualMethods(size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void SetVirtualMethodsPtr(ArtMethod* new_virtual_methods)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Returns the number of non-inherited virtual methods.
- ALWAYS_INLINE uint32_t NumVirtualMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE uint32_t NumVirtualMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return GetField32(OFFSET_OF_OBJECT_MEMBER(Class, num_virtual_methods_));
+ }
+ void SetNumVirtualMethods(uint32_t num) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, num_virtual_methods_), num);
+ }
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- ArtMethod* GetVirtualMethod(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- ArtMethod* GetVirtualMethodDuringLinking(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ArtMethod* GetVirtualMethod(size_t i, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetVirtualMethod(uint32_t i, ArtMethod* f) // TODO: uint16_t
+ ArtMethod* GetVirtualMethodDuringLinking(size_t i, size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE ObjectArray<ArtMethod>* GetVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE PointerArray* GetVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ALWAYS_INLINE ObjectArray<ArtMethod>* GetVTableDuringLinking()
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE PointerArray* GetVTableDuringLinking() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetVTable(ObjectArray<ArtMethod>* new_vtable)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void SetVTable(PointerArray* new_vtable) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
static MemberOffset VTableOffset() {
return OFFSET_OF_OBJECT_MEMBER(Class, vtable_);
}
- static MemberOffset EmbeddedImTableOffset() {
- return MemberOffset(sizeof(Class));
- }
-
static MemberOffset EmbeddedVTableLengthOffset() {
- return MemberOffset(sizeof(Class) + kImtSize * sizeof(mirror::Class::ImTableEntry));
- }
-
- static MemberOffset EmbeddedVTableOffset() {
- return MemberOffset(sizeof(Class) + kImtSize * sizeof(ImTableEntry) + sizeof(int32_t));
+ return MemberOffset(sizeof(Class));
}
bool ShouldHaveEmbeddedImtAndVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -735,90 +757,117 @@ class MANAGED Class FINAL : public Object {
bool HasVTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* GetEmbeddedImTableEntry(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ static MemberOffset EmbeddedImTableEntryOffset(uint32_t i, size_t pointer_size);
- void SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ static MemberOffset EmbeddedVTableEntryOffset(uint32_t i, size_t pointer_size);
+
+ ArtMethod* GetEmbeddedImTableEntry(uint32_t i, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
int32_t GetVTableLength() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* GetVTableEntry(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ArtMethod* GetVTableEntry(uint32_t i, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
int32_t GetEmbeddedVTableLength() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void SetEmbeddedVTableLength(int32_t len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* GetEmbeddedVTableEntry(uint32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ArtMethod* GetEmbeddedVTableEntry(uint32_t i, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void SetEmbeddedVTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void PopulateEmbeddedImtAndVTable(StackHandleScope<kImtSize>* imt_handle_scope)
+ inline void SetEmbeddedVTableEntryUnchecked(uint32_t i, ArtMethod* method, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void PopulateEmbeddedImtAndVTable(ArtMethod* const (&methods)[kImtSize], size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given a method implemented by this class but potentially from a super class, return the
// specific implementation method for this class.
- ArtMethod* FindVirtualMethodForVirtual(ArtMethod* method)
+ ArtMethod* FindVirtualMethodForVirtual(ArtMethod* method, size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given a method implemented by this class' super class, return the specific implementation
// method for this class.
- ArtMethod* FindVirtualMethodForSuper(ArtMethod* method)
+ ArtMethod* FindVirtualMethodForSuper(ArtMethod* method, size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given a method implemented by this class, but potentially from a
// super class or interface, return the specific implementation
// method for this class.
- ArtMethod* FindVirtualMethodForInterface(ArtMethod* method)
+ ArtMethod* FindVirtualMethodForInterface(ArtMethod* method, size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE;
- ArtMethod* FindVirtualMethodForVirtualOrInterface(ArtMethod* method)
+ ArtMethod* FindVirtualMethodForVirtualOrInterface(ArtMethod* method, size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindInterfaceMethod(const StringPiece& name, const StringPiece& signature)
+ ArtMethod* FindInterfaceMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindInterfaceMethod(const StringPiece& name, const Signature& signature)
+ ArtMethod* FindInterfaceMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindInterfaceMethod(const DexCache* dex_cache, uint32_t dex_method_idx)
+ ArtMethod* FindInterfaceMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredDirectMethod(const StringPiece& name, const StringPiece& signature)
+ ArtMethod* FindDeclaredDirectMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredDirectMethod(const StringPiece& name, const Signature& signature)
+ ArtMethod* FindDeclaredDirectMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx)
+ ArtMethod* FindDeclaredDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDirectMethod(const StringPiece& name, const StringPiece& signature)
+ ArtMethod* FindDirectMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDirectMethod(const StringPiece& name, const Signature& signature)
+ ArtMethod* FindDirectMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx)
+ ArtMethod* FindDirectMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name, const StringPiece& signature)
+ ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name, const Signature& signature)
+ ArtMethod* FindDeclaredVirtualMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindDeclaredVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx)
+ ArtMethod* FindDeclaredVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindVirtualMethod(const StringPiece& name, const StringPiece& signature)
+ ArtMethod* FindVirtualMethod(const StringPiece& name, const StringPiece& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindVirtualMethod(const StringPiece& name, const Signature& signature)
+ ArtMethod* FindVirtualMethod(const StringPiece& name, const Signature& signature,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx)
+ ArtMethod* FindVirtualMethod(const DexCache* dex_cache, uint32_t dex_method_idx,
+ size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* FindClassInitializer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ArtMethod* FindClassInitializer(size_t pointer_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
ALWAYS_INLINE int32_t GetIfTableCount() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -867,7 +916,8 @@ class MANAGED Class FINAL : public Object {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Get the offset of the first reference instance field. Other reference instance fields follow.
- MemberOffset GetFirstReferenceInstanceFieldOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ MemberOffset GetFirstReferenceInstanceFieldOffset()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Returns the number of static fields containing reference types.
uint32_t NumReferenceStaticFields() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -886,10 +936,11 @@ class MANAGED Class FINAL : public Object {
}
// Get the offset of the first reference static field. Other reference static fields follow.
- MemberOffset GetFirstReferenceStaticFieldOffset() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ MemberOffset GetFirstReferenceStaticFieldOffset(size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Get the offset of the first reference static field. Other reference static fields follow.
- MemberOffset GetFirstReferenceStaticFieldOffsetDuringLinking()
+ MemberOffset GetFirstReferenceStaticFieldOffsetDuringLinking(size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Gets the static fields of the class.
@@ -989,22 +1040,20 @@ class MANAGED Class FINAL : public Object {
static void VisitRoots(RootVisitor* visitor)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Visit native roots visits roots which are keyed off the native pointers such as ArtFields and
+ // ArtMethods.
template<class Visitor>
- // Visit field roots.
- void VisitFieldRoots(Visitor& visitor) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void VisitNativeRoots(Visitor& visitor, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// When class is verified, set the kAccPreverified flag on each method.
- void SetPreverifiedFlagOnAllMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void SetPreverifiedFlagOnAllMethods(size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template <bool kVisitClass, typename Visitor>
void VisitReferences(mirror::Class* klass, const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Visit references within the embedded tables of the class.
- // TODO: remove NO_THREAD_SAFETY_ANALYSIS when annotalysis handles visitors better.
- template<typename Visitor>
- void VisitEmbeddedImtAndVTable(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS;
-
// Get the descriptor of the class. In a few cases a std::string is required, rather than
// always create one the storage argument is populated and its internal c_str() returned. We do
// this to avoid memory allocation in the common case.
@@ -1014,7 +1063,6 @@ class MANAGED Class FINAL : public Object {
bool DescriptorEquals(const char* match) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
const DexFile::ClassDef* GetClassDef() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
ALWAYS_INLINE uint32_t NumDirectInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -1037,8 +1085,8 @@ class MANAGED Class FINAL : public Object {
void AssertInitializedOrInitializingInThread(Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- Class* CopyOf(Thread* self, int32_t new_length, StackHandleScope<kImtSize>* imt_handle_scope)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ Class* CopyOf(Thread* self, int32_t new_length, ArtMethod* const (&imt)[mirror::Class::kImtSize],
+ size_t pointer_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// For proxy class only.
ObjectArray<Class>* GetInterfaces() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -1060,7 +1108,7 @@ class MANAGED Class FINAL : public Object {
}
// May cause thread suspension due to EqualParameters.
- mirror::ArtMethod* GetDeclaredConstructor(
+ ArtMethod* GetDeclaredConstructor(
Thread* self, Handle<mirror::ObjectArray<mirror::Class>> args)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -1085,6 +1133,20 @@ class MANAGED Class FINAL : public Object {
return GetClassLoader() == nullptr;
}
+ static size_t ImTableEntrySize(size_t pointer_size) {
+ return pointer_size;
+ }
+
+ static size_t VTableEntrySize(size_t pointer_size) {
+ return pointer_size;
+ }
+
+ ALWAYS_INLINE ArtMethod* GetDirectMethodsPtrUnchecked()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ ALWAYS_INLINE ArtMethod* GetVirtualMethodsPtrUnchecked()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
private:
void SetVerifyErrorClass(Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -1109,6 +1171,12 @@ class MANAGED Class FINAL : public Object {
bool ProxyDescriptorEquals(const char* match) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Check that the pointer size mathces the one in the class linker.
+ ALWAYS_INLINE static void CheckPointerSize(size_t pointer_size);
+
+ static MemberOffset EmbeddedImTableOffset(size_t pointer_size);
+ static MemberOffset EmbeddedVTableOffset(size_t pointer_size);
+
// Defining class loader, or null for the "bootstrap" system loader.
HeapReference<ClassLoader> class_loader_;
@@ -1123,9 +1191,6 @@ class MANAGED Class FINAL : public Object {
// Short cuts to dex_cache_ member for fast compiled code access.
HeapReference<ObjectArray<String>> dex_cache_strings_;
- // static, private, and <init> methods
- HeapReference<ObjectArray<ArtMethod>> direct_methods_;
-
// The interface table (iftable_) contains pairs of a interface class and an array of the
// interface methods. There is one pair per interface supported by this class. That means one
// pair for each interface we support directly, indirectly via superclass, or indirectly via a
@@ -1148,19 +1213,19 @@ class MANAGED Class FINAL : public Object {
// If class verify fails, we must return same error on subsequent tries.
HeapReference<Class> verify_error_class_;
- // Virtual methods defined in this class; invoked through vtable.
- HeapReference<ObjectArray<ArtMethod>> virtual_methods_;
-
// Virtual method table (vtable), for use by "invoke-virtual". The vtable from the superclass is
// copied in, and virtual methods from our class either replace those from the super or are
// appended. For abstract classes, methods may be created in the vtable that aren't in
// virtual_ methods_ for miranda methods.
- HeapReference<ObjectArray<ArtMethod>> vtable_;
+ HeapReference<PointerArray> vtable_;
// Access flags; low 16 bits are defined by VM spec.
// Note: Shuffled back.
uint32_t access_flags_;
+ // static, private, and <init> methods. Pointer to an ArtMethod array.
+ uint64_t direct_methods_;
+
// instance fields
//
// These describe the layout of the contents of an Object.
@@ -1174,6 +1239,9 @@ class MANAGED Class FINAL : public Object {
// Static fields
uint64_t sfields_;
+ // Virtual methods defined in this class; invoked through vtable. Pointer to an ArtMethod array.
+ uint64_t virtual_methods_;
+
// Total size of the Class instance; used when allocating storage on gc heap.
// See also object_size_.
uint32_t class_size_;
@@ -1189,7 +1257,10 @@ class MANAGED Class FINAL : public Object {
// TODO: really 16bits
int32_t dex_type_idx_;
- // Number of static fields.
+ // Number of direct fields.
+ uint32_t num_direct_methods_;
+
+ // Number of instance fields.
uint32_t num_instance_fields_;
// Number of instance fields that are object refs.
@@ -1201,6 +1272,9 @@ class MANAGED Class FINAL : public Object {
// Number of static fields.
uint32_t num_static_fields_;
+ // Number of virtual methods.
+ uint32_t num_virtual_methods_;
+
// Total object size; used when allocating storage on gc heap.
// (For interfaces and abstract classes this will be zero.)
// See also class_size_.
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 1cb437e..4b5063a 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -20,6 +20,7 @@
#include "dex_cache.h"
#include "art_field-inl.h"
+#include "art_method-inl.h"
#include "base/logging.h"
#include "mirror/class.h"
#include "runtime.h"
@@ -27,20 +28,9 @@
namespace art {
namespace mirror {
-inline uint32_t DexCache::ClassSize() {
+inline uint32_t DexCache::ClassSize(size_t pointer_size) {
uint32_t vtable_entries = Object::kVTableLength + 5;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
-}
-
-inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- ArtMethod* method = GetResolvedMethods()->Get(method_idx);
- // Hide resolution trampoline methods from the caller
- if (method != nullptr && method->IsRuntimeMethod()) {
- DCHECK_EQ(method, Runtime::Current()->GetResolutionMethod());
- return nullptr;
- }
- return method;
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
}
inline void DexCache::SetResolvedType(uint32_t type_idx, Class* resolved) {
@@ -50,15 +40,8 @@ inline void DexCache::SetResolvedType(uint32_t type_idx, Class* resolved) {
}
inline ArtField* DexCache::GetResolvedField(uint32_t idx, size_t ptr_size) {
- ArtField* field = nullptr;
- if (ptr_size == 8) {
- field = reinterpret_cast<ArtField*>(
- static_cast<uintptr_t>(GetResolvedFields()->AsLongArray()->GetWithoutChecks(idx)));
- } else {
- DCHECK_EQ(ptr_size, 4u);
- field = reinterpret_cast<ArtField*>(
- static_cast<uintptr_t>(GetResolvedFields()->AsIntArray()->GetWithoutChecks(idx)));
- }
+ DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
+ auto* field = GetResolvedFields()->GetElementPtrSize<ArtField*>(idx, ptr_size);
if (field == nullptr || field->GetDeclaringClass()->IsErroneous()) {
return nullptr;
}
@@ -66,15 +49,24 @@ inline ArtField* DexCache::GetResolvedField(uint32_t idx, size_t ptr_size) {
}
inline void DexCache::SetResolvedField(uint32_t idx, ArtField* field, size_t ptr_size) {
- if (ptr_size == 8) {
- GetResolvedFields()->AsLongArray()->Set(
- idx, static_cast<uint64_t>(reinterpret_cast<uintptr_t>(field)));
- } else {
- DCHECK_EQ(ptr_size, 4u);
- CHECK_LE(reinterpret_cast<uintptr_t>(field), 0xFFFFFFFF);
- GetResolvedFields()->AsIntArray()->Set(
- idx, static_cast<uint32_t>(reinterpret_cast<uintptr_t>(field)));
+ DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
+ GetResolvedFields()->SetElementPtrSize(idx, field, ptr_size);
+}
+
+inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, size_t ptr_size) {
+ DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
+ auto* method = GetResolvedMethods()->GetElementPtrSize<ArtMethod*>(method_idx, ptr_size);
+ // Hide resolution trampoline methods from the caller
+ if (method != nullptr && method->IsRuntimeMethod()) {
+ DCHECK_EQ(method, Runtime::Current()->GetResolutionMethod());
+ return nullptr;
}
+ return method;
+}
+
+inline void DexCache::SetResolvedMethod(uint32_t idx, ArtMethod* method, size_t ptr_size) {
+ DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
+ GetResolvedMethods()->SetElementPtrSize(idx, method, ptr_size);
}
} // namespace mirror
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index ade8bd2..630faee 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -31,12 +31,9 @@
namespace art {
namespace mirror {
-void DexCache::Init(const DexFile* dex_file,
- String* location,
- ObjectArray<String>* strings,
- ObjectArray<Class>* resolved_types,
- ObjectArray<ArtMethod>* resolved_methods,
- Array* resolved_fields) {
+void DexCache::Init(const DexFile* dex_file, String* location, ObjectArray<String>* strings,
+ ObjectArray<Class>* resolved_types, PointerArray* resolved_methods,
+ PointerArray* resolved_fields, size_t pointer_size) {
CHECK(dex_file != nullptr);
CHECK(location != nullptr);
CHECK(strings != nullptr);
@@ -51,24 +48,21 @@ void DexCache::Init(const DexFile* dex_file,
SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_), resolved_types);
SetFieldObject<false>(ResolvedMethodsOffset(), resolved_methods);
- Runtime* runtime = Runtime::Current();
+ Runtime* const runtime = Runtime::Current();
if (runtime->HasResolutionMethod()) {
// Initialize the resolve methods array to contain trampolines for resolution.
- ArtMethod* trampoline = runtime->GetResolutionMethod();
- for (size_t i = 0, length = resolved_methods->GetLength(); i < length; i++) {
- resolved_methods->SetWithoutChecks<false>(i, trampoline);
- }
+ Fixup(runtime->GetResolutionMethod(), pointer_size);
}
}
-void DexCache::Fixup(ArtMethod* trampoline) {
+void DexCache::Fixup(ArtMethod* trampoline, size_t pointer_size) {
// Fixup the resolve methods array to contain trampoline for resolution.
CHECK(trampoline != nullptr);
- ObjectArray<ArtMethod>* resolved_methods = GetResolvedMethods();
- size_t length = resolved_methods->GetLength();
- for (size_t i = 0; i < length; i++) {
- if (resolved_methods->GetWithoutChecks(i) == nullptr) {
- resolved_methods->SetWithoutChecks<false>(i, trampoline);
+ CHECK(trampoline->IsRuntimeMethod());
+ auto* resolved_methods = GetResolvedMethods();
+ for (size_t i = 0, length = resolved_methods->GetLength(); i < length; i++) {
+ if (resolved_methods->GetElementPtrSize<ArtMethod*>(i, pointer_size) == nullptr) {
+ resolved_methods->SetElementPtrSize(i, trampoline, pointer_size);
}
}
}
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 7e30b89..0ce83ec 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -17,6 +17,7 @@
#ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_
#define ART_RUNTIME_MIRROR_DEX_CACHE_H_
+#include "array.h"
#include "art_field.h"
#include "art_method.h"
#include "class.h"
@@ -38,22 +39,19 @@ class String;
class MANAGED DexCache FINAL : public Object {
public:
// Size of java.lang.DexCache.class.
- static uint32_t ClassSize();
+ static uint32_t ClassSize(size_t pointer_size);
// Size of an instance of java.lang.DexCache not including referenced values.
static constexpr uint32_t InstanceSize() {
return sizeof(DexCache);
}
- void Init(const DexFile* dex_file,
- String* location,
- ObjectArray<String>* strings,
- ObjectArray<Class>* types,
- ObjectArray<ArtMethod>* methods,
- Array* fields)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void Init(const DexFile* dex_file, String* location, ObjectArray<String>* strings,
+ ObjectArray<Class>* types, PointerArray* methods, PointerArray* fields,
+ size_t pointer_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void Fixup(ArtMethod* trampoline) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void Fixup(ArtMethod* trampoline, size_t pointer_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
String* GetLocation() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
@@ -109,19 +107,18 @@ class MANAGED DexCache FINAL : public Object {
void SetResolvedType(uint32_t type_idx, Class* resolved)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ArtMethod* GetResolvedMethod(uint32_t method_idx) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, size_t ptr_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void SetResolvedMethod(uint32_t method_idx, ArtMethod* resolved) ALWAYS_INLINE
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- GetResolvedMethods()->Set(method_idx, resolved);
- }
+ ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, ArtMethod* resolved, size_t ptr_size)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Pointer sized variant, used for patching.
- ArtField* GetResolvedField(uint32_t idx, size_t ptr_size)
+ ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, size_t ptr_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Pointer sized variant, used for patching.
- void SetResolvedField(uint32_t idx, ArtField* field, size_t ptr_size)
+ ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, size_t ptr_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
ObjectArray<String>* GetStrings() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -133,13 +130,12 @@ class MANAGED DexCache FINAL : public Object {
OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_));
}
- ObjectArray<ArtMethod>* GetResolvedMethods() ALWAYS_INLINE
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetFieldObject< ObjectArray<ArtMethod>>(ResolvedMethodsOffset());
+ PointerArray* GetResolvedMethods() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return GetFieldObject<PointerArray>(ResolvedMethodsOffset());
}
- Array* GetResolvedFields() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return GetFieldObject<Array>(ResolvedFieldsOffset());
+ PointerArray* GetResolvedFields() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return GetFieldObject<PointerArray>(ResolvedFieldsOffset());
}
const DexFile* GetDexFile() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -154,9 +150,9 @@ class MANAGED DexCache FINAL : public Object {
private:
HeapReference<Object> dex_;
HeapReference<String> location_;
- // Either an int array or long array (64 bit).
- HeapReference<Object> resolved_fields_;
- HeapReference<ObjectArray<ArtMethod>> resolved_methods_;
+ // Either an int array or long array based on runtime ISA since these arrays hold pointers.
+ HeapReference<PointerArray> resolved_fields_;
+ HeapReference<PointerArray> resolved_methods_;
HeapReference<ObjectArray<Class>> resolved_types_;
HeapReference<ObjectArray<String>> strings_;
uint64_t dex_file_;
diff --git a/runtime/mirror/field-inl.h b/runtime/mirror/field-inl.h
index 388921b..8a0daec 100644
--- a/runtime/mirror/field-inl.h
+++ b/runtime/mirror/field-inl.h
@@ -50,14 +50,14 @@ inline mirror::Field* Field::CreateFromArtField(Thread* self, ArtField* field,
}
}
auto ret = hs.NewHandle(static_cast<Field*>(StaticClass()->AllocObject(self)));
- if (ret.Get() == nullptr) {
- if (kIsDebugBuild) {
- self->AssertPendingException();
- }
+ if (UNLIKELY(ret.Get() == nullptr)) {
+ self->AssertPendingOOMException();
return nullptr;
}
+ const auto pointer_size = kTransactionActive ?
+ Runtime::Current()->GetClassLinker()->GetImagePointerSize() : sizeof(void*);
auto dex_field_index = field->GetDexFieldIndex();
- auto* resolved_field = field->GetDexCache()->GetResolvedField(dex_field_index, sizeof(void*));
+ auto* resolved_field = field->GetDexCache()->GetResolvedField(dex_field_index, pointer_size);
if (field->GetDeclaringClass()->IsProxyClass()) {
DCHECK(field->IsStatic());
DCHECK_LT(dex_field_index, 2U);
@@ -70,7 +70,7 @@ inline mirror::Field* Field::CreateFromArtField(Thread* self, ArtField* field,
} else {
// We rely on the field being resolved so that we can back to the ArtField
// (i.e. FromReflectedMethod).
- field->GetDexCache()->SetResolvedField(dex_field_index, field, sizeof(void*));
+ field->GetDexCache()->SetResolvedField(dex_field_index, field, pointer_size);
}
}
ret->SetType<kTransactionActive>(type.Get());
diff --git a/runtime/mirror/field.cc b/runtime/mirror/field.cc
index ac56129..02e4484 100644
--- a/runtime/mirror/field.cc
+++ b/runtime/mirror/field.cc
@@ -16,6 +16,7 @@
#include "field-inl.h"
+#include "class-inl.h"
#include "dex_cache-inl.h"
#include "object_array-inl.h"
#include "object-inl.h"
diff --git a/runtime/mirror/iftable.h b/runtime/mirror/iftable.h
index 1c1c7b3..1ea5bee 100644
--- a/runtime/mirror/iftable.h
+++ b/runtime/mirror/iftable.h
@@ -34,27 +34,22 @@ class MANAGED IfTable FINAL : public ObjectArray<Object> {
ALWAYS_INLINE void SetInterface(int32_t i, Class* interface)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- ObjectArray<ArtMethod>* GetMethodArray(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- ObjectArray<ArtMethod>* method_array =
- down_cast<ObjectArray<ArtMethod>*>(Get((i * kMax) + kMethodArray));
+ PointerArray* GetMethodArray(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ auto* method_array = down_cast<PointerArray*>(Get((i * kMax) + kMethodArray));
DCHECK(method_array != nullptr);
return method_array;
}
size_t GetMethodArrayCount(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- ObjectArray<ArtMethod>* method_array =
- down_cast<ObjectArray<ArtMethod>*>(Get((i * kMax) + kMethodArray));
- if (method_array == nullptr) {
- return 0;
- }
- return method_array->GetLength();
+ auto* method_array = down_cast<PointerArray*>(Get((i * kMax) + kMethodArray));
+ return method_array == nullptr ? 0u : method_array->GetLength();
}
- void SetMethodArray(int32_t i, ObjectArray<ArtMethod>* new_ma)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(new_ma != nullptr);
- DCHECK(Get((i * kMax) + kMethodArray) == nullptr);
- Set<false>((i * kMax) + kMethodArray, new_ma);
+ void SetMethodArray(int32_t i, PointerArray* arr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ DCHECK(arr != nullptr);
+ auto idx = i * kMax + kMethodArray;
+ DCHECK(Get(idx) == nullptr);
+ Set<false>(idx, arr);
}
size_t Count() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
diff --git a/runtime/mirror/method.cc b/runtime/mirror/method.cc
index 81530bb..85c52e9 100644
--- a/runtime/mirror/method.cc
+++ b/runtime/mirror/method.cc
@@ -16,7 +16,9 @@
#include "method.h"
-#include "mirror/art_method.h"
+#include "art_method.h"
+#include "gc_root-inl.h"
+#include "mirror/class-inl.h"
#include "mirror/object-inl.h"
namespace art {
@@ -49,7 +51,7 @@ void Method::ResetArrayClass() {
array_class_ = GcRoot<Class>(nullptr);
}
-Method* Method::CreateFromArtMethod(Thread* self, mirror::ArtMethod* method) {
+Method* Method::CreateFromArtMethod(Thread* self, ArtMethod* method) {
DCHECK(!method->IsConstructor()) << PrettyMethod(method);
auto* ret = down_cast<Method*>(StaticClass()->AllocObject(self));
if (LIKELY(ret != nullptr)) {
@@ -90,7 +92,7 @@ void Constructor::VisitRoots(RootVisitor* visitor) {
array_class_.VisitRootIfNonNull(visitor, RootInfo(kRootStickyClass));
}
-Constructor* Constructor::CreateFromArtMethod(Thread* self, mirror::ArtMethod* method) {
+Constructor* Constructor::CreateFromArtMethod(Thread* self, ArtMethod* method) {
DCHECK(method->IsConstructor()) << PrettyMethod(method);
auto* ret = down_cast<Constructor*>(StaticClass()->AllocObject(self));
if (LIKELY(ret != nullptr)) {
diff --git a/runtime/mirror/method.h b/runtime/mirror/method.h
index 88100f0..42c76c0 100644
--- a/runtime/mirror/method.h
+++ b/runtime/mirror/method.h
@@ -28,7 +28,7 @@ class Class;
// C++ mirror of java.lang.reflect.Method.
class MANAGED Method : public AbstractMethod {
public:
- static Method* CreateFromArtMethod(Thread* self, mirror::ArtMethod* method)
+ static Method* CreateFromArtMethod(Thread* self, ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
static mirror::Class* StaticClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -59,7 +59,7 @@ class MANAGED Method : public AbstractMethod {
// C++ mirror of java.lang.reflect.Constructor.
class MANAGED Constructor: public AbstractMethod {
public:
- static Constructor* CreateFromArtMethod(Thread* self, mirror::ArtMethod* method)
+ static Constructor* CreateFromArtMethod(Thread* self, ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
static mirror::Class* StaticClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 39d0f56..05c44e5 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -24,6 +24,7 @@
#include "atomic.h"
#include "array-inl.h"
#include "class.h"
+#include "class_linker.h"
#include "lock_word-inl.h"
#include "monitor.h"
#include "object_array-inl.h"
@@ -36,9 +37,9 @@
namespace art {
namespace mirror {
-inline uint32_t Object::ClassSize() {
+inline uint32_t Object::ClassSize(size_t pointer_size) {
uint32_t vtable_entries = kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
@@ -253,18 +254,6 @@ inline bool Object::IsArrayInstance() {
template IsArrayClass<kVerifyFlags, kReadBarrierOption>();
}
-template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
-inline bool Object::IsArtMethod() {
- return GetClass<kVerifyFlags, kReadBarrierOption>()->
- template IsArtMethodClass<kReadBarrierOption>();
-}
-
-template<VerifyObjectFlags kVerifyFlags>
-inline ArtMethod* Object::AsArtMethod() {
- DCHECK(IsArtMethod<kVerifyFlags>());
- return down_cast<ArtMethod*>(this);
-}
-
template<VerifyObjectFlags kVerifyFlags>
inline bool Object::IsReferenceInstance() {
return GetClass<kVerifyFlags>()->IsTypeOfReferenceClass();
@@ -292,7 +281,7 @@ inline BooleanArray* Object::AsBooleanArray() {
template<VerifyObjectFlags kVerifyFlags>
inline ByteArray* Object::AsByteArray() {
- static const VerifyObjectFlags kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
+ constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveByte());
return down_cast<ByteArray*>(this);
@@ -300,7 +289,7 @@ inline ByteArray* Object::AsByteArray() {
template<VerifyObjectFlags kVerifyFlags>
inline ByteArray* Object::AsByteSizedArray() {
- constexpr VerifyObjectFlags kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
+ constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveByte() ||
GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveBoolean());
@@ -333,25 +322,41 @@ inline ShortArray* Object::AsShortSizedArray() {
}
template<VerifyObjectFlags kVerifyFlags>
-inline IntArray* Object::AsIntArray() {
+inline bool Object::IsIntArray() {
constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
- CHECK(GetClass<kVerifyFlags>()->IsArrayClass());
- CHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveInt() ||
- GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveFloat());
+ auto* component_type = GetClass<kVerifyFlags>()->GetComponentType();
+ return component_type != nullptr && component_type->template IsPrimitiveInt<kNewFlags>();
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+inline IntArray* Object::AsIntArray() {
+ DCHECK(IsIntArray<kVerifyFlags>());
return down_cast<IntArray*>(this);
}
template<VerifyObjectFlags kVerifyFlags>
-inline LongArray* Object::AsLongArray() {
+inline bool Object::IsLongArray() {
constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
- CHECK(GetClass<kVerifyFlags>()->IsArrayClass());
- CHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveLong() ||
- GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveDouble());
+ auto* component_type = GetClass<kVerifyFlags>()->GetComponentType();
+ return component_type != nullptr && component_type->template IsPrimitiveLong<kNewFlags>();
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+inline LongArray* Object::AsLongArray() {
+ DCHECK(IsLongArray<kVerifyFlags>());
return down_cast<LongArray*>(this);
}
template<VerifyObjectFlags kVerifyFlags>
+inline bool Object::IsFloatArray() {
+ constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
+ auto* component_type = GetClass<kVerifyFlags>()->GetComponentType();
+ return component_type != nullptr && component_type->template IsPrimitiveFloat<kNewFlags>();
+}
+
+template<VerifyObjectFlags kVerifyFlags>
inline FloatArray* Object::AsFloatArray() {
+ DCHECK(IsFloatArray<kVerifyFlags>());
constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveFloat());
@@ -359,7 +364,15 @@ inline FloatArray* Object::AsFloatArray() {
}
template<VerifyObjectFlags kVerifyFlags>
+inline bool Object::IsDoubleArray() {
+ constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
+ auto* component_type = GetClass<kVerifyFlags>()->GetComponentType();
+ return component_type != nullptr && component_type->template IsPrimitiveDouble<kNewFlags>();
+}
+
+template<VerifyObjectFlags kVerifyFlags>
inline DoubleArray* Object::AsDoubleArray() {
+ DCHECK(IsDoubleArray<kVerifyFlags>());
constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveDouble());
@@ -950,8 +963,11 @@ inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& v
if (num_reference_fields == 0u) {
continue;
}
+ // Presumably GC can happen when we are cross compiling, it should not cause performance
+ // problems to do pointer size logic.
MemberOffset field_offset = kIsStatic
- ? klass->GetFirstReferenceStaticFieldOffset()
+ ? klass->GetFirstReferenceStaticFieldOffset(
+ Runtime::Current()->GetClassLinker()->GetImagePointerSize())
: klass->GetFirstReferenceInstanceFieldOffset();
for (size_t i = 0; i < num_reference_fields; ++i) {
// TODO: Do a simpler check?
diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc
index f9740bb..b177e2f 100644
--- a/runtime/mirror/object.cc
+++ b/runtime/mirror/object.cc
@@ -106,9 +106,8 @@ class CopyObjectVisitor {
: self_(self), orig_(orig), num_bytes_(num_bytes) {
}
- void operator()(Object* obj, size_t usable_size) const
+ void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- UNUSED(usable_size);
Object::CopyObject(self_, obj, orig_->Get(), num_bytes_);
}
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index 5afe99f..60c756a 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -25,6 +25,7 @@
namespace art {
class ArtField;
+class ArtMethod;
class ImageWriter;
class LockWord;
class Monitor;
@@ -34,7 +35,6 @@ class VoidFunctor;
namespace mirror {
-class ArtMethod;
class Array;
class Class;
class FinalizerReference;
@@ -71,7 +71,7 @@ class MANAGED LOCKABLE Object {
static constexpr size_t kVTableLength = 11;
// The size of the java.lang.Class representing a java.lang.Object.
- static uint32_t ClassSize();
+ static uint32_t ClassSize(size_t pointer_size);
// Size of an instance of java.lang.Object.
static constexpr uint32_t InstanceSize() {
@@ -176,12 +176,22 @@ class MANAGED LOCKABLE Object {
ShortArray* AsShortSizedArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ bool IsIntArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
IntArray* AsIntArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ bool IsLongArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
LongArray* AsLongArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ bool IsFloatArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
FloatArray* AsFloatArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ bool IsDoubleArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
DoubleArray* AsDoubleArray() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -196,12 +206,6 @@ class MANAGED LOCKABLE Object {
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Throwable* AsThrowable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
- ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- bool IsArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
- ArtMethod* AsArtMethod() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
bool IsReferenceInstance() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -429,7 +433,7 @@ class MANAGED LOCKABLE Object {
field_offset, static_cast<int32_t>(ptr));
} else {
SetField64<kTransactionActive, kCheckTransaction, kVerifyFlags>(
- field_offset, static_cast<int64_t>(reinterpret_cast<intptr_t>(new_value)));
+ field_offset, static_cast<int64_t>(reinterpret_cast<uintptr_t>(new_value)));
}
}
// TODO fix thread safety analysis broken by the use of template. This should be
@@ -463,8 +467,8 @@ class MANAGED LOCKABLE Object {
} else {
int64_t v = GetField64<kVerifyFlags, kIsVolatile>(field_offset);
// Check that we dont lose any non 0 bits.
- DCHECK_EQ(reinterpret_cast<int64_t>(reinterpret_cast<T>(v)), v);
- return reinterpret_cast<T>(v);
+ DCHECK_EQ(static_cast<int64_t>(static_cast<uintptr_t>(v)), v);
+ return reinterpret_cast<T>(static_cast<uintptr_t>(v));
}
}
diff --git a/runtime/mirror/object_array.h b/runtime/mirror/object_array.h
index 6404faf..5eddc18 100644
--- a/runtime/mirror/object_array.h
+++ b/runtime/mirror/object_array.h
@@ -26,8 +26,8 @@ template<class T>
class MANAGED ObjectArray: public Array {
public:
// The size of Object[].class.
- static uint32_t ClassSize() {
- return Array::ClassSize();
+ static uint32_t ClassSize(size_t pointer_size) {
+ return Array::ClassSize(pointer_size);
}
static ObjectArray<T>* Alloc(Thread* self, Class* object_array_class, int32_t length,
diff --git a/runtime/mirror/object_test.cc b/runtime/mirror/object_test.cc
index 8e50a7a..85ea28f 100644
--- a/runtime/mirror/object_test.cc
+++ b/runtime/mirror/object_test.cc
@@ -77,9 +77,9 @@ class ObjectTest : public CommonRuntimeTest {
TEST_F(ObjectTest, Constants) {
EXPECT_EQ(kObjectReferenceSize, sizeof(HeapReference<Object>));
EXPECT_EQ(kObjectHeaderSize, sizeof(Object));
- EXPECT_EQ(MIRROR_ART_METHOD_QUICK_CODE_OFFSET_32,
+ EXPECT_EQ(ART_METHOD_QUICK_CODE_OFFSET_32,
ArtMethod::EntryPointFromQuickCompiledCodeOffset(4).Int32Value());
- EXPECT_EQ(MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64,
+ EXPECT_EQ(ART_METHOD_QUICK_CODE_OFFSET_64,
ArtMethod::EntryPointFromQuickCompiledCodeOffset(8).Int32Value());
}
@@ -306,7 +306,7 @@ TEST_F(ObjectTest, CheckAndAllocArrayFromCode) {
// pretend we are trying to call 'new char[3]' from String.toCharArray
ScopedObjectAccess soa(Thread::Current());
Class* java_util_Arrays = class_linker_->FindSystemClass(soa.Self(), "Ljava/util/Arrays;");
- ArtMethod* sort = java_util_Arrays->FindDirectMethod("sort", "([I)V");
+ ArtMethod* sort = java_util_Arrays->FindDirectMethod("sort", "([I)V", sizeof(void*));
const DexFile::StringId* string_id = java_lang_dex_file_->FindStringId("[I");
ASSERT_TRUE(string_id != nullptr);
const DexFile::TypeId* type_id = java_lang_dex_file_->FindTypeId(
@@ -366,7 +366,7 @@ TEST_F(ObjectTest, StaticFieldFromCode) {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::ClassLoader> loader(hs.NewHandle(soa.Decode<ClassLoader*>(class_loader)));
Class* klass = class_linker_->FindClass(soa.Self(), "LStaticsFromCode;", loader);
- ArtMethod* clinit = klass->FindClassInitializer();
+ ArtMethod* clinit = klass->FindClassInitializer(sizeof(void*));
const DexFile::StringId* klass_string_id = dex_file->FindStringId("LStaticsFromCode;");
ASSERT_TRUE(klass_string_id != nullptr);
const DexFile::TypeId* klass_type_id = dex_file->FindTypeId(
@@ -508,22 +508,22 @@ TEST_F(ObjectTest, DescriptorCompare) {
Class* klass2 = linker->FindClass(soa.Self(), "LProtoCompare2;", class_loader_2);
ASSERT_TRUE(klass2 != nullptr);
- ArtMethod* m1_1 = klass1->GetVirtualMethod(0);
+ ArtMethod* m1_1 = klass1->GetVirtualMethod(0, sizeof(void*));
EXPECT_STREQ(m1_1->GetName(), "m1");
- ArtMethod* m2_1 = klass1->GetVirtualMethod(1);
+ ArtMethod* m2_1 = klass1->GetVirtualMethod(1, sizeof(void*));
EXPECT_STREQ(m2_1->GetName(), "m2");
- ArtMethod* m3_1 = klass1->GetVirtualMethod(2);
+ ArtMethod* m3_1 = klass1->GetVirtualMethod(2, sizeof(void*));
EXPECT_STREQ(m3_1->GetName(), "m3");
- ArtMethod* m4_1 = klass1->GetVirtualMethod(3);
+ ArtMethod* m4_1 = klass1->GetVirtualMethod(3, sizeof(void*));
EXPECT_STREQ(m4_1->GetName(), "m4");
- ArtMethod* m1_2 = klass2->GetVirtualMethod(0);
+ ArtMethod* m1_2 = klass2->GetVirtualMethod(0, sizeof(void*));
EXPECT_STREQ(m1_2->GetName(), "m1");
- ArtMethod* m2_2 = klass2->GetVirtualMethod(1);
+ ArtMethod* m2_2 = klass2->GetVirtualMethod(1, sizeof(void*));
EXPECT_STREQ(m2_2->GetName(), "m2");
- ArtMethod* m3_2 = klass2->GetVirtualMethod(2);
+ ArtMethod* m3_2 = klass2->GetVirtualMethod(2, sizeof(void*));
EXPECT_STREQ(m3_2->GetName(), "m3");
- ArtMethod* m4_2 = klass2->GetVirtualMethod(3);
+ ArtMethod* m4_2 = klass2->GetVirtualMethod(3, sizeof(void*));
EXPECT_STREQ(m4_2->GetName(), "m4");
}
diff --git a/runtime/mirror/reference-inl.h b/runtime/mirror/reference-inl.h
index d1d2a3a..01e99b9 100644
--- a/runtime/mirror/reference-inl.h
+++ b/runtime/mirror/reference-inl.h
@@ -22,9 +22,9 @@
namespace art {
namespace mirror {
-inline uint32_t Reference::ClassSize() {
+inline uint32_t Reference::ClassSize(size_t pointer_size) {
uint32_t vtable_entries = Object::kVTableLength + 5;
- return Class::ComputeClassSize(false, vtable_entries, 2, 0, 0, 0, 0);
+ return Class::ComputeClassSize(false, vtable_entries, 2, 0, 0, 0, 0, pointer_size);
}
inline bool Reference::IsEnqueuable() {
diff --git a/runtime/mirror/reference.cc b/runtime/mirror/reference.cc
index 70bcf92..3c7f8c8 100644
--- a/runtime/mirror/reference.cc
+++ b/runtime/mirror/reference.cc
@@ -16,7 +16,7 @@
#include "reference.h"
-#include "mirror/art_method.h"
+#include "art_method.h"
#include "gc_root-inl.h"
namespace art {
diff --git a/runtime/mirror/reference.h b/runtime/mirror/reference.h
index c11d79d..4bbdb99 100644
--- a/runtime/mirror/reference.h
+++ b/runtime/mirror/reference.h
@@ -42,7 +42,7 @@ namespace mirror {
class MANAGED Reference : public Object {
public:
// Size of java.lang.ref.Reference.class.
- static uint32_t ClassSize();
+ static uint32_t ClassSize(size_t pointer_size);
// Size of an instance of java.lang.ref.Reference.
static constexpr uint32_t InstanceSize() {
diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h
index 35b8aef..9f6cd11 100644
--- a/runtime/mirror/string-inl.h
+++ b/runtime/mirror/string-inl.h
@@ -30,9 +30,9 @@
namespace art {
namespace mirror {
-inline uint32_t String::ClassSize() {
+inline uint32_t String::ClassSize(size_t pointer_size) {
uint32_t vtable_entries = Object::kVTableLength + 52;
- return Class::ComputeClassSize(true, vtable_entries, 0, 1, 0, 1, 2);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 1, 0, 1, 2, pointer_size);
}
// Sets string count in the allocation code path to ensure it is guarded by a CAS.
diff --git a/runtime/mirror/string.h b/runtime/mirror/string.h
index fcfe976..a8f16d7 100644
--- a/runtime/mirror/string.h
+++ b/runtime/mirror/string.h
@@ -34,7 +34,7 @@ namespace mirror {
class MANAGED String FINAL : public Object {
public:
// Size of java.lang.String.class.
- static uint32_t ClassSize();
+ static uint32_t ClassSize(size_t pointer_size);
// Size of an instance of java.lang.String not including its value array.
static constexpr uint32_t InstanceSize() {
diff --git a/runtime/mirror/throwable.cc b/runtime/mirror/throwable.cc
index 782b9c0..1c21edb 100644
--- a/runtime/mirror/throwable.cc
+++ b/runtime/mirror/throwable.cc
@@ -71,9 +71,18 @@ bool Throwable::IsCheckedException() {
int32_t Throwable::GetStackDepth() {
Object* stack_state = GetStackState();
- if (stack_state == nullptr || !stack_state->IsObjectArray()) return -1;
- ObjectArray<Object>* method_trace = down_cast<ObjectArray<Object>*>(stack_state);
- return method_trace->GetLength() - 1;
+ if (stack_state == nullptr) {
+ return -1;
+ }
+ if (!stack_state->IsIntArray() && !stack_state->IsLongArray()) {
+ return -1;
+ }
+ mirror::PointerArray* method_trace = down_cast<mirror::PointerArray*>(stack_state->AsArray());
+ int32_t array_len = method_trace->GetLength();
+ // The format is [method pointers][pcs] so the depth is half the length (see method
+ // BuildInternalStackTraceVisitor::Init).
+ CHECK_EQ(array_len % 2, 0);
+ return array_len / 2;
}
std::string Throwable::Dump() {
@@ -86,17 +95,21 @@ std::string Throwable::Dump() {
result += "\n";
Object* stack_state = GetStackState();
// check stack state isn't missing or corrupt
- if (stack_state != nullptr && stack_state->IsObjectArray()) {
+ if (stack_state != nullptr &&
+ (stack_state->IsIntArray() || stack_state->IsLongArray())) {
// Decode the internal stack trace into the depth and method trace
- ObjectArray<Object>* method_trace = down_cast<ObjectArray<Object>*>(stack_state);
- int32_t depth = method_trace->GetLength() - 1;
- IntArray* pc_trace = down_cast<IntArray*>(method_trace->Get(depth));
+ // Format is [method pointers][pcs]
+ auto* method_trace = down_cast<mirror::PointerArray*>(stack_state->AsArray());
+ auto array_len = method_trace->GetLength();
+ CHECK_EQ(array_len % 2, 0);
+ const auto depth = array_len / 2;
if (depth == 0) {
result += "(Throwable with empty stack trace)";
} else {
+ auto ptr_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
for (int32_t i = 0; i < depth; ++i) {
- mirror::ArtMethod* method = down_cast<ArtMethod*>(method_trace->Get(i));
- uint32_t dex_pc = pc_trace->Get(i);
+ ArtMethod* method = method_trace->GetElementPtrSize<ArtMethod*>(i, ptr_size);
+ uintptr_t dex_pc = method_trace->GetElementPtrSize<uintptr_t>(i + depth, ptr_size);
int32_t line_number = method->GetLineNumFromDexPC(dex_pc);
const char* source_file = method->GetDeclaringClassSourceFile();
result += StringPrintf(" at %s (%s:%d)\n", PrettyMethod(method, true).c_str(),
@@ -108,8 +121,7 @@ std::string Throwable::Dump() {
if (stack_trace != nullptr && stack_trace->IsObjectArray()) {
CHECK_EQ(stack_trace->GetClass()->GetComponentType(),
StackTraceElement::GetStackTraceElement());
- ObjectArray<StackTraceElement>* ste_array =
- down_cast<ObjectArray<StackTraceElement>*>(stack_trace);
+ auto* ste_array = down_cast<ObjectArray<StackTraceElement>*>(stack_trace);
if (ste_array->GetLength() == 0) {
result += "(Throwable with empty stack trace)";
} else {