summaryrefslogtreecommitdiffstats
path: root/runtime
diff options
context:
space:
mode:
authorAndreas Gampe <agampe@google.com>2014-03-13 22:21:55 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2014-03-13 22:21:55 +0000
commit4dcef7e7e786192d8e1fd5e9d43e98867000c149 (patch)
treea83b77093690009389395f539dab20ca59dcf5ed /runtime
parent135ce2ea28e98df624aa071688bd01e0dadf2f62 (diff)
parent90546836312adda54f28b700f25ff29ec8becdf8 (diff)
downloadart-4dcef7e7e786192d8e1fd5e9d43e98867000c149.zip
art-4dcef7e7e786192d8e1fd5e9d43e98867000c149.tar.gz
art-4dcef7e7e786192d8e1fd5e9d43e98867000c149.tar.bz2
Merge "Fixes for Generic JNI"
Diffstat (limited to 'runtime')
-rw-r--r--runtime/class_linker.cc38
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc26
-rw-r--r--runtime/mirror/art_method.h5
3 files changed, 56 insertions, 13 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index b8d1493..79a5d28 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1767,7 +1767,7 @@ void ClassLinker::FixupStaticTrampolines(mirror::Class* klass) {
}
static void LinkCode(const SirtRef<mirror::ArtMethod>& method, const OatFile::OatClass* oat_class,
- uint32_t method_index)
+ const DexFile& dex_file, uint32_t dex_method_index, uint32_t method_index)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
// Method shouldn't have already been linked.
DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
@@ -1822,6 +1822,38 @@ static void LinkCode(const SirtRef<mirror::ArtMethod>& method, const OatFile::Oa
if (method->IsNative()) {
// Unregistering restores the dlsym lookup stub.
method->UnregisterNative(Thread::Current());
+
+ if (enter_interpreter) {
+ // We have a native method here without code. Then it should have either the GenericJni
+ // trampoline as entrypoint (non-static), or the Resolution trampoline (static).
+ DCHECK(method->GetEntryPointFromQuickCompiledCode() ==
+ GetQuickResolutionTrampoline(runtime->GetClassLinker())
+ ||
+ method->GetEntryPointFromQuickCompiledCode() == GetQuickGenericJniTrampoline());
+
+ DCHECK_EQ(method->GetFrameSizeInBytes<false>(), 0U);
+
+ // Fix up method metadata if necessary.
+ if (method->GetFrameSizeInBytes<false>() == 0) {
+ uint32_t s_len;
+ const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(dex_method_index), &s_len);
+ uint32_t refs = 1; // Native method always has "this" or class.
+ for (uint32_t i = 1; i < s_len; ++i) {
+ if (shorty[i] == 'L') {
+ refs++;
+ }
+ }
+ size_t sirt_size = StackIndirectReferenceTable::GetAlignedSirtSize(refs);
+
+ // Get the generic spill masks and base frame size.
+ mirror::ArtMethod* callee_save_method =
+ Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
+
+ method->SetFrameSizeInBytes(callee_save_method->GetFrameSizeInBytes() + sirt_size);
+ method->SetCoreSpillMask(callee_save_method->GetCoreSpillMask());
+ method->SetFpSpillMask(callee_save_method->GetFpSpillMask());
+ }
+ }
}
// Allow instrumentation its chance to hijack code.
@@ -1934,7 +1966,7 @@ void ClassLinker::LoadClass(const DexFile& dex_file,
}
klass->SetDirectMethod(i, method.get());
if (oat_class.get() != NULL) {
- LinkCode(method, oat_class.get(), class_def_method_index);
+ LinkCode(method, oat_class.get(), dex_file, it.GetMemberIndex(), class_def_method_index);
}
method->SetMethodIndex(class_def_method_index);
class_def_method_index++;
@@ -1948,7 +1980,7 @@ void ClassLinker::LoadClass(const DexFile& dex_file,
klass->SetVirtualMethod(i, method.get());
DCHECK_EQ(class_def_method_index, it.NumDirectMethods() + i);
if (oat_class.get() != NULL) {
- LinkCode(method, oat_class.get(), class_def_method_index);
+ LinkCode(method, oat_class.get(), dex_file, it.GetMemberIndex(), class_def_method_index);
}
class_def_method_index++;
}
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 31a5728..a4491d4 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -1040,7 +1040,6 @@ template <class T> class BuildGenericJniFrameStateMachine {
return fpr_index_ > 0;
}
- // TODO: please review this bit representation retrieving.
template <typename U, typename V> V convert(U in) {
CHECK_LE(sizeof(U), sizeof(V));
union { U u; V v; } tmp;
@@ -1449,6 +1448,8 @@ void BuildGenericJniFrameVisitor::FinalizeSirt(Thread* self) {
self->PushSirt(sirt_);
}
+extern "C" void* artFindNativeMethod();
+
/*
* Initializes an alloca region assumed to be directly below sp for a native call:
* Create a Sirt and call stack and fill a mini stack with values to be pushed to registers.
@@ -1479,15 +1480,13 @@ extern "C" ssize_t artQuickGenericJniTrampoline(Thread* self, mirror::ArtMethod*
self->VerifyStack();
- // start JNI, save the cookie
+ // Start JNI, save the cookie.
uint32_t cookie;
if (called->IsSynchronized()) {
cookie = JniMethodStartSynchronized(visitor.GetFirstSirtEntry(), self);
if (self->IsExceptionPending()) {
self->PopSirt();
// A negative value denotes an error.
- // TODO: Do we still need to fix the stack pointer? I think so. Then it's necessary to push
- // that value!
return -1;
}
} else {
@@ -1496,18 +1495,27 @@ extern "C" ssize_t artQuickGenericJniTrampoline(Thread* self, mirror::ArtMethod*
uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
*(sp32 - 1) = cookie;
- // retrieve native code
+ // Retrieve the stored native code.
const void* nativeCode = called->GetNativeMethod();
- if (nativeCode == nullptr) {
- // TODO: is this really an error, or do we need to try to find native code?
- LOG(FATAL) << "Finding native code not implemented yet.";
+
+ // Check whether it's the stub to retrieve the native code, we should call that directly.
+ DCHECK(nativeCode != nullptr);
+ if (nativeCode == GetJniDlsymLookupStub()) {
+ nativeCode = artFindNativeMethod();
+
+ if (nativeCode == nullptr) {
+ DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
+ return -1;
+ }
+ // Note that the native code pointer will be automatically set by artFindNativeMethod().
}
+ // Store the native code pointer in the stack at the right location.
uintptr_t* code_pointer = reinterpret_cast<uintptr_t*>(visitor.GetCodeReturn());
- size_t window_size = visitor.GetAllocaUsedSize();
*code_pointer = reinterpret_cast<uintptr_t>(nativeCode);
// 5K reserved, window_size + frame pointer used.
+ size_t window_size = visitor.GetAllocaUsedSize();
return (5 * KB) - window_size - kPointerSize;
}
diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h
index 84a3eb6..c654933 100644
--- a/runtime/mirror/art_method.h
+++ b/runtime/mirror/art_method.h
@@ -325,10 +325,13 @@ class MANAGED ArtMethod : public Object {
void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
uint32_t GetOatNativeGcMapOffset();
+ template <bool kCheckFrameSize = true>
uint32_t GetFrameSizeInBytes() {
uint32_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, quick_frame_size_in_bytes_),
false);
- DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
+ if (kCheckFrameSize) {
+ DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
+ }
return result;
}