summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--runtime/arch/mips/entrypoints_init_mips.cc4
-rw-r--r--runtime/arch/mips64/entrypoints_init_mips64.cc3
-rw-r--r--runtime/arch/x86/jni_entrypoints_x86.S1
-rw-r--r--runtime/arch/x86/quick_entrypoints_x86.S15
-rw-r--r--runtime/art_method.cc47
-rw-r--r--runtime/mirror/class.cc6
-rw-r--r--runtime/stack_map.h10
-rw-r--r--test/004-JniTest/jni_test.cc4
-rw-r--r--test/004-JniTest/src/Main.java28
-rw-r--r--test/Android.run-test.mk14
10 files changed, 86 insertions, 46 deletions
diff --git a/runtime/arch/mips/entrypoints_init_mips.cc b/runtime/arch/mips/entrypoints_init_mips.cc
index ff04106..09a018e 100644
--- a/runtime/arch/mips/entrypoints_init_mips.cc
+++ b/runtime/arch/mips/entrypoints_init_mips.cc
@@ -267,6 +267,10 @@ void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
static_assert(!IsDirectEntrypoint(kQuickThrowStackOverflow), "Non-direct C stub marked direct.");
+ // Deoptimize
+ qpoints->pDeoptimize = art_quick_deoptimize;
+ static_assert(!IsDirectEntrypoint(kQuickDeoptimize), "Non-direct C stub marked direct.");
+
// Atomic 64-bit load/store
qpoints->pA64Load = QuasiAtomic::Read64;
static_assert(IsDirectEntrypoint(kQuickA64Load), "Non-direct C stub marked direct.");
diff --git a/runtime/arch/mips64/entrypoints_init_mips64.cc b/runtime/arch/mips64/entrypoints_init_mips64.cc
index 321c27b..4904af9 100644
--- a/runtime/arch/mips64/entrypoints_init_mips64.cc
+++ b/runtime/arch/mips64/entrypoints_init_mips64.cc
@@ -176,6 +176,9 @@ void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
+ // Deoptimize
+ qpoints->pDeoptimize = art_quick_deoptimize;
+
// TODO - use lld/scd instructions for Mips64
// Atomic 64-bit load/store
qpoints->pA64Load = QuasiAtomic::Read64;
diff --git a/runtime/arch/x86/jni_entrypoints_x86.S b/runtime/arch/x86/jni_entrypoints_x86.S
index 5d27e47..aca5a37 100644
--- a/runtime/arch/x86/jni_entrypoints_x86.S
+++ b/runtime/arch/x86/jni_entrypoints_x86.S
@@ -23,6 +23,7 @@ DEFINE_FUNCTION art_jni_dlsym_lookup_stub
subl LITERAL(8), %esp // align stack
CFI_ADJUST_CFA_OFFSET(8)
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ CFI_ADJUST_CFA_OFFSET(4)
call SYMBOL(artFindNativeMethod) // (Thread*)
addl LITERAL(12), %esp // remove argument & padding
CFI_ADJUST_CFA_OFFSET(-12)
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 870a747..9fb2a6f 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1107,11 +1107,13 @@ DEFINE_FUNCTION art_quick_check_cast
addl LITERAL(12), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-12)
ret
+
+ CFI_ADJUST_CFA_OFFSET(12) // Reset unwind info so following code unwinds.
1:
POP eax // pop arguments
POP ecx
addl LITERAL(4), %esp
- CFI_ADJUST_CFA_OFFSET(-12)
+ CFI_ADJUST_CFA_OFFSET(-4)
SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context
// Outgoing argument set up
PUSH eax // alignment padding
@@ -1416,6 +1418,7 @@ DEFINE_FUNCTION art_quick_resolution_trampoline
call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
movl %eax, %edi // remember code pointer in EDI
addl LITERAL(16), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-16)
test %eax, %eax // if code pointer is null goto deliver pending exception
jz 1f
RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP
@@ -1546,6 +1549,7 @@ DEFINE_FUNCTION art_quick_instrumentation_entry
PUSH eax // Pass Method*.
call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR)
addl LITERAL(28), %esp // Pop arguments upto saved Method*.
+ CFI_ADJUST_CFA_OFFSET(-28)
movl 60(%esp), %edi // Restore edi.
movl %eax, 60(%esp) // Place code* over edi, just under return pc.
movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx
@@ -1565,11 +1569,13 @@ DEFINE_FUNCTION art_quick_instrumentation_entry
movl 52(%esp), %ebp // Restore ebp.
movl 56(%esp), %esi // Restore esi.
addl LITERAL(60), %esp // Wind stack back upto code*.
+ CFI_ADJUST_CFA_OFFSET(-60)
ret // Call method (and pop).
END_FUNCTION art_quick_instrumentation_entry
DEFINE_FUNCTION art_quick_instrumentation_exit
pushl LITERAL(0) // Push a fake return PC as there will be none on the stack.
+ CFI_ADJUST_CFA_OFFSET(4)
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx
mov %esp, %ecx // Remember SP
subl LITERAL(8), %esp // Save float return value.
@@ -1598,6 +1604,7 @@ DEFINE_FUNCTION art_quick_instrumentation_exit
CFI_ADJUST_CFA_OFFSET(-8)
RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
addl LITERAL(4), %esp // Remove fake return pc.
+ CFI_ADJUST_CFA_OFFSET(-4)
jmp *%ecx // Return.
END_FUNCTION art_quick_instrumentation_exit
@@ -1606,7 +1613,7 @@ END_FUNCTION art_quick_instrumentation_exit
* will long jump to the upcall with a special exception of -1.
*/
DEFINE_FUNCTION art_quick_deoptimize
- pushl %ebx // Entry point for a jump. Fake that we were called.
+ PUSH ebx // Entry point for a jump. Fake that we were called.
.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path) // Entry point for real calls
// from compiled slow paths.
SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
@@ -1669,8 +1676,8 @@ END_FUNCTION art_quick_string_compareto
DEFINE_FUNCTION art_nested_signal_return
SETUP_GOT_NOSAVE ebx // sets %ebx for call into PLT
movl LITERAL(1), %ecx
- pushl %ecx // second arg to longjmp (1)
- pushl %eax // first arg to longjmp (jmp_buf)
+ PUSH ecx // second arg to longjmp (1)
+ PUSH eax // first arg to longjmp (jmp_buf)
call PLT_SYMBOL(longjmp)
int3 // won't get here.
END_FUNCTION art_nested_signal_return
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index 16c099d..8619503 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -182,29 +182,32 @@ uint32_t ArtMethod::ToDexPc(const uintptr_t pc, bool abort_on_failure) {
uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
if (IsOptimized(sizeof(void*))) {
CodeInfo code_info = GetOptimizedCodeInfo();
- return code_info.GetStackMapForNativePcOffset(sought_offset).GetDexPc(code_info);
- }
-
- MappingTable table(entry_point != nullptr ?
- GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
- if (table.TotalSize() == 0) {
- // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping
- // but they have no suspend checks and, consequently, we never call ToDexPc() for them.
- DCHECK(IsNative() || IsCalleeSaveMethod() || IsProxyMethod()) << PrettyMethod(this);
- return DexFile::kDexNoIndex; // Special no mapping case
- }
- // Assume the caller wants a pc-to-dex mapping so check here first.
- typedef MappingTable::PcToDexIterator It;
- for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
- if (cur.NativePcOffset() == sought_offset) {
- return cur.DexPc();
+ StackMap stack_map = code_info.GetStackMapForNativePcOffset(sought_offset);
+ if (stack_map.IsValid()) {
+ return stack_map.GetDexPc(code_info);
}
- }
- // Now check dex-to-pc mappings.
- typedef MappingTable::DexToPcIterator It2;
- for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
- if (cur.NativePcOffset() == sought_offset) {
- return cur.DexPc();
+ } else {
+ MappingTable table(entry_point != nullptr ?
+ GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
+ if (table.TotalSize() == 0) {
+ // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping
+ // but they have no suspend checks and, consequently, we never call ToDexPc() for them.
+ DCHECK(IsNative() || IsCalleeSaveMethod() || IsProxyMethod()) << PrettyMethod(this);
+ return DexFile::kDexNoIndex; // Special no mapping case
+ }
+ // Assume the caller wants a pc-to-dex mapping so check here first.
+ typedef MappingTable::PcToDexIterator It;
+ for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
+ if (cur.NativePcOffset() == sought_offset) {
+ return cur.DexPc();
+ }
+ }
+ // Now check dex-to-pc mappings.
+ typedef MappingTable::DexToPcIterator It2;
+ for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
+ if (cur.NativePcOffset() == sought_offset) {
+ return cur.DexPc();
+ }
}
}
if (abort_on_failure) {
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index f0b7bfd..5bd6583 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -471,7 +471,8 @@ ArtMethod* Class::FindDirectMethod(
ArtMethod* Class::FindDeclaredVirtualMethod(const StringPiece& name, const StringPiece& signature,
size_t pointer_size) {
for (auto& method : GetVirtualMethods(pointer_size)) {
- if (name == method.GetName() && method.GetSignature() == signature) {
+ ArtMethod* const np_method = method.GetInterfaceMethodIfProxy(pointer_size);
+ if (name == np_method->GetName() && np_method->GetSignature() == signature) {
return &method;
}
}
@@ -481,7 +482,8 @@ ArtMethod* Class::FindDeclaredVirtualMethod(const StringPiece& name, const Strin
ArtMethod* Class::FindDeclaredVirtualMethod(const StringPiece& name, const Signature& signature,
size_t pointer_size) {
for (auto& method : GetVirtualMethods(pointer_size)) {
- if (name == method.GetName() && signature == method.GetSignature()) {
+ ArtMethod* const np_method = method.GetInterfaceMethodIfProxy(pointer_size);
+ if (name == np_method->GetName() && signature == np_method->GetSignature()) {
return &method;
}
}
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 6cc1709..71e38ff 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -681,8 +681,12 @@ class DexRegisterMap {
*/
class StackMap {
public:
+ StackMap() {}
+
explicit StackMap(MemoryRegion region) : region_(region) {}
+ bool IsValid() const { return region_.pointer() != nullptr; }
+
uint32_t GetDexPc(const CodeInfo& info) const;
void SetDexPc(const CodeInfo& info, uint32_t dex_pc);
@@ -975,8 +979,7 @@ class CodeInfo {
return stack_map;
}
}
- LOG(FATAL) << "Unreachable";
- UNREACHABLE();
+ return StackMap();
}
StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset) const {
@@ -987,8 +990,7 @@ class CodeInfo {
return stack_map;
}
}
- LOG(FATAL) << "Unreachable";
- UNREACHABLE();
+ return StackMap();
}
void Dump(std::ostream& os, uint16_t number_of_dex_registers) const;
diff --git a/test/004-JniTest/jni_test.cc b/test/004-JniTest/jni_test.cc
index ca256ec..db0dd32 100644
--- a/test/004-JniTest/jni_test.cc
+++ b/test/004-JniTest/jni_test.cc
@@ -626,3 +626,7 @@ extern "C" JNIEXPORT void JNICALL Java_Main_testNewStringObject(JNIEnv* env, jcl
assert(strcmp(test_array, chars6) == 0);
env->ReleaseStringUTFChars(s6, chars6);
}
+
+extern "C" JNIEXPORT jlong JNICALL Java_Main_testGetMethodID(JNIEnv* env, jclass, jclass c) {
+ return reinterpret_cast<jlong>(env->GetMethodID(c, "a", "()V"));
+}
diff --git a/test/004-JniTest/src/Main.java b/test/004-JniTest/src/Main.java
index a81ec6d..decefac 100644
--- a/test/004-JniTest/src/Main.java
+++ b/test/004-JniTest/src/Main.java
@@ -14,7 +14,9 @@
* limitations under the License.
*/
+import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
public class Main {
public static void main(String[] args) {
@@ -35,6 +37,7 @@ public class Main {
testCallNonvirtual();
testNewStringObject();
testRemoveLocalObject();
+ testProxyGetMethodID();
}
private static native void testFindClassOnAttachedNativeThread();
@@ -194,6 +197,31 @@ public class Main {
private static native void testCallNonvirtual();
private static native void testNewStringObject();
+
+ private interface SimpleInterface {
+ void a();
+ }
+
+ private static class DummyInvocationHandler implements InvocationHandler {
+ public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+ return null;
+ }
+ }
+
+ private static void testProxyGetMethodID() {
+ InvocationHandler handler = new DummyInvocationHandler();
+ SimpleInterface proxy =
+ (SimpleInterface) Proxy.newProxyInstance(SimpleInterface.class.getClassLoader(),
+ new Class[] {SimpleInterface.class}, handler);
+ if (testGetMethodID(SimpleInterface.class) == 0) {
+ throw new AssertionError();
+ }
+ if (testGetMethodID(proxy.getClass()) == 0) {
+ throw new AssertionError();
+ }
+ }
+
+ private static native long testGetMethodID(Class<?> c);
}
class JniCallNonvirtualTest {
diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk
index b1edc3b..cf4c610 100644
--- a/test/Android.run-test.mk
+++ b/test/Android.run-test.mk
@@ -453,20 +453,6 @@ endif
TEST_ART_BROKEN_OPTIMIZING_ARM64_RUN_TESTS :=
-# Known broken tests for the MIPS64 optimizing compiler backend in 64-bit mode. b/21555893
-TEST_ART_BROKEN_OPTIMIZING_MIPS64_64BIT_RUN_TESTS := \
- 449-checker-bce
-
-ifeq ($(TARGET_ARCH),mips64)
- ifneq (,$(filter optimizing,$(COMPILER_TYPES)))
- ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,target,$(RUN_TYPES),$(PREBUILD_TYPES), \
- optimizing,$(RELOCATE_TYPES),$(TRACE_TYPES),$(GC_TYPES),$(JNI_TYPES), \
- $(IMAGE_TYPES),$(PICTEST_TYPES),$(DEBUGGABLE_TYPES),$(TEST_ART_BROKEN_OPTIMIZING_MIPS64_64BIT_RUN_TESTS),64)
- endif
-endif
-
-TEST_ART_BROKEN_OPTIMIZING_MIPS64_64BIT_RUN_TESTS :=
-
# Known broken tests for the optimizing compiler.
TEST_ART_BROKEN_OPTIMIZING_RUN_TESTS :=