summaryrefslogtreecommitdiffstats
path: root/runtime/interpreter/interpreter_common.h
diff options
context:
space:
mode:
authorSebastien Hertz <shertz@google.com>2013-09-10 11:44:19 +0200
committerSebastien Hertz <shertz@google.com>2013-09-30 08:57:33 +0200
commitc61124bdeaae94f977ffc36ac69535e792c226f2 (patch)
tree41ba8619a6dea88a8c607836db21531b05a3787a /runtime/interpreter/interpreter_common.h
parent7541c745e00b49983f277d6b6b18bc4c174c7e39 (diff)
downloadart-c61124bdeaae94f977ffc36ac69535e792c226f2.zip
art-c61124bdeaae94f977ffc36ac69535e792c226f2.tar.gz
art-c61124bdeaae94f977ffc36ac69535e792c226f2.tar.bz2
Cleanup invoke in interpreter.
Some cleanup in invocation stuff: - Get the number of invoke arguments from instruction (vA) rather than get it from its code item. This benefits to native invoke since we no longer need to parse the method's shorty. Also pass the low 16 bits of instructions to avoid fetching it twice when reading vA. - Remove "is_static" tests by taking advantage of invoke type template argument rather than testing method's access flags. - Ensure Instruction::GetArgs is inlined. - Check exception when initializing method's class when transitioning from interpreter to compiled code (artInterpreterToCompiledCodeBridge). - Move UnstartedRuntimeInvoke function to interpreter_common.cc and make it static as it's only used by DoInvoke and DoInvokeVirtualQuick functions. - Avoid duplicating code in ShadowFrame::Create. Performance remains the same according to benchmarks. Hopefully, this should be addressed in next CLs, especially by improving new shadow frame initialization. Bug: 10668955 Change-Id: I514b8f098d0ef3e35921ceb770383aac1a9c7902
Diffstat (limited to 'runtime/interpreter/interpreter_common.h')
-rw-r--r--runtime/interpreter/interpreter_common.h79
1 files changed, 65 insertions, 14 deletions
diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h
index 3ad1935..80502b4 100644
--- a/runtime/interpreter/interpreter_common.h
+++ b/runtime/interpreter/interpreter_common.h
@@ -71,7 +71,7 @@ template<bool do_access_check>
extern JValue ExecuteSwitchImpl(Thread* self, MethodHelper& mh,
const DexFile::CodeItem* code_item,
ShadowFrame& shadow_frame, JValue result_register)
- NO_THREAD_SAFETY_ANALYSIS __attribute__((hot));
+ NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
// TODO: should be SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) which is failing due to template
// specialization.
@@ -79,12 +79,7 @@ template<bool do_access_check>
extern JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh,
const DexFile::CodeItem* code_item,
ShadowFrame& shadow_frame, JValue result_register)
- NO_THREAD_SAFETY_ANALYSIS __attribute__((hot));
-
-void UnstartedRuntimeInvoke(Thread* self, MethodHelper& mh,
- const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame,
- JValue* result, size_t arg_offset)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
static inline void DoMonitorEnter(Thread* self, Object* ref) NO_THREAD_SAFETY_ANALYSIS {
ref->MonitorEnter(self);
@@ -96,16 +91,72 @@ static inline void DoMonitorExit(Thread* self, Object* ref) NO_THREAD_SAFETY_ANA
// TODO: should be SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) which is failing due to template
// specialization.
+template<bool is_range, bool do_assignability_check>
+bool DoCall(ArtMethod* method, Object* receiver, Thread* self, ShadowFrame& shadow_frame,
+ const Instruction* inst, uint16_t inst_data, JValue* result) NO_THREAD_SAFETY_ANALYSIS;
+
+// TODO: should be SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) which is failing due to template
+// specialization.
template<InvokeType type, bool is_range, bool do_access_check>
-bool DoInvoke(Thread* self, ShadowFrame& shadow_frame,
- const Instruction* inst, JValue* result) NO_THREAD_SAFETY_ANALYSIS;
+static bool DoInvoke(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,
+ uint16_t inst_data, JValue* result) NO_THREAD_SAFETY_ANALYSIS ALWAYS_INLINE;
+
+template<InvokeType type, bool is_range, bool do_access_check>
+static inline bool DoInvoke(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,
+ uint16_t inst_data, JValue* result) {
+ const uint32_t method_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c();
+ const uint32_t vregC = (is_range) ? inst->VRegC_3rc() : inst->VRegC_35c();
+ Object* const receiver = (type == kStatic) ? NULL : shadow_frame.GetVRegReference(vregC);
+ ArtMethod* const method = FindMethodFromCode(method_idx, receiver, shadow_frame.GetMethod(), self,
+ do_access_check, type);
+ if (UNLIKELY(method == NULL)) {
+ CHECK(self->IsExceptionPending());
+ result->SetJ(0);
+ return false;
+ } else if (UNLIKELY(method->IsAbstract())) {
+ ThrowAbstractMethodError(method);
+ result->SetJ(0);
+ return false;
+ } else {
+ return DoCall<is_range, do_access_check>(method, receiver, self, shadow_frame, inst,
+ inst_data, result);
+ }
+}
// TODO: should be SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) which is failing due to template
// specialization.
template<bool is_range>
-bool DoInvokeVirtualQuick(Thread* self, ShadowFrame& shadow_frame,
- const Instruction* inst, JValue* result)
- NO_THREAD_SAFETY_ANALYSIS;
+static bool DoInvokeVirtualQuick(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst,
+ uint16_t inst_data, JValue* result)
+ NO_THREAD_SAFETY_ANALYSIS ALWAYS_INLINE;
+
+template<bool is_range>
+static inline bool DoInvokeVirtualQuick(Thread* self, ShadowFrame& shadow_frame,
+ const Instruction* inst, uint16_t inst_data,
+ JValue* result) {
+ const uint32_t vregC = (is_range) ? inst->VRegC_3rc() : inst->VRegC_35c();
+ Object* const receiver = shadow_frame.GetVRegReference(vregC);
+ if (UNLIKELY(receiver == NULL)) {
+ // We lost the reference to the method index so we cannot get a more
+ // precised exception message.
+ ThrowNullPointerExceptionFromDexPC(shadow_frame.GetCurrentLocationForThrow());
+ return false;
+ }
+ const uint32_t vtable_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c();
+ ArtMethod* const method = receiver->GetClass()->GetVTable()->GetWithoutChecks(vtable_idx);
+ if (UNLIKELY(method == NULL)) {
+ CHECK(self->IsExceptionPending());
+ result->SetJ(0);
+ return false;
+ } else if (UNLIKELY(method->IsAbstract())) {
+ ThrowAbstractMethodError(method);
+ result->SetJ(0);
+ return false;
+ } else {
+ // No need to check since we've been quickened.
+ return DoCall<is_range, false>(method, receiver, self, shadow_frame, inst, inst_data, result);
+ }
+}
// We use template functions to optimize compiler inlining process. Otherwise,
// some parts of the code (like a switch statement) which depend on a constant
@@ -283,11 +334,11 @@ static inline bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame,
// TODO: should be SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) which is failing due to template
// specialization.
template<Primitive::Type field_type>
-static bool DoIPutQuick(ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data)
+static bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data)
NO_THREAD_SAFETY_ANALYSIS ALWAYS_INLINE;
template<Primitive::Type field_type>
-static inline bool DoIPutQuick(ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) {
+static inline bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) {
Object* obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data));
if (UNLIKELY(obj == NULL)) {
// We lost the reference to the field index so we cannot get a more