summaryrefslogtreecommitdiffstats
path: root/runtime/instrumentation.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2015-04-22 13:56:20 -0700
committerMathieu Chartier <mathieuc@google.com>2015-06-02 09:21:27 -0700
commit3d21bdf8894e780d349c481e5c9e29fe1556051c (patch)
tree61a5231f36c0dabd73457fec81df103462a05aff /runtime/instrumentation.h
parent71f0a8a123fa27bdc857a98afebbaf0ed09dac15 (diff)
downloadart-3d21bdf8894e780d349c481e5c9e29fe1556051c.zip
art-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.gz
art-3d21bdf8894e780d349c481e5c9e29fe1556051c.tar.bz2
Move mirror::ArtMethod to native
Optimizing + quick tests are passing, devices boot. TODO: Test and fix bugs in mips64. Saves 16 bytes per most ArtMethod, 7.5MB reduction in system PSS. Some of the savings are from removal of virtual methods and direct methods object arrays. Bug: 19264997 (cherry picked from commit e401d146407d61eeb99f8d6176b2ac13c4df1e33) Change-Id: I622469a0cfa0e7082a2119f3d6a9491eb61e3f3d Fix some ArtMethod related bugs Added root visiting for runtime methods, not currently required since the GcRoots in these methods are null. Added missing GetInterfaceMethodIfProxy in GetMethodLine, fixes --trace run-tests 005, 044. Fixed optimizing compiler bug where we used a normal stack location instead of double on ARM64, this fixes the debuggable tests. TODO: Fix JDWP tests. Bug: 19264997 Change-Id: I7c55f69c61d1b45351fd0dc7185ffe5efad82bd3 ART: Fix casts for 64-bit pointers on 32-bit compiler. Bug: 19264997 Change-Id: Ief45cdd4bae5a43fc8bfdfa7cf744e2c57529457 Fix JDWP tests after ArtMethod change Fixes Throwable::GetStackDepth for exception event detection after internal stack trace representation change. Adds missing ArtMethod::GetInterfaceMethodIfProxy call in case of proxy method. Bug: 19264997 Change-Id: I363e293796848c3ec491c963813f62d868da44d2 Fix accidental IMT and root marking regression Was always using the conflict trampoline. Also included fix for regression in GC time caused by extra roots. Most of the regression was IMT. Fixed bug in DumpGcPerformanceInfo where we would get SIGABRT due to detached thread. EvaluateAndApplyChanges: From ~2500 -> ~1980 GC time: 8.2s -> 7.2s due to 1s less of MarkConcurrentRoots Bug: 19264997 Change-Id: I4333e80a8268c2ed1284f87f25b9f113d4f2c7e0 Fix bogus image test assert Previously we were comparing the size of the non moving space to size of the image file. Now we properly compare the size of the image space against the size of the image file. Bug: 19264997 Change-Id: I7359f1f73ae3df60c5147245935a24431c04808a [MIPS64] Fix art_quick_invoke_stub argument offsets. ArtMethod reference's size got bigger, so we need to move other args and leave enough space for ArtMethod* and 'this' pointer. This fixes mips64 boot. Bug: 19264997 Change-Id: I47198d5f39a4caab30b3b77479d5eedaad5006ab
Diffstat (limited to 'runtime/instrumentation.h')
-rw-r--r--runtime/instrumentation.h85
1 files changed, 39 insertions, 46 deletions
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index 7d70d21..db8e9c2 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -19,7 +19,7 @@
#include <stdint.h>
#include <list>
-#include <map>
+#include <unordered_set>
#include "arch/instruction_set.h"
#include "base/macros.h"
@@ -29,12 +29,12 @@
namespace art {
namespace mirror {
- class ArtMethod;
class Class;
class Object;
class Throwable;
} // namespace mirror
class ArtField;
+class ArtMethod;
union JValue;
class Thread;
@@ -62,32 +62,32 @@ struct InstrumentationListener {
// Call-back for when a method is entered.
virtual void MethodEntered(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method,
+ ArtMethod* method,
uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
// Call-back for when a method is exited.
virtual void MethodExited(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc,
+ ArtMethod* method, uint32_t dex_pc,
const JValue& return_value)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
// Call-back for when a method is popped due to an exception throw. A method will either cause a
// MethodExited call-back or a MethodUnwind call-back when its activation is removed.
virtual void MethodUnwind(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc)
+ ArtMethod* method, uint32_t dex_pc)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
// Call-back for when the dex pc moves in a method.
virtual void DexPcMoved(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t new_dex_pc)
+ ArtMethod* method, uint32_t new_dex_pc)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
// Call-back for when we read from a field.
- virtual void FieldRead(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method,
+ virtual void FieldRead(Thread* thread, mirror::Object* this_object, ArtMethod* method,
uint32_t dex_pc, ArtField* field) = 0;
// Call-back for when we write into a field.
- virtual void FieldWritten(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method,
+ virtual void FieldWritten(Thread* thread, mirror::Object* this_object, ArtMethod* method,
uint32_t dex_pc, ArtField* field, const JValue& field_value) = 0;
// Call-back when an exception is caught.
@@ -95,7 +95,7 @@ struct InstrumentationListener {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
// Call-back for when we get a backward branch.
- virtual void BackwardBranch(Thread* thread, mirror::ArtMethod* method, int32_t dex_pc_offset)
+ virtual void BackwardBranch(Thread* thread, ArtMethod* method, int32_t dex_pc_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
};
@@ -162,19 +162,19 @@ class Instrumentation {
// Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
// method (except a class initializer) set to the resolution trampoline will be deoptimized only
// once its declaring class is initialized.
- void Deoptimize(mirror::ArtMethod* method)
+ void Deoptimize(ArtMethod* method)
LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
// Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
// (except a class initializer) set to the resolution trampoline will be updated only once its
// declaring class is initialized.
- void Undeoptimize(mirror::ArtMethod* method)
+ void Undeoptimize(ArtMethod* method)
LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
// Indicates whether the method has been deoptimized so it is executed with the interpreter.
- bool IsDeoptimized(mirror::ArtMethod* method)
+ bool IsDeoptimized(ArtMethod* method)
LOCKS_EXCLUDED(deoptimized_methods_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -205,13 +205,13 @@ class Instrumentation {
void ResetQuickAllocEntryPoints() EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_);
// Update the code of a method respecting any installed stubs.
- void UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code)
+ void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Get the quick code for the given method. More efficient than asking the class linker as it
// will short-cut to GetCode if instrumentation and static method resolution stubs aren't
// installed.
- const void* GetQuickCodeFor(mirror::ArtMethod* method, size_t pointer_size) const
+ const void* GetQuickCodeFor(ArtMethod* method, size_t pointer_size) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void ForceInterpretOnly() {
@@ -273,7 +273,7 @@ class Instrumentation {
// Inform listeners that a method has been entered. A dex PC is provided as we may install
// listeners into executing code and get method enter events for methods already on the stack.
void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc) const
+ ArtMethod* method, uint32_t dex_pc) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (UNLIKELY(HasMethodEntryListeners())) {
MethodEnterEventImpl(thread, this_object, method, dex_pc);
@@ -282,7 +282,7 @@ class Instrumentation {
// Inform listeners that a method has been exited.
void MethodExitEvent(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc,
+ ArtMethod* method, uint32_t dex_pc,
const JValue& return_value) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (UNLIKELY(HasMethodExitListeners())) {
@@ -292,12 +292,12 @@ class Instrumentation {
// Inform listeners that a method has been exited due to an exception.
void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc) const
+ ArtMethod* method, uint32_t dex_pc) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Inform listeners that the dex pc has moved (only supported by the interpreter).
void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc) const
+ ArtMethod* method, uint32_t dex_pc) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (UNLIKELY(HasDexPcListeners())) {
DexPcMovedEventImpl(thread, this_object, method, dex_pc);
@@ -305,7 +305,7 @@ class Instrumentation {
}
// Inform listeners that a backward branch has been taken (only supported by the interpreter).
- void BackwardBranch(Thread* thread, mirror::ArtMethod* method, int32_t offset) const
+ void BackwardBranch(Thread* thread, ArtMethod* method, int32_t offset) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (UNLIKELY(HasBackwardBranchListeners())) {
BackwardBranchImpl(thread, method, offset);
@@ -314,7 +314,7 @@ class Instrumentation {
// Inform listeners that we read a field (only supported by the interpreter).
void FieldReadEvent(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc,
+ ArtMethod* method, uint32_t dex_pc,
ArtField* field) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (UNLIKELY(HasFieldReadListeners())) {
@@ -324,7 +324,7 @@ class Instrumentation {
// Inform listeners that we write a field (only supported by the interpreter).
void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc,
+ ArtMethod* method, uint32_t dex_pc,
ArtField* field, const JValue& field_value) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (UNLIKELY(HasFieldWriteListeners())) {
@@ -339,7 +339,7 @@ class Instrumentation {
// Called when an instrumented method is entered. The intended link register (lr) is saved so
// that returning causes a branch to the method exit stub. Generates method enter events.
void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
- mirror::ArtMethod* method, uintptr_t lr,
+ ArtMethod* method, uintptr_t lr,
bool interpreter_entry)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -356,12 +356,9 @@ class Instrumentation {
// Call back for configure stubs.
void InstallStubsForClass(mirror::Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void InstallStubsForMethod(mirror::ArtMethod* method)
+ void InstallStubsForMethod(ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void VisitRoots(RootVisitor* visitor) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- LOCKS_EXCLUDED(deoptimized_methods_lock_);
-
private:
InstrumentationLevel GetCurrentInstrumentationLevel() const;
@@ -384,42 +381,39 @@ class Instrumentation {
void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc) const
+ ArtMethod* method, uint32_t dex_pc) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method,
+ ArtMethod* method,
uint32_t dex_pc, const JValue& return_value) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc) const
+ ArtMethod* method, uint32_t dex_pc) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void BackwardBranchImpl(Thread* thread, mirror::ArtMethod* method, int32_t offset) const
+ void BackwardBranchImpl(Thread* thread, ArtMethod* method, int32_t offset) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc,
+ ArtMethod* method, uint32_t dex_pc,
ArtField* field) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
- mirror::ArtMethod* method, uint32_t dex_pc,
+ ArtMethod* method, uint32_t dex_pc,
ArtField* field, const JValue& field_value) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Read barrier-aware utility functions for accessing deoptimized_methods_
- bool AddDeoptimizedMethod(mirror::ArtMethod* method)
+ bool AddDeoptimizedMethod(ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
- bool FindDeoptimizedMethod(mirror::ArtMethod* method)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
- bool RemoveDeoptimizedMethod(mirror::ArtMethod* method)
+ bool IsDeoptimizedMethod(ArtMethod* method)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, deoptimized_methods_lock_);
+ bool RemoveDeoptimizedMethod(ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
- mirror::ArtMethod* BeginDeoptimizedMethod()
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
+ ArtMethod* BeginDeoptimizedMethod()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, deoptimized_methods_lock_);
bool IsDeoptimizedMethodsEmpty() const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, deoptimized_methods_lock_);
// Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
bool instrumentation_stubs_installed_;
@@ -488,8 +482,7 @@ class Instrumentation {
// The set of methods being deoptimized (by the debugger) which must be executed with interpreter
// only.
mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
- std::multimap<int32_t, GcRoot<mirror::ArtMethod>> deoptimized_methods_
- GUARDED_BY(deoptimized_methods_lock_);
+ std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
bool deoptimization_enabled_;
// Current interpreter handler table. This is updated each time the thread state flags are
@@ -509,7 +502,7 @@ std::ostream& operator<<(std::ostream& os, const Instrumentation::Instrumentatio
// An element in the instrumentation side stack maintained in art::Thread.
struct InstrumentationStackFrame {
- InstrumentationStackFrame(mirror::Object* this_object, mirror::ArtMethod* method,
+ InstrumentationStackFrame(mirror::Object* this_object, ArtMethod* method,
uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
: this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
interpreter_entry_(interpreter_entry) {
@@ -518,7 +511,7 @@ struct InstrumentationStackFrame {
std::string Dump() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
mirror::Object* this_object_;
- mirror::ArtMethod* method_;
+ ArtMethod* method_;
uintptr_t return_pc_;
size_t frame_id_;
bool interpreter_entry_;