summaryrefslogtreecommitdiffstats
path: root/runtime/entrypoints/entrypoint_utils.cc
diff options
context:
space:
mode:
authorHiroshi Yamauchi <yamauchi@google.com>2013-09-12 21:33:12 -0700
committerHiroshi Yamauchi <yamauchi@google.com>2013-09-25 20:28:49 -0700
commit3b4c18933c24b8a33f38573c2ebcdb9aa16efeb5 (patch)
tree5298ccd9c1f1f6b329c0cb6cefac6a8df43dd633 /runtime/entrypoints/entrypoint_utils.cc
parentf7e090ebcded6d6693894c018d89c4add79253ff (diff)
downloadart-3b4c18933c24b8a33f38573c2ebcdb9aa16efeb5.zip
art-3b4c18933c24b8a33f38573c2ebcdb9aa16efeb5.tar.gz
art-3b4c18933c24b8a33f38573c2ebcdb9aa16efeb5.tar.bz2
Split the allocation path into 'instrumented' and 'uninstrumented'
ones. The instrumented path is equivalent to the existing allocation path that checks for three instrumentation mechanisms (the debugger allocation tracking, the runtime allocation stats collection, and valgrind) for every allocation. The uinstrumented path does not perform these checks. We use the uninstrumented path by default and enable the instrumented path only when any of the three mechanisms is enabled. The uninstrumented version of Heap::AllocObject() is inlined. This change improves the Ritz MemAllocTest by ~4% on Nexus 4 and ~3% on Host/x86. Bug: 9986565 Change-Id: I3e68dfff6789d77bbdcea98457b694e1b5fcef5f
Diffstat (limited to 'runtime/entrypoints/entrypoint_utils.cc')
-rw-r--r--runtime/entrypoints/entrypoint_utils.cc57
1 files changed, 39 insertions, 18 deletions
diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc
index 52f8c81..d9c9e31 100644
--- a/runtime/entrypoints/entrypoint_utils.cc
+++ b/runtime/entrypoints/entrypoint_utils.cc
@@ -33,20 +33,20 @@
namespace art {
-// Helper function to allocate array for FILLED_NEW_ARRAY.
-mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* referrer,
- int32_t component_count, Thread* self,
- bool access_check) {
+static inline bool CheckFilledNewArrayAlloc(uint32_t type_idx, mirror::ArtMethod* referrer,
+ int32_t component_count, Thread* self,
+ bool access_check, mirror::Class** klass_ptr)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (UNLIKELY(component_count < 0)) {
ThrowNegativeArraySizeException(component_count);
- return NULL; // Failure
+ return false; // Failure
}
- mirror::Class* klass = referrer->GetDexCacheResolvedTypes()->Get(type_idx);
+ mirror::Class* klass = referrer->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
if (UNLIKELY(klass == NULL)) { // Not in dex cache so try to resolve
klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, referrer);
if (klass == NULL) { // Error
DCHECK(self->IsExceptionPending());
- return NULL; // Failure
+ return false; // Failure
}
}
if (UNLIKELY(klass->IsPrimitive() && !klass->IsPrimitiveInt())) {
@@ -60,18 +60,40 @@ mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod*
"Found type %s; filled-new-array not implemented for anything but \'int\'",
PrettyDescriptor(klass).c_str());
}
- return NULL; // Failure
- } else {
- if (access_check) {
- mirror::Class* referrer_klass = referrer->GetDeclaringClass();
- if (UNLIKELY(!referrer_klass->CanAccess(klass))) {
- ThrowIllegalAccessErrorClass(referrer_klass, klass);
- return NULL; // Failure
- }
+ return false; // Failure
+ }
+ if (access_check) {
+ mirror::Class* referrer_klass = referrer->GetDeclaringClass();
+ if (UNLIKELY(!referrer_klass->CanAccess(klass))) {
+ ThrowIllegalAccessErrorClass(referrer_klass, klass);
+ return false; // Failure
}
- DCHECK(klass->IsArrayClass()) << PrettyClass(klass);
- return mirror::Array::Alloc(self, klass, component_count);
}
+ DCHECK(klass->IsArrayClass()) << PrettyClass(klass);
+ *klass_ptr = klass;
+ return true;
+}
+
+// Helper function to allocate array for FILLED_NEW_ARRAY.
+mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* referrer,
+ int32_t component_count, Thread* self,
+ bool access_check) {
+ mirror::Class* klass;
+ if (UNLIKELY(!CheckFilledNewArrayAlloc(type_idx, referrer, component_count, self, access_check, &klass))) {
+ return NULL;
+ }
+ return mirror::Array::AllocUninstrumented(self, klass, component_count);
+}
+
+// Helper function to allocate array for FILLED_NEW_ARRAY.
+mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, mirror::ArtMethod* referrer,
+ int32_t component_count, Thread* self,
+ bool access_check) {
+ mirror::Class* klass;
+ if (UNLIKELY(!CheckFilledNewArrayAlloc(type_idx, referrer, component_count, self, access_check, &klass))) {
+ return NULL;
+ }
+ return mirror::Array::AllocInstrumented(self, klass, component_count);
}
mirror::ArtField* FindFieldFromCode(uint32_t field_idx, const mirror::ArtMethod* referrer,
@@ -405,5 +427,4 @@ JValue InvokeProxyInvocationHandler(ScopedObjectAccessUnchecked& soa, const char
return zero;
}
}
-
} // namespace art