summaryrefslogtreecommitdiffstats
path: root/runtime/jit
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2015-03-23 12:37:35 -0700
committerMathieu Chartier <mathieuc@google.com>2015-03-23 13:00:14 -0700
commitbce416f7f22f1e73250f020be1178a1c7db72330 (patch)
treecb9503e1ca6457468924cf4d50210f894af2b2c3 /runtime/jit
parentfc06816cc25f80cc6c4c5d002e5d3bced242a9ee (diff)
downloadart-bce416f7f22f1e73250f020be1178a1c7db72330.zip
art-bce416f7f22f1e73250f020be1178a1c7db72330.tar.gz
art-bce416f7f22f1e73250f020be1178a1c7db72330.tar.bz2
Add code cache test
Bug: 17950037 Change-Id: I13913667517db5bb9b7224f0639c2b39cf3a1973
Diffstat (limited to 'runtime/jit')
-rw-r--r--runtime/jit/jit_code_cache.h24
-rw-r--r--runtime/jit/jit_code_cache_test.cc103
2 files changed, 126 insertions, 1 deletions
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index aa8c717..8a20e39 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -47,33 +47,50 @@ class JitCodeCache {
static constexpr size_t kMaxCapacity = 1 * GB;
static constexpr size_t kDefaultCapacity = 2 * MB;
+ // Create the code cache with a code + data capacity equal to "capacity", error message is passed
+ // in the out arg error_msg.
static JitCodeCache* Create(size_t capacity, std::string* error_msg);
const uint8_t* CodeCachePtr() const {
return code_cache_ptr_;
}
+
size_t CodeCacheSize() const {
return code_cache_ptr_ - code_cache_begin_;
}
+
size_t CodeCacheRemain() const {
return code_cache_end_ - code_cache_ptr_;
}
+
+ const uint8_t* DataCachePtr() const {
+ return data_cache_ptr_;
+ }
+
size_t DataCacheSize() const {
return data_cache_ptr_ - data_cache_begin_;
}
+
size_t DataCacheRemain() const {
return data_cache_end_ - data_cache_ptr_;
}
+
size_t NumMethods() const {
return num_methods_;
}
+ // Return true if the code cache contains the code pointer which si the entrypoint of the method.
bool ContainsMethod(mirror::ArtMethod* method) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ // Return true if the code cache contains a code ptr.
bool ContainsCodePtr(const void* ptr) const;
+ // Reserve a region of code of size at least "size". Returns nullptr if there is no more room.
uint8_t* ReserveCode(Thread* self, size_t size) LOCKS_EXCLUDED(lock_);
+ // Add a data array of size (end - begin) with the associated contents, returns nullptr if there
+ // is no more room.
uint8_t* AddDataArray(Thread* self, const uint8_t* begin, const uint8_t* end)
LOCKS_EXCLUDED(lock_);
@@ -81,14 +98,19 @@ class JitCodeCache {
const void* GetCodeFor(mirror::ArtMethod* method)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(lock_);
+ // Save the compiled code for a method so that GetCodeFor(method) will return old_code_ptr if the
+ // entrypoint isn't within the cache.
void SaveCompiledCode(mirror::ArtMethod* method, const void* old_code_ptr)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(lock_);
private:
// Takes ownership of code_mem_map.
explicit JitCodeCache(MemMap* code_mem_map);
+
+ // Unimplemented, TODO: Determine if it is necessary.
void FlushInstructionCache();
+ // Lock which guards.
Mutex lock_;
// Mem map which holds code and data. We do this since we need to have 32 bit offsets from method
// headers in code cache which point to things in the data cache. If the maps are more than 4GB
@@ -106,7 +128,7 @@ class JitCodeCache {
// TODO: This relies on methods not moving.
// This map holds code for methods if they were deoptimized by the instrumentation stubs. This is
// required since we have to implement ClassLinker::GetQuickOatCodeFor for walking stacks.
- SafeMap<mirror::ArtMethod*, const void*> method_code_map_;
+ SafeMap<mirror::ArtMethod*, const void*> method_code_map_ GUARDED_BY(lock_);
DISALLOW_COPY_AND_ASSIGN(JitCodeCache);
};
diff --git a/runtime/jit/jit_code_cache_test.cc b/runtime/jit/jit_code_cache_test.cc
new file mode 100644
index 0000000..2155552
--- /dev/null
+++ b/runtime/jit/jit_code_cache_test.cc
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "common_runtime_test.h"
+
+#include "class_linker.h"
+#include "jit_code_cache.h"
+#include "mirror/art_method-inl.h"
+#include "scoped_thread_state_change.h"
+#include "thread-inl.h"
+#include "utils.h"
+
+namespace art {
+namespace jit {
+
+class JitCodeCacheTest : public CommonRuntimeTest {
+ public:
+};
+
+TEST_F(JitCodeCacheTest, TestCoverage) {
+ std::string error_msg;
+ constexpr size_t kSize = 1 * MB;
+ std::unique_ptr<JitCodeCache> code_cache(
+ JitCodeCache::Create(kSize, &error_msg));
+ ASSERT_TRUE(code_cache.get() != nullptr) << error_msg;
+ ASSERT_TRUE(code_cache->CodeCachePtr() != nullptr);
+ ASSERT_EQ(code_cache->CodeCacheSize(), 0u);
+ ASSERT_GT(code_cache->CodeCacheRemain(), 0u);
+ ASSERT_TRUE(code_cache->DataCachePtr() != nullptr);
+ ASSERT_EQ(code_cache->DataCacheSize(), 0u);
+ ASSERT_GT(code_cache->DataCacheRemain(), 0u);
+ ASSERT_EQ(code_cache->CodeCacheRemain() + code_cache->DataCacheRemain(), kSize);
+ ASSERT_EQ(code_cache->NumMethods(), 0u);
+ ScopedObjectAccess soa(Thread::Current());
+ StackHandleScope<1> hs(soa.Self());
+ uint8_t* const reserved_code = code_cache->ReserveCode(soa.Self(), 4 * KB);
+ ASSERT_TRUE(reserved_code != nullptr);
+ ASSERT_TRUE(code_cache->ContainsCodePtr(reserved_code));
+ ASSERT_EQ(code_cache->NumMethods(), 1u);
+ ClassLinker* const cl = Runtime::Current()->GetClassLinker();
+ auto h_method = hs.NewHandle(cl->AllocArtMethod(soa.Self()));
+ ASSERT_FALSE(code_cache->ContainsMethod(h_method.Get()));
+ h_method->SetEntryPointFromQuickCompiledCode(reserved_code);
+ ASSERT_TRUE(code_cache->ContainsMethod(h_method.Get()));
+ ASSERT_EQ(code_cache->GetCodeFor(h_method.Get()), reserved_code);
+ // Save the code and then change it.
+ code_cache->SaveCompiledCode(h_method.Get(), reserved_code);
+ h_method->SetEntryPointFromQuickCompiledCode(nullptr);
+ ASSERT_EQ(code_cache->GetCodeFor(h_method.Get()), reserved_code);
+ const uint8_t data_arr[] = {1, 2, 3, 4, 5};
+ uint8_t* data_ptr = code_cache->AddDataArray(soa.Self(), data_arr, data_arr + sizeof(data_arr));
+ ASSERT_TRUE(data_ptr != nullptr);
+ ASSERT_EQ(memcmp(data_ptr, data_arr, sizeof(data_arr)), 0);
+}
+
+TEST_F(JitCodeCacheTest, TestOverflow) {
+ std::string error_msg;
+ constexpr size_t kSize = 1 * MB;
+ std::unique_ptr<JitCodeCache> code_cache(
+ JitCodeCache::Create(kSize, &error_msg));
+ ASSERT_TRUE(code_cache.get() != nullptr) << error_msg;
+ ASSERT_TRUE(code_cache->CodeCachePtr() != nullptr);
+ size_t code_bytes = 0;
+ size_t data_bytes = 0;
+ constexpr size_t kCodeArrSize = 4 * KB;
+ constexpr size_t kDataArrSize = 4 * KB;
+ uint8_t data_arr[kDataArrSize] = {53};
+ // Add code and data until we are full.
+ uint8_t* code_ptr = nullptr;
+ uint8_t* data_ptr = nullptr;
+ do {
+ code_ptr = code_cache->ReserveCode(Thread::Current(), kCodeArrSize);
+ data_ptr = code_cache->AddDataArray(Thread::Current(), data_arr, data_arr + kDataArrSize);
+ if (code_ptr != nullptr) {
+ code_bytes += kCodeArrSize;
+ }
+ if (data_ptr != nullptr) {
+ data_bytes += kDataArrSize;
+ }
+ } while (code_ptr != nullptr || data_ptr != nullptr);
+ // Make sure we added a reasonable amount
+ CHECK_GT(code_bytes, 0u);
+ CHECK_LE(code_bytes, kSize);
+ CHECK_GT(data_bytes, 0u);
+ CHECK_LE(data_bytes, kSize);
+ CHECK_GE(code_bytes + data_bytes, kSize * 4 / 5);
+}
+
+} // namespace jit
+} // namespace art