summaryrefslogtreecommitdiffstats
path: root/runtime/handle_scope.h
diff options
context:
space:
mode:
authorMathieu Chartier <mathieuc@google.com>2014-06-03 15:37:03 -0700
committerMathieu Chartier <mathieuc@google.com>2014-06-04 09:58:05 -0700
commitbc56fc3242ea2fad6630abdee1657f444eee8d30 (patch)
tree9a50948145747c557d1da1e0f25192573d81e0f1 /runtime/handle_scope.h
parent32640daf36acda331719766956b25661647e2461 (diff)
downloadart-bc56fc3242ea2fad6630abdee1657f444eee8d30.zip
art-bc56fc3242ea2fad6630abdee1657f444eee8d30.tar.gz
art-bc56fc3242ea2fad6630abdee1657f444eee8d30.tar.bz2
Fix compiler warnings.
Added GetReference, GetHandle to StackHandleScope to prevent the compiler from optimizing away these loads/stores from inline functions. Change-Id: I4db02dd3194665d844292e74e3a7d7c80e730e06
Diffstat (limited to 'runtime/handle_scope.h')
-rw-r--r--runtime/handle_scope.h31
1 files changed, 26 insertions, 5 deletions
diff --git a/runtime/handle_scope.h b/runtime/handle_scope.h
index 8ff7086..629e4ec 100644
--- a/runtime/handle_scope.h
+++ b/runtime/handle_scope.h
@@ -31,7 +31,7 @@ class Thread;
// HandleScopes can be allocated within the bridge frame between managed and native code backed by
// stack storage or manually allocated in native.
-class HandleScope {
+class PACKED(4) HandleScope {
public:
~HandleScope() {}
@@ -46,7 +46,7 @@ class HandleScope {
// Returns the size of a HandleScope containing num_references handles.
static size_t SizeOf(uint32_t num_references) {
- size_t header_size = OFFSETOF_MEMBER(HandleScope, references_);
+ size_t header_size = sizeof(HandleScope);
size_t data_size = sizeof(StackReference<mirror::Object>) * num_references;
return header_size + data_size;
}
@@ -98,8 +98,8 @@ class HandleScope {
// jni_compiler should have a jobject/jclass as a native method is
// passed in a this pointer or a class
DCHECK_GT(number_of_references_, 0U);
- return ((&references_[0] <= handle_scope_entry)
- && (handle_scope_entry <= (&references_[number_of_references_ - 1])));
+ return &references_[0] <= handle_scope_entry &&
+ handle_scope_entry <= &references_[number_of_references_ - 1];
}
// Offset of link within HandleScope, used by generated code
@@ -152,11 +152,32 @@ class HandleWrapper : public Handle<T> {
// Scoped handle storage of a fixed size that is usually stack allocated.
template<size_t kNumReferences>
-class StackHandleScope : public HandleScope {
+class PACKED(4) StackHandleScope : public HandleScope {
public:
explicit StackHandleScope(Thread* self);
~StackHandleScope();
+ // Currently unused, using this GetReference instead of the one in HandleScope is preferred to
+ // avoid compiler optimizations incorrectly optimizing out of bound array accesses.
+ // TODO: Remove this when it is un-necessary.
+ mirror::Object* GetReference(size_t i) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ ALWAYS_INLINE {
+ DCHECK_LT(i, number_of_references_);
+ return references_storage_[i].AsMirrorPtr();
+ }
+
+ Handle<mirror::Object> GetHandle(size_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ ALWAYS_INLINE {
+ DCHECK_LT(i, number_of_references_);
+ return Handle<mirror::Object>(&references_storage_[i]);
+ }
+
+ void SetReference(size_t i, mirror::Object* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ ALWAYS_INLINE {
+ DCHECK_LT(i, number_of_references_);
+ references_storage_[i].Assign(object);
+ }
+
template<class T>
Handle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
SetReference(pos_, object);