summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--runtime/handle.h24
-rw-r--r--runtime/handle_scope.h40
2 files changed, 28 insertions, 36 deletions
diff --git a/runtime/handle.h b/runtime/handle.h
index addb663..6af3220 100644
--- a/runtime/handle.h
+++ b/runtime/handle.h
@@ -20,6 +20,7 @@
#include "base/casts.h"
#include "base/logging.h"
#include "base/macros.h"
+#include "base/value_object.h"
#include "stack.h"
namespace art {
@@ -33,7 +34,7 @@ template<class T> class Handle;
// a wrap pointer. Handles are generally allocated within HandleScopes. Handle is a super-class
// of MutableHandle and doesn't support assignment operations.
template<class T>
-class Handle {
+class Handle : public ValueObject {
public:
Handle() : reference_(nullptr) {
}
@@ -58,7 +59,7 @@ class Handle {
}
ALWAYS_INLINE T* Get() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return reference_->AsMirrorPtr();
+ return down_cast<T*>(reference_->AsMirrorPtr());
}
ALWAYS_INLINE jobject ToJObject() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -70,25 +71,25 @@ class Handle {
}
protected:
- StackReference<T>* reference_;
-
template<typename S>
explicit Handle(StackReference<S>* reference)
- : reference_(reinterpret_cast<StackReference<T>*>(reference)) {
+ : reference_(reference) {
}
template<typename S>
explicit Handle(const Handle<S>& handle)
- : reference_(reinterpret_cast<StackReference<T>*>(handle.reference_)) {
+ : reference_(handle.reference_) {
}
- StackReference<T>* GetReference() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
+ StackReference<mirror::Object>* GetReference() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
return reference_;
}
- ALWAYS_INLINE const StackReference<T>* GetReference() const
+ ALWAYS_INLINE const StackReference<mirror::Object>* GetReference() const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return reference_;
}
+ StackReference<mirror::Object>* reference_;
+
private:
friend class BuildGenericJniFrameVisitor;
template<class S> friend class Handle;
@@ -121,8 +122,8 @@ class MutableHandle : public Handle<T> {
}
ALWAYS_INLINE T* Assign(T* reference) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- StackReference<T>* ref = Handle<T>::GetReference();
- T* const old = ref->AsMirrorPtr();
+ StackReference<mirror::Object>* ref = Handle<T>::GetReference();
+ T* old = down_cast<T*>(ref->AsMirrorPtr());
ref->Assign(reference);
return old;
}
@@ -132,7 +133,6 @@ class MutableHandle : public Handle<T> {
: Handle<T>(handle) {
}
- protected:
template<typename S>
explicit MutableHandle(StackReference<S>* reference) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
: Handle<T>(reference) {
@@ -153,7 +153,7 @@ class NullHandle : public Handle<T> {
}
private:
- StackReference<T> null_ref_;
+ StackReference<mirror::Object> null_ref_;
};
} // namespace art
diff --git a/runtime/handle_scope.h b/runtime/handle_scope.h
index f795e38..13c939f 100644
--- a/runtime/handle_scope.h
+++ b/runtime/handle_scope.h
@@ -159,7 +159,7 @@ class HandleWrapper : public MutableHandle<T> {
}
private:
- T** obj_;
+ T** const obj_;
};
// Scoped handle storage of a fixed size that is usually stack allocated.
@@ -169,31 +169,10 @@ class PACKED(4) StackHandleScope FINAL : public HandleScope {
explicit StackHandleScope(Thread* self);
~StackHandleScope();
- // Currently unused, using this GetReference instead of the one in HandleScope is preferred to
- // avoid compiler optimizations incorrectly optimizing out of bound array accesses.
- // TODO: Remove this when it is un-necessary.
- ALWAYS_INLINE mirror::Object* GetReference(size_t i) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK_LT(i, kNumReferences);
- return GetReferences()[i].AsMirrorPtr();
- }
-
- ALWAYS_INLINE MutableHandle<mirror::Object> GetHandle(size_t i)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK_LT(i, kNumReferences);
- return MutableHandle<mirror::Object>(&GetReferences()[i]);
- }
-
- ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK_LT(i, kNumReferences);
- GetReferences()[i].Assign(object);
- }
-
template<class T>
MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
SetReference(pos_, object);
- MutableHandle<T> h(GetHandle(pos_));
+ MutableHandle<T> h(GetHandle<T>(pos_));
pos_++;
return h;
}
@@ -201,12 +180,25 @@ class PACKED(4) StackHandleScope FINAL : public HandleScope {
template<class T>
HandleWrapper<T> NewHandleWrapper(T** object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
SetReference(pos_, *object);
- MutableHandle<T> h(GetHandle(pos_));
+ MutableHandle<T> h(GetHandle<T>(pos_));
pos_++;
return HandleWrapper<T>(object, h);
}
private:
+ template<class T>
+ ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ DCHECK_LT(i, kNumReferences);
+ return MutableHandle<T>(&GetReferences()[i]);
+ }
+
+ ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ DCHECK_LT(i, kNumReferences);
+ GetReferences()[i].Assign(object);
+ }
+
// Reference storage needs to be first as expected by the HandleScope layout.
StackReference<mirror::Object> storage_[kNumReferences];