summaryrefslogtreecommitdiffstats
path: root/runtime/entrypoints/quick
diff options
context:
space:
mode:
authorIan Rogers <irogers@google.com>2014-03-13 09:02:55 -0700
committerIan Rogers <irogers@google.com>2014-03-13 09:12:20 -0700
commit9758f79a6c1ef7f662caca9c1df39de1934166b8 (patch)
treefdffb18605692c990665f3e99a17dcb0efb09893 /runtime/entrypoints/quick
parentc1020433660737d466b0d726bbeb86d9a279a44a (diff)
downloadart-9758f79a6c1ef7f662caca9c1df39de1934166b8.zip
art-9758f79a6c1ef7f662caca9c1df39de1934166b8.tar.gz
art-9758f79a6c1ef7f662caca9c1df39de1934166b8.tar.bz2
Implement FINAL/OVERRIDE for clang.
Separate declaration from definition in certain places to work-around issues with clang. Remove bogus lock annotation at definition in compilers.cc that is already present at the declaration. Remove duplicate definition of ClassReference. Change-Id: I5368057bb36319a259110b2198610d9d2b2e5041
Diffstat (limited to 'runtime/entrypoints/quick')
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc310
1 files changed, 163 insertions, 147 deletions
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 9489d9b..31a5728 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -434,38 +434,7 @@ class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
- void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE {
- Primitive::Type type = GetParamPrimitiveType();
- switch (type) {
- case Primitive::kPrimLong: // Fall-through.
- case Primitive::kPrimDouble:
- if (IsSplitLongOrDouble()) {
- sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
- } else {
- sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
- }
- ++cur_reg_;
- break;
- case Primitive::kPrimNot: {
- StackReference<mirror::Object>* stack_ref =
- reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
- sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
- }
- break;
- case Primitive::kPrimBoolean: // Fall-through.
- case Primitive::kPrimByte: // Fall-through.
- case Primitive::kPrimChar: // Fall-through.
- case Primitive::kPrimShort: // Fall-through.
- case Primitive::kPrimInt: // Fall-through.
- case Primitive::kPrimFloat:
- sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
- break;
- case Primitive::kPrimVoid:
- LOG(FATAL) << "UNREACHABLE";
- break;
- }
- ++cur_reg_;
- }
+ void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
private:
ShadowFrame* const sf_;
@@ -474,6 +443,39 @@ class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
};
+void BuildQuickShadowFrameVisitor::Visit() {
+ Primitive::Type type = GetParamPrimitiveType();
+ switch (type) {
+ case Primitive::kPrimLong: // Fall-through.
+ case Primitive::kPrimDouble:
+ if (IsSplitLongOrDouble()) {
+ sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
+ } else {
+ sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
+ }
+ ++cur_reg_;
+ break;
+ case Primitive::kPrimNot: {
+ StackReference<mirror::Object>* stack_ref =
+ reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
+ sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
+ }
+ break;
+ case Primitive::kPrimBoolean: // Fall-through.
+ case Primitive::kPrimByte: // Fall-through.
+ case Primitive::kPrimChar: // Fall-through.
+ case Primitive::kPrimShort: // Fall-through.
+ case Primitive::kPrimInt: // Fall-through.
+ case Primitive::kPrimFloat:
+ sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
+ break;
+ case Primitive::kPrimVoid:
+ LOG(FATAL) << "UNREACHABLE";
+ break;
+ }
+ ++cur_reg_;
+}
+
extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
mirror::ArtMethod** sp)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -532,56 +534,61 @@ class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
std::vector<jvalue>* args) :
QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
- void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE {
- jvalue val;
- Primitive::Type type = GetParamPrimitiveType();
- switch (type) {
- case Primitive::kPrimNot: {
- StackReference<mirror::Object>* stack_ref =
- reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
- val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
- references_.push_back(std::make_pair(val.l, stack_ref));
- break;
- }
- case Primitive::kPrimLong: // Fall-through.
- case Primitive::kPrimDouble:
- if (IsSplitLongOrDouble()) {
- val.j = ReadSplitLongParam();
- } else {
- val.j = *reinterpret_cast<jlong*>(GetParamAddress());
- }
- break;
- case Primitive::kPrimBoolean: // Fall-through.
- case Primitive::kPrimByte: // Fall-through.
- case Primitive::kPrimChar: // Fall-through.
- case Primitive::kPrimShort: // Fall-through.
- case Primitive::kPrimInt: // Fall-through.
- case Primitive::kPrimFloat:
- val.i = *reinterpret_cast<jint*>(GetParamAddress());
- break;
- case Primitive::kPrimVoid:
- LOG(FATAL) << "UNREACHABLE";
- val.j = 0;
- break;
- }
- args_->push_back(val);
- }
+ void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
- void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Fixup any references which may have changed.
- for (const auto& pair : references_) {
- pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
- }
- }
+ void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
private:
- ScopedObjectAccessUnchecked* soa_;
- std::vector<jvalue>* args_;
+ ScopedObjectAccessUnchecked* const soa_;
+ std::vector<jvalue>* const args_;
// References which we must update when exiting in case the GC moved the objects.
std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
+
DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
};
+void BuildQuickArgumentVisitor::Visit() {
+ jvalue val;
+ Primitive::Type type = GetParamPrimitiveType();
+ switch (type) {
+ case Primitive::kPrimNot: {
+ StackReference<mirror::Object>* stack_ref =
+ reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
+ val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
+ references_.push_back(std::make_pair(val.l, stack_ref));
+ break;
+ }
+ case Primitive::kPrimLong: // Fall-through.
+ case Primitive::kPrimDouble:
+ if (IsSplitLongOrDouble()) {
+ val.j = ReadSplitLongParam();
+ } else {
+ val.j = *reinterpret_cast<jlong*>(GetParamAddress());
+ }
+ break;
+ case Primitive::kPrimBoolean: // Fall-through.
+ case Primitive::kPrimByte: // Fall-through.
+ case Primitive::kPrimChar: // Fall-through.
+ case Primitive::kPrimShort: // Fall-through.
+ case Primitive::kPrimInt: // Fall-through.
+ case Primitive::kPrimFloat:
+ val.i = *reinterpret_cast<jint*>(GetParamAddress());
+ break;
+ case Primitive::kPrimVoid:
+ LOG(FATAL) << "UNREACHABLE";
+ val.j = 0;
+ break;
+ }
+ args_->push_back(val);
+}
+
+void BuildQuickArgumentVisitor::FixupReferences() {
+ // Fixup any references which may have changed.
+ for (const auto& pair : references_) {
+ pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
+ }
+}
+
// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
// which is responsible for recording callee save registers. We explicitly place into jobjects the
// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
@@ -644,30 +651,35 @@ class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
- void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE {
- if (IsParamAReference()) {
- StackReference<mirror::Object>* stack_ref =
- reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
- jobject reference =
- soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
- references_.push_back(std::make_pair(reference, stack_ref));
- }
- }
+ void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
- void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Fixup any references which may have changed.
- for (const auto& pair : references_) {
- pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
- }
- }
+ void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
private:
- ScopedObjectAccessUnchecked* soa_;
+ ScopedObjectAccessUnchecked* const soa_;
// References which we must update when exiting in case the GC moved the objects.
std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
};
+void RememberForGcArgumentVisitor::Visit() {
+ if (IsParamAReference()) {
+ StackReference<mirror::Object>* stack_ref =
+ reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
+ jobject reference =
+ soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
+ references_.push_back(std::make_pair(reference, stack_ref));
+ }
+}
+
+void RememberForGcArgumentVisitor::FixupReferences() {
+ // Fixup any references which may have changed.
+ for (const auto& pair : references_) {
+ pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
+ }
+}
+
+
// Lazily resolve a method for quick. Called by stub code.
extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
mirror::Object* receiver,
@@ -1309,64 +1321,9 @@ class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
}
}
- void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE {
- Primitive::Type type = GetParamPrimitiveType();
- switch (type) {
- case Primitive::kPrimLong: {
- jlong long_arg;
- if (IsSplitLongOrDouble()) {
- long_arg = ReadSplitLongParam();
- } else {
- long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
- }
- sm_.AdvanceLong(long_arg);
- break;
- }
- case Primitive::kPrimDouble: {
- uint64_t double_arg;
- if (IsSplitLongOrDouble()) {
- // Read into union so that we don't case to a double.
- double_arg = ReadSplitLongParam();
- } else {
- double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
- }
- sm_.AdvanceDouble(double_arg);
- break;
- }
- case Primitive::kPrimNot: {
- StackReference<mirror::Object>* stack_ref =
- reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
- sm_.AdvanceSirt(stack_ref->AsMirrorPtr());
- break;
- }
- case Primitive::kPrimFloat:
- sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
- break;
- case Primitive::kPrimBoolean: // Fall-through.
- case Primitive::kPrimByte: // Fall-through.
- case Primitive::kPrimChar: // Fall-through.
- case Primitive::kPrimShort: // Fall-through.
- case Primitive::kPrimInt: // Fall-through.
- sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
- break;
- case Primitive::kPrimVoid:
- LOG(FATAL) << "UNREACHABLE";
- break;
- }
- }
+ void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
- void FinalizeSirt(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- // Initialize padding entries.
- while (sirt_number_of_references_ < sirt_expected_refs_) {
- *cur_sirt_entry_ = StackReference<mirror::Object>();
- cur_sirt_entry_++;
- sirt_number_of_references_++;
- }
- sirt_->SetNumberOfReferences(sirt_expected_refs_);
- DCHECK_NE(sirt_expected_refs_, 0U);
- // Install Sirt.
- self->PushSirt(sirt_);
- }
+ void FinalizeSirt(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
jobject GetFirstSirtEntry() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
return reinterpret_cast<jobject>(sirt_->GetStackReference(0));
@@ -1433,6 +1390,65 @@ class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
};
+void BuildGenericJniFrameVisitor::Visit() {
+ Primitive::Type type = GetParamPrimitiveType();
+ switch (type) {
+ case Primitive::kPrimLong: {
+ jlong long_arg;
+ if (IsSplitLongOrDouble()) {
+ long_arg = ReadSplitLongParam();
+ } else {
+ long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
+ }
+ sm_.AdvanceLong(long_arg);
+ break;
+ }
+ case Primitive::kPrimDouble: {
+ uint64_t double_arg;
+ if (IsSplitLongOrDouble()) {
+ // Read into union so that we don't case to a double.
+ double_arg = ReadSplitLongParam();
+ } else {
+ double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
+ }
+ sm_.AdvanceDouble(double_arg);
+ break;
+ }
+ case Primitive::kPrimNot: {
+ StackReference<mirror::Object>* stack_ref =
+ reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
+ sm_.AdvanceSirt(stack_ref->AsMirrorPtr());
+ break;
+ }
+ case Primitive::kPrimFloat:
+ sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
+ break;
+ case Primitive::kPrimBoolean: // Fall-through.
+ case Primitive::kPrimByte: // Fall-through.
+ case Primitive::kPrimChar: // Fall-through.
+ case Primitive::kPrimShort: // Fall-through.
+ case Primitive::kPrimInt: // Fall-through.
+ sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
+ break;
+ case Primitive::kPrimVoid:
+ LOG(FATAL) << "UNREACHABLE";
+ break;
+ }
+}
+
+void BuildGenericJniFrameVisitor::FinalizeSirt(Thread* self) {
+ // Initialize padding entries.
+ while (sirt_number_of_references_ < sirt_expected_refs_) {
+ *cur_sirt_entry_ = StackReference<mirror::Object>();
+ cur_sirt_entry_++;
+ sirt_number_of_references_++;
+ }
+ sirt_->SetNumberOfReferences(sirt_expected_refs_);
+ DCHECK_NE(sirt_expected_refs_, 0U);
+ // Install Sirt.
+ self->PushSirt(sirt_);
+}
+
/*
* Initializes an alloca region assumed to be directly below sp for a native call:
* Create a Sirt and call stack and fill a mini stack with values to be pushed to registers.