summaryrefslogtreecommitdiffstats
path: root/compiler/dex/quick
diff options
context:
space:
mode:
authorVladimir Marko <vmarko@google.com>2014-11-14 10:51:40 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2014-11-14 10:51:40 +0000
commit4594ad627a48e249ee1680e954558dea15f0d133 (patch)
tree2ab812f944c7a75b2cecbed3aef260c0762f19e9 /compiler/dex/quick
parent244087cfc7718f42db73a8b9e7f82879ced779d1 (diff)
parente08785bd2601d2d62567aacd51e99165120a9c53 (diff)
downloadart-4594ad627a48e249ee1680e954558dea15f0d133.zip
art-4594ad627a48e249ee1680e954558dea15f0d133.tar.gz
art-4594ad627a48e249ee1680e954558dea15f0d133.tar.bz2
Merge "Quick: Fix arm64 AGET/APUT to use 32-bit index."
Diffstat (limited to 'compiler/dex/quick')
-rw-r--r--compiler/dex/quick/arm64/int_arm64.cc81
1 files changed, 24 insertions, 57 deletions
diff --git a/compiler/dex/quick/arm64/int_arm64.cc b/compiler/dex/quick/arm64/int_arm64.cc
index e57f99c..8a5a58c 100644
--- a/compiler/dex/quick/arm64/int_arm64.cc
+++ b/compiler/dex/quick/arm64/int_arm64.cc
@@ -1147,11 +1147,6 @@ void Arm64Mir2Lir::GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array,
data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Int32Value();
}
- // If index is constant, just fold it into the data offset
- if (constant_index) {
- data_offset += mir_graph_->ConstantValue(rl_index) << scale;
- }
-
/* null object? */
GenNullCheck(rl_array.reg, opt_flags);
@@ -1165,42 +1160,22 @@ void Arm64Mir2Lir::GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array,
} else {
ForceImplicitNullCheck(rl_array.reg, opt_flags);
}
- if (rl_dest.wide || rl_dest.fp || constant_index) {
- RegStorage reg_ptr;
- if (constant_index) {
- reg_ptr = rl_array.reg; // NOTE: must not alter reg_ptr in constant case.
- } else {
- // No special indexed operation, lea + load w/ displacement
- reg_ptr = AllocTempRef();
- OpRegRegRegShift(kOpAdd, reg_ptr, rl_array.reg, As64BitReg(rl_index.reg),
- EncodeShift(kA64Lsl, scale));
- FreeTemp(rl_index.reg);
- }
+ if (constant_index) {
rl_result = EvalLoc(rl_dest, reg_class, true);
if (needs_range_check) {
- if (constant_index) {
- GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
- } else {
- GenArrayBoundsCheck(rl_index.reg, reg_len);
- }
+ GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
FreeTemp(reg_len);
}
+ // Fold the constant index into the data offset.
+ data_offset += mir_graph_->ConstantValue(rl_index) << scale;
if (rl_result.ref) {
- LoadRefDisp(reg_ptr, data_offset, rl_result.reg, kNotVolatile);
- } else {
- LoadBaseDisp(reg_ptr, data_offset, rl_result.reg, size, kNotVolatile);
- }
- if (!constant_index) {
- FreeTemp(reg_ptr);
- }
- if (rl_dest.wide) {
- StoreValueWide(rl_dest, rl_result);
+ LoadRefDisp(rl_array.reg, data_offset, rl_result.reg, kNotVolatile);
} else {
- StoreValue(rl_dest, rl_result);
+ LoadBaseDisp(rl_array.reg, data_offset, rl_result.reg, size, kNotVolatile);
}
} else {
- // Offset base, then use indexed load
+ // Offset base, then use indexed load.
RegStorage reg_ptr = AllocTempRef();
OpRegRegImm(kOpAdd, reg_ptr, rl_array.reg, data_offset);
FreeTemp(rl_array.reg);
@@ -1211,11 +1186,15 @@ void Arm64Mir2Lir::GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array,
FreeTemp(reg_len);
}
if (rl_result.ref) {
- LoadRefIndexed(reg_ptr, As64BitReg(rl_index.reg), rl_result.reg, scale);
+ LoadRefIndexed(reg_ptr, rl_index.reg, rl_result.reg, scale);
} else {
- LoadBaseIndexed(reg_ptr, As64BitReg(rl_index.reg), rl_result.reg, scale, size);
+ LoadBaseIndexed(reg_ptr, rl_index.reg, rl_result.reg, scale, size);
}
FreeTemp(reg_ptr);
+ }
+ if (rl_dest.wide) {
+ StoreValueWide(rl_dest, rl_result);
+ } else {
StoreValue(rl_dest, rl_result);
}
}
@@ -1237,11 +1216,6 @@ void Arm64Mir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Int32Value();
}
- // If index is constant, just fold it into the data offset.
- if (constant_index) {
- data_offset += mir_graph_->ConstantValue(rl_index) << scale;
- }
-
rl_array = LoadValue(rl_array, kRefReg);
if (!constant_index) {
rl_index = LoadValue(rl_index, kCoreReg);
@@ -1274,24 +1248,18 @@ void Arm64Mir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
ForceImplicitNullCheck(rl_array.reg, opt_flags);
}
/* at this point, reg_ptr points to array, 2 live temps */
- if (rl_src.wide || rl_src.fp || constant_index) {
- if (rl_src.wide) {
- rl_src = LoadValueWide(rl_src, reg_class);
- } else {
- rl_src = LoadValue(rl_src, reg_class);
- }
- if (!constant_index) {
- OpRegRegRegShift(kOpAdd, reg_ptr, rl_array.reg, As64BitReg(rl_index.reg),
- EncodeShift(kA64Lsl, scale));
- }
+ if (rl_src.wide) {
+ rl_src = LoadValueWide(rl_src, reg_class);
+ } else {
+ rl_src = LoadValue(rl_src, reg_class);
+ }
+ if (constant_index) {
if (needs_range_check) {
- if (constant_index) {
- GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
- } else {
- GenArrayBoundsCheck(rl_index.reg, reg_len);
- }
+ GenArrayBoundsCheck(mir_graph_->ConstantValue(rl_index), reg_len);
FreeTemp(reg_len);
}
+ // Fold the constant index into the data offset.
+ data_offset += mir_graph_->ConstantValue(rl_index) << scale;
if (rl_src.ref) {
StoreRefDisp(reg_ptr, data_offset, rl_src.reg, kNotVolatile);
} else {
@@ -1300,15 +1268,14 @@ void Arm64Mir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
} else {
/* reg_ptr -> array data */
OpRegRegImm(kOpAdd, reg_ptr, rl_array.reg, data_offset);
- rl_src = LoadValue(rl_src, reg_class);
if (needs_range_check) {
GenArrayBoundsCheck(rl_index.reg, reg_len);
FreeTemp(reg_len);
}
if (rl_src.ref) {
- StoreRefIndexed(reg_ptr, As64BitReg(rl_index.reg), rl_src.reg, scale);
+ StoreRefIndexed(reg_ptr, rl_index.reg, rl_src.reg, scale);
} else {
- StoreBaseIndexed(reg_ptr, As64BitReg(rl_index.reg), rl_src.reg, scale, size);
+ StoreBaseIndexed(reg_ptr, rl_index.reg, rl_src.reg, scale, size);
}
}
if (allocated_reg_ptr_temp) {