summaryrefslogtreecommitdiffstats
path: root/compiler/dex/quick/arm64/target_arm64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/dex/quick/arm64/target_arm64.cc')
-rw-r--r--compiler/dex/quick/arm64/target_arm64.cc29
1 files changed, 10 insertions, 19 deletions
diff --git a/compiler/dex/quick/arm64/target_arm64.cc b/compiler/dex/quick/arm64/target_arm64.cc
index dfaa483..06e1cda 100644
--- a/compiler/dex/quick/arm64/target_arm64.cc
+++ b/compiler/dex/quick/arm64/target_arm64.cc
@@ -789,7 +789,7 @@ RegStorage Arm64Mir2Lir::LoadHelper(ThreadOffset<4> offset) {
RegStorage Arm64Mir2Lir::LoadHelper(ThreadOffset<8> offset) {
// TODO(Arm64): use LoadWordDisp instead.
// e.g. LoadWordDisp(rs_rA64_SELF, offset.Int32Value(), rs_rA64_LR);
- LoadBaseDisp(rs_rA64_SELF, offset.Int32Value(), rs_rA64_LR, k64, kNotVolatile);
+ LoadBaseDisp(rs_rA64_SELF, offset.Int32Value(), rs_rA64_LR, k64);
return rs_rA64_LR;
}
@@ -949,7 +949,7 @@ void Arm64Mir2Lir::FlushIns(RegLocation* ArgLocs, RegLocation rl_method) {
StoreValue(rl_method, rl_src);
// If Method* has been promoted, explicitly flush
if (rl_method.location == kLocPhysReg) {
- StoreRefDisp(TargetReg(kSp), 0, TargetReg(kArg0), kNotVolatile);
+ StoreRefDisp(TargetReg(kSp), 0, TargetReg(kArg0));
}
if (cu_->num_ins == 0) {
@@ -971,7 +971,7 @@ void Arm64Mir2Lir::FlushIns(RegLocation* ArgLocs, RegLocation rl_method) {
} else if ((v_map->fp_location == kLocPhysReg) && t_loc->fp) {
OpRegCopy(RegStorage::Solo32(v_map->FpReg), reg);
} else {
- StoreBaseDisp(TargetReg(kSp), SRegOffset(start_vreg + i), reg, op_size, kNotVolatile);
+ StoreBaseDisp(TargetReg(kSp), SRegOffset(start_vreg + i), reg, op_size);
if (reg.Is64Bit()) {
if (SRegOffset(start_vreg + i) + 4 != SRegOffset(start_vreg + i + 1)) {
LOG(FATAL) << "64 bit value stored in non-consecutive 4 bytes slots";
@@ -1057,14 +1057,14 @@ int Arm64Mir2Lir::GenDalvikArgsRange(CallInfo* info, int call_state,
loc = UpdateLocWide(loc);
if (loc.location == kLocPhysReg) {
ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
- StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k64, kNotVolatile);
+ StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k64);
}
next_arg += 2;
} else {
loc = UpdateLoc(loc);
if (loc.location == kLocPhysReg) {
ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
- StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k32, kNotVolatile);
+ StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k32);
}
next_arg++;
}
@@ -1122,27 +1122,18 @@ int Arm64Mir2Lir::GenDalvikArgsRange(CallInfo* info, int call_state,
ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
if (rl_arg.wide) {
if (rl_arg.location == kLocPhysReg) {
- StoreBaseDisp(TargetReg(kSp), out_offset, rl_arg.reg, k64, kNotVolatile);
+ StoreBaseDisp(TargetReg(kSp), out_offset, rl_arg.reg, k64);
} else {
LoadValueDirectWideFixed(rl_arg, regWide);
- StoreBaseDisp(TargetReg(kSp), out_offset, regWide, k64, kNotVolatile);
+ StoreBaseDisp(TargetReg(kSp), out_offset, regWide, k64);
}
i++;
} else {
if (rl_arg.location == kLocPhysReg) {
- if (rl_arg.ref) {
- StoreRefDisp(TargetReg(kSp), out_offset, rl_arg.reg, kNotVolatile);
- } else {
- StoreBaseDisp(TargetReg(kSp), out_offset, rl_arg.reg, k32, kNotVolatile);
- }
+ StoreBaseDisp(TargetReg(kSp), out_offset, rl_arg.reg, k32);
} else {
- if (rl_arg.ref) {
- LoadValueDirectFixed(rl_arg, regSingle);
- StoreRefDisp(TargetReg(kSp), out_offset, regSingle, kNotVolatile);
- } else {
- LoadValueDirectFixed(rl_arg, As32BitReg(regSingle));
- StoreBaseDisp(TargetReg(kSp), out_offset, As32BitReg(regSingle), k32, kNotVolatile);
- }
+ LoadValueDirectFixed(rl_arg, regSingle);
+ StoreBaseDisp(TargetReg(kSp), out_offset, regSingle, k32);
}
}
}