summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--compiler/dex/quick/arm/call_arm.cc7
-rw-r--r--runtime/gc/collector/garbage_collector.h2
2 files changed, 4 insertions, 5 deletions
diff --git a/compiler/dex/quick/arm/call_arm.cc b/compiler/dex/quick/arm/call_arm.cc
index 2e37877..8c9f8ea 100644
--- a/compiler/dex/quick/arm/call_arm.cc
+++ b/compiler/dex/quick/arm/call_arm.cc
@@ -365,8 +365,8 @@ void ArmMir2Lir::GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) {
NewLIR1(kThumb2VPushCS, num_fp_spills_);
}
- // TODO: 64 bit will be different code.
- const int frame_size_without_spills = frame_size_ - spill_count * 4;
+ const int spill_size = spill_count * 4;
+ const int frame_size_without_spills = frame_size_ - spill_size;
if (!skip_overflow_check) {
if (Runtime::Current()->ExplicitStackOverflowChecks()) {
class StackOverflowSlowPath : public LIRSlowPath {
@@ -398,8 +398,7 @@ void ArmMir2Lir::GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) {
OpRegRegImm(kOpSub, rs_rARM_LR, rs_rARM_SP, frame_size_without_spills);
LIR* branch = OpCmpBranch(kCondUlt, rs_rARM_LR, rs_r12, nullptr);
// Need to restore LR since we used it as a temp.
- AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, true,
- frame_size_without_spills));
+ AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, true, spill_size));
OpRegCopy(rs_rARM_SP, rs_rARM_LR); // Establish stack
} else {
// If the frame is small enough we are guaranteed to have enough space that remains to
diff --git a/runtime/gc/collector/garbage_collector.h b/runtime/gc/collector/garbage_collector.h
index ccfa9cf..5b7b8a2 100644
--- a/runtime/gc/collector/garbage_collector.h
+++ b/runtime/gc/collector/garbage_collector.h
@@ -128,7 +128,7 @@ class GarbageCollector {
// Mark all reachable objects, done concurrently.
virtual void MarkingPhase() = 0;
- // Only called for concurrent GCs.
+ // Phase of the GC which is run with mutator lock exclusively held.
virtual void PausePhase();
// Called with mutators running.