summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorNicolas Geoffray <ngeoffray@google.com>2014-09-16 14:11:14 +0100
committerNicolas Geoffray <ngeoffray@google.com>2014-09-16 14:15:22 +0100
commitaac0f39a3501a7f7dd04b2342c2a16961969f139 (patch)
treeef71b73a7d95de726d36883e6c88f7c8cbcfaaf6
parent56369897d662ea63ea5ed57ae36af0ae0fa1452d (diff)
downloadart-aac0f39a3501a7f7dd04b2342c2a16961969f139.zip
art-aac0f39a3501a7f7dd04b2342c2a16961969f139.tar.gz
art-aac0f39a3501a7f7dd04b2342c2a16961969f139.tar.bz2
Fix a bug in the register allocator.
We need to take the live interval that starts first to know until when a register is free, instead of using the live interval that is last in the inactive list. Change-Id: I2c9f87481ff1b4fc7b9948db7559b8d3b11d84ce
-rw-r--r--compiler/optimizing/register_allocator.cc3
-rw-r--r--compiler/optimizing/register_allocator.h4
-rw-r--r--compiler/optimizing/register_allocator_test.cc55
3 files changed, 61 insertions, 1 deletions
diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc
index b451ef4..7862611 100644
--- a/compiler/optimizing/register_allocator.cc
+++ b/compiler/optimizing/register_allocator.cc
@@ -440,7 +440,8 @@ bool RegisterAllocator::TryAllocateFreeReg(LiveInterval* current) {
DCHECK(inactive->HasRegister());
size_t next_intersection = inactive->FirstIntersectionWith(current);
if (next_intersection != kNoLifetime) {
- free_until[inactive->GetRegister()] = next_intersection;
+ free_until[inactive->GetRegister()] =
+ std::min(free_until[inactive->GetRegister()], next_intersection);
}
}
diff --git a/compiler/optimizing/register_allocator.h b/compiler/optimizing/register_allocator.h
index f737491..7d397e3 100644
--- a/compiler/optimizing/register_allocator.h
+++ b/compiler/optimizing/register_allocator.h
@@ -21,6 +21,8 @@
#include "primitive.h"
#include "utils/growable_array.h"
+#include "gtest/gtest.h"
+
namespace art {
class CodeGenerator;
@@ -177,6 +179,8 @@ class RegisterAllocator {
// Slots reserved for out arguments.
size_t reserved_out_slots_;
+ FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
+
DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
};
diff --git a/compiler/optimizing/register_allocator_test.cc b/compiler/optimizing/register_allocator_test.cc
index dcae46b..3e3b6b1 100644
--- a/compiler/optimizing/register_allocator_test.cc
+++ b/compiler/optimizing/register_allocator_test.cc
@@ -392,4 +392,59 @@ TEST(RegisterAllocatorTest, DeadPhi) {
ASSERT_TRUE(register_allocator.Validate(false));
}
+/**
+ * Test that the TryAllocateFreeReg method works in the presence of inactive intervals
+ * that share the same register. It should split the interval it is currently
+ * allocating for at the minimum lifetime position between the two inactive intervals.
+ */
+TEST(RegisterAllocatorTest, FreeUntil) {
+ const uint16_t data[] = TWO_REGISTERS_CODE_ITEM(
+ Instruction::CONST_4 | 0 | 0,
+ Instruction::RETURN);
+
+ ArenaPool pool;
+ ArenaAllocator allocator(&pool);
+ HGraph* graph = BuildSSAGraph(data, &allocator);
+ SsaDeadPhiElimination(graph).Run();
+ x86::CodeGeneratorX86 codegen(graph);
+ SsaLivenessAnalysis liveness(*graph, &codegen);
+ liveness.Analyze();
+ RegisterAllocator register_allocator(&allocator, &codegen, liveness);
+
+ // Add an artifical range to cover the temps that will be put in the unhandled list.
+ LiveInterval* unhandled = graph->GetEntryBlock()->GetFirstInstruction()->GetLiveInterval();
+ unhandled->AddLoopRange(0, 60);
+
+ // Add three temps holding the same register, and starting at different positions.
+ // Put the one that should be picked in the middle of the inactive list to ensure
+ // we do not depend on an order.
+ LiveInterval* interval = LiveInterval::MakeTempInterval(&allocator, nullptr, Primitive::kPrimInt);
+ interval->SetRegister(0);
+ interval->AddRange(40, 50);
+ register_allocator.inactive_.Add(interval);
+
+ interval = LiveInterval::MakeTempInterval(&allocator, nullptr, Primitive::kPrimInt);
+ interval->SetRegister(0);
+ interval->AddRange(20, 30);
+ register_allocator.inactive_.Add(interval);
+
+ interval = LiveInterval::MakeTempInterval(&allocator, nullptr, Primitive::kPrimInt);
+ interval->SetRegister(0);
+ interval->AddRange(60, 70);
+ register_allocator.inactive_.Add(interval);
+
+ register_allocator.number_of_registers_ = 1;
+ register_allocator.registers_array_ = allocator.AllocArray<size_t>(1);
+ register_allocator.processing_core_registers_ = true;
+ register_allocator.unhandled_ = &register_allocator.unhandled_core_intervals_;
+
+ register_allocator.TryAllocateFreeReg(unhandled);
+
+ // Check that we have split the interval.
+ ASSERT_EQ(1u, register_allocator.unhandled_->Size());
+ // Check that we know need to find a new register where the next interval
+ // that uses the register starts.
+ ASSERT_EQ(20u, register_allocator.unhandled_->Get(0)->GetStart());
+}
+
} // namespace art