summaryrefslogtreecommitdiffstats
path: root/compiler
diff options
context:
space:
mode:
authorDavid Brazdil <dbrazdil@google.com>2015-06-26 16:58:14 +0100
committerDavid Brazdil <dbrazdil@google.com>2015-06-26 16:58:14 +0100
commit1ff04abecc7318826248c24850c1116df5b68b33 (patch)
treefb2130a79f1dec76bf0c7de0b7fd51287811d872 /compiler
parent5597b422882a5ab9dc5eaaedd644e30bc2fd7c05 (diff)
downloadart-1ff04abecc7318826248c24850c1116df5b68b33.zip
art-1ff04abecc7318826248c24850c1116df5b68b33.tar.gz
art-1ff04abecc7318826248c24850c1116df5b68b33.tar.bz2
ART: Bail out immediately on try/catch
Optimizing builds the graph even if it knows it will delegate to Quick. This patch moves the decision at the beginning of the TryCompile function to save a little bit of compile time. Bug: 22115561 Change-Id: I18b5a686e10592080ea3c49d53a0dd81aa3b3010
Diffstat (limited to 'compiler')
-rw-r--r--compiler/optimizing/optimizing_compiler.cc17
1 files changed, 11 insertions, 6 deletions
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 5864741..d0d63a4 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -491,6 +491,16 @@ CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_ite
instruction_set = kThumb2;
}
+ // `run_optimizations_` is set explicitly (either through a compiler filter
+ // or the debuggable flag). If it is set, we can run baseline. Otherwise, we
+ // fall back to Quick.
+ bool should_use_baseline = !run_optimizations_;
+ bool can_optimize = CanOptimize(*code_item);
+ if (!can_optimize && !should_use_baseline) {
+ // We know we will not compile this method. Bail out before doing any work.
+ return nullptr;
+ }
+
// Do not attempt to compile on architectures we do not support.
if (!IsInstructionSetSupported(instruction_set)) {
MaybeRecordStat(MethodCompilationStat::kNotCompiledUnsupportedIsa);
@@ -564,13 +574,8 @@ CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_ite
}
}
- bool can_optimize = CanOptimize(*code_item);
bool can_allocate_registers = RegisterAllocator::CanAllocateRegistersFor(*graph, instruction_set);
- // `run_optimizations_` is set explicitly (either through a compiler filter
- // or the debuggable flag). If it is set, we can run baseline. Otherwise, we fall back
- // to Quick.
- bool can_use_baseline = !run_optimizations_;
if (run_optimizations_ && can_optimize && can_allocate_registers) {
VLOG(compiler) << "Optimizing " << method_name;
@@ -593,7 +598,7 @@ CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_ite
} else if (shouldOptimize && can_allocate_registers) {
LOG(FATAL) << "Could not allocate registers in optimizing compiler";
UNREACHABLE();
- } else if (can_use_baseline) {
+ } else if (should_use_baseline) {
VLOG(compiler) << "Compile baseline " << method_name;
if (!run_optimizations_) {