summaryrefslogtreecommitdiff
path: root/compiler/optimizing/optimizing_compiler.cc
diff options
context:
space:
mode:
authorVladimir Marko <vmarko@google.com>2017-11-24 15:02:40 +0000
committerVladimir Marko <vmarko@google.com>2017-11-27 19:02:34 +0000
commitcd09e1f4f9902b82fa62cb2da984ea499e3b2d70 (patch)
tree535f7f75849af30b67c560804125ead95909d72b /compiler/optimizing/optimizing_compiler.cc
parent72a3f1da3a300b486626b066e33280108b5ce994 (diff)
Fix stats reporting over 100% methods compiled.
Add statistics for intrinsic and native stub compilation and JIT failing to allocate memory for committing the code. Clean up recording of compilation statistics. New statistics when building aosp_taimen-userdebug boot image with --dump-stats: Attempted compilation of 94304 methods: 99.99% (94295) compiled. OptStat#AttemptBytecodeCompilation: 89487 OptStat#AttemptIntrinsicCompilation: 160 OptStat#CompiledNativeStub: 4733 OptStat#CompiledIntrinsic: 84 OptStat#CompiledBytecode: 89478 ... where 94304=89487+4733+84 and 94295=89478+4733+84. Test: testrunner.py -b --host --optimizing Test: Manually inspect output of building boot image with --dump-stats. Bug: 69627511 Change-Id: I15eb2b062a96f09a7721948bcc77b83ee4f18efd
Diffstat (limited to 'compiler/optimizing/optimizing_compiler.cc')
-rw-r--r--compiler/optimizing/optimizing_compiler.cc34
1 files changed, 16 insertions, 18 deletions
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 53f9ec413b..095ca6372e 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -738,7 +738,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
ArtMethod* method,
bool osr,
VariableSizedHandleScope* handles) const {
- MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptCompilation);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptBytecodeCompilation);
CompilerDriver* compiler_driver = GetCompilerDriver();
InstructionSet instruction_set = compiler_driver->GetInstructionSet();
const DexFile& dex_file = *dex_compilation_unit.GetDexFile();
@@ -757,8 +757,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
}
if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledPathological);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledPathological);
return nullptr;
}
@@ -768,8 +767,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions();
if ((compiler_options.GetCompilerFilter() == CompilerFilter::kSpace)
&& (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledSpaceFilter);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledSpaceFilter);
return nullptr;
}
@@ -800,8 +798,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
compiler_driver->GetCompilerOptions(),
compilation_stats_.get()));
if (codegen.get() == nullptr) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledNoCodegen);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledNoCodegen);
return nullptr;
}
codegen->GetAssembler()->cfi().SetEnabled(
@@ -873,6 +870,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
codegen->Compile(code_allocator);
pass_observer.DumpDisassembly();
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledBytecode);
return codegen.release();
}
@@ -883,6 +881,7 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
const DexCompilationUnit& dex_compilation_unit,
ArtMethod* method,
VariableSizedHandleScope* handles) const {
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptIntrinsicCompilation);
CompilerDriver* compiler_driver = GetCompilerDriver();
InstructionSet instruction_set = compiler_driver->GetInstructionSet();
const DexFile& dex_file = *dex_compilation_unit.GetDexFile();
@@ -894,8 +893,6 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
// Do not attempt to compile on architectures we do not support.
if (!IsInstructionSetSupported(instruction_set)) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledUnsupportedIsa);
return nullptr;
}
@@ -920,8 +917,6 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
compiler_driver->GetCompilerOptions(),
compilation_stats_.get()));
if (codegen.get() == nullptr) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledNoCodegen);
return nullptr;
}
codegen->GetAssembler()->cfi().SetEnabled(
@@ -979,6 +974,7 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
VLOG(compiler) << "Compiled intrinsic: " << method->GetIntrinsic()
<< " " << graph->PrettyMethod();
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledIntrinsic);
return codegen.release();
}
@@ -1046,8 +1042,6 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
}
}
if (codegen.get() != nullptr) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kCompiled);
compiled_method = Emit(&allocator,
&code_allocator,
codegen.get(),
@@ -1139,10 +1133,12 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
}
}
- return ArtQuickJniCompileMethod(GetCompilerDriver(),
- access_flags,
- method_idx,
- dex_file);
+ CompiledMethod* compiled_method = ArtQuickJniCompileMethod(GetCompilerDriver(),
+ access_flags,
+ method_idx,
+ dex_file);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
+ return compiled_method;
}
Compiler* CreateOptimizingCompiler(CompilerDriver* driver) {
@@ -1237,6 +1233,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
self, class_linker->GetClassRoot(ClassLinker::kObjectArrayClass), number_of_roots)));
if (roots == nullptr) {
// Out of memory, just clear the exception to avoid any Java exception uncaught problems.
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
DCHECK(self->IsExceptionPending());
self->ClearException();
return false;
@@ -1253,9 +1250,9 @@ bool OptimizingCompiler::JitCompile(Thread* self,
&method_info_data,
&roots_data);
if (stack_map_data == nullptr || roots_data == nullptr) {
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
return false;
}
- MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiled);
codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size),
MemoryRegion(method_info_data, method_info_size),
code_item);
@@ -1279,6 +1276,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
codegen->GetGraph()->GetCHASingleImplementationList());
if (code == nullptr) {
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
code_cache->ClearData(self, stack_map_data, roots_data);
return false;
}