diff options
author | Igor Murashkin <iam@google.com> | 2017-08-09 13:20:34 -0700 |
---|---|---|
committer | Igor Murashkin <iam@google.com> | 2017-08-11 10:23:07 -0700 |
commit | 1e065a54845da12541572f4f149e6ab0dcd20180 (patch) | |
tree | 061d28c8905c7bc8ac50c8c86f4073034afb5ba2 /compiler/optimizing/optimizing_compiler.cc | |
parent | f573972a087b798f74bf5404e271355a2805e100 (diff) |
optimizing: Refactor statistics to use OptimizingCompilerStats helper
Remove all copies of 'MaybeRecordStat', replacing them with a single
OptimizingCompilerStats::MaybeRecordStat helper.
Change-Id: I83b96b41439dccece3eee2e159b18c95336ea933
Diffstat (limited to 'compiler/optimizing/optimizing_compiler.cc')
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 66 |
1 files changed, 40 insertions, 26 deletions
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 76a243f793..70bbc382b4 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -329,12 +329,6 @@ class OptimizingCompiler FINAL : public Compiler { void UnInit() const OVERRIDE; - void MaybeRecordStat(MethodCompilationStat compilation_stat) const { - if (compilation_stats_.get() != nullptr) { - compilation_stats_->RecordStat(compilation_stat); - } - } - bool JitCompile(Thread* self, jit::JitCodeCache* code_cache, ArtMethod* method, @@ -512,7 +506,8 @@ static HOptimization* BuildOptimization( } else if (opt_name == LoadStoreElimination::kLoadStoreEliminationPassName) { CHECK(most_recent_side_effects != nullptr); CHECK(most_recent_lsa != nullptr); - return new (arena) LoadStoreElimination(graph, *most_recent_side_effects, *most_recent_lsa); + return + new (arena) LoadStoreElimination(graph, *most_recent_side_effects, *most_recent_lsa, stats); } else if (opt_name == SideEffectsAnalysis::kSideEffectsAnalysisPassName) { return new (arena) SideEffectsAnalysis(graph); } else if (opt_name == HLoopOptimization::kLoopOptimizationPassName) { @@ -778,7 +773,7 @@ void OptimizingCompiler::RunOptimizations(HGraph* graph, BoundsCheckElimination* bce = new (arena) BoundsCheckElimination(graph, *side_effects1, induction); HLoopOptimization* loop = new (arena) HLoopOptimization(graph, driver, induction); LoadStoreAnalysis* lsa = new (arena) LoadStoreAnalysis(graph); - LoadStoreElimination* lse = new (arena) LoadStoreElimination(graph, *side_effects2, *lsa); + LoadStoreElimination* lse = new (arena) LoadStoreElimination(graph, *side_effects2, *lsa, stats); HSharpening* sharpening = new (arena) HSharpening( graph, codegen, dex_compilation_unit, driver, handles); InstructionSimplifier* simplify2 = new (arena) InstructionSimplifier( @@ -894,7 +889,8 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, ArtMethod* method, bool osr, VariableSizedHandleScope* handles) const { - MaybeRecordStat(MethodCompilationStat::kAttemptCompilation); + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kAttemptCompilation); CompilerDriver* compiler_driver = GetCompilerDriver(); InstructionSet instruction_set = compiler_driver->GetInstructionSet(); @@ -904,12 +900,14 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, // Do not attempt to compile on architectures we do not support. if (!IsInstructionSetSupported(instruction_set)) { - MaybeRecordStat(MethodCompilationStat::kNotCompiledUnsupportedIsa); + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledUnsupportedIsa); return nullptr; } if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) { - MaybeRecordStat(MethodCompilationStat::kNotCompiledPathological); + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledPathological); return nullptr; } @@ -919,7 +917,8 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions(); if ((compiler_options.GetCompilerFilter() == CompilerFilter::kSpace) && (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) { - MaybeRecordStat(MethodCompilationStat::kNotCompiledSpaceFilter); + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledSpaceFilter); return nullptr; } @@ -966,7 +965,8 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, compiler_driver->GetCompilerOptions(), compilation_stats_.get())); if (codegen.get() == nullptr) { - MaybeRecordStat(MethodCompilationStat::kNotCompiledNoCodegen); + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledNoCodegen); return nullptr; } codegen->GetAssembler()->cfi().SetEnabled( @@ -995,17 +995,25 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, GraphAnalysisResult result = builder.BuildGraph(); if (result != kAnalysisSuccess) { switch (result) { - case kAnalysisSkipped: - MaybeRecordStat(MethodCompilationStat::kNotCompiledSkipped); + case kAnalysisSkipped: { + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledSkipped); + } break; - case kAnalysisInvalidBytecode: - MaybeRecordStat(MethodCompilationStat::kNotCompiledInvalidBytecode); + case kAnalysisInvalidBytecode: { + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledInvalidBytecode); + } break; - case kAnalysisFailThrowCatchLoop: - MaybeRecordStat(MethodCompilationStat::kNotCompiledThrowCatchLoop); + case kAnalysisFailThrowCatchLoop: { + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledThrowCatchLoop); + } break; - case kAnalysisFailAmbiguousArrayOp: - MaybeRecordStat(MethodCompilationStat::kNotCompiledAmbiguousArrayOp); + case kAnalysisFailAmbiguousArrayOp: { + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kNotCompiledAmbiguousArrayOp); + } break; case kAnalysisSuccess: UNREACHABLE(); @@ -1024,7 +1032,10 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, RegisterAllocator::Strategy regalloc_strategy = compiler_options.GetRegisterAllocationStrategy(); - AllocateRegisters(graph, codegen.get(), &pass_observer, regalloc_strategy); + AllocateRegisters(graph, + codegen.get(), + &pass_observer, + regalloc_strategy); codegen->Compile(code_allocator); pass_observer.DumpDisassembly(); @@ -1072,7 +1083,8 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item, &handles)); } if (codegen.get() != nullptr) { - MaybeRecordStat(MethodCompilationStat::kCompiled); + MaybeRecordStat(compilation_stats_.get(), + MethodCompilationStat::kCompiled); method = Emit(&arena, &code_allocator, codegen.get(), compiler_driver, code_item); if (kArenaAllocatorCountAllocations) { @@ -1083,11 +1095,13 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item, } } } else { + MethodCompilationStat method_stat; if (compiler_driver->GetCompilerOptions().VerifyAtRuntime()) { - MaybeRecordStat(MethodCompilationStat::kNotCompiledVerifyAtRuntime); + method_stat = MethodCompilationStat::kNotCompiledVerifyAtRuntime; } else { - MaybeRecordStat(MethodCompilationStat::kNotCompiledVerificationError); + method_stat = MethodCompilationStat::kNotCompiledVerificationError; } + MaybeRecordStat(compilation_stats_.get(), method_stat); } if (kIsDebugBuild && @@ -1221,7 +1235,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, if (stack_map_data == nullptr || roots_data == nullptr) { return false; } - MaybeRecordStat(MethodCompilationStat::kCompiled); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiled); codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size), MemoryRegion(method_info_data, method_info_size), *code_item); |