diff options
author | Vladimir Marko <vmarko@google.com> | 2015-09-28 12:17:40 +0100 |
---|---|---|
committer | Vladimir Marko <vmarko@google.com> | 2015-09-29 10:49:35 +0100 |
commit | 225b6464a58ebe11c156144653f11a1c6607f4eb (patch) | |
tree | 3f1c6067c3841c892edaa1a60a61af9c559cb4e4 /compiler/optimizing/code_generator.h | |
parent | 6a9984e62c08bcd78c8e49dd40b1f0f9d53513b7 (diff) |
Optimizing: Tag arena allocations in code generators.
And completely remove the deprecated GrowableArray.
Replace GrowableArray with ArenaVector in code generators
and related classes and tag arena allocations.
Label arrays use direct allocations from ArenaAllocator
because Label is non-copyable and non-movable and as such
cannot be really held in a container. The GrowableArray
never actually constructed them, instead relying on the
zero-initialized storage from the arena allocator to be
correct. We now actually construct the labels.
Also avoid StackMapStream::ComputeDexRegisterMapSize() being
passed null references, even though unused.
Change-Id: I26a46fdd406b23a3969300a67739d55528df8bf4
Diffstat (limited to 'compiler/optimizing/code_generator.h')
-rw-r--r-- | compiler/optimizing/code_generator.h | 24 |
1 files changed, 19 insertions, 5 deletions
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h index a1c6db0a2c..b58a3ff7f2 100644 --- a/compiler/optimizing/code_generator.h +++ b/compiler/optimizing/code_generator.h @@ -261,7 +261,7 @@ class CodeGenerator { bool IsImplicitNullCheckAllowed(HNullCheck* null_check) const; void AddSlowPath(SlowPathCode* slow_path) { - slow_paths_.Add(slow_path); + slow_paths_.push_back(slow_path); } void SetSrcMap(DefaultSrcMap* src_map) { src_map_ = src_map; } @@ -441,10 +441,12 @@ class CodeGenerator { graph_(graph), compiler_options_(compiler_options), src_map_(nullptr), - slow_paths_(graph->GetArena(), 8), + slow_paths_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), current_block_index_(0), is_leaf_(true), - requires_current_method_(false) {} + requires_current_method_(false) { + slow_paths_.reserve(8); + } // Register allocation logic. void AllocateRegistersLocally(HInstruction* instruction) const; @@ -485,8 +487,20 @@ class CodeGenerator { return instruction_set == kX86 || instruction_set == kX86_64; } - // Arm64 has its own type for a label, so we need to templatize this method + // Arm64 has its own type for a label, so we need to templatize these methods // to share the logic. + + template <typename LabelType> + LabelType* CommonInitializeLabels() { + size_t size = GetGraph()->GetBlocks().size(); + LabelType* labels = GetGraph()->GetArena()->AllocArray<LabelType>(size, + kArenaAllocCodeGenerator); + for (size_t i = 0; i != size; ++i) { + new(labels + i) LabelType(); + } + return labels; + } + template <typename LabelType> LabelType* CommonGetLabelOf(LabelType* raw_pointer_to_labels_array, HBasicBlock* block) const { block = FirstNonEmptyBlock(block); @@ -539,7 +553,7 @@ class CodeGenerator { // Native to dex_pc map used for native debugging/profiling tools. DefaultSrcMap* src_map_; - GrowableArray<SlowPathCode*> slow_paths_; + ArenaVector<SlowPathCode*> slow_paths_; // The current block index in `block_order_` of the block // we are generating code for. |