summaryrefslogtreecommitdiff
path: root/compiler/optimizing/optimizing_unit_test.h
diff options
context:
space:
mode:
authorVladimir Marko <vmarko@google.com>2017-10-05 14:35:55 +0100
committerVladimir Marko <vmarko@google.com>2017-10-09 10:39:22 +0100
commite764d2e50c544c2cb98ee61a15d613161ac6bd17 (patch)
tree112aa7ca459d2edb4f800897060a2407fcc622c7 /compiler/optimizing/optimizing_unit_test.h
parentca6fff898afcb62491458ae8bcd428bfb3043da1 (diff)
Use ScopedArenaAllocator for register allocation.
Memory needed to compile the two most expensive methods for aosp_angler-userdebug boot image: BatteryStats.dumpCheckinLocked() : 25.1MiB -> 21.1MiB BatteryStats.dumpLocked(): 49.6MiB -> 42.0MiB This is because all the memory previously used by Scheduler is reused by the register allocator; the register allocator has a higher peak usage of the ArenaStack. And continue the "arena"->"allocator" renaming. Test: m test-art-host-gtest Test: testrunner.py --host Bug: 64312607 Change-Id: Idfd79a9901552b5147ec0bf591cb38120de86b01
Diffstat (limited to 'compiler/optimizing/optimizing_unit_test.h')
-rw-r--r--compiler/optimizing/optimizing_unit_test.h8
1 files changed, 6 insertions, 2 deletions
diff --git a/compiler/optimizing/optimizing_unit_test.h b/compiler/optimizing/optimizing_unit_test.h
index f31ad828eb..5632f9a453 100644
--- a/compiler/optimizing/optimizing_unit_test.h
+++ b/compiler/optimizing/optimizing_unit_test.h
@@ -48,7 +48,7 @@ namespace art {
LiveInterval* BuildInterval(const size_t ranges[][2],
size_t number_of_ranges,
- ArenaAllocator* allocator,
+ ScopedArenaAllocator* allocator,
int reg = -1,
HInstruction* defined_by = nullptr) {
LiveInterval* interval =
@@ -81,15 +81,18 @@ void RemoveSuspendChecks(HGraph* graph) {
class ArenaPoolAndAllocator {
public:
- ArenaPoolAndAllocator() : pool_(), allocator_(&pool_), arena_stack_(&pool_) { }
+ ArenaPoolAndAllocator()
+ : pool_(), allocator_(&pool_), arena_stack_(&pool_), scoped_allocator_(&arena_stack_) { }
ArenaAllocator* GetAllocator() { return &allocator_; }
ArenaStack* GetArenaStack() { return &arena_stack_; }
+ ScopedArenaAllocator* GetScopedAllocator() { return &scoped_allocator_; }
private:
ArenaPool pool_;
ArenaAllocator allocator_;
ArenaStack arena_stack_;
+ ScopedArenaAllocator scoped_allocator_;
};
inline HGraph* CreateGraph(ArenaPoolAndAllocator* pool_and_allocator) {
@@ -107,6 +110,7 @@ class OptimizingUnitTest : public CommonCompilerTest {
ArenaAllocator* GetAllocator() { return pool_and_allocator_->GetAllocator(); }
ArenaStack* GetArenaStack() { return pool_and_allocator_->GetArenaStack(); }
+ ScopedArenaAllocator* GetScopedAllocator() { return pool_and_allocator_->GetScopedAllocator(); }
void ResetPoolAndAllocator() {
pool_and_allocator_.reset(new ArenaPoolAndAllocator());