diff options
author | Vladimir Marko <vmarko@google.com> | 2020-06-08 10:26:06 +0100 |
---|---|---|
committer | Vladimir Marko <vmarko@google.com> | 2020-06-08 12:41:04 +0000 |
commit | ef898425c975f150caaed077ca204fa86b951e7f (patch) | |
tree | 1ad038b90bb860fe1b9a20872b990c7918fcd1e1 /compiler/optimizing/load_store_analysis_test.cc | |
parent | f7290cac4af6a981d98122af1a6d48b0e80da574 (diff) |
Run LSA as a part of the LSE pass.
Make LSA a helper class, not an optimization pass. Move all
its allocations to ScopedArenaAllocator to reduce the peak
memory usage a little bit.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Change-Id: I7fc634abe732d22c99005921ffecac5207bcf05f
Diffstat (limited to 'compiler/optimizing/load_store_analysis_test.cc')
-rw-r--r-- | compiler/optimizing/load_store_analysis_test.cc | 18 |
1 files changed, 12 insertions, 6 deletions
diff --git a/compiler/optimizing/load_store_analysis_test.cc b/compiler/optimizing/load_store_analysis_test.cc index d725aba9c8..c518f03fbe 100644 --- a/compiler/optimizing/load_store_analysis_test.cc +++ b/compiler/optimizing/load_store_analysis_test.cc @@ -66,7 +66,8 @@ TEST_F(LoadStoreAnalysisTest, ArrayHeapLocations) { // Test HeapLocationCollector initialization. // Should be no heap locations, no operations on the heap. - HeapLocationCollector heap_location_collector(graph_); + ScopedArenaAllocator allocator(graph_->GetArenaStack()); + HeapLocationCollector heap_location_collector(graph_, &allocator); ASSERT_EQ(heap_location_collector.GetNumberOfHeapLocations(), 0U); ASSERT_FALSE(heap_location_collector.HasHeapStores()); @@ -162,7 +163,8 @@ TEST_F(LoadStoreAnalysisTest, FieldHeapLocations) { // Test HeapLocationCollector initialization. // Should be no heap locations, no operations on the heap. - HeapLocationCollector heap_location_collector(graph_); + ScopedArenaAllocator allocator(graph_->GetArenaStack()); + HeapLocationCollector heap_location_collector(graph_, &allocator); ASSERT_EQ(heap_location_collector.GetNumberOfHeapLocations(), 0U); ASSERT_FALSE(heap_location_collector.HasHeapStores()); @@ -241,7 +243,8 @@ TEST_F(LoadStoreAnalysisTest, ArrayIndexAliasingTest) { entry->AddInstruction(arr_set7); // array[1-i] = c0 entry->AddInstruction(arr_set8); // array[i-(-1)] = c0 - LoadStoreAnalysis lsa(graph_); + ScopedArenaAllocator allocator(graph_->GetArenaStack()); + LoadStoreAnalysis lsa(graph_, &allocator); lsa.Run(); const HeapLocationCollector& heap_location_collector = lsa.GetHeapLocationCollector(); @@ -407,7 +410,8 @@ TEST_F(LoadStoreAnalysisTest, ArrayAliasingTest) { entry->AddInstruction(vstore_i_add8); entry->AddInstruction(vstore_i_add6_vlen2); - LoadStoreAnalysis lsa(graph_); + ScopedArenaAllocator allocator(graph_->GetArenaStack()); + LoadStoreAnalysis lsa(graph_, &allocator); lsa.Run(); const HeapLocationCollector& heap_location_collector = lsa.GetHeapLocationCollector(); @@ -565,7 +569,8 @@ TEST_F(LoadStoreAnalysisTest, ArrayIndexCalculationOverflowTest) { entry->AddInstruction(arr_set_7); entry->AddInstruction(arr_set_8); - LoadStoreAnalysis lsa(graph_); + ScopedArenaAllocator allocator(graph_->GetArenaStack()); + LoadStoreAnalysis lsa(graph_, &allocator); lsa.Run(); const HeapLocationCollector& heap_location_collector = lsa.GetHeapLocationCollector(); @@ -654,7 +659,8 @@ TEST_F(LoadStoreAnalysisTest, TestHuntOriginalRef) { entry->AddInstruction(inter_addr); entry->AddInstruction(array_get4); - HeapLocationCollector heap_location_collector(graph_); + ScopedArenaAllocator allocator(graph_->GetArenaStack()); + HeapLocationCollector heap_location_collector(graph_, &allocator); heap_location_collector.VisitBasicBlock(entry); // Test that the HeapLocationCollector should be able to tell |