summaryrefslogtreecommitdiff
path: root/compiler/optimizing/load_store_analysis.h
diff options
context:
space:
mode:
authorAlex Light <allight@google.com>2020-11-02 08:48:33 -0800
committerAlex Light <allight@google.com>2021-01-21 17:58:10 +0000
commitb8686ce4c93eba7192ed7ef89e7ffd9f3aa6cd07 (patch)
tree1721ee940f978736a2212d693271ee698897cb0b /compiler/optimizing/load_store_analysis.h
parent625048049558d394d50b6e98885b8c210e481bf1 (diff)
Partial Load Store Elimination
Add partial load-store elimination to the LSE pass. Partial LSE will move object allocations which only escape along certain execution paths closer to the escape point and allow more values to be eliminated. It does this by creating new predicated load and store instructions that are used when an object has only escaped some of the time. In cases where the object has not escaped a default value will be used. Test: ./test.py --host Test: ./test.py --target Bug: 67037140 Change-Id: Idde67eb59ec90de79747cde17b552eec05b58497
Diffstat (limited to 'compiler/optimizing/load_store_analysis.h')
-rw-r--r--compiler/optimizing/load_store_analysis.h64
1 files changed, 44 insertions, 20 deletions
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
index 5d2d841b37..e81572743e 100644
--- a/compiler/optimizing/load_store_analysis.h
+++ b/compiler/optimizing/load_store_analysis.h
@@ -30,6 +30,12 @@
namespace art {
+enum class LoadStoreAnalysisType {
+ kBasic,
+ kNoPredicatedInstructions,
+ kFull,
+};
+
// A ReferenceInfo contains additional info about a reference such as
// whether it's a singleton, returned, etc.
class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
@@ -37,22 +43,23 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
ReferenceInfo(HInstruction* reference,
ScopedArenaAllocator* allocator,
size_t pos,
- bool for_partial_elimination)
+ LoadStoreAnalysisType elimination_type)
: reference_(reference),
position_(pos),
is_singleton_(true),
is_singleton_and_not_returned_(true),
is_singleton_and_not_deopt_visible_(true),
allocator_(allocator),
- subgraph_(reference->GetBlock()->GetGraph(), for_partial_elimination, allocator_) {
+ subgraph_(reference->GetBlock()->GetGraph(),
+ elimination_type != LoadStoreAnalysisType::kBasic,
+ allocator_) {
// TODO We can do this in one pass.
// TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
// for now just ignore it.
- bool can_be_partial =
- for_partial_elimination && (/* reference_->IsNewArray() || */ reference_->IsNewInstance());
- LambdaEscapeVisitor func([&](HInstruction* inst) { return HandleEscape(inst); });
+ bool can_be_partial = elimination_type != LoadStoreAnalysisType::kBasic &&
+ (/* reference_->IsNewArray() || */ reference_->IsNewInstance());
if (can_be_partial) {
- VisitEscapes(reference_, func);
+ CollectPartialEscapes(reference_->GetBlock()->GetGraph());
}
CalculateEscape(reference_,
nullptr,
@@ -60,10 +67,12 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
&is_singleton_and_not_returned_,
&is_singleton_and_not_deopt_visible_);
if (can_be_partial) {
- // This is to mark writes to partially escaped values as also part of the escaped subset.
- // TODO We can avoid this if we have a 'ConditionalWrite' instruction. Will require testing
- // to see if the additional branches are worth it.
- PrunePartialEscapeWrites();
+ if (elimination_type == LoadStoreAnalysisType::kNoPredicatedInstructions) {
+ // This is to mark writes to partially escaped values as also part of the escaped subset.
+ // TODO We can avoid this if we have a 'ConditionalWrite' instruction. Will require testing
+ // to see if the additional branches are worth it.
+ PrunePartialEscapeWrites();
+ }
subgraph_.Finalize();
} else {
subgraph_.Invalidate();
@@ -112,9 +121,12 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
}
private:
- bool HandleEscape(HInstruction* escape) {
- subgraph_.RemoveBlock(escape->GetBlock());
- return true;
+ void CollectPartialEscapes(HGraph* graph);
+ void HandleEscape(HBasicBlock* escape) {
+ subgraph_.RemoveBlock(escape);
+ }
+ void HandleEscape(HInstruction* escape) {
+ HandleEscape(escape->GetBlock());
}
// Make sure we mark any writes/potential writes to heap-locations within partially
@@ -229,7 +241,7 @@ class HeapLocationCollector : public HGraphVisitor {
HeapLocationCollector(HGraph* graph,
ScopedArenaAllocator* allocator,
- bool for_partial_elimination)
+ LoadStoreAnalysisType lse_type)
: HGraphVisitor(graph),
allocator_(allocator),
ref_info_array_(allocator->Adapter(kArenaAllocLSA)),
@@ -238,7 +250,7 @@ class HeapLocationCollector : public HGraphVisitor {
has_heap_stores_(false),
has_volatile_(false),
has_monitor_operations_(false),
- for_partial_elimination_(for_partial_elimination) {
+ lse_type_(lse_type) {
aliasing_matrix_.ClearAllBits();
}
@@ -252,6 +264,10 @@ class HeapLocationCollector : public HGraphVisitor {
ref_info_array_.clear();
}
+ size_t GetNumberOfReferenceInfos() const {
+ return ref_info_array_.size();
+ }
+
size_t GetNumberOfHeapLocations() const {
return heap_locations_.size();
}
@@ -260,6 +276,11 @@ class HeapLocationCollector : public HGraphVisitor {
return heap_locations_[index];
}
+ size_t GetHeapLocationIndex(const HeapLocation* hl) const {
+ auto res = std::find(heap_locations_.cbegin(), heap_locations_.cend(), hl);
+ return std::distance(heap_locations_.cbegin(), res);
+ }
+
HInstruction* HuntForOriginalReference(HInstruction* ref) const {
// An original reference can be transformed by instructions like:
// i0 NewArray
@@ -480,8 +501,7 @@ class HeapLocationCollector : public HGraphVisitor {
ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
if (ref_info == nullptr) {
size_t pos = ref_info_array_.size();
- ref_info =
- new (allocator_) ReferenceInfo(instruction, allocator_, pos, for_partial_elimination_);
+ ref_info = new (allocator_) ReferenceInfo(instruction, allocator_, pos, lse_type_);
ref_info_array_.push_back(ref_info);
}
return ref_info;
@@ -539,6 +559,10 @@ class HeapLocationCollector : public HGraphVisitor {
HeapLocation::kDeclaringClassDefIndexForArrays);
}
+ void VisitPredicatedInstanceFieldGet(HPredicatedInstanceFieldGet* instruction) override {
+ VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
+ CreateReferenceInfoForReferenceType(instruction);
+ }
void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override {
VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
CreateReferenceInfoForReferenceType(instruction);
@@ -618,7 +642,7 @@ class HeapLocationCollector : public HGraphVisitor {
// alias analysis and won't be as effective.
bool has_volatile_; // If there are volatile field accesses.
bool has_monitor_operations_; // If there are monitor operations.
- bool for_partial_elimination_;
+ LoadStoreAnalysisType lse_type_;
DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
};
@@ -630,13 +654,13 @@ class LoadStoreAnalysis {
explicit LoadStoreAnalysis(HGraph* graph,
OptimizingCompilerStats* stats,
ScopedArenaAllocator* local_allocator,
- bool for_elimination = true)
+ LoadStoreAnalysisType lse_type)
: graph_(graph),
stats_(stats),
heap_location_collector_(
graph,
local_allocator,
- /*for_partial_elimination=*/for_elimination && ExecutionSubgraph::CanAnalyse(graph_)) {}
+ ExecutionSubgraph::CanAnalyse(graph_) ? lse_type : LoadStoreAnalysisType::kBasic) {}
const HeapLocationCollector& GetHeapLocationCollector() const {
return heap_location_collector_;