summaryrefslogtreecommitdiff
path: root/compiler/optimizing/load_store_analysis.h
diff options
context:
space:
mode:
authorAlex Light <allight@google.com>2021-01-25 14:11:05 +0000
committerAlex Light <allight@google.com>2021-01-25 07:01:32 -0800
commit3a73ffb70151dbc99fa41f300a237f8c29783e0e (patch)
tree8debef1b372e5bea27d5265a9019446229955e3d /compiler/optimizing/load_store_analysis.h
parente585964df42e9fd2fab6f209810cb03e1b261ab1 (diff)
Revert^4 "Partial Load Store Elimination"
This reverts commit 791df7a161ecfa28eb69862a4bc285282463b960. This unreverts commit fc1ce4e8be0d977e3d41699f5ec746d68f63c024. This unreverts commit b8686ce4c93eba7192ed7ef89e7ffd9f3aa6cd07. We incorrectly failed to include PredicatedInstanceFieldGet in a few conditions, including a DCHECK. This caused tests to fail under the read-barrier-table-lookup configuration. Reason for revert: Fixed 2 incorrect checks Bug: 67037140 Test: ./art/test/testrunner/run_build_test_target.py -j70 art-gtest-read-barrier-table-lookup Change-Id: I32b01b29fb32077fb5074e7c77a0226bd1fcaab4
Diffstat (limited to 'compiler/optimizing/load_store_analysis.h')
-rw-r--r--compiler/optimizing/load_store_analysis.h64
1 files changed, 44 insertions, 20 deletions
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
index 5d2d841b37..e81572743e 100644
--- a/compiler/optimizing/load_store_analysis.h
+++ b/compiler/optimizing/load_store_analysis.h
@@ -30,6 +30,12 @@
namespace art {
+enum class LoadStoreAnalysisType {
+ kBasic,
+ kNoPredicatedInstructions,
+ kFull,
+};
+
// A ReferenceInfo contains additional info about a reference such as
// whether it's a singleton, returned, etc.
class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
@@ -37,22 +43,23 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
ReferenceInfo(HInstruction* reference,
ScopedArenaAllocator* allocator,
size_t pos,
- bool for_partial_elimination)
+ LoadStoreAnalysisType elimination_type)
: reference_(reference),
position_(pos),
is_singleton_(true),
is_singleton_and_not_returned_(true),
is_singleton_and_not_deopt_visible_(true),
allocator_(allocator),
- subgraph_(reference->GetBlock()->GetGraph(), for_partial_elimination, allocator_) {
+ subgraph_(reference->GetBlock()->GetGraph(),
+ elimination_type != LoadStoreAnalysisType::kBasic,
+ allocator_) {
// TODO We can do this in one pass.
// TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
// for now just ignore it.
- bool can_be_partial =
- for_partial_elimination && (/* reference_->IsNewArray() || */ reference_->IsNewInstance());
- LambdaEscapeVisitor func([&](HInstruction* inst) { return HandleEscape(inst); });
+ bool can_be_partial = elimination_type != LoadStoreAnalysisType::kBasic &&
+ (/* reference_->IsNewArray() || */ reference_->IsNewInstance());
if (can_be_partial) {
- VisitEscapes(reference_, func);
+ CollectPartialEscapes(reference_->GetBlock()->GetGraph());
}
CalculateEscape(reference_,
nullptr,
@@ -60,10 +67,12 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
&is_singleton_and_not_returned_,
&is_singleton_and_not_deopt_visible_);
if (can_be_partial) {
- // This is to mark writes to partially escaped values as also part of the escaped subset.
- // TODO We can avoid this if we have a 'ConditionalWrite' instruction. Will require testing
- // to see if the additional branches are worth it.
- PrunePartialEscapeWrites();
+ if (elimination_type == LoadStoreAnalysisType::kNoPredicatedInstructions) {
+ // This is to mark writes to partially escaped values as also part of the escaped subset.
+ // TODO We can avoid this if we have a 'ConditionalWrite' instruction. Will require testing
+ // to see if the additional branches are worth it.
+ PrunePartialEscapeWrites();
+ }
subgraph_.Finalize();
} else {
subgraph_.Invalidate();
@@ -112,9 +121,12 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
}
private:
- bool HandleEscape(HInstruction* escape) {
- subgraph_.RemoveBlock(escape->GetBlock());
- return true;
+ void CollectPartialEscapes(HGraph* graph);
+ void HandleEscape(HBasicBlock* escape) {
+ subgraph_.RemoveBlock(escape);
+ }
+ void HandleEscape(HInstruction* escape) {
+ HandleEscape(escape->GetBlock());
}
// Make sure we mark any writes/potential writes to heap-locations within partially
@@ -229,7 +241,7 @@ class HeapLocationCollector : public HGraphVisitor {
HeapLocationCollector(HGraph* graph,
ScopedArenaAllocator* allocator,
- bool for_partial_elimination)
+ LoadStoreAnalysisType lse_type)
: HGraphVisitor(graph),
allocator_(allocator),
ref_info_array_(allocator->Adapter(kArenaAllocLSA)),
@@ -238,7 +250,7 @@ class HeapLocationCollector : public HGraphVisitor {
has_heap_stores_(false),
has_volatile_(false),
has_monitor_operations_(false),
- for_partial_elimination_(for_partial_elimination) {
+ lse_type_(lse_type) {
aliasing_matrix_.ClearAllBits();
}
@@ -252,6 +264,10 @@ class HeapLocationCollector : public HGraphVisitor {
ref_info_array_.clear();
}
+ size_t GetNumberOfReferenceInfos() const {
+ return ref_info_array_.size();
+ }
+
size_t GetNumberOfHeapLocations() const {
return heap_locations_.size();
}
@@ -260,6 +276,11 @@ class HeapLocationCollector : public HGraphVisitor {
return heap_locations_[index];
}
+ size_t GetHeapLocationIndex(const HeapLocation* hl) const {
+ auto res = std::find(heap_locations_.cbegin(), heap_locations_.cend(), hl);
+ return std::distance(heap_locations_.cbegin(), res);
+ }
+
HInstruction* HuntForOriginalReference(HInstruction* ref) const {
// An original reference can be transformed by instructions like:
// i0 NewArray
@@ -480,8 +501,7 @@ class HeapLocationCollector : public HGraphVisitor {
ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
if (ref_info == nullptr) {
size_t pos = ref_info_array_.size();
- ref_info =
- new (allocator_) ReferenceInfo(instruction, allocator_, pos, for_partial_elimination_);
+ ref_info = new (allocator_) ReferenceInfo(instruction, allocator_, pos, lse_type_);
ref_info_array_.push_back(ref_info);
}
return ref_info;
@@ -539,6 +559,10 @@ class HeapLocationCollector : public HGraphVisitor {
HeapLocation::kDeclaringClassDefIndexForArrays);
}
+ void VisitPredicatedInstanceFieldGet(HPredicatedInstanceFieldGet* instruction) override {
+ VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
+ CreateReferenceInfoForReferenceType(instruction);
+ }
void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override {
VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
CreateReferenceInfoForReferenceType(instruction);
@@ -618,7 +642,7 @@ class HeapLocationCollector : public HGraphVisitor {
// alias analysis and won't be as effective.
bool has_volatile_; // If there are volatile field accesses.
bool has_monitor_operations_; // If there are monitor operations.
- bool for_partial_elimination_;
+ LoadStoreAnalysisType lse_type_;
DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
};
@@ -630,13 +654,13 @@ class LoadStoreAnalysis {
explicit LoadStoreAnalysis(HGraph* graph,
OptimizingCompilerStats* stats,
ScopedArenaAllocator* local_allocator,
- bool for_elimination = true)
+ LoadStoreAnalysisType lse_type)
: graph_(graph),
stats_(stats),
heap_location_collector_(
graph,
local_allocator,
- /*for_partial_elimination=*/for_elimination && ExecutionSubgraph::CanAnalyse(graph_)) {}
+ ExecutionSubgraph::CanAnalyse(graph_) ? lse_type : LoadStoreAnalysisType::kBasic) {}
const HeapLocationCollector& GetHeapLocationCollector() const {
return heap_location_collector_;