summaryrefslogtreecommitdiff
path: root/compiler/optimizing/load_store_analysis.h
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/load_store_analysis.h')
-rw-r--r--compiler/optimizing/load_store_analysis.h29
1 files changed, 18 insertions, 11 deletions
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
index 7e5b071483..4975bae2a2 100644
--- a/compiler/optimizing/load_store_analysis.h
+++ b/compiler/optimizing/load_store_analysis.h
@@ -50,15 +50,15 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
is_singleton_and_not_returned_(true),
is_singleton_and_not_deopt_visible_(true),
allocator_(allocator),
- subgraph_(reference->GetBlock()->GetGraph(),
- elimination_type != LoadStoreAnalysisType::kBasic,
- allocator_) {
+ subgraph_(nullptr) {
// TODO We can do this in one pass.
// TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
// for now just ignore it.
bool can_be_partial = elimination_type != LoadStoreAnalysisType::kBasic &&
(/* reference_->IsNewArray() || */ reference_->IsNewInstance());
if (can_be_partial) {
+ subgraph_.reset(
+ new (allocator) ExecutionSubgraph(reference->GetBlock()->GetGraph(), allocator));
CollectPartialEscapes(reference_->GetBlock()->GetGraph());
}
CalculateEscape(reference_,
@@ -73,14 +73,16 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
// to see if the additional branches are worth it.
PrunePartialEscapeWrites();
}
- subgraph_.Finalize();
+ DCHECK(subgraph_ != nullptr);
+ subgraph_->Finalize();
} else {
- subgraph_.Invalidate();
+ DCHECK(subgraph_ == nullptr);
}
}
const ExecutionSubgraph* GetNoEscapeSubgraph() const {
- return &subgraph_;
+ DCHECK(IsPartialSingleton());
+ return subgraph_.get();
}
HInstruction* GetReference() const {
@@ -103,7 +105,9 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
auto ref = GetReference();
// TODO NewArray is possible but will need to get a handle on how to deal with the dynamic loads
// for now just ignore it.
- return (/* ref->IsNewArray() || */ ref->IsNewInstance()) && GetNoEscapeSubgraph()->IsValid();
+ return (/* ref->IsNewArray() || */ ref->IsNewInstance()) &&
+ subgraph_ != nullptr &&
+ subgraph_->IsValid();
}
// Returns true if reference_ is a singleton and not returned to the caller or
@@ -123,7 +127,8 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
private:
void CollectPartialEscapes(HGraph* graph);
void HandleEscape(HBasicBlock* escape) {
- subgraph_.RemoveBlock(escape);
+ DCHECK(subgraph_ != nullptr);
+ subgraph_->RemoveBlock(escape);
}
void HandleEscape(HInstruction* escape) {
HandleEscape(escape->GetBlock());
@@ -145,7 +150,7 @@ class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> {
ScopedArenaAllocator* allocator_;
- ExecutionSubgraph subgraph_;
+ std::unique_ptr<ExecutionSubgraph> subgraph_;
DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
};
@@ -264,8 +269,10 @@ class HeapLocationCollector : public HGraphVisitor {
ref_info_array_.clear();
}
- size_t GetNumberOfReferenceInfos() const {
- return ref_info_array_.size();
+ size_t CountPartialSingletons() const {
+ return std::count_if(ref_info_array_.begin(),
+ ref_info_array_.end(),
+ [](ReferenceInfo* ri) { return ri->IsPartialSingleton(); });
}
size_t GetNumberOfHeapLocations() const {