summaryrefslogtreecommitdiff
path: root/compiler/optimizing/execution_subgraph.h
diff options
context:
space:
mode:
authorAlex Light <allight@google.com>2020-11-02 08:48:33 -0800
committerAlex Light <allight@google.com>2021-01-21 17:58:10 +0000
commitb8686ce4c93eba7192ed7ef89e7ffd9f3aa6cd07 (patch)
tree1721ee940f978736a2212d693271ee698897cb0b /compiler/optimizing/execution_subgraph.h
parent625048049558d394d50b6e98885b8c210e481bf1 (diff)
Partial Load Store Elimination
Add partial load-store elimination to the LSE pass. Partial LSE will move object allocations which only escape along certain execution paths closer to the escape point and allow more values to be eliminated. It does this by creating new predicated load and store instructions that are used when an object has only escaped some of the time. In cases where the object has not escaped a default value will be used. Test: ./test.py --host Test: ./test.py --target Bug: 67037140 Change-Id: Idde67eb59ec90de79747cde17b552eec05b58497
Diffstat (limited to 'compiler/optimizing/execution_subgraph.h')
-rw-r--r--compiler/optimizing/execution_subgraph.h44
1 files changed, 44 insertions, 0 deletions
diff --git a/compiler/optimizing/execution_subgraph.h b/compiler/optimizing/execution_subgraph.h
index dac938ed62..7fabbaead1 100644
--- a/compiler/optimizing/execution_subgraph.h
+++ b/compiler/optimizing/execution_subgraph.h
@@ -27,6 +27,7 @@
#include "base/bit_vector-inl.h"
#include "base/globals.h"
#include "base/iteration_range.h"
+#include "base/mutex.h"
#include "base/scoped_arena_allocator.h"
#include "base/scoped_arena_containers.h"
#include "base/stl_util.h"
@@ -35,6 +36,18 @@
namespace art {
+// Helper for transforming blocks to block_ids.
+class BlockToBlockIdTransformer {
+ public:
+ BlockToBlockIdTransformer(BlockToBlockIdTransformer&&) = default;
+ BlockToBlockIdTransformer(const BlockToBlockIdTransformer&) = default;
+ BlockToBlockIdTransformer() {}
+
+ inline uint32_t operator()(const HBasicBlock* b) const {
+ return b->GetBlockId();
+ }
+};
+
// Helper for transforming block ids to blocks.
class BlockIdToBlockTransformer {
public:
@@ -61,6 +74,20 @@ class BlockIdToBlockTransformer {
const HGraph* const graph_;
};
+class BlockIdFilterThunk {
+ public:
+ explicit BlockIdFilterThunk(const BitVector& i) : inner_(i) {}
+ BlockIdFilterThunk(BlockIdFilterThunk&& other) noexcept = default;
+ BlockIdFilterThunk(const BlockIdFilterThunk&) = default;
+
+ bool operator()(const HBasicBlock* b) const {
+ return inner_.IsBitSet(b->GetBlockId());
+ }
+
+ private:
+ const BitVector& inner_;
+};
+
// A representation of a particular section of the graph. The graph is split
// into an excluded and included area and is used to track escapes.
//
@@ -80,10 +107,18 @@ class BlockIdToBlockTransformer {
// cohort-exit block to reach any cohort-entry block. This means we can use the
// boundary between the cohort and the rest of the graph to insert
// materialization blocks for partial LSE.
+//
+// TODO We really should expand this to take into account where the object
+// allocation takes place directly. Currently we always act as though it were
+// allocated in the entry block. This is a massively simplifying assumption but
+// means we can't partially remove objects that are repeatedly allocated in a
+// loop.
class ExecutionSubgraph : public ArenaObject<kArenaAllocLSA> {
public:
using BitVecBlockRange =
IterationRange<TransformIterator<BitVector::IndexIterator, BlockIdToBlockTransformer>>;
+ using FilteredBitVecBlockRange = IterationRange<
+ FilterIterator<ArenaVector<HBasicBlock*>::const_iterator, BlockIdFilterThunk>>;
// A set of connected blocks which are connected and removed from the
// ExecutionSubgraph. See above comment for explanation.
@@ -110,6 +145,15 @@ class ExecutionSubgraph : public ArenaObject<kArenaAllocLSA> {
return BlockIterRange(entry_blocks_);
}
+ FilteredBitVecBlockRange EntryBlocksReversePostOrder() const {
+ return Filter(MakeIterationRange(graph_->GetReversePostOrder()),
+ BlockIdFilterThunk(entry_blocks_));
+ }
+
+ bool IsEntryBlock(const HBasicBlock* blk) const {
+ return entry_blocks_.IsBitSet(blk->GetBlockId());
+ }
+
// Blocks that have successors outside of the cohort. The successors of
// these blocks will need to have PHI's to restore state.
BitVecBlockRange ExitBlocks() const {