diff options
author | David Srbecky <dsrbecky@google.com> | 2018-06-23 22:05:49 +0100 |
---|---|---|
committer | David Srbecky <dsrbecky@google.com> | 2018-06-26 16:51:15 +0100 |
commit | f6ba5b316b51d0fb9f91cb51a42e51dfeee62ee4 (patch) | |
tree | ac3b776ae3c20fc957949d06dd878ef3ffa6ffb5 /compiler/optimizing | |
parent | cca7cb9ffa56d8ab8fd0c5997c8bfd965d7426c1 (diff) |
Add method frame info to CodeInfo.
The stored information will be used in follow-up CLs.
This temporarily increases .oat file size by 0.7%.
Test: test-art-host-gtest
Change-Id: Ie7d898b06398ae44287bb1e8153861ab112a216c
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/code_generator.cc | 14 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_stream.cc | 59 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_stream.h | 24 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_test.cc | 60 |
4 files changed, 105 insertions, 52 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 2589869859..21b2226dd6 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -393,6 +393,11 @@ void CodeGenerator::Compile(CodeAllocator* allocator) { HGraphVisitor* instruction_visitor = GetInstructionVisitor(); DCHECK_EQ(current_block_index_, 0u); + GetStackMapStream()->BeginMethod(HasEmptyFrame() ? 0 : frame_size_, + core_spill_mask_, + fpu_spill_mask_, + GetGraph()->GetNumberOfVRegs()); + size_t frame_start = GetAssembler()->CodeSize(); GenerateFrameEntry(); DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); @@ -435,6 +440,8 @@ void CodeGenerator::Compile(CodeAllocator* allocator) { // Finalize instructions in assember; Finalize(allocator); + + GetStackMapStream()->EndMethod(); } void CodeGenerator::Finalize(CodeAllocator* allocator) { @@ -1087,7 +1094,7 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction, if (instruction == nullptr) { // For stack overflow checks and native-debug-info entries without dex register // mapping (i.e. start of basic block or start of slow path). - stack_map_stream->BeginStackMapEntry(dex_pc, native_pc, 0, 0, 0, 0); + stack_map_stream->BeginStackMapEntry(dex_pc, native_pc); stack_map_stream->EndStackMapEntry(); return; } @@ -1135,8 +1142,6 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction, native_pc, register_mask, locations->GetStackMask(), - outer_environment_size, - inlining_depth, kind); EmitEnvironment(environment, slow_path); // Record invoke info, the common case for the trampoline is super and static invokes. Only @@ -1204,15 +1209,12 @@ void CodeGenerator::RecordCatchBlockInfo() { uint32_t dex_pc = block->GetDexPc(); uint32_t num_vregs = graph_->GetNumberOfVRegs(); - uint32_t inlining_depth = 0; // Inlining of catch blocks is not supported at the moment. uint32_t native_pc = GetAddressOf(block); stack_map_stream->BeginStackMapEntry(dex_pc, native_pc, /* register_mask */ 0, /* stack_mask */ nullptr, - num_vregs, - inlining_depth, StackMap::Kind::Catch); HInstruction* current_phi = block->GetFirstPhi(); diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index 57f47af777..cb988050ff 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -39,22 +39,33 @@ void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offs StackMap::PackNativePc(native_pc_offset, instruction_set_); } +void StackMapStream::BeginMethod(size_t frame_size_in_bytes, + size_t core_spill_mask, + size_t fp_spill_mask, + uint32_t num_dex_registers) { + DCHECK(!in_method_) << "Mismatched Begin/End calls"; + in_method_ = true; + DCHECK_EQ(frame_size_in_bytes_, 0u) << "BeginMethod was already called"; + + frame_size_in_bytes_ = frame_size_in_bytes; + core_spill_mask_ = core_spill_mask; + fp_spill_mask_ = fp_spill_mask; + num_dex_registers_ = num_dex_registers; +} + +void StackMapStream::EndMethod() { + DCHECK(in_method_) << "Mismatched Begin/End calls"; + in_method_ = false; +} + void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, uint32_t native_pc_offset, uint32_t register_mask, BitVector* stack_mask, - uint32_t num_dex_registers, - uint8_t inlining_depth, StackMap::Kind kind) { + DCHECK(in_method_) << "Call BeginMethod first"; DCHECK(!in_stack_map_) << "Mismatched Begin/End calls"; in_stack_map_ = true; - // num_dex_registers_ is the constant per-method number of registers. - // However we initially don't know what the value is, so lazily initialize it. - if (num_dex_registers_ == 0) { - num_dex_registers_ = num_dex_registers; - } else if (num_dex_registers > 0) { - DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count"; - } current_stack_map_ = BitTableBuilder<StackMap>::Entry(); current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind); @@ -84,7 +95,7 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, lazy_stack_masks_.push_back(stack_mask); current_inline_infos_.clear(); current_dex_registers_.clear(); - expected_num_dex_registers_ = num_dex_registers; + expected_num_dex_registers_ = num_dex_registers_; if (kVerifyStackMaps) { size_t stack_map_index = stack_maps_.size(); @@ -109,8 +120,6 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) { CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b)); } - CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0)); - CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth); }); } } @@ -118,9 +127,9 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, void StackMapStream::EndStackMapEntry() { DCHECK(in_stack_map_) << "Mismatched Begin/End calls"; in_stack_map_ = false; - DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); // Generate index into the InlineInfo table. + size_t inlining_depth = current_inline_infos_.size(); if (!current_inline_infos_.empty()) { current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast; current_stack_map_[StackMap::kInlineInfoIndex] = @@ -128,9 +137,23 @@ void StackMapStream::EndStackMapEntry() { } // Generate delta-compressed dex register map. - CreateDexRegisterMap(); + size_t num_dex_registers = current_dex_registers_.size(); + if (!current_dex_registers_.empty()) { + DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); + CreateDexRegisterMap(); + } stack_maps_.Add(current_stack_map_); + + if (kVerifyStackMaps) { + size_t stack_map_index = stack_maps_.size() - 1; + dchecks_.emplace_back([=](const CodeInfo& code_info) { + StackMap stack_map = code_info.GetStackMapAt(stack_map_index); + CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0)); + CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0)); + CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth); + }); + } } void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) { @@ -157,6 +180,7 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, uint32_t dex_pc, uint32_t num_dex_registers, const DexFile* outer_dex_file) { + DCHECK(in_stack_map_) << "Call BeginStackMapEntry first"; DCHECK(!in_inline_info_) << "Mismatched Begin/End calls"; in_inline_info_ = true; DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); @@ -301,7 +325,11 @@ size_t StackMapStream::PrepareForFillIn() { } } - BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&out_); + EncodeUnsignedLeb128(&out_, frame_size_in_bytes_); + EncodeUnsignedLeb128(&out_, core_spill_mask_); + EncodeUnsignedLeb128(&out_, fp_spill_mask_); + EncodeUnsignedLeb128(&out_, num_dex_registers_); + BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&out_, out_.size() * kBitsPerByte); stack_maps_.Encode(out); register_masks_.Encode(out); stack_masks_.Encode(out); @@ -310,7 +338,6 @@ size_t StackMapStream::PrepareForFillIn() { dex_register_masks_.Encode(out); dex_register_maps_.Encode(out); dex_register_catalog_.Encode(out); - EncodeVarintBits(out, num_dex_registers_); return UnsignedLeb128Size(out_.size()) + out_.size(); } diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h index 7d1820d67f..ed865b12f7 100644 --- a/compiler/optimizing/stack_map_stream.h +++ b/compiler/optimizing/stack_map_stream.h @@ -50,8 +50,6 @@ class StackMapStream : public ValueObject { out_(allocator->Adapter(kArenaAllocStackMapStream)), method_infos_(allocator), lazy_stack_masks_(allocator->Adapter(kArenaAllocStackMapStream)), - in_stack_map_(false), - in_inline_info_(false), current_stack_map_(), current_inline_infos_(allocator->Adapter(kArenaAllocStackMapStream)), current_dex_registers_(allocator->Adapter(kArenaAllocStackMapStream)), @@ -61,12 +59,16 @@ class StackMapStream : public ValueObject { temp_dex_register_map_(allocator->Adapter(kArenaAllocStackMapStream)) { } + void BeginMethod(size_t frame_size_in_bytes, + size_t core_spill_mask, + size_t fp_spill_mask, + uint32_t num_dex_registers); + void EndMethod(); + void BeginStackMapEntry(uint32_t dex_pc, uint32_t native_pc_offset, - uint32_t register_mask, - BitVector* sp_mask, - uint32_t num_dex_registers, - uint8_t inlining_depth, + uint32_t register_mask = 0, + BitVector* sp_mask = nullptr, StackMap::Kind kind = StackMap::Kind::Default); void EndStackMapEntry(); @@ -103,6 +105,10 @@ class StackMapStream : public ValueObject { void CreateDexRegisterMap(); const InstructionSet instruction_set_; + uint32_t frame_size_in_bytes_ = 0; + uint32_t core_spill_mask_ = 0; + uint32_t fp_spill_mask_ = 0; + uint32_t num_dex_registers_ = 0; BitTableBuilder<StackMap> stack_maps_; BitTableBuilder<RegisterMask> register_masks_; BitmapTableBuilder stack_masks_; @@ -111,7 +117,6 @@ class StackMapStream : public ValueObject { BitmapTableBuilder dex_register_masks_; BitTableBuilder<MaskInfo> dex_register_maps_; BitTableBuilder<DexRegisterInfo> dex_register_catalog_; - uint32_t num_dex_registers_ = 0; // TODO: Make this const and get the value in constructor. ScopedArenaVector<uint8_t> out_; BitTableBuilderBase<1> method_infos_; @@ -119,8 +124,9 @@ class StackMapStream : public ValueObject { ScopedArenaVector<BitVector*> lazy_stack_masks_; // Variables which track the current state between Begin/End calls; - bool in_stack_map_; - bool in_inline_info_; + bool in_method_ = false; + bool in_stack_map_ = false; + bool in_inline_info_ = false; BitTableBuilder<StackMap>::Entry current_stack_map_; ScopedArenaVector<BitTableBuilder<InlineInfo>::Entry> current_inline_infos_; ScopedArenaVector<DexRegisterLocation> current_dex_registers_; diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc index fd856671ba..6241e0c25a 100644 --- a/compiler/optimizing/stack_map_test.cc +++ b/compiler/optimizing/stack_map_test.cc @@ -52,14 +52,16 @@ TEST(StackMapTest, Test1) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 2); ArenaBitVector sp_mask(&allocator, 0, false); size_t number_of_dex_registers = 2; - stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0); + stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location. stream.AddDexRegisterEntry(Kind::kConstant, -2); // Short location. stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -107,6 +109,7 @@ TEST(StackMapTest, Test2) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 2); ArtMethod art_method; ArenaBitVector sp_mask1(&allocator, 0, true); @@ -114,7 +117,7 @@ TEST(StackMapTest, Test2) { sp_mask1.SetBit(4); size_t number_of_dex_registers = 2; size_t number_of_dex_registers_in_inline_info = 0; - stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 2); + stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1); stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location. stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info); @@ -126,7 +129,7 @@ TEST(StackMapTest, Test2) { ArenaBitVector sp_mask2(&allocator, 0, true); sp_mask2.SetBit(3); sp_mask2.SetBit(8); - stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2, number_of_dex_registers, 0); + stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2); stream.AddDexRegisterEntry(Kind::kInRegister, 18); // Short location. stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location. stream.EndStackMapEntry(); @@ -134,7 +137,7 @@ TEST(StackMapTest, Test2) { ArenaBitVector sp_mask3(&allocator, 0, true); sp_mask3.SetBit(1); sp_mask3.SetBit(5); - stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3, number_of_dex_registers, 0); + stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3); stream.AddDexRegisterEntry(Kind::kInRegister, 6); // Short location. stream.AddDexRegisterEntry(Kind::kInRegisterHigh, 8); // Short location. stream.EndStackMapEntry(); @@ -142,11 +145,12 @@ TEST(StackMapTest, Test2) { ArenaBitVector sp_mask4(&allocator, 0, true); sp_mask4.SetBit(6); sp_mask4.SetBit(7); - stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4, number_of_dex_registers, 0); + stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4); stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location, same in stack map 2. stream.AddDexRegisterEntry(Kind::kInFpuRegisterHigh, 1); // Short location. stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -303,6 +307,7 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 2); ArtMethod art_method; ArenaBitVector sp_mask1(&allocator, 0, true); @@ -310,7 +315,7 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) { sp_mask1.SetBit(4); const size_t number_of_dex_registers = 2; const size_t number_of_dex_registers_in_inline_info = 2; - stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 1); + stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1); stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location. stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info); @@ -319,6 +324,7 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) { stream.EndInlineInfoEntry(); stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -367,14 +373,16 @@ TEST(StackMapTest, TestNonLiveDexRegisters) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 2); ArenaBitVector sp_mask(&allocator, 0, false); uint32_t number_of_dex_registers = 2; - stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0); + stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); stream.AddDexRegisterEntry(Kind::kNone, 0); // No location. stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -416,25 +424,27 @@ TEST(StackMapTest, TestShareDexRegisterMap) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 2); ArenaBitVector sp_mask(&allocator, 0, false); uint32_t number_of_dex_registers = 2; // First stack map. - stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0); + stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location. stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. stream.EndStackMapEntry(); // Second stack map, which should share the same dex register map. - stream.BeginStackMapEntry(0, 65 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0); + stream.BeginStackMapEntry(0, 65 * kPcAlign, 0x3, &sp_mask); stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location. stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. stream.EndStackMapEntry(); // Third stack map (doesn't share the dex register map). - stream.BeginStackMapEntry(0, 66 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0); + stream.BeginStackMapEntry(0, 66 * kPcAlign, 0x3, &sp_mask); stream.AddDexRegisterEntry(Kind::kInRegister, 2); // Short location. stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location. stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -473,17 +483,19 @@ TEST(StackMapTest, TestNoDexRegisterMap) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 1); ArenaBitVector sp_mask(&allocator, 0, false); uint32_t number_of_dex_registers = 0; - stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0); + stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask); stream.EndStackMapEntry(); number_of_dex_registers = 1; - stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask, number_of_dex_registers, 0); + stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask); stream.AddDexRegisterEntry(Kind::kNone, 0); stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -521,6 +533,7 @@ TEST(StackMapTest, InlineTest) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 2); ArtMethod art_method; ArenaBitVector sp_mask1(&allocator, 0, true); @@ -528,7 +541,7 @@ TEST(StackMapTest, InlineTest) { sp_mask1.SetBit(4); // First stack map. - stream.BeginStackMapEntry(0, 10 * kPcAlign, 0x3, &sp_mask1, 2, 2); + stream.BeginStackMapEntry(0, 10 * kPcAlign, 0x3, &sp_mask1); stream.AddDexRegisterEntry(Kind::kInStack, 0); stream.AddDexRegisterEntry(Kind::kConstant, 4); @@ -544,7 +557,7 @@ TEST(StackMapTest, InlineTest) { stream.EndStackMapEntry(); // Second stack map. - stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1, 2, 3); + stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1); stream.AddDexRegisterEntry(Kind::kInStack, 56); stream.AddDexRegisterEntry(Kind::kConstant, 0); @@ -562,13 +575,13 @@ TEST(StackMapTest, InlineTest) { stream.EndStackMapEntry(); // Third stack map. - stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1, 2, 0); + stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1); stream.AddDexRegisterEntry(Kind::kNone, 0); stream.AddDexRegisterEntry(Kind::kConstant, 4); stream.EndStackMapEntry(); // Fourth stack map. - stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1, 2, 3); + stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1); stream.AddDexRegisterEntry(Kind::kInStack, 56); stream.AddDexRegisterEntry(Kind::kConstant, 0); @@ -584,6 +597,7 @@ TEST(StackMapTest, InlineTest) { stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -725,15 +739,17 @@ TEST(StackMapTest, TestDeduplicateStackMask) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 0); ArenaBitVector sp_mask(&allocator, 0, true); sp_mask.SetBit(1); sp_mask.SetBit(4); - stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0); + stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask); stream.EndStackMapEntry(); - stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0); + stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask); stream.EndStackMapEntry(); + stream.EndMethod(); size_t size = stream.PrepareForFillIn(); void* memory = allocator.Alloc(size, kArenaAllocMisc); MemoryRegion region(memory, size); @@ -753,19 +769,21 @@ TEST(StackMapTest, TestInvokeInfo) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); StackMapStream stream(&allocator, kRuntimeISA); + stream.BeginMethod(32, 0, 0, 0); ArenaBitVector sp_mask(&allocator, 0, true); sp_mask.SetBit(1); - stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0); + stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask); stream.AddInvoke(kSuper, 1); stream.EndStackMapEntry(); - stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0); + stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask); stream.AddInvoke(kStatic, 3); stream.EndStackMapEntry(); - stream.BeginStackMapEntry(0, 16 * kPcAlign, 0x3, &sp_mask, 0, 0); + stream.BeginStackMapEntry(0, 16 * kPcAlign, 0x3, &sp_mask); stream.AddInvoke(kDirect, 65535); stream.EndStackMapEntry(); + stream.EndMethod(); const size_t code_info_size = stream.PrepareForFillIn(); MemoryRegion code_info_region(allocator.Alloc(code_info_size, kArenaAllocMisc), code_info_size); stream.FillInCodeInfo(code_info_region); |