diff options
author | Nicolas Geoffray <ngeoffray@google.com> | 2016-11-14 12:41:12 +0000 |
---|---|---|
committer | Gerrit Code Review <noreply-gerritcodereview@google.com> | 2016-11-14 12:41:12 +0000 |
commit | 81cae78d1853893ff9c3ecea4b5100002a538eb7 (patch) | |
tree | d32c8f7e57449381511ffb206f4a335c157ae597 /compiler/optimizing | |
parent | e8fc2cedb85ce4a6747cddbbf4cf33288e0ba5b9 (diff) | |
parent | 3395fbc20bcd20948bec8958db91b304c17cacd8 (diff) |
Merge "Revert "Revert "Revert "JIT root tables.""""
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/code_generator.cc | 26 | ||||
-rw-r--r-- | compiler/optimizing/code_generator.h | 45 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 44 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm.h | 15 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 51 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.h | 15 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_mips.cc | 5 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 42 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.h | 6 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 43 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.h | 6 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 11 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 30 | ||||
-rw-r--r-- | compiler/optimizing/sharpening.cc | 16 |
14 files changed, 49 insertions, 306 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index a5f248dd20..8b450e11dc 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -1375,30 +1375,4 @@ uint32_t CodeGenerator::GetReferenceDisableFlagOffset() const { return klass->GetDisableIntrinsicFlagOffset().Uint32Value(); } -void CodeGenerator::EmitJitRoots(uint8_t* code, - Handle<mirror::ObjectArray<mirror::Object>> roots, - const uint8_t* roots_data, - Handle<mirror::DexCache> outer_dex_cache) { - DCHECK_EQ(static_cast<size_t>(roots->GetLength()), GetNumberOfJitRoots()); - StackHandleScope<1> hs(Thread::Current()); - MutableHandle<mirror::DexCache> h_dex_cache(hs.NewHandle<mirror::DexCache>(nullptr)); - ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); - size_t index = 0; - for (auto& entry : jit_string_roots_) { - const DexFile& entry_dex_file = *entry.first.dex_file; - // Avoid the expensive FindDexCache call by checking if the string is - // in the compiled method's dex file. - h_dex_cache.Assign(IsSameDexFile(*outer_dex_cache->GetDexFile(), entry_dex_file) - ? outer_dex_cache.Get() - : class_linker->FindDexCache(hs.Self(), entry_dex_file)); - mirror::String* string = class_linker->LookupString( - entry_dex_file, entry.first.string_index, h_dex_cache); - DCHECK(string != nullptr) << "JIT roots require strings to have been loaded"; - roots->Set(index, string); - entry.second = index; - ++index; - } - EmitJitRootPatches(code, roots_data); -} - } // namespace art diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h index a5d19abe92..bf246ad309 100644 --- a/compiler/optimizing/code_generator.h +++ b/compiler/optimizing/code_generator.h @@ -32,7 +32,6 @@ #include "optimizing_compiler_stats.h" #include "read_barrier_option.h" #include "stack_map_stream.h" -#include "string_reference.h" #include "utils/label.h" namespace art { @@ -336,17 +335,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { void BuildStackMaps(MemoryRegion region, const DexFile::CodeItem& code_item); size_t ComputeStackMapsSize(); - size_t GetNumberOfJitRoots() const { - return jit_string_roots_.size(); - } - - // Fills the `literals` array with literals collected during code generation. - // Also emits literal patches. - void EmitJitRoots(uint8_t* code, - Handle<mirror::ObjectArray<mirror::Object>> roots, - const uint8_t* roots_data, - Handle<mirror::DexCache> outer_dex_cache) - REQUIRES_SHARED(Locks::mutator_lock_); bool IsLeafMethod() const { return is_leaf_; @@ -527,26 +515,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { virtual HLoadClass::LoadKind GetSupportedLoadClassKind( HLoadClass::LoadKind desired_class_load_kind) = 0; - static LocationSummary::CallKind GetLoadStringCallKind(HLoadString* load) { - switch (load->GetLoadKind()) { - case HLoadString::LoadKind::kBssEntry: - DCHECK(load->NeedsEnvironment()); - return LocationSummary::kCallOnSlowPath; - case HLoadString::LoadKind::kDexCacheViaMethod: - DCHECK(load->NeedsEnvironment()); - return LocationSummary::kCallOnMainOnly; - case HLoadString::LoadKind::kJitTableAddress: - DCHECK(!load->NeedsEnvironment()); - return kEmitCompilerReadBarrier - ? LocationSummary::kCallOnSlowPath - : LocationSummary::kNoCall; - break; - default: - DCHECK(!load->NeedsEnvironment()); - return LocationSummary::kNoCall; - } - } - // Check if the desired_dispatch_info is supported. If it is, return it, // otherwise return a fall-back info that should be used instead. virtual HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch( @@ -603,8 +571,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { fpu_callee_save_mask_(fpu_callee_save_mask), stack_map_stream_(graph->GetArena()), block_order_(nullptr), - jit_string_roots_(StringReferenceValueComparator(), - graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), disasm_info_(nullptr), stats_(stats), graph_(graph), @@ -671,12 +637,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { return current_slow_path_; } - // Emit the patches assocatied with JIT roots. Only applies to JIT compiled code. - virtual void EmitJitRootPatches(uint8_t* code ATTRIBUTE_UNUSED, - const uint8_t* roots_data ATTRIBUTE_UNUSED) { - DCHECK_EQ(jit_string_roots_.size(), 0u); - } - // Frame size required for this method. uint32_t frame_size_; uint32_t core_spill_mask_; @@ -702,11 +662,6 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { // The order to use for code generation. const ArenaVector<HBasicBlock*>* block_order_; - // Maps a StringReference (dex_file, string_index) to the index in the literal table. - // Entries are intially added with a 0 index, and `EmitJitRoots` will compute all the - // indices. - ArenaSafeMap<StringReference, size_t, StringReferenceValueComparator> jit_string_roots_; - DisassemblyInformation* disasm_info_; private: diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 035c02e37a..7c72d00389 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -1222,9 +1222,7 @@ CodeGeneratorARM::CodeGeneratorARM(HGraph* graph, graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), boot_image_address_patches_(std::less<uint32_t>(), - graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), - jit_string_patches_(StringReferenceValueComparator(), - graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { + graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { // Always save the LR register to mimic Quick. AddAllocatedRegister(Location::RegisterLocation(LR)); } @@ -5903,9 +5901,6 @@ HLoadString::LoadKind CodeGeneratorARM::GetSupportedLoadStringKind( case HLoadString::LoadKind::kBssEntry: DCHECK(!Runtime::Current()->UseJitCompilation()); break; - case HLoadString::LoadKind::kJitTableAddress: - DCHECK(Runtime::Current()->UseJitCompilation()); - break; case HLoadString::LoadKind::kDexCacheViaMethod: break; } @@ -5913,8 +5908,13 @@ HLoadString::LoadKind CodeGeneratorARM::GetSupportedLoadStringKind( } void LocationsBuilderARM::VisitLoadString(HLoadString* load) { - LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load); + LocationSummary::CallKind call_kind = load->NeedsEnvironment() + ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) + ? LocationSummary::kCallOnMainOnly + : LocationSummary::kCallOnSlowPath) + : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); + HLoadString::LoadKind load_kind = load->GetLoadKind(); if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) { locations->SetOut(Location::RegisterLocation(R0)); @@ -5987,13 +5987,6 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { __ Bind(slow_path->GetExitLabel()); return; } - case HLoadString::LoadKind::kJitTableAddress: { - __ LoadLiteral(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(), - load->GetStringIndex())); - // /* GcRoot<mirror::String> */ out = *out - GenerateGcRootFieldLoad(load, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption); - return; - } default: break; } @@ -7350,14 +7343,6 @@ Literal* CodeGeneratorARM::DeduplicateDexCacheAddressLiteral(uint32_t address) { return DeduplicateUint32Literal(address, &uint32_literals_); } -Literal* CodeGeneratorARM::DeduplicateJitStringLiteral(const DexFile& dex_file, - uint32_t string_index) { - jit_string_roots_.Overwrite(StringReference(&dex_file, string_index), /* placeholder */ 0u); - return jit_string_patches_.GetOrCreate( - StringReference(&dex_file, string_index), - [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); }); -} - template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> inline void CodeGeneratorARM::EmitPcRelativeLinkerPatches( const ArenaDeque<PcRelativePatchInfo>& infos, @@ -7674,21 +7659,6 @@ void InstructionCodeGeneratorARM::VisitClassTableGet(HClassTableGet* instruction } } -void CodeGeneratorARM::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) { - for (const auto& entry : jit_string_patches_) { - const auto& it = jit_string_roots_.find(entry.first); - DCHECK(it != jit_string_roots_.end()); - size_t index_in_table = it->second; - Literal* literal = entry.second; - DCHECK(literal->GetLabel()->IsBound()); - uint32_t literal_offset = literal->GetLabel()->Position(); - uintptr_t address = - reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>); - uint8_t* data = code + literal_offset; - reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address); - } -} - #undef __ #undef QUICK_ENTRY_POINT diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h index 8ace3dac08..f95dd573cb 100644 --- a/compiler/optimizing/code_generator_arm.h +++ b/compiler/optimizing/code_generator_arm.h @@ -488,12 +488,9 @@ class CodeGeneratorARM : public CodeGenerator { Literal* DeduplicateBootImageTypeLiteral(const DexFile& dex_file, uint32_t type_index); Literal* DeduplicateBootImageAddressLiteral(uint32_t address); Literal* DeduplicateDexCacheAddressLiteral(uint32_t address); - Literal* DeduplicateJitStringLiteral(const DexFile& dex_file, uint32_t string_index); void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE; - void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE; - // Fast path implementation of ReadBarrier::Barrier for a heap // reference field load when Baker's read barriers are used. void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction, @@ -594,9 +591,9 @@ class CodeGeneratorARM : public CodeGenerator { using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, Literal*>; using MethodToLiteralMap = ArenaSafeMap<MethodReference, Literal*, MethodReferenceComparator>; - using StringToLiteralMap = ArenaSafeMap<StringReference, - Literal*, - StringReferenceValueComparator>; + using BootStringToLiteralMap = ArenaSafeMap<StringReference, + Literal*, + StringReferenceValueComparator>; using BootTypeToLiteralMap = ArenaSafeMap<TypeReference, Literal*, TypeReferenceValueComparator>; @@ -608,6 +605,7 @@ class CodeGeneratorARM : public CodeGenerator { PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches); + template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos, ArenaVector<LinkerPatch>* linker_patches); @@ -632,7 +630,7 @@ class CodeGeneratorARM : public CodeGenerator { // PC-relative patch info for each HArmDexCacheArraysBase. ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_; // Deduplication map for boot string literals for kBootImageLinkTimeAddress. - StringToLiteralMap boot_image_string_patches_; + BootStringToLiteralMap boot_image_string_patches_; // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC). ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_; // Deduplication map for boot type literals for kBootImageLinkTimeAddress. @@ -642,9 +640,6 @@ class CodeGeneratorARM : public CodeGenerator { // Deduplication map for patchable boot image addresses. Uint32ToLiteralMap boot_image_address_patches_; - // Patches for string literals in JIT compiled code. - StringToLiteralMap jit_string_patches_; - DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARM); }; diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index ea579907df..35b16051e5 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -1166,9 +1166,7 @@ CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), boot_image_address_patches_(std::less<uint32_t>(), - graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), - jit_string_patches_(StringReferenceValueComparator(), - graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { + graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { // Save the link register (containing the return address) to mimic Quick. AddAllocatedRegister(LocationFrom(lr)); } @@ -4140,14 +4138,6 @@ vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddress return DeduplicateUint64Literal(address); } -vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral( - const DexFile& dex_file, uint32_t string_index) { - jit_string_roots_.Overwrite(StringReference(&dex_file, string_index), /* placeholder */ 0u); - return jit_string_patches_.GetOrCreate( - StringReference(&dex_file, string_index), - [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); }); -} - void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg) { DCHECK(reg.IsX()); @@ -4533,15 +4523,16 @@ HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind( break; case HLoadString::LoadKind::kDexCacheViaMethod: break; - case HLoadString::LoadKind::kJitTableAddress: - DCHECK(Runtime::Current()->UseJitCompilation()); - break; } return desired_string_load_kind; } void LocationsBuilderARM64::VisitLoadString(HLoadString* load) { - LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load); + LocationSummary::CallKind call_kind = load->NeedsEnvironment() + ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) + ? LocationSummary::kCallOnMainOnly + : LocationSummary::kCallOnSlowPath) + : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) { InvokeRuntimeCallingConvention calling_convention; @@ -4567,7 +4558,6 @@ void LocationsBuilderARM64::VisitLoadString(HLoadString* load) { void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { Register out = OutputRegister(load); - Location out_loc = load->GetLocations()->Out(); switch (load->GetLoadKind()) { case HLoadString::LoadKind::kBootImageLinkTimeAddress: @@ -4604,9 +4594,9 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { // Add LDR with its PC-relative String patch. vixl::aarch64::Label* ldr_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label); - // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */ + // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */ GenerateGcRootFieldLoad(load, - out_loc, + load->GetLocations()->Out(), temp, /* offset placeholder */ 0u, ldr_label, @@ -4618,17 +4608,6 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { __ Bind(slow_path->GetExitLabel()); return; } - case HLoadString::LoadKind::kJitTableAddress: { - __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(), - load->GetStringIndex())); - GenerateGcRootFieldLoad(load, - out_loc, - out.X(), - /* offset */ 0, - /* fixup_label */ nullptr, - kCompilerReadBarrierOption); - return; - } default: break; } @@ -5748,19 +5727,7 @@ void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instructi } } -void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) { - for (const auto& entry : jit_string_patches_) { - const auto& it = jit_string_roots_.find(entry.first); - DCHECK(it != jit_string_roots_.end()); - size_t index_in_table = it->second; - vixl::aarch64::Literal<uint32_t>* literal = entry.second; - uint32_t literal_offset = literal->GetOffset(); - uintptr_t address = - reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>); - uint8_t* data = code + literal_offset; - reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address); - } -} + #undef __ #undef QUICK_ENTRY_POINT diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index a2ab60709f..0e8d4fd549 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -565,8 +565,6 @@ class CodeGeneratorARM64 : public CodeGenerator { uint32_t type_index); vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address); vixl::aarch64::Literal<uint64_t>* DeduplicateDexCacheAddressLiteral(uint64_t address); - vixl::aarch64::Literal<uint32_t>* DeduplicateJitStringLiteral(const DexFile& dex_file, - uint32_t string_index); void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg); void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label, @@ -578,8 +576,6 @@ class CodeGeneratorARM64 : public CodeGenerator { void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE; - void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE; - // Fast path implementation of ReadBarrier::Barrier for a heap // reference field load when Baker's read barriers are used. void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction, @@ -677,9 +673,9 @@ class CodeGeneratorARM64 : public CodeGenerator { using MethodToLiteralMap = ArenaSafeMap<MethodReference, vixl::aarch64::Literal<uint64_t>*, MethodReferenceComparator>; - using StringToLiteralMap = ArenaSafeMap<StringReference, - vixl::aarch64::Literal<uint32_t>*, - StringReferenceValueComparator>; + using BootStringToLiteralMap = ArenaSafeMap<StringReference, + vixl::aarch64::Literal<uint32_t>*, + StringReferenceValueComparator>; using BootTypeToLiteralMap = ArenaSafeMap<TypeReference, vixl::aarch64::Literal<uint32_t>*, TypeReferenceValueComparator>; @@ -743,7 +739,7 @@ class CodeGeneratorARM64 : public CodeGenerator { // PC-relative DexCache access info. ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_; // Deduplication map for boot string literals for kBootImageLinkTimeAddress. - StringToLiteralMap boot_image_string_patches_; + BootStringToLiteralMap boot_image_string_patches_; // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC). ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_; // Deduplication map for boot type literals for kBootImageLinkTimeAddress. @@ -753,9 +749,6 @@ class CodeGeneratorARM64 : public CodeGenerator { // Deduplication map for patchable boot image addresses. Uint32ToLiteralMap boot_image_address_patches_; - // Patches for string literals in JIT compiled code. - StringToLiteralMap jit_string_patches_; - DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARM64); }; diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index 0936ac0c55..f169eb00f3 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -5215,11 +5215,6 @@ HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind( case HLoadString::LoadKind::kDexCacheViaMethod: fallback_load = false; break; - case HLoadString::LoadKind::kJitTableAddress: - DCHECK(Runtime::Current()->UseJitCompilation()); - // TODO: implement. - fallback_load = true; - break; } if (fallback_load) { desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod; diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index c7addcef31..2a9e21d1e1 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -1020,7 +1020,6 @@ CodeGeneratorX86::CodeGeneratorX86(HGraph* graph, simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), - jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), constant_area_start_(-1), fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), method_address_offset_(-1) { @@ -6227,15 +6226,16 @@ HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind( break; case HLoadString::LoadKind::kDexCacheViaMethod: break; - case HLoadString::LoadKind::kJitTableAddress: - DCHECK(Runtime::Current()->UseJitCompilation()); - break; } return desired_string_load_kind; } void LocationsBuilderX86::VisitLoadString(HLoadString* load) { - LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load); + LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier) + ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) + ? LocationSummary::kCallOnMainOnly + : LocationSummary::kCallOnSlowPath) + : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); HLoadString::LoadKind load_kind = load->GetLoadKind(); if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative || @@ -6260,14 +6260,6 @@ void LocationsBuilderX86::VisitLoadString(HLoadString* load) { } } -Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index) { - jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u); - // Add a patch entry and return the label. - jit_string_patches_.emplace_back(dex_file, dex_index); - PatchInfo<Label>* info = &jit_string_patches_.back(); - return &info->label; -} - void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) { LocationSummary* locations = load->GetLocations(); Location out_loc = locations->Out(); @@ -6296,7 +6288,7 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) { Register method_address = locations->InAt(0).AsRegister<Register>(); Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset); Label* fixup_label = codegen_->NewStringBssEntryPatch(load); - // /* GcRoot<mirror::String> */ out = *address /* PC-relative */ + // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */ GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption); SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load); codegen_->AddSlowPath(slow_path); @@ -6305,14 +6297,6 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) { __ Bind(slow_path->GetExitLabel()); return; } - case HLoadString::LoadKind::kJitTableAddress: { - Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset); - Label* fixup_label = codegen_->NewJitRootStringPatch( - load->GetDexFile(), load->GetStringIndex()); - // /* GcRoot<mirror::String> */ out = *address - GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption); - return; - } default: break; } @@ -7738,20 +7722,6 @@ void CodeGeneratorX86::MoveFromReturnRegister(Location target, Primitive::Type t } } -void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) { - for (const PatchInfo<Label>& info : jit_string_patches_) { - const auto& it = jit_string_roots_.find(StringReference(&info.dex_file, info.index)); - DCHECK(it != jit_string_roots_.end()); - size_t index_in_table = it->second; - uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - uintptr_t address = - reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>); - typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t; - reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] = - dchecked_integral_cast<uint32_t>(address); - } -} - #undef __ } // namespace x86 diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index 16ea6b55d6..164231b4e5 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -414,15 +414,12 @@ class CodeGeneratorX86 : public CodeGenerator { void RecordTypePatch(HLoadClass* load_class); Label* NewStringBssEntryPatch(HLoadString* load_string); Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset); - Label* NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index); void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE; // Emit linker patches. void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE; - void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE; - // Emit a write barrier. void MarkGCCard(Register temp, Register card, @@ -619,9 +616,6 @@ class CodeGeneratorX86 : public CodeGenerator { // Type patch locations. ArenaDeque<PatchInfo<Label>> type_patches_; - // Patches for string root accesses in JIT compiled code. - ArenaDeque<PatchInfo<Label>> jit_string_patches_; - // Offset to the start of the constant area in the assembled code. // Used for fixups to the constant area. int32_t constant_area_start_; diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 7be887f365..cb89e50dbb 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -1266,8 +1266,7 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph, simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), - fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), - jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { + fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister)); } @@ -5639,15 +5638,16 @@ HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind( break; case HLoadString::LoadKind::kDexCacheViaMethod: break; - case HLoadString::LoadKind::kJitTableAddress: - DCHECK(Runtime::Current()->UseJitCompilation()); - break; } return desired_string_load_kind; } void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) { - LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load); + LocationSummary::CallKind call_kind = load->NeedsEnvironment() + ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) + ? LocationSummary::kCallOnMainOnly + : LocationSummary::kCallOnSlowPath) + : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) { locations->SetOut(Location::RegisterLocation(RAX)); @@ -5667,14 +5667,6 @@ void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) { } } -Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index) { - jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u); - // Add a patch entry and return the label. - jit_string_patches_.emplace_back(dex_file, dex_index); - PatchInfo<Label>* info = &jit_string_patches_.back(); - return &info->label; -} - void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { LocationSummary* locations = load->GetLocations(); Location out_loc = locations->Out(); @@ -5706,15 +5698,6 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { __ Bind(slow_path->GetExitLabel()); return; } - case HLoadString::LoadKind::kJitTableAddress: { - Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, - /* no_rip */ true); - Label* fixup_label = - codegen_->NewJitRootStringPatch(load->GetDexFile(), load->GetStringIndex()); - // /* GcRoot<mirror::String> */ out = *address - GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption); - return; - } default: break; } @@ -7093,20 +7076,6 @@ void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low, } } -void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) { - for (const PatchInfo<Label>& info : jit_string_patches_) { - const auto& it = jit_string_roots_.find(StringReference(&info.dex_file, info.index)); - DCHECK(it != jit_string_roots_.end()); - size_t index_in_table = it->second; - uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - uintptr_t address = - reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>); - typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t; - reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] = - dchecked_integral_cast<uint32_t>(address); - } -} - #undef __ } // namespace x86_64 diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index 0f70b15787..e5a4152517 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -412,14 +412,11 @@ class CodeGeneratorX86_64 : public CodeGenerator { void RecordTypePatch(HLoadClass* load_class); Label* NewStringBssEntryPatch(HLoadString* load_string); Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset); - Label* NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index); void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE; void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE; - void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE; - const X86_64InstructionSetFeatures& GetInstructionSetFeatures() const { return isa_features_; } @@ -605,9 +602,6 @@ class CodeGeneratorX86_64 : public CodeGenerator { // Fixups for jump tables need to be handled specially. ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_; - // Patches for string literals in JIT compiled code. - ArenaDeque<PatchInfo<Label>> jit_string_patches_; - DISALLOW_COPY_AND_ASSIGN(CodeGeneratorX86_64); }; diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 215ed54a4a..e0c582a76d 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -5690,10 +5690,7 @@ class HLoadString FINAL : public HInstruction { // all other types are unavailable. kDexCacheViaMethod, - // Load from the root table associated with the JIT compiled method. - kJitTableAddress, - - kLast = kJitTableAddress, + kLast = kDexCacheViaMethod }; HLoadString(HCurrentMethod* current_method, @@ -5751,8 +5748,7 @@ class HLoadString FINAL : public HInstruction { LoadKind load_kind = GetLoadKind(); if (load_kind == LoadKind::kBootImageLinkTimeAddress || load_kind == LoadKind::kBootImageLinkTimePcRelative || - load_kind == LoadKind::kBootImageAddress || - load_kind == LoadKind::kJitTableAddress) { + load_kind == LoadKind::kBootImageAddress) { return false; } return !IsInDexCache(); @@ -5805,8 +5801,7 @@ class HLoadString FINAL : public HInstruction { return load_kind == LoadKind::kBootImageLinkTimeAddress || load_kind == LoadKind::kBootImageLinkTimePcRelative || load_kind == LoadKind::kBssEntry || - load_kind == LoadKind::kDexCacheViaMethod || - load_kind == LoadKind::kJitTableAddress; + load_kind == LoadKind::kDexCacheViaMethod; } static bool HasAddress(LoadKind load_kind) { diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 830f834591..7a930cce71 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -117,7 +117,6 @@ class CodeVectorAllocator FINAL : public CodeAllocator { size_t GetSize() const { return size_; } const ArenaVector<uint8_t>& GetMemory() const { return memory_; } - uint8_t* GetData() { return memory_.data(); } private: ArenaVector<uint8_t> memory_; @@ -1126,7 +1125,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, jit::JitCodeCache* code_cache, ArtMethod* method, bool osr) { - StackHandleScope<3> hs(self); + StackHandleScope<2> hs(self); Handle<mirror::ClassLoader> class_loader(hs.NewHandle( method->GetDeclaringClass()->GetClassLoader())); Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache())); @@ -1172,43 +1171,22 @@ bool OptimizingCompiler::JitCompile(Thread* self, } size_t stack_map_size = codegen->ComputeStackMapsSize(); - size_t number_of_roots = codegen->GetNumberOfJitRoots(); - ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); - // We allocate an object array to ensure the JIT roots that we will collect in EmitJitRoots - // will be visible by the GC between EmitLiterals and CommitCode. Once CommitCode is - // executed, this array is not needed. - Handle<mirror::ObjectArray<mirror::Object>> roots( - hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc( - self, class_linker->GetClassRoot(ClassLinker::kObjectArrayClass), number_of_roots))); - if (roots.Get() == nullptr) { - // Out of memory, just clear the exception to avoid any Java exception uncaught problems. - DCHECK(self->IsExceptionPending()); - self->ClearException(); - return false; - } - uint8_t* stack_map_data = nullptr; - uint8_t* roots_data = nullptr; - code_cache->ReserveData( - self, stack_map_size, number_of_roots, method, &stack_map_data, &roots_data); - if (stack_map_data == nullptr || roots_data == nullptr) { + uint8_t* stack_map_data = code_cache->ReserveData(self, stack_map_size, method); + if (stack_map_data == nullptr) { return false; } MaybeRecordStat(MethodCompilationStat::kCompiled); codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size), *code_item); - codegen->EmitJitRoots(code_allocator.GetData(), roots, roots_data, dex_cache); - const void* code = code_cache->CommitCode( self, method, stack_map_data, - roots_data, codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(), codegen->GetCoreSpillMask(), codegen->GetFpuSpillMask(), code_allocator.GetMemory().data(), code_allocator.GetSize(), - osr, - roots); + osr); if (code == nullptr) { code_cache->ClearData(self, stack_map_data); diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc index 15254edcab..63e4ca674e 100644 --- a/compiler/optimizing/sharpening.cc +++ b/compiler/optimizing/sharpening.cc @@ -281,8 +281,7 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { : hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file)); if (codegen_->GetCompilerOptions().IsBootImage()) { - // Compiling boot image. Resolve the string and allocate it if needed, to ensure - // the string will be added to the boot image. + // Compiling boot image. Resolve the string and allocate it if needed. DCHECK(!runtime->UseJitCompilation()); mirror::String* string = class_linker->ResolveString(dex_file, string_index, dex_cache); CHECK(string != nullptr); @@ -298,14 +297,10 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { } else if (runtime->UseJitCompilation()) { // TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus. // DCHECK(!codegen_->GetCompilerOptions().GetCompilePic()); - mirror::String* string = class_linker->LookupString(dex_file, string_index, dex_cache); - if (string != nullptr) { - if (runtime->GetHeap()->ObjectIsInBootImageSpace(string)) { - desired_load_kind = HLoadString::LoadKind::kBootImageAddress; - address = reinterpret_cast64<uint64_t>(string); - } else { - desired_load_kind = HLoadString::LoadKind::kJitTableAddress; - } + mirror::String* string = dex_cache->GetResolvedString(string_index); + if (string != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(string)) { + desired_load_kind = HLoadString::LoadKind::kBootImageAddress; + address = reinterpret_cast64<uint64_t>(string); } } else { // AOT app compilation. Try to lookup the string without allocating if not found. @@ -327,7 +322,6 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { case HLoadString::LoadKind::kBootImageLinkTimePcRelative: case HLoadString::LoadKind::kBssEntry: case HLoadString::LoadKind::kDexCacheViaMethod: - case HLoadString::LoadKind::kJitTableAddress: load_string->SetLoadKindWithStringReference(load_kind, dex_file, string_index); break; case HLoadString::LoadKind::kBootImageAddress: |