diff options
author | Nicolas Geoffray <ngeoffray@google.com> | 2014-10-31 14:24:05 +0000 |
---|---|---|
committer | Gerrit Code Review <noreply-gerritcodereview@google.com> | 2014-10-31 14:24:06 +0000 |
commit | a9014f977ae90373f5bad4cf812c2bda810b10f8 (patch) | |
tree | 40f4bc76cb5e7cf9c95fced70b1ad5c2c2d39f4e /compiler/optimizing | |
parent | 8b557af85871e5086589afd2b3a17089d0f67df8 (diff) | |
parent | b5f62b3dc5ac2731ba8ad53cdf3d9bdb14fbf86b (diff) |
Merge "Support for CONST_STRING in optimizing compiler."
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/builder.cc | 12 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 51 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 1 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 48 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 49 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 34 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 4 | ||||
-rw-r--r-- | compiler/optimizing/register_allocator.cc | 17 |
8 files changed, 210 insertions, 6 deletions
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc index e4ccd9651b..434d9efbcf 100644 --- a/compiler/optimizing/builder.cc +++ b/compiler/optimizing/builder.cc @@ -1173,6 +1173,18 @@ bool HGraphBuilder::AnalyzeDexInstruction(const Instruction& instruction, uint32 break; } + case Instruction::CONST_STRING: { + current_block_->AddInstruction(new (arena_) HLoadString(instruction.VRegB_21c(), dex_offset)); + UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction()); + break; + } + + case Instruction::CONST_STRING_JUMBO: { + current_block_->AddInstruction(new (arena_) HLoadString(instruction.VRegB_31c(), dex_offset)); + UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction()); + break; + } + default: return false; } diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 9ed19695a3..c812f6b416 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -196,6 +196,37 @@ class ClinitCheckSlowPathARM : public SlowPathCodeARM { DISALLOW_COPY_AND_ASSIGN(ClinitCheckSlowPathARM); }; +class LoadStringSlowPathARM : public SlowPathCodeARM { + public: + explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {} + + virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { + LocationSummary* locations = instruction_->GetLocations(); + DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); + + CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); + __ Bind(GetEntryLabel()); + codegen->SaveLiveRegisters(locations); + + InvokeRuntimeCallingConvention calling_convention; + arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0)); + __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex()); + arm_codegen->InvokeRuntime( + QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc()); + arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); + + codegen->RestoreLiveRegisters(locations); + __ b(GetExitLabel()); + } + + private: + HLoadString* const instruction_; + + DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM); +}; + +#undef __ + #undef __ #define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> @@ -2270,5 +2301,25 @@ void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instructi } } +void LocationsBuilderARM::VisitLoadString(HLoadString* load) { + LocationSummary* locations = + new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath); + locations->SetOut(Location::RequiresRegister()); +} + +void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { + SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load); + codegen_->AddSlowPath(slow_path); + + Register out = load->GetLocations()->Out().As<Register>(); + codegen_->LoadCurrentMethod(out); + __ LoadFromOffset( + kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value()); + __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex())); + __ cmp(out, ShifterOperand(0)); + __ b(slow_path->GetEntryLabel(), EQ); + __ Bind(slow_path->GetExitLabel()); +} + } // namespace arm } // namespace art diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 5d504c697a..ec9af73a71 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -539,6 +539,7 @@ InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph, M(Div) \ M(FloatConstant) \ M(LoadClass) \ + M(LoadString) \ M(Neg) \ M(NewArray) \ M(ParallelMove) \ diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 2d6d14fbc4..d41d5a00a8 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -183,6 +183,35 @@ class ClinitCheckSlowPathX86 : public SlowPathCodeX86 { DISALLOW_COPY_AND_ASSIGN(ClinitCheckSlowPathX86); }; +class LoadStringSlowPathX86 : public SlowPathCodeX86 { + public: + explicit LoadStringSlowPathX86(HLoadString* instruction) : instruction_(instruction) {} + + virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { + LocationSummary* locations = instruction_->GetLocations(); + DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); + + CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen); + __ Bind(GetEntryLabel()); + codegen->SaveLiveRegisters(locations); + + InvokeRuntimeCallingConvention calling_convention; + x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0)); + __ movl(calling_convention.GetRegisterAt(1), Immediate(instruction_->GetStringIndex())); + __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pResolveString))); + codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); + x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX)); + codegen->RestoreLiveRegisters(locations); + + __ jmp(GetExitLabel()); + } + + private: + HLoadString* const instruction_; + + DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86); +}; + #undef __ #define __ reinterpret_cast<X86Assembler*>(GetAssembler())-> @@ -2321,5 +2350,24 @@ void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instructi } } +void LocationsBuilderX86::VisitLoadString(HLoadString* load) { + LocationSummary* locations = + new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath); + locations->SetOut(Location::RequiresRegister()); +} + +void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) { + SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load); + codegen_->AddSlowPath(slow_path); + + Register out = load->GetLocations()->Out().As<Register>(); + codegen_->LoadCurrentMethod(out); + __ movl(out, Address(out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value())); + __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex()))); + __ testl(out, out); + __ j(kEqual, slow_path->GetEntryLabel()); + __ Bind(slow_path->GetExitLabel()); +} + } // namespace x86 } // namespace art diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index e8d34e3888..bda3520708 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -196,6 +196,36 @@ class ClinitCheckSlowPathX86_64 : public SlowPathCodeX86_64 { DISALLOW_COPY_AND_ASSIGN(ClinitCheckSlowPathX86_64); }; +class LoadStringSlowPathX86_64 : public SlowPathCodeX86_64 { + public: + explicit LoadStringSlowPathX86_64(HLoadString* instruction) : instruction_(instruction) {} + + virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { + LocationSummary* locations = instruction_->GetLocations(); + DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); + + CodeGeneratorX86_64* x64_codegen = down_cast<CodeGeneratorX86_64*>(codegen); + __ Bind(GetEntryLabel()); + codegen->SaveLiveRegisters(locations); + + InvokeRuntimeCallingConvention calling_convention; + x64_codegen->LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(0))); + __ movl(CpuRegister(calling_convention.GetRegisterAt(1)), + Immediate(instruction_->GetStringIndex())); + __ gs()->call(Address::Absolute( + QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pResolveString), true)); + codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); + x64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX)); + codegen->RestoreLiveRegisters(locations); + __ jmp(GetExitLabel()); + } + + private: + HLoadString* const instruction_; + + DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64); +}; + #undef __ #define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())-> @@ -2270,5 +2300,24 @@ void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instru } } +void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) { + LocationSummary* locations = + new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath); + locations->SetOut(Location::RequiresRegister()); +} + +void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { + SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load); + codegen_->AddSlowPath(slow_path); + + CpuRegister out = load->GetLocations()->Out().As<CpuRegister>(); + codegen_->LoadCurrentMethod(CpuRegister(out)); + __ movl(out, Address(out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value())); + __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex()))); + __ testl(out, out); + __ j(kEqual, slow_path->GetEntryLabel()); + __ Bind(slow_path->GetExitLabel()); +} + } // namespace x86_64 } // namespace art diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 86c36b8313..33bfe19081 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -489,10 +489,11 @@ class HBasicBlock : public ArenaObject { M(IntConstant, Constant) \ M(InvokeStatic, Invoke) \ M(InvokeVirtual, Invoke) \ - M(LoadClass, Instruction) \ M(LessThan, Condition) \ M(LessThanOrEqual, Condition) \ + M(LoadClass, Instruction) \ M(LoadLocal, Instruction) \ + M(LoadString, Instruction) \ M(Local, Instruction) \ M(LongConstant, Constant) \ M(Mul, BinaryOperation) \ @@ -2022,7 +2023,8 @@ class HSuspendCheck : public HTemplateInstruction<0> { }; // TODO: Make this class handle the case the load is null (dex cache -// is null). +// is null). This will be required when using it for other things than +// initialization check. /** * Instruction to load a Class object. */ @@ -2064,6 +2066,34 @@ class HLoadClass : public HExpression<0> { DISALLOW_COPY_AND_ASSIGN(HLoadClass); }; +class HLoadString : public HExpression<0> { + public: + HLoadString(uint32_t string_index, uint32_t dex_pc) + : HExpression(Primitive::kPrimNot, SideEffects::None()), + string_index_(string_index), + dex_pc_(dex_pc) {} + + bool InstructionDataEquals(HInstruction* other) const OVERRIDE { + return other->AsLoadString()->string_index_ == string_index_; + } + + size_t ComputeHashCode() const OVERRIDE { return string_index_; } + + uint32_t GetDexPc() const { return dex_pc_; } + uint32_t GetStringIndex() const { return string_index_; } + + // TODO: Can we deopt or debug when we resolve a string? + bool NeedsEnvironment() const OVERRIDE { return false; } + + DECLARE_INSTRUCTION(LoadString); + + private: + const uint32_t string_index_; + const uint32_t dex_pc_; + + DISALLOW_COPY_AND_ASSIGN(HLoadString); +}; + // TODO: Pass this check to HInvokeStatic nodes. /** * Performs an initialization check on its Class object input. diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 5350dcb7b6..d3fe1c4afc 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -226,6 +226,10 @@ CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_ite return nullptr; } + if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) { + return nullptr; + } + DexCompilationUnit dex_compilation_unit( nullptr, class_loader, art::Runtime::Current()->GetClassLinker(), dex_file, code_item, class_def_idx, method_idx, access_flags, diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc index f95c4a47e3..497e9b9c94 100644 --- a/compiler/optimizing/register_allocator.cc +++ b/compiler/optimizing/register_allocator.cc @@ -266,15 +266,17 @@ void RegisterAllocator::ProcessInstruction(HInstruction* instruction) { size_t first_register_use = current->FirstRegisterUse(); if (first_register_use != kNoLifetime) { LiveInterval* split = Split(current, first_register_use - 1); - // Don't add direclty to `unhandled`, it needs to be sorted and the start + // Don't add directly to `unhandled`, it needs to be sorted and the start // of this new interval might be after intervals already in the list. AddSorted(&unhandled, split); } else { // Nothing to do, we won't allocate a register for this value. } } else { - DCHECK(unhandled.IsEmpty() || current->StartsBeforeOrAt(unhandled.Peek())); - unhandled.Add(current); + // Don't add directly to `unhandled`, temp or safepoint intervals + // for this instruction may have been added, and those can be + // processed first. + AddSorted(&unhandled, current); } } @@ -973,7 +975,14 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) { HInstruction* safepoint = safepoints_.Get(i); size_t position = safepoint->GetLifetimePosition(); LocationSummary* locations = safepoint->GetLocations(); - if (!current->Covers(position)) continue; + if (!current->Covers(position)) { + continue; + } + if (interval->GetStart() == position) { + // The safepoint is for this instruction, so the location of the instruction + // does not need to be saved. + continue; + } if ((current->GetType() == Primitive::kPrimNot) && current->GetParent()->HasSpillSlot()) { locations->SetStackBit(current->GetParent()->GetSpillSlot() / kVRegSize); |