diff options
Diffstat (limited to 'compiler/optimizing/code_generator_x86.cc')
| -rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 425 | 
1 files changed, 400 insertions, 25 deletions
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 6bf045885d6..82d1fda8789 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -51,6 +51,9 @@ static constexpr int kC2ConditionMask = 0x400;  static constexpr int kFakeReturnRegister = Register(8); +static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000); +static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000); +  // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.  #define __ down_cast<X86Assembler*>(codegen->GetAssembler())->  // NOLINT  #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value() @@ -3802,6 +3805,301 @@ void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {    }  } +static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) { +  LocationSummary* locations = new (allocator) LocationSummary(minmax); +  switch (minmax->GetResultType()) { +    case DataType::Type::kInt32: +      locations->SetInAt(0, Location::RequiresRegister()); +      locations->SetInAt(1, Location::RequiresRegister()); +      locations->SetOut(Location::SameAsFirstInput()); +      break; +    case DataType::Type::kInt64: +      locations->SetInAt(0, Location::RequiresRegister()); +      locations->SetInAt(1, Location::RequiresRegister()); +      locations->SetOut(Location::SameAsFirstInput()); +      // Register to use to perform a long subtract to set cc. +      locations->AddTemp(Location::RequiresRegister()); +      break; +    case DataType::Type::kFloat32: +      locations->SetInAt(0, Location::RequiresFpuRegister()); +      locations->SetInAt(1, Location::RequiresFpuRegister()); +      locations->SetOut(Location::SameAsFirstInput()); +      locations->AddTemp(Location::RequiresRegister()); +      break; +    case DataType::Type::kFloat64: +      locations->SetInAt(0, Location::RequiresFpuRegister()); +      locations->SetInAt(1, Location::RequiresFpuRegister()); +      locations->SetOut(Location::SameAsFirstInput()); +      break; +    default: +      LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType(); +  } +} + +void InstructionCodeGeneratorX86::GenerateMinMaxInt(LocationSummary* locations, +                                                    bool is_min, +                                                    DataType::Type type) { +  Location op1_loc = locations->InAt(0); +  Location op2_loc = locations->InAt(1); + +  // Shortcut for same input locations. +  if (op1_loc.Equals(op2_loc)) { +    // Can return immediately, as op1_loc == out_loc. +    // Note: if we ever support separate registers, e.g., output into memory, we need to check for +    //       a copy here. +    DCHECK(locations->Out().Equals(op1_loc)); +    return; +  } + +  if (type == DataType::Type::kInt64) { +    // Need to perform a subtract to get the sign right. +    // op1 is already in the same location as the output. +    Location output = locations->Out(); +    Register output_lo = output.AsRegisterPairLow<Register>(); +    Register output_hi = output.AsRegisterPairHigh<Register>(); + +    Register op2_lo = op2_loc.AsRegisterPairLow<Register>(); +    Register op2_hi = op2_loc.AsRegisterPairHigh<Register>(); + +    // The comparison is performed by subtracting the second operand from +    // the first operand and then setting the status flags in the same +    // manner as the SUB instruction." +    __ cmpl(output_lo, op2_lo); + +    // Now use a temp and the borrow to finish the subtraction of op2_hi. +    Register temp = locations->GetTemp(0).AsRegister<Register>(); +    __ movl(temp, output_hi); +    __ sbbl(temp, op2_hi); + +    // Now the condition code is correct. +    Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess; +    __ cmovl(cond, output_lo, op2_lo); +    __ cmovl(cond, output_hi, op2_hi); +  } else { +    DCHECK_EQ(type, DataType::Type::kInt32); +    Register out = locations->Out().AsRegister<Register>(); +    Register op2 = op2_loc.AsRegister<Register>(); + +    //  (out := op1) +    //  out <=? op2 +    //  if out is min jmp done +    //  out := op2 +    // done: + +    __ cmpl(out, op2); +    Condition cond = is_min ? Condition::kGreater : Condition::kLess; +    __ cmovl(cond, out, op2); +  } +} + +void InstructionCodeGeneratorX86::GenerateMinMaxFP(LocationSummary* locations, +                                                   bool is_min, +                                                   DataType::Type type) { +  Location op1_loc = locations->InAt(0); +  Location op2_loc = locations->InAt(1); +  Location out_loc = locations->Out(); +  XmmRegister out = out_loc.AsFpuRegister<XmmRegister>(); + +  // Shortcut for same input locations. +  if (op1_loc.Equals(op2_loc)) { +    DCHECK(out_loc.Equals(op1_loc)); +    return; +  } + +  //  (out := op1) +  //  out <=? op2 +  //  if Nan jmp Nan_label +  //  if out is min jmp done +  //  if op2 is min jmp op2_label +  //  handle -0/+0 +  //  jmp done +  // Nan_label: +  //  out := NaN +  // op2_label: +  //  out := op2 +  // done: +  // +  // This removes one jmp, but needs to copy one input (op1) to out. +  // +  // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath? + +  XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>(); + +  NearLabel nan, done, op2_label; +  if (type == DataType::Type::kFloat64) { +    __ ucomisd(out, op2); +  } else { +    DCHECK_EQ(type, DataType::Type::kFloat32); +    __ ucomiss(out, op2); +  } + +  __ j(Condition::kParityEven, &nan); + +  __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label); +  __ j(is_min ? Condition::kBelow : Condition::kAbove, &done); + +  // Handle 0.0/-0.0. +  if (is_min) { +    if (type == DataType::Type::kFloat64) { +      __ orpd(out, op2); +    } else { +      __ orps(out, op2); +    } +  } else { +    if (type == DataType::Type::kFloat64) { +      __ andpd(out, op2); +    } else { +      __ andps(out, op2); +    } +  } +  __ jmp(&done); + +  // NaN handling. +  __ Bind(&nan); +  if (type == DataType::Type::kFloat64) { +    // TODO: Use a constant from the constant table (requires extra input). +    __ LoadLongConstant(out, kDoubleNaN); +  } else { +    Register constant = locations->GetTemp(0).AsRegister<Register>(); +    __ movl(constant, Immediate(kFloatNaN)); +    __ movd(out, constant); +  } +  __ jmp(&done); + +  // out := op2; +  __ Bind(&op2_label); +  if (type == DataType::Type::kFloat64) { +    __ movsd(out, op2); +  } else { +    __ movss(out, op2); +  } + +  // Done. +  __ Bind(&done); +} + +void InstructionCodeGeneratorX86::GenerateMinMax(HBinaryOperation* minmax, bool is_min) { +  DataType::Type type = minmax->GetResultType(); +  switch (type) { +    case DataType::Type::kInt32: +    case DataType::Type::kInt64: +      GenerateMinMaxInt(minmax->GetLocations(), is_min, type); +      break; +    case DataType::Type::kFloat32: +    case DataType::Type::kFloat64: +      GenerateMinMaxFP(minmax->GetLocations(), is_min, type); +      break; +    default: +      LOG(FATAL) << "Unexpected type for HMinMax " << type; +  } +} + +void LocationsBuilderX86::VisitMin(HMin* min) { +  CreateMinMaxLocations(GetGraph()->GetAllocator(), min); +} + +void InstructionCodeGeneratorX86::VisitMin(HMin* min) { +  GenerateMinMax(min, /*is_min*/ true); +} + +void LocationsBuilderX86::VisitMax(HMax* max) { +  CreateMinMaxLocations(GetGraph()->GetAllocator(), max); +} + +void InstructionCodeGeneratorX86::VisitMax(HMax* max) { +  GenerateMinMax(max, /*is_min*/ false); +} + +void LocationsBuilderX86::VisitAbs(HAbs* abs) { +  LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs); +  switch (abs->GetResultType()) { +    case DataType::Type::kInt32: +      locations->SetInAt(0, Location::RegisterLocation(EAX)); +      locations->SetOut(Location::SameAsFirstInput()); +      locations->AddTemp(Location::RegisterLocation(EDX)); +      break; +    case DataType::Type::kInt64: +      locations->SetInAt(0, Location::RequiresRegister()); +      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); +      locations->AddTemp(Location::RequiresRegister()); +      break; +    case DataType::Type::kFloat32: +      locations->SetInAt(0, Location::RequiresFpuRegister()); +      locations->SetOut(Location::SameAsFirstInput()); +      locations->AddTemp(Location::RequiresFpuRegister()); +      locations->AddTemp(Location::RequiresRegister()); +      break; +    case DataType::Type::kFloat64: +      locations->SetInAt(0, Location::RequiresFpuRegister()); +      locations->SetOut(Location::SameAsFirstInput()); +      locations->AddTemp(Location::RequiresFpuRegister()); +      break; +    default: +      LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType(); +  } +} + +void InstructionCodeGeneratorX86::VisitAbs(HAbs* abs) { +  LocationSummary* locations = abs->GetLocations(); +  switch (abs->GetResultType()) { +    case DataType::Type::kInt32: { +      Register out = locations->Out().AsRegister<Register>(); +      DCHECK_EQ(out, EAX); +      Register temp = locations->GetTemp(0).AsRegister<Register>(); +      DCHECK_EQ(temp, EDX); +      // Sign extend EAX into EDX. +      __ cdq(); +      // XOR EAX with sign. +      __ xorl(EAX, EDX); +      // Subtract out sign to correct. +      __ subl(EAX, EDX); +      // The result is in EAX. +      break; +    } +    case DataType::Type::kInt64: { +      Location input = locations->InAt(0); +      Register input_lo = input.AsRegisterPairLow<Register>(); +      Register input_hi = input.AsRegisterPairHigh<Register>(); +      Location output = locations->Out(); +      Register output_lo = output.AsRegisterPairLow<Register>(); +      Register output_hi = output.AsRegisterPairHigh<Register>(); +      Register temp = locations->GetTemp(0).AsRegister<Register>(); +      // Compute the sign into the temporary. +      __ movl(temp, input_hi); +      __ sarl(temp, Immediate(31)); +      // Store the sign into the output. +      __ movl(output_lo, temp); +      __ movl(output_hi, temp); +      // XOR the input to the output. +      __ xorl(output_lo, input_lo); +      __ xorl(output_hi, input_hi); +      // Subtract the sign. +      __ subl(output_lo, temp); +      __ sbbl(output_hi, temp); +      break; +    } +    case DataType::Type::kFloat32: { +      XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); +      XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); +      Register constant = locations->GetTemp(1).AsRegister<Register>(); +      __ movl(constant, Immediate(INT32_C(0x7FFFFFFF))); +      __ movd(temp, constant); +      __ andps(out, temp); +      break; +    } +    case DataType::Type::kFloat64: { +      XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); +      XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); +      // TODO: Use a constant from the constant table (requires extra input). +      __ LoadLongConstant(temp, INT64_C(0x7FFFFFFFFFFFFFFF)); +      __ andpd(out, temp); +      break; +    } +    default: +      LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType(); +  } +} +  void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {    LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);    switch (instruction->GetType()) { @@ -4534,6 +4832,15 @@ void CodeGeneratorX86::GenerateStaticOrDirectCall(      case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:        __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));        break; +    case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: { +      Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke, +                                                                temp.AsRegister<Register>()); +      __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset)); +      RecordBootImageRelRoPatch( +          invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress(), +          GetBootImageOffset(invoke)); +      break; +    }      case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {        Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,                                                                  temp.AsRegister<Register>()); @@ -4595,6 +4902,13 @@ void CodeGeneratorX86::GenerateVirtualCall(    RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);  } +void CodeGeneratorX86::RecordBootImageRelRoPatch(HX86ComputeBaseMethodAddress* method_address, +                                                 uint32_t boot_image_offset) { +  boot_image_method_patches_.emplace_back( +      method_address, /* target_dex_file */ nullptr, boot_image_offset); +  __ Bind(&boot_image_method_patches_.back().label); +} +  void CodeGeneratorX86::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {    DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);    HX86ComputeBaseMethodAddress* method_address = @@ -4664,6 +4978,14 @@ inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(    }  } +linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset, +                                              const DexFile* target_dex_file, +                                              uint32_t pc_insn_offset, +                                              uint32_t boot_image_offset) { +  DCHECK(target_dex_file == nullptr);  // Unused for DataBimgRelRoPatch(), should be null. +  return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset); +} +  void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {    DCHECK(linker_patches->empty());    size_t size = @@ -4682,11 +5004,10 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linke      EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(          boot_image_string_patches_, linker_patches);    } else { -    DCHECK(boot_image_method_patches_.empty()); -    EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>( -        boot_image_type_patches_, linker_patches); -    EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>( -        boot_image_string_patches_, linker_patches); +    EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>( +        boot_image_method_patches_, linker_patches); +    DCHECK(boot_image_type_patches_.empty()); +    DCHECK(boot_image_string_patches_.empty());    }    EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(        method_bss_entry_patches_, linker_patches); @@ -6055,7 +6376,7 @@ HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(      case HLoadClass::LoadKind::kReferrersClass:        break;      case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: -    case HLoadClass::LoadKind::kBootImageClassTable: +    case HLoadClass::LoadKind::kBootImageRelRo:      case HLoadClass::LoadKind::kBssEntry:        DCHECK(!Runtime::Current()->UseJitCompilation());        break; @@ -6093,7 +6414,7 @@ void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {    if (load_kind == HLoadClass::LoadKind::kReferrersClass ||        load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative || -      load_kind == HLoadClass::LoadKind::kBootImageClassTable || +      load_kind == HLoadClass::LoadKind::kBootImageRelRo ||        load_kind == HLoadClass::LoadKind::kBssEntry) {      locations->SetInAt(0, Location::RequiresRegister());    } @@ -6169,17 +6490,12 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFE        __ movl(out, Immediate(address));        break;      } -    case HLoadClass::LoadKind::kBootImageClassTable: { +    case HLoadClass::LoadKind::kBootImageRelRo: {        DCHECK(!codegen_->GetCompilerOptions().IsBootImage());        Register method_address = locations->InAt(0).AsRegister<Register>();        __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset)); -      codegen_->RecordBootImageTypePatch(cls); -      // Extract the reference from the slot data, i.e. clear the hash bits. -      int32_t masked_hash = ClassTable::TableSlot::MaskHash( -          ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex()))); -      if (masked_hash != 0) { -        __ subl(out, Immediate(masked_hash)); -      } +      codegen_->RecordBootImageRelRoPatch(cls->InputAt(0)->AsX86ComputeBaseMethodAddress(), +                                          codegen_->GetBootImageOffset(cls));        break;      }      case HLoadClass::LoadKind::kBssEntry: { @@ -6255,11 +6571,31 @@ void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(    // No need for memory fence, thanks to the X86 memory model.  } +void InstructionCodeGeneratorX86::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check, +                                                                    Register temp) { +  uint32_t path_to_root = check->GetBitstringPathToRoot(); +  uint32_t mask = check->GetBitstringMask(); +  DCHECK(IsPowerOfTwo(mask + 1)); +  size_t mask_bits = WhichPowerOf2(mask + 1); + +  if (mask_bits == 16u) { +    // Compare the bitstring in memory. +    __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root)); +  } else { +    // /* uint32_t */ temp = temp->status_ +    __ movl(temp, Address(temp, mirror::Class::StatusOffset())); +    // Compare the bitstring bits using SUB. +    __ subl(temp, Immediate(path_to_root)); +    // Shift out bits that do not contribute to the comparison. +    __ shll(temp, Immediate(32u - mask_bits)); +  } +} +  HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(      HLoadString::LoadKind desired_string_load_kind) {    switch (desired_string_load_kind) {      case HLoadString::LoadKind::kBootImageLinkTimePcRelative: -    case HLoadString::LoadKind::kBootImageInternTable: +    case HLoadString::LoadKind::kBootImageRelRo:      case HLoadString::LoadKind::kBssEntry:        DCHECK(!Runtime::Current()->UseJitCompilation());        break; @@ -6278,7 +6614,7 @@ void LocationsBuilderX86::VisitLoadString(HLoadString* load) {    LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);    HLoadString::LoadKind load_kind = load->GetLoadKind();    if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative || -      load_kind == HLoadString::LoadKind::kBootImageInternTable || +      load_kind == HLoadString::LoadKind::kBootImageRelRo ||        load_kind == HLoadString::LoadKind::kBssEntry) {      locations->SetInAt(0, Location::RequiresRegister());    } @@ -6332,11 +6668,12 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_S        __ movl(out, Immediate(address));        return;      } -    case HLoadString::LoadKind::kBootImageInternTable: { +    case HLoadString::LoadKind::kBootImageRelRo: {        DCHECK(!codegen_->GetCompilerOptions().IsBootImage());        Register method_address = locations->InAt(0).AsRegister<Register>();        __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset)); -      codegen_->RecordBootImageStringPatch(load); +      codegen_->RecordBootImageRelRoPatch(load->InputAt(0)->AsX86ComputeBaseMethodAddress(), +                                          codegen_->GetBootImageOffset(load));        return;      }      case HLoadString::LoadKind::kBssEntry: { @@ -6418,8 +6755,8 @@ static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {    return 0;  } -// Interface case has 3 temps, one for holding the number of interfaces, one for the current -// interface pointer, one for loading the current interface. +// Interface case has 2 temps, one for holding the number of interfaces, one for the current +// interface pointer, the current interface is compared in memory.  // The other checks have one temp for loading the object's class.  static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {    if (type_check_kind == TypeCheckKind::kInterfaceCheck) { @@ -6447,6 +6784,8 @@ void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {      case TypeCheckKind::kInterfaceCheck:        call_kind = LocationSummary::kCallOnSlowPath;        break; +    case TypeCheckKind::kBitstringCheck: +      break;    }    LocationSummary* locations = @@ -6455,7 +6794,13 @@ void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {      locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers.    }    locations->SetInAt(0, Location::RequiresRegister()); -  locations->SetInAt(1, Location::Any()); +  if (type_check_kind == TypeCheckKind::kBitstringCheck) { +    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant())); +    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant())); +    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant())); +  } else { +    locations->SetInAt(1, Location::Any()); +  }    // Note that TypeCheckSlowPathX86 uses this "out" register too.    locations->SetOut(Location::RequiresRegister());    // When read barriers are enabled, we need a temporary register for some cases. @@ -6676,6 +7021,21 @@ void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {        }        break;      } + +    case TypeCheckKind::kBitstringCheck: { +      // /* HeapReference<Class> */ temp = obj->klass_ +      GenerateReferenceLoadTwoRegisters(instruction, +                                        out_loc, +                                        obj_loc, +                                        class_offset, +                                        kWithoutReadBarrier); + +      GenerateBitstringTypeCheckCompare(instruction, out); +      __ j(kNotEqual, &zero); +      __ movl(out, Immediate(1)); +      __ jmp(&done); +      break; +    }    }    if (zero.IsLinked()) { @@ -6702,12 +7062,14 @@ void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {      // Require a register for the interface check since there is a loop that compares the class to      // a memory address.      locations->SetInAt(1, Location::RequiresRegister()); +  } else if (type_check_kind == TypeCheckKind::kBitstringCheck) { +    locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant())); +    locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant())); +    locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));    } else {      locations->SetInAt(1, Location::Any());    } -  // Note that TypeCheckSlowPathX86 uses this "temp" register too. -  locations->AddTemp(Location::RequiresRegister()); -  // When read barriers are enabled, we need an additional temporary register for some cases. +  // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.    locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));  } @@ -6921,6 +7283,19 @@ void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {        __ MaybeUnpoisonHeapReference(cls.AsRegister<Register>());        break;      } + +    case TypeCheckKind::kBitstringCheck: { +      // /* HeapReference<Class> */ temp = obj->klass_ +      GenerateReferenceLoadTwoRegisters(instruction, +                                        temp_loc, +                                        obj_loc, +                                        class_offset, +                                        kWithoutReadBarrier); + +      GenerateBitstringTypeCheckCompare(instruction, temp); +      __ j(kNotEqual, type_check_slow_path->GetEntryLabel()); +      break; +    }    }    __ Bind(&done);  | 
