Home
last modified time | relevance | path

Searched refs:GetVIXLAssembler (Results 1 – 21 of 21) sorted by relevance

/art/compiler/optimizing/
Djit_patches_arm64.cc30 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(value); in DeduplicateUint32Literal()
39 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint64_t>(value); in DeduplicateUint64Literal()
86 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); in DeduplicateJitStringLiteral()
99 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); in DeduplicateJitClassLiteral()
Djit_patches_arm64.h89 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
Dintrinsics_arm64.cc78 MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { in GetVIXLAssembler() function in art::arm64::IntrinsicCodeGeneratorARM64
79 return codegen_->GetVIXLAssembler(); in GetVIXLAssembler()
90 #define __ codegen->GetVIXLAssembler()->
210 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleDoubleToRawLongBits()
213 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleLongBitsToDouble()
224 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatFloatToRawIntBits()
227 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatIntBitsToFloat()
299 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler()); in VisitIntegerReverseBytes()
307 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler()); in VisitLongReverseBytes()
315 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetVIXLAssembler()); in VisitShortReverseBytes()
[all …]
Dcode_generator_arm_vixl.cc110 #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
209 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in SaveContiguousSRegisterList()
257 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in RestoreContiguousSRegisterList()
986 UseScratchRegisterScope temps(arm_codegen->GetVIXLAssembler()); in EmitNativeCode()
1231 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongDataProc()
1385 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTestConstant()
1437 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTestConstant()
1513 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTest()
1539 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTest()
1592 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateConditionGeneric()
[all …]
Dintrinsics_arm64.h78 vixl::aarch64::MacroAssembler* GetVIXLAssembler();
Dcode_generator_arm64.cc176 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
560 EmissionCheckScope scope(codegen->GetVIXLAssembler(), in EmitTable()
860 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler()); in EmitNativeCode()
1050 #define __ GetVIXLAssembler()->
1154 vixl_temps_.Open(GetVIXLAssembler()); in PrepareForEmitNativeCode()
1212 MacroAssembler* masm = GetVIXLAssembler(); in GenerateMethodEntryExitHook()
1294 MacroAssembler* masm = GetVIXLAssembler(); in MaybeIncrementHotness()
1333 MacroAssembler* masm = GetVIXLAssembler(); in GenerateFrameEntry()
1393 ExactAssemblyScope eas(GetVIXLAssembler(), in GenerateFrameEntry()
1528 UseScratchRegisterScope temps(GetVIXLAssembler()); in MarkGCCard()
[all …]
Dintrinsics_arm_vixl.cc42 #define __ assembler->GetVIXLAssembler()->
405 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in VisitMathRoundFloat()
614 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo()
636 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo()
687 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenerateStringCompareToLoop()
785 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in GenerateStringCompareToLoop()
822 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in GenerateStringCompareToLoop()
977 UseScratchRegisterScope scratch_scope(assembler->GetVIXLAssembler()); in VisitStringEquals()
1004 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringEquals()
1012 UseScratchRegisterScope scratch_scope(assembler->GetVIXLAssembler()); in VisitStringEquals()
[all …]
Dcode_generator_arm64.h334 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
600 vixl::aarch64::MacroAssembler* GetVIXLAssembler() const { in GetVIXLAssembler() function
601 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler()
661 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
1034 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
Doptimizing_cfi_test.cc174 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
Dcode_generator_arm_vixl.h423 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
575 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
899 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
Dcode_generator_vector_arm64_sve.cc39 #define __ GetVIXLAssembler()->
1042 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecDotProd()
1099 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad()
1141 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
1372 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot()
1373 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot()
1403 MacroAssembler* masm = codegen->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelperSveImpl()
Dcode_generator_vector_arm_vixl.cc34 #define __ GetVIXLAssembler()->
839 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecSADAccumulate()
958 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad()
1010 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
Dcode_generator_vector_arm64_neon.cc40 #define __ GetVIXLAssembler()->
1430 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad()
1490 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
1597 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot()
1598 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot()
1629 MacroAssembler* masm = codegen->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelperNeonImpl()
Dcodegen_test.cc831 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F()
846 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F()
/art/compiler/utils/arm/
Djni_macro_assembler_arm_vixl.cc40 #define ___ asm_.GetVIXLAssembler()->
227 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in RemoveFrame()
283 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Store()
303 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRawPtr()
322 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRawPtrFromThread()
329 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackPointerToThread()
511 UseScratchRegisterScope temps2(asm_.GetVIXLAssembler()); in MoveArguments()
592 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in MoveArguments()
608 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in MoveArguments()
718 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Move()
[all …]
Dassembler_arm_vixl.cc374 UseScratchRegisterScope temps(GetVIXLAssembler()); in StoreRegisterList()
395 UseScratchRegisterScope temps(GetVIXLAssembler()); in LoadRegisterList()
Dassembler_arm_vixl.h213 ArmVIXLMacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
/art/compiler/utils/arm64/
Djni_macro_assembler_arm64.cc36 #define ___ asm_.GetVIXLAssembler()->
131 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in AddConstant()
204 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackPointerToThread()
221 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadImmediate()
545 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Move()
591 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Copy()
630 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Jump()
658 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CreateJObject()
679 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CreateJObject()
717 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in TryToTransitionFromRunnableToNative()
[all …]
Dassembler_arm64.h84 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
107 vixl::aarch64::UseScratchRegisterScope temps(GetVIXLAssembler()); in SaveRestoreZRegisterList()
/art/compiler/trampolines/
Dtrampoline_compiler.cc53 #define ___ assembler.GetVIXLAssembler()->
65 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline()
/art/compiler/utils/
Dassembler_thumb_test.cc279 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
312 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()