/art/compiler/optimizing/ |
D | jit_patches_arm64.cc | 30 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(value); in DeduplicateUint32Literal() 39 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint64_t>(value); in DeduplicateUint64Literal() 86 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); in DeduplicateJitStringLiteral() 99 return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); in DeduplicateJitClassLiteral()
|
D | jit_patches_arm64.h | 89 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
|
D | intrinsics_arm64.cc | 78 MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { in GetVIXLAssembler() function in art::arm64::IntrinsicCodeGeneratorARM64 79 return codegen_->GetVIXLAssembler(); in GetVIXLAssembler() 90 #define __ codegen->GetVIXLAssembler()-> 210 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleDoubleToRawLongBits() 213 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleLongBitsToDouble() 224 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatFloatToRawIntBits() 227 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatIntBitsToFloat() 299 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler()); in VisitIntegerReverseBytes() 307 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler()); in VisitLongReverseBytes() 315 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetVIXLAssembler()); in VisitShortReverseBytes() [all …]
|
D | code_generator_arm_vixl.cc | 110 #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT 209 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in SaveContiguousSRegisterList() 257 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in RestoreContiguousSRegisterList() 986 UseScratchRegisterScope temps(arm_codegen->GetVIXLAssembler()); in EmitNativeCode() 1231 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongDataProc() 1385 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTestConstant() 1437 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTestConstant() 1513 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTest() 1539 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTest() 1592 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateConditionGeneric() [all …]
|
D | intrinsics_arm64.h | 78 vixl::aarch64::MacroAssembler* GetVIXLAssembler();
|
D | code_generator_arm64.cc | 176 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT 560 EmissionCheckScope scope(codegen->GetVIXLAssembler(), in EmitTable() 860 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler()); in EmitNativeCode() 1050 #define __ GetVIXLAssembler()-> 1154 vixl_temps_.Open(GetVIXLAssembler()); in PrepareForEmitNativeCode() 1212 MacroAssembler* masm = GetVIXLAssembler(); in GenerateMethodEntryExitHook() 1294 MacroAssembler* masm = GetVIXLAssembler(); in MaybeIncrementHotness() 1333 MacroAssembler* masm = GetVIXLAssembler(); in GenerateFrameEntry() 1393 ExactAssemblyScope eas(GetVIXLAssembler(), in GenerateFrameEntry() 1528 UseScratchRegisterScope temps(GetVIXLAssembler()); in MarkGCCard() [all …]
|
D | intrinsics_arm_vixl.cc | 42 #define __ assembler->GetVIXLAssembler()-> 405 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in VisitMathRoundFloat() 614 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo() 636 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo() 687 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenerateStringCompareToLoop() 785 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in GenerateStringCompareToLoop() 822 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in GenerateStringCompareToLoop() 977 UseScratchRegisterScope scratch_scope(assembler->GetVIXLAssembler()); in VisitStringEquals() 1004 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringEquals() 1012 UseScratchRegisterScope scratch_scope(assembler->GetVIXLAssembler()); in VisitStringEquals() [all …]
|
D | code_generator_arm64.h | 334 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 600 vixl::aarch64::MacroAssembler* GetVIXLAssembler() const { in GetVIXLAssembler() function 601 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler() 661 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 1034 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
|
D | optimizing_cfi_test.cc | 174 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
|
D | code_generator_arm_vixl.h | 423 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 575 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 899 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
|
D | code_generator_vector_arm64_sve.cc | 39 #define __ GetVIXLAssembler()-> 1042 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecDotProd() 1099 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad() 1141 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore() 1372 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot() 1373 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot() 1403 MacroAssembler* masm = codegen->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelperSveImpl()
|
D | code_generator_vector_arm_vixl.cc | 34 #define __ GetVIXLAssembler()-> 839 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecSADAccumulate() 958 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad() 1010 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
|
D | code_generator_vector_arm64_neon.cc | 40 #define __ GetVIXLAssembler()-> 1430 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad() 1490 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore() 1597 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot() 1598 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot() 1629 MacroAssembler* masm = codegen->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelperNeonImpl()
|
D | codegen_test.cc | 831 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F() 846 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F()
|
/art/compiler/utils/arm/ |
D | jni_macro_assembler_arm_vixl.cc | 40 #define ___ asm_.GetVIXLAssembler()-> 227 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in RemoveFrame() 283 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Store() 303 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRawPtr() 322 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRawPtrFromThread() 329 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackPointerToThread() 511 UseScratchRegisterScope temps2(asm_.GetVIXLAssembler()); in MoveArguments() 592 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in MoveArguments() 608 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in MoveArguments() 718 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Move() [all …]
|
D | assembler_arm_vixl.cc | 374 UseScratchRegisterScope temps(GetVIXLAssembler()); in StoreRegisterList() 395 UseScratchRegisterScope temps(GetVIXLAssembler()); in LoadRegisterList()
|
D | assembler_arm_vixl.h | 213 ArmVIXLMacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
|
/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.cc | 36 #define ___ asm_.GetVIXLAssembler()-> 131 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in AddConstant() 204 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackPointerToThread() 221 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadImmediate() 545 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Move() 591 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Copy() 630 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Jump() 658 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CreateJObject() 679 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CreateJObject() 717 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in TryToTransitionFromRunnableToNative() [all …]
|
D | assembler_arm64.h | 84 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function 107 vixl::aarch64::UseScratchRegisterScope temps(GetVIXLAssembler()); in SaveRestoreZRegisterList()
|
/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 53 #define ___ assembler.GetVIXLAssembler()-> 65 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline()
|
/art/compiler/utils/ |
D | assembler_thumb_test.cc | 279 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F() 312 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
|