Home
last modified time | relevance | path

Searched refs:GetVIXLAssembler (Results 1 – 18 of 18) sorted by relevance

/art/compiler/utils/arm/
Djni_macro_assembler_arm_vixl.cc36 #define ___ asm_.GetVIXLAssembler()->
174 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Store()
194 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRef()
202 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRawPtr()
214 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreSpanning()
224 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRef()
236 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRef()
264 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreImmediateToFrame()
283 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRawPtrFromThread()
293 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRawPtrFromThread()
[all …]
Dassembler_arm_vixl.cc367 UseScratchRegisterScope temps(GetVIXLAssembler()); in StoreRegisterList()
388 UseScratchRegisterScope temps(GetVIXLAssembler()); in LoadRegisterList()
Dassembler_arm_vixl.h161 ArmVIXLMacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
/art/compiler/optimizing/
Dintrinsics_arm64.cc67 MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { in GetVIXLAssembler() function in art::arm64::IntrinsicCodeGeneratorARM64
68 return codegen_->GetVIXLAssembler(); in GetVIXLAssembler()
75 #define __ codegen->GetVIXLAssembler()->
125 vixl::EmissionCheckScope guard(codegen->GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes); in EmitNativeCode()
276 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); in VisitDoubleDoubleToRawLongBits()
279 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); in VisitDoubleLongBitsToDouble()
290 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); in VisitFloatFloatToRawIntBits()
293 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); in VisitFloatIntBitsToFloat()
330 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); in VisitIntegerReverseBytes()
338 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); in VisitLongReverseBytes()
[all …]
Dcode_generator_arm64.cc163 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
182 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelper()
333 !UseScratchRegisterScope(arm64_codegen->GetVIXLAssembler()).IsAvailable(bss_entry_temp_)); in EmitNativeCode()
371 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler()); in EmitNativeCode()
415 DCHECK(!UseScratchRegisterScope(arm64_codegen->GetVIXLAssembler()).IsAvailable(temp_)); in EmitNativeCode()
441 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler()); in EmitNativeCode()
651 EmissionCheckScope scope(codegen->GetVIXLAssembler(), in EmitTable()
1074 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler(); in EmitNativeCode()
1455 #define __ GetVIXLAssembler()->
1483 vixl_temps_.Open(GetVIXLAssembler()); in PrepareForEmitNativeCode()
[all …]
Dintrinsics_arm64.h79 vixl::aarch64::MacroAssembler* GetVIXLAssembler();
Dcode_generator_arm_vixl.cc85 #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
141 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in SaveContiguousSRegisterList()
189 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in RestoreContiguousSRegisterList()
448 down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in EmitNativeCode()
524 down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in EmitNativeCode()
1059 UseScratchRegisterScope temps(arm_codegen->GetVIXLAssembler()); in EmitNativeCode()
1095 ExactAssemblyScope aas(arm_codegen->GetVIXLAssembler(), in EmitNativeCode()
1593 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongDataProc()
1730 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTestConstant()
1763 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTestConstant()
[all …]
Dcode_generator_arm64.h251 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
381 vixl::aarch64::MacroAssembler* GetVIXLAssembler() const { in GetVIXLAssembler() function
382 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler()
432 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
Dintrinsics_arm_vixl.cc36 #define __ assembler->GetVIXLAssembler()->
537 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenMinMaxFloat()
552 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in GenMinMaxFloat()
636 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in GenMinMaxDouble()
693 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenMinMaxLong()
706 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in GenMinMaxLong()
748 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in GenMinMax()
844 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in VisitMathRoundFloat()
1022 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenUnsafeGet()
1184 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenUnsafePut()
[all …]
Dcode_generator_arm_vixl.h320 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
453 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
Dcode_generator_vector_arm_vixl.cc23 #define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT
Doptimizing_cfi_test.cc209 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
Dcode_generator_vector_arm64.cc32 #define __ GetVIXLAssembler()->
809 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad()
838 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
/art/compiler/utils/arm64/
Dassembler_arm64.h68 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
Djni_macro_assembler_arm64.cc33 #define ___ asm_.GetVIXLAssembler()->
87 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in AddConstant()
178 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackPointerToThread()
203 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadImmediate()
320 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRawPtr()
666 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in EmitExceptionPoll()
/art/compiler/trampolines/
Dtrampoline_compiler.cc56 #define ___ assembler.GetVIXLAssembler()->
71 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline()
/art/compiler/linker/arm64/
Drelative_patcher_arm64.cc373 #define __ assembler.GetVIXLAssembler()->
424 UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CompileThunk()
458 UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CompileThunk()
/art/compiler/utils/
Dassembler_thumb_test.cc1761 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
1794 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()