Home
last modified time | relevance | path

Searched refs:masm (Results 1 – 4 of 4) sorted by relevance

/art/compiler/optimizing/
Dintrinsics_arm64.cc170 #define __ masm->
186 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MacroAssembler* masm) { in MoveFPToInt() argument
193 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MacroAssembler* masm) { in MoveIntToFP() argument
252 static void GenerateReverseBytes(MacroAssembler* masm, in GenerateReverseBytes() argument
286 MacroAssembler* masm) { in GenReverseBytes() argument
289 GenerateReverseBytes(masm, type, CPURegisterFrom(in, type), CPURegisterFrom(out, type)); in GenReverseBytes()
318 MacroAssembler* masm) { in GenNumberOfLeadingZeros() argument
345 MacroAssembler* masm) { in GenNumberOfTrailingZeros() argument
373 MacroAssembler* masm) { in GenReverse() argument
398 static void GenBitCount(HInvoke* instr, DataType::Type type, MacroAssembler* masm) { in GenBitCount() argument
[all …]
Dcode_generator_vector_arm64_neon.cc1611 MacroAssembler* masm = codegen->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelperNeonImpl() local
1612 UseScratchRegisterScope temps(masm); in SaveRestoreLiveRegistersHelperNeonImpl()
1614 Register base = masm->StackPointer(); in SaveRestoreLiveRegistersHelperNeonImpl()
1621 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) { in SaveRestoreLiveRegistersHelperNeonImpl()
1625 masm->Add(new_base, base, Operand(spill_offset + core_spill_size)); in SaveRestoreLiveRegistersHelperNeonImpl()
1629 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size)); in SaveRestoreLiveRegistersHelperNeonImpl()
1630 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size)); in SaveRestoreLiveRegistersHelperNeonImpl()
1634 masm->StoreCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelperNeonImpl()
1635 masm->StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size)); in SaveRestoreLiveRegistersHelperNeonImpl()
1637 masm->LoadCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelperNeonImpl()
[all …]
Dcode_generator_vector_arm64_sve.cc1345 MacroAssembler* masm = codegen->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelperSveImpl() local
1346 Register base = masm->StackPointer(); in SaveRestoreLiveRegistersHelperSveImpl()
1354 masm->StoreCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelperSveImpl()
1356 masm->LoadCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelperSveImpl()
1367 masm->StoreCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelperSveImpl()
1368 masm->StoreCPURegList(fp_list, MemOperand(base, fp_spill_offset)); in SaveRestoreLiveRegistersHelperSveImpl()
1370 masm->LoadCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelperSveImpl()
1371 masm->LoadCPURegList(fp_list, MemOperand(base, fp_spill_offset)); in SaveRestoreLiveRegistersHelperSveImpl()
Dcode_generator_arm64.cc1113 MacroAssembler* masm = GetVIXLAssembler(); in MaybeIncrementHotness() local
1115 UseScratchRegisterScope temps(masm); in MaybeIncrementHotness()
1135 UseScratchRegisterScope temps(masm); in MaybeIncrementHotness()
1173 MacroAssembler* masm = GetVIXLAssembler(); in GenerateFrameEntry() local
1179 UseScratchRegisterScope temps(masm); in GenerateFrameEntry()
1441 static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm, in AcquireFPOrCoreCPURegisterOfSize() argument
1444 return masm->GetScratchVRegisterList()->IsEmpty() in AcquireFPOrCoreCPURegisterOfSize()
1622 MacroAssembler* masm = GetVIXLAssembler(); in LoadAcquire() local
1623 UseScratchRegisterScope temps(masm); in LoadAcquire()
1639 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize); in LoadAcquire()
[all …]