/art/compiler/utils/arm64/ |
D | assembler_arm64.h | 115 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 116 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 117 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE; 118 void StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, ManagedRegister scratch) 123 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off, 127 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 128 void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE; 129 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 130 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs) OVERRIDE; 131 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE; [all …]
|
D | assembler_arm64.cc | 196 void Arm64Assembler::LoadImmediate(Register dest, int32_t value, in LoadImmediate() argument 199 ___ Mov(reg_x(dest), value); in LoadImmediate() 205 temps.Exclude(reg_x(dest)); in LoadImmediate() 208 ___ Csel(reg_x(dest), temp, reg_x(dest), COND_OP(cond)); in LoadImmediate() 210 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), COND_OP(cond)); in LoadImmediate() 215 void Arm64Assembler::LoadWFromOffset(LoadOperandType type, WRegister dest, in LoadWFromOffset() argument 219 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset() 222 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset() 225 ___ Ldrb(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset() 228 ___ Ldrh(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset() [all …]
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 598 void MipsAssembler::Store(FrameOffset dest, ManagedRegister msrc, size_t size) { in Store() argument 604 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value()); in Store() 607 StoreToOffset(kStoreWord, src.AsRegisterPairLow(), SP, dest.Int32Value()); in Store() 609 SP, dest.Int32Value() + 4); in Store() 611 StoreFToOffset(src.AsFRegister(), SP, dest.Int32Value()); in Store() 614 StoreDToOffset(src.AsDRegister(), SP, dest.Int32Value()); in Store() 618 void MipsAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { in StoreRef() argument 621 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value()); in StoreRef() 624 void MipsAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { in StoreRawPtr() argument 627 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value()); in StoreRawPtr() [all …]
|
D | assembler_mips.h | 172 void StoreRef(FrameOffset dest, ManagedRegister msrc) OVERRIDE; 173 void StoreRawPtr(FrameOffset dest, ManagedRegister msrc) OVERRIDE; 175 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister mscratch) OVERRIDE; 177 void StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm, ManagedRegister mscratch) 185 void StoreSpanning(FrameOffset dest, ManagedRegister msrc, FrameOffset in_off, 193 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 210 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister mscratch) OVERRIDE; 212 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister mscratch, size_t size) OVERRIDE; 214 void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister mscratch, 220 void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister mscratch, [all …]
|
/art/compiler/utils/ |
D | assembler.h | 384 virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0; 385 virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0; 387 virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, 390 virtual void StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm, 392 virtual void StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, 405 virtual void StoreSpanning(FrameOffset dest, ManagedRegister src, 409 virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0; 411 virtual void LoadFromThread32(ManagedRegister dest, ThreadOffset<4> src, size_t size); 412 virtual void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size); 414 virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0; [all …]
|
D | assembler.cc | 127 void Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm, in StoreImmediateToThread32() argument 132 void Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, in StoreImmediateToThread64() argument 157 void Assembler::LoadFromThread32(ManagedRegister dest, ThreadOffset<4> src, size_t size) { in LoadFromThread32() argument 161 void Assembler::LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) { in LoadFromThread64() argument 165 void Assembler::LoadRawPtrFromThread32(ManagedRegister dest, ThreadOffset<4> offs) { in LoadRawPtrFromThread32() argument 169 void Assembler::LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) { in LoadRawPtrFromThread64() argument
|
/art/runtime/ |
D | leb128.h | 115 static inline uint8_t* EncodeUnsignedLeb128(uint8_t* dest, uint32_t value) { in EncodeUnsignedLeb128() argument 119 *dest++ = out | 0x80; in EncodeUnsignedLeb128() 123 *dest++ = out; in EncodeUnsignedLeb128() 124 return dest; in EncodeUnsignedLeb128() 127 static inline uint8_t* EncodeSignedLeb128(uint8_t* dest, int32_t value) { in EncodeSignedLeb128() argument 131 *dest++ = out | 0x80; in EncodeSignedLeb128() 136 *dest++ = out; in EncodeSignedLeb128() 137 return dest; in EncodeSignedLeb128()
|
/art/runtime/base/ |
D | casts.h | 86 Dest dest; in bit_cast() local 87 memcpy(&dest, &source, sizeof(dest)); in bit_cast() 88 return dest; in bit_cast()
|
/art/compiler/utils/x86/ |
D | assembler_x86.cc | 1480 void X86Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { in StoreRef() argument 1483 movl(Address(ESP, dest), src.AsCpuRegister()); in StoreRef() 1486 void X86Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { in StoreRawPtr() argument 1489 movl(Address(ESP, dest), src.AsCpuRegister()); in StoreRawPtr() 1492 void X86Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm, in StoreImmediateToFrame() argument 1494 movl(Address(ESP, dest), Immediate(imm)); in StoreImmediateToFrame() 1497 void X86Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm, in StoreImmediateToThread32() argument 1499 fs()->movl(Address::Absolute(dest), Immediate(imm)); in StoreImmediateToThread32() 1521 X86ManagedRegister dest = mdest.AsX86(); in Load() local 1522 if (dest.IsNoRegister()) { in Load() [all …]
|
D | assembler_x86.h | 476 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 477 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 479 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE; 481 void StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm, ManagedRegister scratch) 489 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off, 493 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 495 void LoadFromThread32(ManagedRegister dest, ThreadOffset<4> src, size_t size) OVERRIDE; 497 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 499 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs) OVERRIDE; 501 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE; [all …]
|
/art/compiler/dex/quick/ |
D | mir_to_lir-inl.h | 79 inline LIR* Mir2Lir::NewLIR1(int opcode, int dest) { in NewLIR1() argument 84 LIR* insn = RawLIR(current_dalvik_offset_, opcode, dest); in NewLIR1() 89 inline LIR* Mir2Lir::NewLIR2(int opcode, int dest, int src1) { in NewLIR2() argument 94 LIR* insn = RawLIR(current_dalvik_offset_, opcode, dest, src1); in NewLIR2() 109 inline LIR* Mir2Lir::NewLIR3(int opcode, int dest, int src1, int src2) { in NewLIR3() argument 114 LIR* insn = RawLIR(current_dalvik_offset_, opcode, dest, src1, src2); in NewLIR3() 119 inline LIR* Mir2Lir::NewLIR4(int opcode, int dest, int src1, int src2, int info) { in NewLIR4() argument 124 LIR* insn = RawLIR(current_dalvik_offset_, opcode, dest, src1, src2, info); in NewLIR4() 129 inline LIR* Mir2Lir::NewLIR5(int opcode, int dest, int src1, int src2, int info1, in NewLIR5() argument 135 LIR* insn = RawLIR(current_dalvik_offset_, opcode, dest, src1, src2, info1, info2); in NewLIR5()
|
D | mir_to_lir.h | 679 LIR* NewLIR1(int opcode, int dest); 680 LIR* NewLIR2(int opcode, int dest, int src1); 682 LIR* NewLIR3(int opcode, int dest, int src1, int src2); 683 LIR* NewLIR4(int opcode, int dest, int src1, int src2, int info); 684 LIR* NewLIR5(int opcode, int dest, int src1, int src2, int info1, int info2); 722 void ConvertMemOpIntoMove(LIR* orig_lir, RegStorage dest, RegStorage src); 1442 virtual void OpRegCopyWide(RegStorage dest, RegStorage src) = 0;
|
D | local_optimizations.cc | 70 void Mir2Lir::ConvertMemOpIntoMove(LIR* orig_lir, RegStorage dest, RegStorage src) { in ConvertMemOpIntoMove() argument 73 move_lir = OpRegCopyNoInsert(dest, src); in ConvertMemOpIntoMove()
|
/art/compiler/utils/arm/ |
D | assembler_arm.cc | 455 void ArmAssembler::Store(FrameOffset dest, ManagedRegister msrc, size_t size) { in Store() argument 461 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value()); in Store() 464 StoreToOffset(kStoreWord, src.AsRegisterPairLow(), SP, dest.Int32Value()); in Store() 466 SP, dest.Int32Value() + 4); in Store() 468 StoreSToOffset(src.AsSRegister(), SP, dest.Int32Value()); in Store() 471 StoreDToOffset(src.AsDRegister(), SP, dest.Int32Value()); in Store() 475 void ArmAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { in StoreRef() argument 478 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value()); in StoreRef() 481 void ArmAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { in StoreRawPtr() argument 484 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value()); in StoreRawPtr() [all …]
|
D | assembler_arm.h | 628 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 629 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 631 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE; 633 void StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm, ManagedRegister scratch) 641 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off, 645 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 647 void LoadFromThread32(ManagedRegister dest, ThreadOffset<4> src, size_t size) OVERRIDE; 649 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 651 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs) OVERRIDE; 653 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE; [all …]
|
/art/runtime/mirror/ |
D | object.cc | 70 Object* Object::CopyObject(Thread* self, mirror::Object* dest, mirror::Object* src, in CopyObject() argument 75 byte* dst_bytes = reinterpret_cast<byte*>(dest); in CopyObject() 82 CopyReferenceFieldsWithReadBarrierVisitor visitor(dest); in CopyObject() 90 ObjectArray<Object>* array = dest->AsObjectArray<Object>(); in CopyObject() 91 heap->WriteBarrierArray(dest, 0, array->GetLength()); in CopyObject() 94 heap->WriteBarrierEveryFieldOf(dest); in CopyObject() 97 heap->AddFinalizerReference(self, &dest); in CopyObject() 99 return dest; in CopyObject()
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.cc | 1839 void X86_64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { in StoreRef() argument 1842 movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister()); in StoreRef() 1845 void X86_64Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { in StoreRawPtr() argument 1848 movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister()); in StoreRawPtr() 1851 void X86_64Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm, in StoreImmediateToFrame() argument 1853 movl(Address(CpuRegister(RSP), dest), Immediate(imm)); // TODO(64) movq? in StoreImmediateToFrame() 1856 void X86_64Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, in StoreImmediateToThread64() argument 1858 gs()->movl(Address::Absolute(dest, true), Immediate(imm)); // TODO(64) movq? in StoreImmediateToThread64() 1880 X86_64ManagedRegister dest = mdest.AsX86_64(); in Load() local 1881 if (dest.IsNoRegister()) { in Load() [all …]
|
D | assembler_x86_64.h | 518 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 519 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 521 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE; 523 void StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, ManagedRegister scratch) 531 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off, 535 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 537 void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE; 539 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 541 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs) OVERRIDE; 543 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE; [all …]
|
/art/compiler/dex/ |
D | ssa_transformation.cc | 438 void MIRGraph::ComputeSuccLineIn(ArenaBitVector* dest, const ArenaBitVector* src1, in ComputeSuccLineIn() argument 440 if (dest->GetStorageSize() != src1->GetStorageSize() || in ComputeSuccLineIn() 441 dest->GetStorageSize() != src2->GetStorageSize() || in ComputeSuccLineIn() 442 dest->IsExpandable() != src1->IsExpandable() || in ComputeSuccLineIn() 443 dest->IsExpandable() != src2->IsExpandable()) { in ComputeSuccLineIn() 448 for (idx = 0; idx < dest->GetStorageSize(); idx++) { in ComputeSuccLineIn() 449 dest->GetRawStorage()[idx] |= src1->GetRawStorageWord(idx) & ~(src2->GetRawStorageWord(idx)); in ComputeSuccLineIn()
|
D | local_value_numbering.h | 292 void CopyAliasingValuesMap(ScopedArenaSafeMap<K, AliasingValues>* dest, 323 void CopyLiveSregValues(SregValueMap* dest, const SregValueMap& src);
|
/art/runtime/arch/arm/ |
D | portable_entrypoints_arm.S | 47 add r0, sp, #4 @ pass stack pointer + method ptr as dest for memcpy 48 bl memcpy @ memcpy (dest, src, bytes)
|
/art/runtime/gc/collector/ |
D | semi_space.cc | 430 static inline size_t CopyAvoidingDirtyingPages(void* dest, const void* src, size_t size) { in CopyAvoidingDirtyingPages() argument 436 memcpy(dest, src, size); in CopyAvoidingDirtyingPages() 440 byte* byte_dest = reinterpret_cast<byte*>(dest); in CopyAvoidingDirtyingPages() 451 memcpy(dest, src, page_remain); in CopyAvoidingDirtyingPages()
|
/art/compiler/dex/quick/x86/ |
D | codegen_x86.h | 286 void OpRegCopyWide(RegStorage dest, RegStorage src) OVERRIDE; 702 X86OpCode GetOpcode(Instruction::Code op, RegLocation dest, RegLocation rhs, 758 void GenImulRegImm(RegStorage dest, RegStorage src, int val); 767 void GenImulMemImm(RegStorage dest, int sreg, int displacement, int val);
|
D | int_x86.cc | 1261 void X86Mir2Lir::GenImulRegImm(RegStorage dest, RegStorage src, int val) { in GenImulRegImm() argument 1264 NewLIR2(kX86Xor32RR, dest.GetReg(), dest.GetReg()); in GenImulRegImm() 1267 OpRegCopy(dest, src); in GenImulRegImm() 1270 OpRegRegImm(kOpMul, dest, src, val); in GenImulRegImm() 1275 void X86Mir2Lir::GenImulMemImm(RegStorage dest, int sreg, int displacement, int val) { in GenImulMemImm() argument 1282 NewLIR2(kX86Xor32RR, dest.GetReg(), dest.GetReg()); in GenImulMemImm() 1285 LoadBaseDisp(rs_rX86_SP, displacement, dest, k32, kNotVolatile); in GenImulMemImm() 1288 m = NewLIR4(IS_SIMM8(val) ? kX86Imul32RMI8 : kX86Imul32RMI, dest.GetReg(), in GenImulMemImm() 2340 X86OpCode X86Mir2Lir::GetOpcode(Instruction::Code op, RegLocation dest, RegLocation rhs, in GetOpcode() argument 2343 bool dest_in_mem = dest.location != kLocPhysReg; in GetOpcode()
|
/art/compiler/jni/quick/ |
D | jni_compiler.cc | 539 FrameOffset dest = jni_conv->CurrentParamStackOffset(); in SetNativeParameter() local 540 __ StoreRawPtr(dest, in_reg); in SetNativeParameter()
|