Lines Matching refs:dest

199 void Arm64Assembler::LoadImmediate(XRegister dest, int32_t value,  in LoadImmediate()  argument
202 ___ Mov(reg_x(dest), value); in LoadImmediate()
208 temps.Exclude(reg_x(dest)); in LoadImmediate()
211 ___ Csel(reg_x(dest), temp, reg_x(dest), cond); in LoadImmediate()
213 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), cond); in LoadImmediate()
218 void Arm64Assembler::LoadWFromOffset(LoadOperandType type, WRegister dest, in LoadWFromOffset() argument
222 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset()
225 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset()
228 ___ Ldrb(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset()
231 ___ Ldrh(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset()
234 ___ Ldr(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset()
243 void Arm64Assembler::LoadFromOffset(XRegister dest, XRegister base, in LoadFromOffset() argument
245 CHECK_NE(dest, SP); in LoadFromOffset()
246 ___ Ldr(reg_x(dest), MEM_OP(reg_x(base), offset)); in LoadFromOffset()
249 void Arm64Assembler::LoadSFromOffset(SRegister dest, XRegister base, in LoadSFromOffset() argument
251 ___ Ldr(reg_s(dest), MEM_OP(reg_x(base), offset)); in LoadSFromOffset()
254 void Arm64Assembler::LoadDFromOffset(DRegister dest, XRegister base, in LoadDFromOffset() argument
256 ___ Ldr(reg_d(dest), MEM_OP(reg_x(base), offset)); in LoadDFromOffset()
259 void Arm64Assembler::Load(Arm64ManagedRegister dest, XRegister base, in Load() argument
261 if (dest.IsNoRegister()) { in Load()
262 CHECK_EQ(0u, size) << dest; in Load()
263 } else if (dest.IsWRegister()) { in Load()
264 CHECK_EQ(4u, size) << dest; in Load()
265 ___ Ldr(reg_w(dest.AsWRegister()), MEM_OP(reg_x(base), offset)); in Load()
266 } else if (dest.IsXRegister()) { in Load()
267 CHECK_NE(dest.AsXRegister(), SP) << dest; in Load()
269 ___ Ldr(reg_w(dest.AsOverlappingWRegister()), MEM_OP(reg_x(base), offset)); in Load()
271 CHECK_EQ(8u, size) << dest; in Load()
272 ___ Ldr(reg_x(dest.AsXRegister()), MEM_OP(reg_x(base), offset)); in Load()
274 } else if (dest.IsSRegister()) { in Load()
275 ___ Ldr(reg_s(dest.AsSRegister()), MEM_OP(reg_x(base), offset)); in Load()
277 CHECK(dest.IsDRegister()) << dest; in Load()
278 ___ Ldr(reg_d(dest.AsDRegister()), MEM_OP(reg_x(base), offset)); in Load()
373 void Arm64Assembler::CopyRef(FrameOffset dest, FrameOffset src, in CopyRef() argument
380 SP, dest.Int32Value()); in CopyRef()
383 void Arm64Assembler::Copy(FrameOffset dest, FrameOffset src, in Copy() argument
390 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP, dest.Int32Value()); in Copy()
393 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value()); in Copy()
399 void Arm64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, in Copy() argument
409 StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value()); in Copy()
412 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value()); in Copy()
447 Arm64ManagedRegister dest = m_dest.AsArm64(); in Copy() local
448 CHECK(dest.IsXRegister()) << dest; in Copy()
456 StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsXRegister(), in Copy()
461 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), dest.AsXRegister(), in Copy()
466 StoreToOffset(scratch.AsXRegister(), dest.AsXRegister(), dest_offset.Int32Value()); in Copy()