Searched refs:mem_op (Results 1 – 2 of 2) sorted by relevance
/art/compiler/optimizing/ |
D | common_arm64.h | 228 const vixl::aarch64::MemOperand& mem_op) { in OperandFromMemOperand() argument 229 if (mem_op.IsImmediateOffset()) { in OperandFromMemOperand() 230 return vixl::aarch64::Operand(mem_op.GetOffset()); in OperandFromMemOperand() 232 DCHECK(mem_op.IsRegisterOffset()); in OperandFromMemOperand() 233 if (mem_op.GetExtend() != vixl::aarch64::NO_EXTEND) { in OperandFromMemOperand() 234 return vixl::aarch64::Operand(mem_op.GetRegisterOffset(), in OperandFromMemOperand() 235 mem_op.GetExtend(), in OperandFromMemOperand() 236 mem_op.GetShiftAmount()); in OperandFromMemOperand() 237 } else if (mem_op.GetShift() != vixl::aarch64::NO_SHIFT) { in OperandFromMemOperand() 238 return vixl::aarch64::Operand(mem_op.GetRegisterOffset(), in OperandFromMemOperand() [all …]
|
D | intrinsics_arm64.cc | 731 MemOperand mem_op(base.X(), offset); in GenUnsafeGet() local 733 codegen->LoadAcquire(invoke, type, trg, mem_op, /* needs_null_check= */ true); in GenUnsafeGet() 735 codegen->Load(type, trg, mem_op); in GenUnsafeGet() 966 MemOperand mem_op(base.X(), offset); in GenUnsafePut() local 982 codegen->StoreRelease(invoke, type, source, mem_op, /* needs_null_check= */ false); in GenUnsafePut() 984 codegen->Store(type, source, mem_op); in GenUnsafePut()
|