/art/compiler/utils/ |
D | assembler_thumb_test.cc | 436 __ ldr(R3, Address(R4, 24)); in TEST() 437 __ ldrb(R3, Address(R4, 24)); in TEST() 438 __ ldrh(R3, Address(R4, 24)); in TEST() 439 __ ldrsb(R3, Address(R4, 24)); in TEST() 440 __ ldrsh(R3, Address(R4, 24)); in TEST() 442 __ ldr(R3, Address(SP, 24)); in TEST() 445 __ ldr(R8, Address(R4, 24)); in TEST() 446 __ ldrb(R8, Address(R4, 24)); in TEST() 447 __ ldrh(R8, Address(R4, 24)); in TEST() 448 __ ldrsb(R8, Address(R4, 24)); in TEST() [all …]
|
/art/compiler/utils/x86/ |
D | assembler_x86.h | 136 class Address : public Operand { 138 Address(Register base, int32_t disp) { in Address() function 142 Address(Register base, Offset disp) { in Address() function 146 Address(Register base, FrameOffset disp) { in Address() function 151 Address(Register base, MemberOffset disp) { in Address() function 171 Address(Register index, ScaleFactor scale, int32_t disp) { in Address() function 178 Address(Register base, Register index, ScaleFactor scale, int32_t disp) { in Address() function 194 static Address Absolute(uword addr) { in Absolute() 195 Address result; in Absolute() 201 static Address Absolute(ThreadOffset<4> addr) { in Absolute() [all …]
|
D | assembler_x86.cc | 42 void X86Assembler::call(const Address& address) { in call() 73 void X86Assembler::pushl(const Address& address) { in pushl() 98 void X86Assembler::popl(const Address& address) { in popl() 119 void X86Assembler::movl(Register dst, const Address& src) { in movl() 126 void X86Assembler::movl(const Address& dst, Register src) { in movl() 133 void X86Assembler::movl(const Address& dst, const Immediate& imm) { in movl() 140 void X86Assembler::movl(const Address& dst, Label* lbl) { in movl() 155 void X86Assembler::movzxb(Register dst, const Address& src) { in movzxb() 171 void X86Assembler::movsxb(Register dst, const Address& src) { in movsxb() 179 void X86Assembler::movb(Register /*dst*/, const Address& /*src*/) { in movb() argument [all …]
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.h | 164 class Address : public Operand { 166 Address(CpuRegister base, int32_t disp) { in Address() function 170 Address(CpuRegister base, Offset disp) { in Address() function 174 Address(CpuRegister base, FrameOffset disp) { in Address() function 179 Address(CpuRegister base, MemberOffset disp) { in Address() function 205 Address(CpuRegister index, ScaleFactor scale, int32_t disp) { in Address() function 212 Address(CpuRegister base, CpuRegister index, ScaleFactor scale, int32_t disp) { in Address() function 229 static Address Absolute(uword addr, bool no_rip = false) { 230 Address result; 243 static Address Absolute(ThreadOffset<8> addr, bool no_rip = false) { [all …]
|
D | assembler_x86_64.cc | 47 void X86_64Assembler::call(const Address& address) { in call() 69 void X86_64Assembler::pushq(const Address& address) { in pushq() 97 void X86_64Assembler::popq(const Address& address) { in popq() 146 void X86_64Assembler::movq(CpuRegister dst, const Address& src) { in movq() 154 void X86_64Assembler::movl(CpuRegister dst, const Address& src) { in movl() 162 void X86_64Assembler::movq(const Address& dst, CpuRegister src) { in movq() 170 void X86_64Assembler::movl(const Address& dst, CpuRegister src) { in movl() 177 void X86_64Assembler::movl(const Address& dst, const Immediate& imm) { in movl() 194 void X86_64Assembler::movzxb(CpuRegister dst, const Address& src) { in movzxb() 212 void X86_64Assembler::movsxb(CpuRegister dst, const Address& src) { in movsxb() [all …]
|
D | assembler_x86_64_test.cc | 131 GetAssembler()->movl(x86_64::CpuRegister(x86_64::RAX), x86_64::Address( in TEST_F() 133 GetAssembler()->movl(x86_64::CpuRegister(x86_64::RAX), x86_64::Address( in TEST_F() 135 GetAssembler()->movl(x86_64::CpuRegister(x86_64::R8), x86_64::Address( in TEST_F() 148 GetAssembler()->movw(x86_64::Address(x86_64::CpuRegister(x86_64::RAX), 0), in TEST_F()
|
/art/compiler/optimizing/ |
D | code_generator_x86.cc | 68 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer))); in EmitNativeCode() 85 __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow))); in EmitNativeCode() 105 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds))); in EmitNativeCode() 246 __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86)))); in GenerateFrameEntry() 257 __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>())); in GenerateFrameEntry() 261 __ movl(Address(ESP, kCurrentMethodStackOffset), EAX); in GenerateFrameEntry() 273 __ movl(reg, Address(ESP, kCurrentMethodStackOffset)); in LoadCurrentMethod() 352 __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex())); in Move32() 356 __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister()); in Move32() 359 __ pushl(Address(ESP, source.GetStackIndex())); in Move32() [all …]
|
D | code_generator_x86_64.cc | 73 Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowNullPointer), true)); in EmitNativeCode() 91 Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowStackOverflow), true)); in EmitNativeCode() 111 __ gs()->call(Address::Absolute( in EmitNativeCode() 224 Address::Absolute(Thread::StackEndOffset<kX86_64WordSize>(), true)); in GenerateFrameEntry() 227 __ testq(CpuRegister(RAX), Address( in GenerateFrameEntry() 232 __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI)); in GenerateFrameEntry() 245 __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset)); in LoadCurrentMethod() 282 …__ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex())); in Move() 285 …__ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex())); in Move() 289 …__ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister()); in Move() [all …]
|
D | code_generator_arm.cc | 69 __ ldr(LR, Address(TR, offset)); in EmitNativeCode() 107 __ ldr(LR, Address(TR, offset)); in EmitNativeCode() 279 __ ldr(IP, Address(IP, 0)); in GenerateFrameEntry() 289 __ str(R0, Address(SP, 0)); in GenerateFrameEntry() 376 __ ldr(destination.AsArm().AsCoreRegister(), Address(SP, source.GetStackIndex())); in Move32() 381 __ str(source.AsArm().AsCoreRegister(), Address(SP, destination.GetStackIndex())); in Move32() 383 __ ldr(IP, Address(SP, source.GetStackIndex())); in Move32() 384 __ str(IP, Address(SP, destination.GetStackIndex())); in Move32() 403 Address(SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize())); in Move64() 407 __ ldr(R1, Address(SP, source.GetStackIndex())); in Move64() [all …]
|
/art/compiler/utils/arm/ |
D | assembler_arm32.cc | 211 void Arm32Assembler::ldr(Register rd, const Address& ad, Condition cond) { in ldr() 216 void Arm32Assembler::str(Register rd, const Address& ad, Condition cond) { in str() 221 void Arm32Assembler::ldrb(Register rd, const Address& ad, Condition cond) { in ldrb() 226 void Arm32Assembler::strb(Register rd, const Address& ad, Condition cond) { in strb() 231 void Arm32Assembler::ldrh(Register rd, const Address& ad, Condition cond) { in ldrh() 236 void Arm32Assembler::strh(Register rd, const Address& ad, Condition cond) { in strh() 241 void Arm32Assembler::ldrsb(Register rd, const Address& ad, Condition cond) { in ldrsb() 246 void Arm32Assembler::ldrsh(Register rd, const Address& ad, Condition cond) { in ldrsh() 251 void Arm32Assembler::ldrd(Register rd, const Address& ad, Condition cond) { in ldrd() 257 void Arm32Assembler::strd(Register rd, const Address& ad, Condition cond) { in strd() [all …]
|
D | assembler_thumb2.cc | 255 void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) { in ldr() 260 void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) { in str() 265 void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) { in ldrb() 270 void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) { in strb() 275 void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) { in ldrh() 280 void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) { in strh() 285 void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) { in ldrsb() 290 void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) { in ldrsh() 295 void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) { in ldrd() 306 void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) { in strd() [all …]
|
D | assembler_arm32.h | 100 void ldr(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 101 void str(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 103 void ldrb(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 104 void strb(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 106 void ldrh(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 107 void strh(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 109 void ldrsb(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 110 void ldrsh(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 112 void ldrd(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 113 void strd(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; [all …]
|
D | assembler_arm.h | 183 class Address { 197 Address(Register rn, int32_t offset = 0, Mode am = Offset) : rn_(rn), rm_(R0), in rn_() 202 Address(Register rn, Register rm, Mode am = Offset) : rn_(rn), rm_(rm), offset_(0), in rn_() 207 Address(Register rn, Register rm, Shift shift, uint32_t count, Mode am = Offset) : 214 explicit Address(int32_t offset) : in Address() function 420 virtual void ldr(Register rd, const Address& ad, Condition cond = AL) = 0; 421 virtual void str(Register rd, const Address& ad, Condition cond = AL) = 0; 423 virtual void ldrb(Register rd, const Address& ad, Condition cond = AL) = 0; 424 virtual void strb(Register rd, const Address& ad, Condition cond = AL) = 0; 426 virtual void ldrh(Register rd, const Address& ad, Condition cond = AL) = 0; [all …]
|
D | assembler_thumb2.h | 122 void ldr(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 123 void str(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 125 void ldrb(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 126 void strb(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 128 void ldrh(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 129 void strh(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 131 void ldrsb(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 132 void ldrsh(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 134 void ldrd(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; 135 void strd(Register rd, const Address& ad, Condition cond = AL) OVERRIDE; [all …]
|
D | assembler_arm.cc | 185 uint32_t Address::encodingArm() const { in encodingArm() 208 uint32_t Address::encodingThumb(bool is_32bit) const { in encodingThumb() 265 uint32_t Address::encodingThumbLdrdStrd() const { in encodingThumbLdrdStrd() 288 uint32_t Address::encoding3() const { in encoding3() 297 uint32_t Address::vencoding() const { in vencoding() 312 bool Address::CanHoldLoadOffsetArm(LoadOperandType type, int offset) { in CanHoldLoadOffsetArm() 332 bool Address::CanHoldStoreOffsetArm(StoreOperandType type, int offset) { in CanHoldStoreOffsetArm() 349 bool Address::CanHoldLoadOffsetThumb(LoadOperandType type, int offset) { in CanHoldLoadOffsetThumb() 369 bool Address::CanHoldStoreOffsetThumb(StoreOperandType type, int offset) { in CanHoldStoreOffsetThumb()
|
/art/runtime/base/ |
D | mutex.cc | 330 if (futex(state_.Address(), FUTEX_WAIT, 1, NULL, NULL, 0) != 0) { in ExclusiveLock() 423 futex(state_.Address(), FUTEX_WAKE, 1, NULL, NULL, 0); in ExclusiveUnlock() 506 if (futex(state_.Address(), FUTEX_WAIT, cur_state, NULL, NULL, 0) != 0) { in ExclusiveLock() 547 futex(state_.Address(), FUTEX_WAKE, -1, NULL, NULL, 0); in ExclusiveUnlock() 582 if (futex(state_.Address(), FUTEX_WAIT, cur_state, &rel_ts, NULL, 0) != 0) { in ExclusiveLockWithTimeout() 724 done = futex(sequence_.Address(), FUTEX_CMP_REQUEUE, 0, in Broadcast() 726 guard_.state_.Address(), cur_sequence) != -1; in Broadcast() 747 int num_woken = futex(sequence_.Address(), FUTEX_WAKE, 1, NULL, NULL, 0); in Signal() 772 if (futex(sequence_.Address(), FUTEX_WAIT, cur_sequence, NULL, NULL, 0) != 0) { in WaitHoldingLocks() 810 if (futex(sequence_.Address(), FUTEX_WAIT, cur_sequence, &rel_ts, NULL, 0) != 0) { in TimedWait()
|
D | mutex-inl.h | 164 if (futex(state_.Address(), FUTEX_WAIT, cur_state, NULL, NULL, 0) != 0) { in SharedLock() 199 futex(state_.Address(), FUTEX_WAKE, -1, NULL, NULL, 0); in SharedUnlock()
|
/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 130 __ fs()->jmp(Address::Absolute(offset)); in CreateTrampoline() 148 __ gs()->jmp(x86_64::Address::Absolute(offset, true)); in CreateTrampoline()
|
/art/runtime/ |
D | atomic.h | 285 volatile T* Address() { in PACKED()
|