/art/compiler/utils/arm/ |
D | assembler_arm32.cc | 1372 CHECK(rn != IP); in AddConstant() 1374 mvn(IP, shifter_op, cond); in AddConstant() 1375 add(rd, rn, ShifterOperand(IP), cond); in AddConstant() 1377 mvn(IP, shifter_op, cond); in AddConstant() 1378 sub(rd, rn, ShifterOperand(IP), cond); in AddConstant() 1380 movw(IP, Low16Bits(value), cond); in AddConstant() 1383 movt(IP, value_high, cond); in AddConstant() 1385 add(rd, rn, ShifterOperand(IP), cond); in AddConstant() 1399 CHECK(rn != IP); in AddConstantSetFlags() 1401 mvn(IP, shifter_op, cond); in AddConstantSetFlags() [all …]
|
D | assembler_thumb2.cc | 2477 CHECK(rn != IP); in AddConstant() 2479 mvn(IP, shifter_op, cond); in AddConstant() 2480 add(rd, rn, ShifterOperand(IP), cond); in AddConstant() 2482 mvn(IP, shifter_op, cond); in AddConstant() 2483 sub(rd, rn, ShifterOperand(IP), cond); in AddConstant() 2485 movw(IP, Low16Bits(value), cond); in AddConstant() 2488 movt(IP, value_high, cond); in AddConstant() 2490 add(rd, rn, ShifterOperand(IP), cond); in AddConstant() 2504 CHECK(rn != IP); in AddConstantSetFlags() 2506 mvn(IP, shifter_op, cond); in AddConstantSetFlags() [all …]
|
D | assembler_thumb2_test.cc | 256 __ StoreToOffset(type, arm::IP, arm::SP, offset); in TEST_F() 257 __ StoreToOffset(type, arm::IP, arm::R5, offset); in TEST_F() 272 __ StoreToOffset(type, arm::IP, arm::SP, offset); in TEST_F() 273 __ StoreToOffset(type, arm::IP, arm::R5, offset); in TEST_F()
|
D | assembler_arm.h | 551 LoadImmediate(IP, int_value, cond); 552 vmovsr(sd, IP, cond);
|
/art/compiler/optimizing/ |
D | code_generator_arm.cc | 463 blocked_core_registers_[IP] = true; in SetupBlockedRegisters() 536 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); in GenerateFrameEntry() 537 __ LoadFromOffset(kLoadWord, IP, IP, 0); in GenerateFrameEntry() 739 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); in Move32() 740 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); in Move32() 817 __ LoadImmediate(IP, value); in Move() 818 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); in Move() 828 __ LoadImmediate(IP, Low32Bits(value)); in Move() 829 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); in Move() 830 __ LoadImmediate(IP, High32Bits(value)); in Move() [all …]
|
D | intrinsics_arm.cc | 495 __ add(IP, base, ShifterOperand(offset)); in GenUnsafeGet() 498 __ ldrexd(trg_lo, trg_hi, IP); in GenUnsafeGet() 500 __ ldrd(trg_lo, Address(IP)); in GenUnsafeGet() 637 __ add(IP, base, ShifterOperand(offset)); in GenUnsafePut() 640 __ ldrexd(temp_lo, temp_hi, IP); in GenUnsafePut() 641 __ strexd(temp_lo, value_lo, value_hi, IP); in GenUnsafePut() 645 __ add(IP, base, ShifterOperand(offset)); in GenUnsafePut() 646 __ strd(value_lo, Address(IP)); in GenUnsafePut()
|
/art/runtime/arch/arm/ |
D | registers_arm.h | 45 IP = 12, enumerator
|
/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 41 __ LoadFromOffset(kLoadWord, IP, R0, JNIEnvExt::SelfOffset().Int32Value()); in CreateTrampoline() 42 __ LoadFromOffset(kLoadWord, PC, IP, offset.Int32Value()); in CreateTrampoline()
|
/art/compiler/jni/quick/arm/ |
D | calling_convention_arm.cc | 48 return ArmManagedRegister::FromCoreRegister(IP); // R12 in InterproceduralScratchRegister() 52 return ArmManagedRegister::FromCoreRegister(IP); // R12 in InterproceduralScratchRegister()
|