/art/disassembler/ |
D | disassembler_arm64.cc | 39 LR = 30 enumerator 50 } else if (reg.code() == LR) { in AppendRegisterNameToOutput()
|
/art/runtime/arch/arm64/ |
D | quick_method_frame_info_arm64.h | 33 (1 << art::arm64::LR); 97 POPCOUNT(Arm64CalleeSaveCoreSpills(type) & (-(1 << LR))) * kArm64PointerSize; in Arm64CalleeSaveLrOffset()
|
D | context_arm64.cc | 34 gprs_[LR] = &pc_; in Reset() 37 pc_ = Arm64Context::kBadGprBase + LR; in Reset()
|
D | context_arm64.h | 45 SetGPR(LR, new_lr); in SetPC()
|
D | registers_arm64.h | 68 LR = X30, enumerator
|
/art/runtime/arch/arm/ |
D | quick_method_frame_info_arm.h | 29 (1 << art::arm::LR); 88 POPCOUNT(ArmCalleeSaveCoreSpills(type) & (-(1 << LR))) * kArmPointerSize; in ArmCalleeSaveLrOffset()
|
D | registers_arm.h | 47 LR = 14, enumerator
|
D | quick_entrypoints_arm.S | 448 ldr r14, [r0, #56] @ (LR from gprs_ 56=4*14)
|
/art/compiler/optimizing/ |
D | intrinsics_arm.cc | 848 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value()); in VisitStringCompareTo() 849 __ blx(LR); in VisitStringCompareTo() 893 __ LoadFromOffset(kLoadWord, LR, TR, in GenerateVisitStringIndexOf() 895 __ blx(LR); in GenerateVisitStringIndexOf() 964 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value()); in VisitStringNewStringFromBytes() 966 __ blx(LR); in VisitStringNewStringFromBytes() 985 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value()); in VisitStringNewStringFromChars() 987 __ blx(LR); in VisitStringNewStringFromChars() 1010 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value()); in VisitStringNewStringFromString() 1012 __ blx(LR); in VisitStringNewStringFromString()
|
D | code_generator_arm.cc | 456 blocked_core_registers_[LR] = true; in SetupBlockedRegisters() 543 uint32_t push_mask = (core_spill_mask_ & (~(1 << PC))) | 1 << LR; in GenerateFrameEntry() 561 __ bx(LR); in GenerateFrameExit() 891 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset); in InvokeRuntime() 892 __ blx(LR); in InvokeRuntime() 1333 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); in VisitInvokeVirtual() 1335 __ blx(LR); in VisitInvokeVirtual() 1372 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); in VisitInvokeInterface() 1374 __ blx(LR); in VisitInvokeInterface() 4085 __ LoadFromOffset(kLoadWord, LR, temp, in GenerateStaticOrDirectCall() [all …]
|
/art/compiler/jni/quick/arm64/ |
D | calling_convention_arm64.cc | 189 1 << X25 | 1 << X26 | 1 << X27 | 1 << X28 | 1 << X29 | 1 << LR; in CoreSpillMask()
|
/art/compiler/jni/quick/arm/ |
D | calling_convention_arm.cc | 242 result = 1 << R5 | 1 << R6 | 1 << R7 | 1 << R8 | 1 << R10 | 1 << R11 | 1 << LR; in CoreSpillMask()
|
/art/compiler/utils/ |
D | assembler_thumb_test.cc | 716 __ ldm(DB_W, R4, (1 << LR | 1 << R11)); in TEST() 717 __ ldm(DB, R4, (1 << LR | 1 << R11)); in TEST() 737 __ stm(IA_W, R4, (1 << LR | 1 << R11)); in TEST() 738 __ stm(IA, R4, (1 << LR | 1 << R11)); in TEST() 1016 __ blx(LR); in TEST() 1017 __ bx(LR); in TEST()
|
/art/compiler/utils/arm/ |
D | assembler_arm.cc | 391 RegList core_spill_mask = 1 << LR; in BuildFrame()
|
D | assembler_thumb2.cc | 1532 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) { in EmitMultiMemOp() 1535 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff); in EmitMultiMemOp()
|
/art/compiler/utils/arm64/ |
D | managed_register_arm64_test.cc | 630 EXPECT_TRUE(vixl::lr.Is(Arm64Assembler::reg_x(LR))); in TEST()
|