/art/runtime/arch/mips/ |
D | context_mips.cc | 32 gprs_[RA] = &ra_; in Reset() 35 ra_ = MipsContext::kBadGprBase + RA; in Reset()
|
D | registers_mips.cc | 31 if (rhs >= ZERO && rhs <= RA) { in operator <<()
|
D | registers_mips.h | 61 RA = 31, // Return address. enumerator
|
D | context_mips.h | 44 SetGPR(RA, new_pc); in SetPC()
|
D | quick_method_frame_info_mips.h | 43 (type == Runtime::kSaveAll ? kMipsCalleeSaveAllSpills : 0) | (1 << art::mips::RA); in MipsCalleeSaveCoreSpills()
|
/art/runtime/arch/mips64/ |
D | context_mips64.cc | 32 gprs_[RA] = &ra_; in Reset() 35 ra_ = Mips64Context::kBadGprBase + RA; in Reset()
|
D | registers_mips64.h | 61 RA = 31, // Return address. enumerator
|
D | context_mips64.h | 44 SetGPR(RA, new_pc); in SetPC()
|
D | quick_method_frame_info_mips64.h | 53 (type == Runtime::kSaveAll ? kMips64CalleeSaveAllSpills : 0) | (1 << art::mips64::RA); in Mips64CalleeSaveCoreSpills()
|
D | quick_entrypoints_mips64.S | 1489 jal artInstrumentationMethodEntryFromCode # (Method*, Object*, Thread*, RA)
|
/art/compiler/dex/quick/x86/ |
D | x86_lir.h | 381 opcode ## 8RR, opcode ## 8RM, opcode ## 8RA, opcode ## 8RT, \ 384 opcode ## 16RR, opcode ## 16RM, opcode ## 16RA, opcode ## 16RT, \ 388 opcode ## 32RR, opcode ## 32RM, opcode ## 32RA, opcode ## 32RT, \ 392 opcode ## 64RR, opcode ## 64RM, opcode ## 64RA, opcode ## 64RT, \ 490 opcode ## RR, opcode ## RM, opcode ## RA
|
D | assemble_x86.cc | 48 { kX86 ## opname ## 8RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE012 | SETS_CCODES … 60 { kX86 ## opname ## 16RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE012 | SETS_CCODE… 76 { kX86 ## opname ## 32RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE012 | SETS_CCODE… 92 { kX86 ## opname ## 64RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE012 | SETS_CCODE… 341 { kX86 ## opname ## RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE12, { prefix, 0, 0x0… 348 { kX86 ## opname ## RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE12, { prefix, 0, 0x0… 353 { kX86 ## opname ## RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE12, { prefix, REX_W,… 358 { kX86 ## opname ## RA, kRegArray, IS_LOAD | IS_QUIN_OP | reg_def | REG_USE12, { prefix, 0, 0x0…
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 341 EmitR(0, rs, static_cast<Register>(0), RA, 0, 0x09); in Jalr() 556 StoreToOffset(kStoreWord, RA, SP, stack_offset); in BuildFrame() 557 cfi_.RelOffset(DWARFReg(RA), stack_offset); in BuildFrame() 588 LoadFromOffset(kLoadWord, RA, SP, stack_offset); in RemoveFrame() 589 cfi_.Restore(DWARFReg(RA)); in RemoveFrame() 595 Jr(RA); in RemoveFrame()
|
/art/compiler/jni/quick/mips/ |
D | calling_convention_mips.cc | 141 1 << T7 | 1 << T8 | 1 << RA; in CoreSpillMask()
|
/art/compiler/jni/quick/mips64/ |
D | calling_convention_mips64.cc | 142 result = 1 << S2 | 1 << S3 | 1 << S4 | 1 << S5 | 1 << S6 | 1 << S7 | 1 << GP | 1 << S8 | 1 << RA; in CoreSpillMask()
|
/art/compiler/utils/mips64/ |
D | assembler_mips64.cc | 423 Jalr(RA, rs); in Jalr() 1081 StoreToOffset(kStoreDoubleword, RA, SP, stack_offset); in BuildFrame() 1082 cfi_.RelOffset(DWARFReg(RA), stack_offset); in BuildFrame() 1127 LoadFromOffset(kLoadDoubleword, RA, SP, stack_offset); in RemoveFrame() 1128 cfi_.Restore(DWARFReg(RA)); in RemoveFrame() 1134 Jr(RA); in RemoveFrame()
|
D | assembler_mips64.h | 202 void Jalr(Label* label, GpuRegister indirect_reg = RA); // R6
|
/art/compiler/optimizing/ |
D | code_generator_mips64.h | 59 { S0, S1, S2, S3, S4, S5, S6, S7, GP, S8, RA }; // TODO: review
|
D | code_generator_mips64.cc | 413 AddAllocatedRegister(Location::RegisterLocation(RA)); in CodeGeneratorMIPS64() 582 __ Jr(RA); in GenerateFrameExit() 880 blocked_core_registers_[RA] = true; in SetupBlockedRegisters()
|