Home
last modified time | relevance | path

Searched refs:lir (Results 1 – 21 of 21) sorted by relevance

/art/compiler/dex/quick/mips/
Dassemble_mips.cc460 void MipsMir2Lir::ConvertShortToLongBranch(LIR* lir) { in ConvertShortToLongBranch() argument
463 int opcode = lir->opcode; in ConvertShortToLongBranch()
464 int dalvik_offset = lir->dalvik_offset; in ConvertShortToLongBranch()
485 LIR* hop_branch = RawLIR(dalvik_offset, opcode, lir->operands[0], in ConvertShortToLongBranch()
486 lir->operands[1], 0, 0, 0, hop_target); in ConvertShortToLongBranch()
487 InsertLIRBefore(lir, hop_branch); in ConvertShortToLongBranch()
490 InsertLIRBefore(lir, curr_pc); in ConvertShortToLongBranch()
493 lir->target); in ConvertShortToLongBranch()
494 InsertLIRBefore(lir, delta_hi); in ConvertShortToLongBranch()
495 InsertLIRBefore(lir, anchor); in ConvertShortToLongBranch()
[all …]
Dtarget_mips.cc146 void MipsMir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, in SetupTargetResourceMasks() argument
149 DCHECK(!lir->flags.use_def_invalid); in SetupTargetResourceMasks()
194 std::string MipsMir2Lir::BuildInsnString(const char *fmt, LIR *lir, unsigned char* base_addr) { in BuildInsnString() argument
211 operand = lir->operands[nc-'0']; in BuildInsnString()
245 reinterpret_cast<uintptr_t>(base_addr) + lir->offset + 4 + (operand << 1), in BuildInsnString()
246 lir->target); in BuildInsnString()
252 int offset_1 = lir->operands[0]; in BuildInsnString()
253 int offset_2 = NEXT_LIR(lir)->operands[0]; in BuildInsnString()
255 (((reinterpret_cast<uintptr_t>(base_addr) + lir->offset + 4) & ~3) + in BuildInsnString()
558 bool MipsMir2Lir::IsUnconditionalBranch(LIR* lir) { in IsUnconditionalBranch() argument
[all …]
Dcodegen_mips.h72 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
73 void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
77 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr);
80 size_t GetInsnSize(LIR* lir) OVERRIDE;
81 bool IsUnconditionalBranch(LIR* lir);
191 void ConvertShortToLongBranch(LIR* lir);
/art/compiler/dex/quick/arm/
Dassemble_arm.cc1070 uint8_t* ArmMir2Lir::EncodeLIRs(uint8_t* write_pos, LIR* lir) { in EncodeLIRs() argument
1071 for (; lir != NULL; lir = NEXT_LIR(lir)) { in EncodeLIRs()
1072 if (!lir->flags.is_nop) { in EncodeLIRs()
1073 int opcode = lir->opcode; in EncodeLIRs()
1077 if (lir->offset & 0x2) { in EncodeLIRs()
1083 } else if (LIKELY(!lir->flags.is_nop)) { in EncodeLIRs()
1084 const ArmEncodingMap *encoder = &EncodingMap[lir->opcode]; in EncodeLIRs()
1089 operand = lir->operands[i]; in EncodeLIRs()
1211 LIR* lir; in AssembleLIR() local
1230 lir = first_fixup_; in AssembleLIR()
[all …]
Dtarget_arm.cc162 void ArmMir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, in SetupTargetResourceMasks() argument
165 DCHECK(!lir->flags.use_def_invalid); in SetupTargetResourceMasks()
167 int opcode = lir->opcode; in SetupTargetResourceMasks()
182 def_mask->SetBits(EncodeArmRegList(lir->operands[0])); in SetupTargetResourceMasks()
186 def_mask->SetBits(EncodeArmRegList(lir->operands[1])); in SetupTargetResourceMasks()
190 def_mask->SetBits(EncodeArmRegList(lir->operands[0])); in SetupTargetResourceMasks()
194 for (int i = 0; i < lir->operands[2]; i++) { in SetupTargetResourceMasks()
195 SetupRegMask(def_mask, lir->operands[1] + i); in SetupTargetResourceMasks()
209 use_mask->SetBits(EncodeArmRegList(lir->operands[0])); in SetupTargetResourceMasks()
213 use_mask->SetBits(EncodeArmRegList(lir->operands[1])); in SetupTargetResourceMasks()
[all …]
Dcodegen_arm.h71 static uint8_t* EncodeLIRs(uint8_t* write_pos, LIR* lir);
72 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
73 void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
77 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr);
80 size_t GetInsnSize(LIR* lir) OVERRIDE;
81 bool IsUnconditionalBranch(LIR* lir);
188 size_t GetInstructionOffset(LIR* lir);
Dutility_arm.cc836 LIR* lir = nullptr; in LoadStoreUsingInsnWithOffsetImm8Shl2() local
838 lir = NewLIR3(opcode, r_src_dest.GetReg(), r_ptr.GetReg(), encoded_disp); in LoadStoreUsingInsnWithOffsetImm8Shl2()
840 lir = NewLIR4(opcode, r_src_dest.GetLowReg(), r_src_dest.GetHighReg(), r_ptr.GetReg(), in LoadStoreUsingInsnWithOffsetImm8Shl2()
846 return lir; in LoadStoreUsingInsnWithOffsetImm8Shl2()
1170 size_t ArmMir2Lir::GetInstructionOffset(LIR* lir) { in GetInstructionOffset() argument
1171 uint64_t check_flags = GetTargetInstFlags(lir->opcode); in GetInstructionOffset()
1173 size_t offset = (check_flags & IS_TERTIARY_OP) ? lir->operands[2] : 0; in GetInstructionOffset()
/art/compiler/dex/quick/x86/
Dassemble_x86.cc675 size_t X86Mir2Lir::GetInsnSize(LIR* lir) { in GetInsnSize() argument
676 DCHECK(!IsPseudoLirOp(lir->opcode)); in GetInsnSize()
677 const X86EncodingMap* entry = &X86Mir2Lir::EncodingMap[lir->opcode]; in GetInsnSize()
678 DCHECK_EQ(entry->opcode, lir->opcode) << entry->name; in GetInsnSize()
684 return lir->operands[0]; // Length of nop is sole operand. in GetInsnSize()
688 return ComputeSize(entry, NO_REG, NO_REG, lir->operands[0], 0); in GetInsnSize()
690 return ComputeSize(entry, NO_REG, NO_REG, lir->operands[0], 0); in GetInsnSize()
692 return ComputeSize(entry, NO_REG, NO_REG, lir->operands[0], lir->operands[1]); in GetInsnSize()
694 return ComputeSize(entry, NO_REG, lir->operands[1], lir->operands[0], lir->operands[3]); in GetInsnSize()
696 return ComputeSize(entry, lir->operands[2], NO_REG, lir->operands[0], lir->operands[1]); in GetInsnSize()
[all …]
Dcodegen_x86.h131 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
132 void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
136 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) OVERRIDE;
139 size_t GetInsnSize(LIR* lir) OVERRIDE;
140 bool IsUnconditionalBranch(LIR* lir) OVERRIDE;
478 void EmitUnimplemented(const X86EncodingMap* entry, LIR* lir);
Dfp_x86.cc630 LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP.GetReg(), displacement, 0x7fffffff); in GenInlinedAbsFloat() local
631 AnnotateDalvikRegAccess(lir, displacement >> 2, false /*is_load */, false /* is_64bit */); in GenInlinedAbsFloat()
632 AnnotateDalvikRegAccess(lir, displacement >> 2, true /* is_load */, false /* is_64bit*/); in GenInlinedAbsFloat()
694 LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP.GetReg(), displacement + HIWORD_OFFSET, 0x7fffffff); in GenInlinedAbsDouble() local
695 …AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, true /* is_load */, true /* is_6… in GenInlinedAbsDouble()
696 …AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, false /*is_load */, true /* is_6… in GenInlinedAbsDouble()
Dint_x86.cc1615 LIR *lir = NewLIR3(x86op, cu_->target64 ? rl_dest.reg.GetReg() : rl_dest.reg.GetLowReg(), in GenLongRegOrMemOp() local
1617 AnnotateDalvikRegAccess(lir, (displacement + LOWORD_OFFSET) >> 2, in GenLongRegOrMemOp()
1621 lir = NewLIR3(x86op, rl_dest.reg.GetHighReg(), r_base, displacement + HIWORD_OFFSET); in GenLongRegOrMemOp()
1622 AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, in GenLongRegOrMemOp()
1650 LIR *lir = NewLIR3(x86op, r_base, displacement + LOWORD_OFFSET, in GenLongArith() local
1652 AnnotateDalvikRegAccess(lir, (displacement + LOWORD_OFFSET) >> 2, in GenLongArith()
1654 AnnotateDalvikRegAccess(lir, (displacement + LOWORD_OFFSET) >> 2, in GenLongArith()
1658 lir = NewLIR3(x86op, r_base, displacement + HIWORD_OFFSET, rl_src.reg.GetHighReg()); in GenLongArith()
1659 AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, in GenLongArith()
1661 AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, in GenLongArith()
[all …]
Dtarget_x86.cc253 void X86Mir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, in SetupTargetResourceMasks() argument
256 DCHECK(!lir->flags.use_def_invalid); in SetupTargetResourceMasks()
291 if (lir->opcode == kX86RepneScasw) { in SetupTargetResourceMasks()
333 std::string X86Mir2Lir::BuildInsnString(const char *fmt, LIR *lir, unsigned char* base_addr) { in BuildInsnString() argument
352 int operand = lir->operands[operand_number]; in BuildInsnString()
363 static_cast<uint32_t>(lir->operands[operand_number+1])); in BuildInsnString()
383 reinterpret_cast<uintptr_t>(base_addr) + lir->offset + operand, in BuildInsnString()
384 lir->target); in BuildInsnString()
755 bool X86Mir2Lir::IsUnconditionalBranch(LIR* lir) { in IsUnconditionalBranch() argument
756 return (lir->opcode == kX86Jmp8 || lir->opcode == kX86Jmp32); in IsUnconditionalBranch()
/art/compiler/dex/quick/
Dcodegen_util.cc104 void Mir2Lir::UnlinkLIR(LIR* lir) { in UnlinkLIR() argument
105 if (UNLIKELY(lir == first_lir_insn_)) { in UnlinkLIR()
106 first_lir_insn_ = lir->next; in UnlinkLIR()
107 if (lir->next != NULL) { in UnlinkLIR()
108 lir->next->prev = NULL; in UnlinkLIR()
110 DCHECK(lir->next == NULL); in UnlinkLIR()
111 DCHECK(lir == last_lir_insn_); in UnlinkLIR()
114 } else if (lir == last_lir_insn_) { in UnlinkLIR()
115 last_lir_insn_ = lir->prev; in UnlinkLIR()
116 lir->prev->next = NULL; in UnlinkLIR()
[all …]
Dmir_to_lir-inl.h161 inline void Mir2Lir::SetupResourceMasks(LIR* lir) { in SetupResourceMasks() argument
162 int opcode = lir->opcode; in SetupResourceMasks()
165 lir->u.m.use_mask = lir->u.m.def_mask = &kEncodeNone; in SetupResourceMasks()
167 lir->flags.fixup = kFixupLabel; in SetupResourceMasks()
176 lir->flags.fixup = kFixupLabel; in SetupResourceMasks()
180 lir->flags.size = GetInsnSize(lir); in SetupResourceMasks()
181 estimated_native_code_size_ += lir->flags.size; in SetupResourceMasks()
207 lir->u.m.def_mask = lir->u.m.use_mask = &kEncodeAll; in SetupResourceMasks()
212 SetupRegMask(&def_mask, lir->operands[0]); in SetupResourceMasks()
216 SetupRegMask(&def_mask, lir->operands[1]); in SetupResourceMasks()
[all …]
Dlocal_optimizations.cc92 inline void Mir2Lir::EliminateLoad(LIR* lir, int reg_id) { in EliminateLoad() argument
93 DCHECK(RegStorage::SameRegType(lir->operands[0], reg_id)); in EliminateLoad()
97 if (lir->operands[0] == reg_id) { in EliminateLoad()
98 NopLIR(lir); in EliminateLoad()
105 dest_reg = RegStorage::Solo32(lir->operands[0]); in EliminateLoad()
109 dest_reg = RegStorage::Solo64(lir->operands[0]); in EliminateLoad()
113 dest_reg = RegStorage::FloatSolo32(lir->operands[0]); in EliminateLoad()
117 dest_reg = RegStorage::FloatSolo64(lir->operands[0]); in EliminateLoad()
124 ConvertMemOpIntoMove(lir, dest_reg, src_reg); in EliminateLoad()
125 NopLIR(lir); in EliminateLoad()
Dmir_to_lir.h201 #define NEXT_LIR(lir) (lir->next) argument
202 #define PREV_LIR(lir) (lir->prev) argument
579 virtual size_t GetInstructionOffset(LIR* lir);
638 void AppendLIR(LIR* lir);
666 void SetupResourceMasks(LIR* lir);
667 void SetMemRefType(LIR* lir, bool is_load, int mem_type);
668 void AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load, bool is64bit);
672 void EliminateLoad(LIR* lir, int reg_id);
694 void NopLIR(LIR* lir);
695 void UnlinkLIR(LIR* lir);
[all …]
Dmir_to_lir.cc1324 size_t Mir2Lir::GetInstructionOffset(LIR* lir) { in GetInstructionOffset() argument
/art/compiler/dex/quick/arm64/
Dassemble_arm64.cc652 uint8_t* Arm64Mir2Lir::EncodeLIRs(uint8_t* write_pos, LIR* lir) { in EncodeLIRs() argument
653 for (; lir != nullptr; lir = NEXT_LIR(lir)) { in EncodeLIRs()
654 bool opcode_is_wide = IS_WIDE(lir->opcode); in EncodeLIRs()
655 ArmOpcode opcode = UNWIDE(lir->opcode); in EncodeLIRs()
661 if (LIKELY(!lir->flags.is_nop)) { in EncodeLIRs()
670 uint32_t operand = lir->operands[i]; in EncodeLIRs()
747 << " @ 0x" << std::hex << lir->dalvik_offset; in EncodeLIRs()
815 LIR* lir; in AssembleLIR() local
839 lir = first_fixup_; in AssembleLIR()
841 while (lir != NULL) { in AssembleLIR()
[all …]
Dtarget_arm64.cc166 void Arm64Mir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, in SetupTargetResourceMasks() argument
169 DCHECK(!lir->flags.use_def_invalid); in SetupTargetResourceMasks()
335 std::string Arm64Mir2Lir::BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) { in BuildInsnString() argument
352 operand = lir->operands[nc-'0']; in BuildInsnString()
357 int omittable = ((IS_WIDE(lir->opcode)) ? EncodeExtend(kA64Uxtw, 0) : in BuildInsnString()
407 snprintf(tbuf, arraysize(tbuf), "%c%d", (IS_FWIDE(lir->opcode)) ? 'd' : 's', in BuildInsnString()
411 bool is_wide = IS_WIDE(lir->opcode); in BuildInsnString()
453 snprintf(tbuf, arraysize(tbuf), "%d", operand*((IS_WIDE(lir->opcode)) ? 8 : 4)); in BuildInsnString()
465 strcpy(tbuf, (IS_WIDE(lir->opcode)) ? ", lsl #3" : ", lsl #2"); in BuildInsnString()
472 reinterpret_cast<uintptr_t>(base_addr) + lir->offset + (operand << 2), in BuildInsnString()
[all …]
Dcodegen_arm64.h125 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
126 void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
130 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) OVERRIDE;
133 size_t GetInsnSize(LIR* lir) OVERRIDE;
134 bool IsUnconditionalBranch(LIR* lir) OVERRIDE;
258 size_t GetInstructionOffset(LIR* lir) OVERRIDE;
345 size_t GetLoadStoreSize(LIR* lir);
353 uint8_t* EncodeLIRs(uint8_t* write_pos, LIR* lir);
Dutility_arm64.cc90 size_t Arm64Mir2Lir::GetLoadStoreSize(LIR* lir) { in GetLoadStoreSize() argument
91 bool opcode_is_wide = IS_WIDE(lir->opcode); in GetLoadStoreSize()
92 ArmOpcode opcode = UNWIDE(lir->opcode); in GetLoadStoreSize()
99 size_t Arm64Mir2Lir::GetInstructionOffset(LIR* lir) { in GetInstructionOffset() argument
100 size_t offset = lir->operands[2]; in GetInstructionOffset()
101 uint64_t check_flags = GetTargetInstFlags(lir->opcode); in GetInstructionOffset()
105 offset = offset * (1 << GetLoadStoreSize(lir)); in GetInstructionOffset()