Home
last modified time | relevance | path

Searched refs:wide (Results 1 – 25 of 31) sorted by relevance

12

/art/compiler/dex/
Dvreg_analysis.cc84 if (is_wide && !reg_location_[index].wide) { in SetWide()
85 reg_location_[index].wide = true; in SetWide()
93 if (!reg_location_[index].wide) { in SetWide()
94 reg_location_[index].wide = true; in SetWide()
150 reg_location_[defs[0]].wide = true; in InferTypeAndSize()
151 reg_location_[defs[1]].wide = true; in InferTypeAndSize()
169 reg_location_[uses[next]].wide = true; in InferTypeAndSize()
170 reg_location_[uses[next + 1]].wide = true; in InferTypeAndSize()
176 type_mismatch |= reg_location_[uses[next]].wide; in InferTypeAndSize()
188 reg_location_[uses[next]].wide = true; in InferTypeAndSize()
[all …]
Dreg_location.h44 unsigned wide:1; member
Dmir_graph.cc354 bool wide = false; in IsBadMonitorExitCatch() local
357 wide = true; in IsBadMonitorExitCatch()
365 wide = true; in IsBadMonitorExitCatch()
373 wide = true; in IsBadMonitorExitCatch()
390 if (dest == monitor_reg || (wide && dest + 1 == monitor_reg)) { in IsBadMonitorExitCatch()
1302 if (!show_singles && (reg_location_ != NULL) && reg_location_[i].wide) { in GetDalvikDisassembly()
1370 if (!singles_only && reg_location_[ssa_reg].wide) { in GetSSANameWithConst()
1977 bool MIR::DecodedInstruction::GetConstant(int64_t* ptr_value, bool* wide) const { in GetConstant()
1982 DCHECK(wide != nullptr); in GetConstant()
1988 *wide = false; in GetConstant()
[all …]
Dmir_graph.h289 bool GetConstant(int64_t* ptr_value, bool* wide) const;
783 DCHECK(!res.wide); in GetDest()
789 DCHECK(!res.wide); in GetSrc()
795 DCHECK(res.wide); in GetDestWide()
801 DCHECK(res.wide); in GetSrcWide()
877 CompilerTemp* GetNewCompilerTemp(CompilerTempType ct_type, bool wide);
Dmir_optimization.cc248 CompilerTemp* MIRGraph::GetNewCompilerTemp(CompilerTempType ct_type, bool wide) { in GetNewCompilerTemp() argument
252 if (available_temps <= 0 || (available_temps <= 1 && wide)) { in GetNewCompilerTemp()
263 DCHECK_EQ(wide, false); in GetNewCompilerTemp()
278 if (wide) { in GetNewCompilerTemp()
300 reg_location_[ssa_reg_high].wide = true; in GetNewCompilerTemp()
312 reg_location_[ssa_reg_low].wide = wide; in GetNewCompilerTemp()
Dlocal_value_numbering.cc1032 bool wide = false; in HandlePhi() local
1035 wide = true; in HandlePhi()
1053 value_name = wide ? lvn->GetOperandValueWide(s_reg) : lvn->GetOperandValue(s_reg); in HandlePhi()
1067 if (!wide && gvn_->NullCheckedInAllPredecessors(merge_names_)) { in HandlePhi()
1072 if (wide) { in HandlePhi()
/art/compiler/dex/quick/mips/
Dfp_mips.cc106 DCHECK(rl_src1.wide); in GenArithOpDouble()
108 DCHECK(rl_src2.wide); in GenArithOpDouble()
110 DCHECK(rl_dest.wide); in GenArithOpDouble()
111 DCHECK(rl_result.wide); in GenArithOpDouble()
154 if (rl_src.wide) { in GenConversion()
161 if (rl_dest.wide) { in GenConversion()
170 bool wide = true; in GenCmpFP() local
176 wide = false; in GenCmpFP()
180 wide = false; in GenCmpFP()
194 if (wide) { in GenCmpFP()
DREADME.mips25 The resource masks in the LIR structure are 64-bits wide, which is enough
/art/compiler/dex/quick/arm64/
Dutility_arm64.cc567 ArmOpcode wide = (r_dest_src1.Is64Bit()) ? WIDE(0) : UNWIDE(0); in OpRegRegShift() local
593 return NewLIR2(kA64Rev2rr | wide, r_dest_src1.GetReg(), r_src2.GetReg()); in OpRegRegShift()
597 NewLIR2(kA64Rev162rr | wide, r_dest_src1.GetReg(), r_src2.GetReg()); in OpRegRegShift()
599 return NewLIR4(kA64Sbfm4rrdd | wide, r_dest_src1.GetReg(), r_dest_src1.GetReg(), 0, 15); in OpRegRegShift()
605 return NewLIR4(kA64Sbfm4rrdd | wide, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 7); in OpRegRegShift()
609 return NewLIR4(kA64Sbfm4rrdd | wide, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 15); in OpRegRegShift()
614 return NewLIR4(kA64Ubfm4rrdd | wide, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 15); in OpRegRegShift()
622 return NewLIR2(opcode | wide, r_dest_src1.GetReg(), r_src2.GetReg()); in OpRegRegShift()
626 return NewLIR3(opcode | wide, r_dest_src1.GetReg(), r_src2.GetReg(), shift); in OpRegRegShift()
636 ArmOpcode wide = (r_dest_src1.Is64Bit()) ? WIDE(0) : UNWIDE(0); in OpRegRegExtend() local
[all …]
Dfp_arm64.cc109 DCHECK(rl_src1.wide); in GenArithOpDouble()
111 DCHECK(rl_src2.wide); in GenArithOpDouble()
113 DCHECK(rl_dest.wide); in GenArithOpDouble()
114 DCHECK(rl_result.wide); in GenArithOpDouble()
185 if (rl_src.wide) { in GenConversion()
194 if (rl_dest.wide) { in GenConversion()
422 ArmOpcode wide = (is_double) ? FWIDE(0) : FUNWIDE(0); in GenInlinedRound() local
430 NewLIR2(kA64Fmov2fI | wide, r_imm_point5.GetReg(), encoded_imm); in GenInlinedRound()
431 NewLIR3(kA64Fadd3fff | wide, r_tmp.GetReg(), rl_src.reg.GetReg(), r_imm_point5.GetReg()); in GenInlinedRound()
440 ArmOpcode wide = (is_double) ? FWIDE(0) : FUNWIDE(0); in GenInlinedMinMaxFP() local
[all …]
Dint_arm64.cc206 bool is_wide = rl_dest.ref || rl_dest.wide; in GenSelect()
266 ArmOpcode wide = reg.Is64Bit() ? WIDE(0) : UNWIDE(0); in OpCmpImmBranch() local
267 branch = NewLIR2(opcode | wide, reg.GetReg(), 0); in OpCmpImmBranch()
272 ArmOpcode wide = reg.Is64Bit() ? WIDE(0) : UNWIDE(0); in OpCmpImmBranch() local
273 branch = NewLIR2(opcode | wide, reg.GetReg(), 0); in OpCmpImmBranch()
523 const bool is_64bit = rl_dest.wide; in HandleEasyDivRem64()
628 ArmOpcode wide; in GenDivRem() local
631 wide = WIDE(0); in GenDivRem()
634 wide = UNWIDE(0); in GenDivRem()
637 NewLIR4(kA64Msub4rrrr | wide, rl_result.reg.GetReg(), temp.GetReg(), in GenDivRem()
[all …]
Dtarget_arm64.cc839 arg_locs[in_position].wide, in Initialize()
843 if (arg_locs[in_position].wide) { in Initialize()
865 if (loc->wide) { in GetArgPhysicalReg()
878 if (loc->wide || loc->ref) { in GetArgPhysicalReg()
956 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(start_vreg + i), reg, t_loc->wide ? k64 : k32, in FlushIns()
967 t_loc->wide ? k64 : k32, kNotVolatile); in FlushIns()
971 if (t_loc->wide) { in FlushIns()
1059 if (loc.wide) { in GenDalvikArgsRange()
1130 if (rl_arg.wide) { in GenDalvikArgsRange()
1160 if (rl_arg.wide) { in GenDalvikArgsRange()
[all …]
/art/compiler/dex/quick/
Dmir_to_lir.cc55 void Mir2Lir::LockArg(int in_position, bool wide) { in LockArg() argument
57 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : in LockArg()
69 RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) { in LoadArg() argument
93 wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class); in LoadArg()
94 LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile); in LoadArg()
99 if (wide) { in LoadArg()
114 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : in LoadArg()
118 if (wide && !reg_arg_high.Valid()) { in LoadArg()
140 DCHECK(!wide); in LoadArg()
146 RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low; in LoadArg()
[all …]
Dralloc_util.cc486 RegStorage Mir2Lir::AllocLiveReg(int s_reg, int reg_class, bool wide) { in AllocLiveReg() argument
493 reg = FindLiveReg(wide ? reg_pool_->dp_regs_ : reg_pool_->sp_regs_, s_reg); in AllocLiveReg()
497 reg = FindLiveReg(wide || reg_class == kRefReg ? reg_pool_->core64_regs_ : in AllocLiveReg()
504 if (wide && !reg.IsFloat() && !cu_->target64) { in AllocLiveReg()
515 if (reg.Valid() && (wide != GetRegInfo(reg)->IsWide())) { in AllocLiveReg()
539 if (wide) { in AllocLiveReg()
564 DCHECK(rl_keep.wide); in FreeRegLocTemps()
565 DCHECK(rl_free.wide); in FreeRegLocTemps()
683 DCHECK(!rl.wide); in MarkDef()
697 DCHECK(rl.wide); in MarkDefWide()
[all …]
Dgen_loadstore.cc186 DCHECK(!rl_dest.wide); in StoreValue()
187 DCHECK(!rl_src.wide); in StoreValue()
232 DCHECK(rl_src.wide); in LoadValueWide()
270 DCHECK(rl_dest.wide); in StoreValueWide()
271 DCHECK(rl_src.wide); in StoreValueWide()
347 DCHECK(rl_dest.wide); in StoreFinalValueWide()
348 DCHECK(rl_src.wide); in StoreFinalValueWide()
389 DCHECK(!loc.wide); in ForceTemp()
406 DCHECK(loc.wide); in ForceTempWide()
Dgen_invoke.cc123 if (arg0.wide == 0) { in CallRuntimeHelperRegLocation()
144 if (arg1.wide == 0) { in CallRuntimeHelperImmRegLocation()
158 DCHECK(!arg0.wide); in CallRuntimeHelperRegLocationImm()
233 if (arg0.wide == 0) { in CallRuntimeHelperRegLocationRegLocation()
239 if (arg1.wide == 0) { in CallRuntimeHelperRegLocationRegLocation()
246 if (arg0.wide == 0) { in CallRuntimeHelperRegLocationRegLocation()
248 if (arg1.wide == 0) { in CallRuntimeHelperRegLocationRegLocation()
263 if (arg1.wide == 0) { in CallRuntimeHelperRegLocationRegLocation()
334 DCHECK_EQ(static_cast<unsigned int>(arg1.wide), 0U); // The static_cast works around an in CallRuntimeHelperImmRegLocationRegLocation()
337 if (arg2.wide == 0) { in CallRuntimeHelperImmRegLocationRegLocation()
[all …]
Dmir_to_lir-inl.h271 inline void Mir2Lir::CheckRegStorage(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp) in CheckRegStorage() argument
274 CheckRegStorageImpl(rs, wide, ref, fp, kFailOnSizeError, kReportSizeError); in CheckRegStorage()
Dmir_to_lir.h758 RegStorage AllocLiveReg(int s_reg, int reg_class, bool wide);
1219 return TargetReg(reg, loc.wide ? kWide : kNotWide); in TargetReg()
1504 static constexpr OpSize LoadStoreOpSize(bool wide, bool ref) { in LoadStoreOpSize() argument
1505 return wide ? k64 : ref ? kReference : k32; in LoadStoreOpSize()
1572 void LockArg(int in_position, bool wide = false);
1582 RegStorage LoadArg(int in_position, RegisterClass reg_class, bool wide = false);
1662 void CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp, bool fail,
1674 void CheckRegStorage(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp) const;
Dcodegen_util.cc58 if (rl_src.wide) { in IsInexpensiveConstant()
1205 DCHECK(rl_src.wide); in BadOverlap()
1206 DCHECK(rl_dest.wide); in BadOverlap()
1307 loc.wide = false; in NarrowRegLoc()
/art/compiler/dex/quick/arm/
Dfp_arm.cc106 DCHECK(rl_src1.wide); in GenArithOpDouble()
108 DCHECK(rl_src2.wide); in GenArithOpDouble()
110 DCHECK(rl_dest.wide); in GenArithOpDouble()
111 DCHECK(rl_result.wide); in GenArithOpDouble()
196 if (rl_src.wide) { in GenConversion()
203 if (rl_dest.wide) { in GenConversion()
Dint_arm.cc1305 if (rl_dest.wide) { in GenArrayGet()
1329 if (rl_dest.wide || rl_dest.fp || constant_index) { in GenArrayGet()
1354 if (rl_dest.wide) { in GenArrayGet()
1431 if (rl_src.wide || rl_src.fp || constant_index) { in GenArrayPut()
1432 if (rl_src.wide) { in GenArrayPut()
/art/compiler/dex/portable/
Dmir_to_gbc.cc103 if (loc.wide) { in LlvmTypeFromLocRec()
224 if (loc.wide) { in EmitConst()
252 if (loc.wide) { in EmitCopy()
450 ::llvm::Value* res = GenArithOp(op, rl_dest.wide, src1, src2); in ConvertArithOp()
458 ::llvm::Value* res = GenArithOp(op, rl_dest.wide, src1, src2); in ConvertArithOpLit()
481 i += info->args[i].wide ? 2 : 1; in ConvertInvoke()
494 if (info->result.wide) { in ConvertInvoke()
642 DCHECK_EQ(rl_src1.wide, rl_src2.wide); in ConvertWideComparison()
1580 DCHECK_EQ(rl_dest.wide, loc.wide); in HandlePhiNodes()
1581 DCHECK_EQ(rl_dest.wide & rl_dest.high_word, loc.wide & loc.high_word); in HandlePhiNodes()
[all …]
/art/compiler/dex/quick/x86/
Dcall_x86.cc87 if (rl_method.wide) { in GenLargePackedSwitch()
161 if (rl_method.wide) { in GenFillArrayData()
Dtarget_x86.cc936 << (loc.wide ? " w" : " ") in DumpRegLocation()
1812 if (rl_method.wide) { in AppendOpcodeWithConst()
2320 if (rl_src.wide == 0) { in GenSetVector()
2412 arg_locs[in_position].wide, arg_locs[in_position].ref); in Initialize()
2416 if (arg_locs[in_position].wide) { in Initialize()
2522 StoreBaseDisp(rs_rX86_SP, SRegOffset(start_vreg + i), reg, t_loc->wide ? k64 : k32, in FlushIns()
2533 t_loc->wide ? k64 : k32, kNotVolatile); in FlushIns()
2537 if (t_loc->wide) { in FlushIns()
2609 info->args[last_mapped_in].wide ? 2 : 1; in GenDalvikArgsRange()
2619 if (loc.wide) { in GenDalvikArgsRange()
[all …]
Dfp_x86.cc77 DCHECK(rl_dest.wide); in GenArithOpDouble()
79 DCHECK(rl_src1.wide); in GenArithOpDouble()
81 DCHECK(rl_src2.wide); in GenArithOpDouble()
328 if (rl_src.wide) { in GenConversion()
335 if (rl_dest.wide) { in GenConversion()

12