Home
last modified time | relevance | path

Searched refs:cu_ (Results 1 – 25 of 52) sorted by relevance

123

/art/compiler/dex/
Dmir_graph_test.cc61 cu_.mir_graph->block_id_map_.clear(); in DoPrepareBasicBlocks()
62 cu_.mir_graph->block_list_.clear(); in DoPrepareBasicBlocks()
69 BasicBlock* bb = cu_.mir_graph->CreateNewBB(def->type); in DoPrepareBasicBlocks()
81 static_cast<SuccessorBlockInfo*>(cu_.arena.Alloc(sizeof(SuccessorBlockInfo), in DoPrepareBasicBlocks()
91 cu_.arena.Alloc(sizeof(BasicBlockDataFlow), kArenaAllocDFInfo)); in DoPrepareBasicBlocks()
94 ASSERT_EQ(count, cu_.mir_graph->block_list_.size()); in DoPrepareBasicBlocks()
95 cu_.mir_graph->entry_block_ = cu_.mir_graph->block_list_[1]; in DoPrepareBasicBlocks()
96 ASSERT_EQ(kEntryBlock, cu_.mir_graph->entry_block_->block_type); in DoPrepareBasicBlocks()
97 cu_.mir_graph->exit_block_ = cu_.mir_graph->block_list_[2]; in DoPrepareBasicBlocks()
98 ASSERT_EQ(kExitBlock, cu_.mir_graph->exit_block_->block_type); in DoPrepareBasicBlocks()
[all …]
Dtype_inference_test.cc249 cu_.dex_file = dex_file_.get(); in BuildDexFile()
250 cu_.method_idx = dex_file_builder_.GetMethodIdx(kClassName, test_method_signature, kMethodName); in BuildDexFile()
251 cu_.access_flags = is_static ? kAccStatic : 0u; in BuildDexFile()
252 cu_.mir_graph->m_units_.push_back(new (cu_.mir_graph->arena_) DexCompilationUnit( in BuildDexFile()
253 &cu_, cu_.class_loader, cu_.class_linker, *cu_.dex_file, nullptr /* code_item not used */, in BuildDexFile()
255 cu_.access_flags, nullptr /* verified_method not used */)); in BuildDexFile()
256 cu_.mir_graph->current_method_ = 0u; in BuildDexFile()
258 cu_.arena.Alloc(sizeof(DexFile::CodeItem), kArenaAllocMisc)); in BuildDexFile()
262 cu_.mir_graph->current_code_item_ = code_item_; in BuildDexFile()
263 cu_.mir_graph->num_ssa_regs_ = kMaxSsaRegs; in BuildDexFile()
[all …]
Dmir_optimization_test.cc100 cu_.mir_graph->block_id_map_.clear(); in DoPrepareBasicBlocks()
101 cu_.mir_graph->block_list_.clear(); in DoPrepareBasicBlocks()
108 BasicBlock* bb = cu_.mir_graph->CreateNewBB(def->type); in DoPrepareBasicBlocks()
120 static_cast<SuccessorBlockInfo*>(cu_.arena.Alloc(sizeof(SuccessorBlockInfo), in DoPrepareBasicBlocks()
130 cu_.arena.Alloc(sizeof(BasicBlockDataFlow), kArenaAllocDFInfo)); in DoPrepareBasicBlocks()
133 ASSERT_EQ(count, cu_.mir_graph->block_list_.size()); in DoPrepareBasicBlocks()
134 cu_.mir_graph->entry_block_ = cu_.mir_graph->block_list_[1]; in DoPrepareBasicBlocks()
135 ASSERT_EQ(kEntryBlock, cu_.mir_graph->entry_block_->block_type); in DoPrepareBasicBlocks()
136 cu_.mir_graph->exit_block_ = cu_.mir_graph->block_list_[2]; in DoPrepareBasicBlocks()
137 ASSERT_EQ(kExitBlock, cu_.mir_graph->exit_block_->block_type); in DoPrepareBasicBlocks()
[all …]
Dmir_optimization.cc252 return InstructionSetPointerSize(cu_->instruction_set); in GetNumBytesForSpecialTemps()
283 bool verbose = cu_->verbose; in GetNewCompilerTemp()
440 if (cu_->verbose) { in RemoveLastCompilerTemp()
473 if (cu_->instruction_set == kArm64 || cu_->instruction_set == kThumb2) { in BasicBlockOpt()
476 bool use_lvn = bb->use_lvn && (cu_->disable_opt & (1u << kLocalValueNumbering)) == 0u; in BasicBlockOpt()
481 allocator.reset(ScopedArenaAllocator::Create(&cu_->arena_stack)); in BasicBlockOpt()
482 global_valnum.reset(new (allocator.get()) GlobalValueNumbering(cu_, allocator.get(), in BasicBlockOpt()
543 if ((cu_->disable_opt & (1 << kBranchFusing)) != 0) { in BasicBlockOpt()
599 if ((cu_->instruction_set == kArm64 || cu_->instruction_set == kThumb2 || in BasicBlockOpt()
600 cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) && in BasicBlockOpt()
[all …]
Dlocal_value_numbering_test.cc95 cu_.mir_graph->ifield_lowering_infos_.clear(); in DoPrepareIFields()
96 cu_.mir_graph->ifield_lowering_infos_.reserve(count); in DoPrepareIFields()
105 cu_.mir_graph->ifield_lowering_infos_.push_back(field_info); in DoPrepareIFields()
115 cu_.mir_graph->sfield_lowering_infos_.clear(); in DoPrepareSFields()
116 cu_.mir_graph->sfield_lowering_infos_.reserve(count); in DoPrepareSFields()
128 cu_.mir_graph->sfield_lowering_infos_.push_back(field_info); in DoPrepareSFields()
139 mirs_ = cu_.arena.AllocArray<MIR>(count, kArenaAllocMIR); in DoPrepareMIRs()
148 ASSERT_LT(def->field_info, cu_.mir_graph->ifield_lowering_infos_.size()); in DoPrepareMIRs()
150 ASSERT_EQ(cu_.mir_graph->ifield_lowering_infos_[def->field_info].MemAccessType(), in DoPrepareMIRs()
153 ASSERT_LT(def->field_info, cu_.mir_graph->sfield_lowering_infos_.size()); in DoPrepareMIRs()
[all …]
Dgvn_dead_code_elimination_test.cc144 cu_.mir_graph->ifield_lowering_infos_.clear(); in DoPrepareIFields()
145 cu_.mir_graph->ifield_lowering_infos_.reserve(count); in DoPrepareIFields()
156 cu_.mir_graph->ifield_lowering_infos_.push_back(field_info); in DoPrepareIFields()
166 cu_.mir_graph->sfield_lowering_infos_.clear(); in DoPrepareSFields()
167 cu_.mir_graph->sfield_lowering_infos_.reserve(count); in DoPrepareSFields()
181 cu_.mir_graph->sfield_lowering_infos_.push_back(field_info); in DoPrepareSFields()
191 cu_.mir_graph->block_id_map_.clear(); in DoPrepareBasicBlocks()
192 cu_.mir_graph->block_list_.clear(); in DoPrepareBasicBlocks()
199 BasicBlock* bb = cu_.mir_graph->CreateNewBB(def->type); in DoPrepareBasicBlocks()
211 static_cast<SuccessorBlockInfo*>(cu_.arena.Alloc(sizeof(SuccessorBlockInfo), in DoPrepareBasicBlocks()
[all …]
Dglobal_value_numbering_test.cc141 cu_.mir_graph->ifield_lowering_infos_.clear(); in DoPrepareIFields()
142 cu_.mir_graph->ifield_lowering_infos_.reserve(count); in DoPrepareIFields()
151 cu_.mir_graph->ifield_lowering_infos_.push_back(field_info); in DoPrepareIFields()
161 cu_.mir_graph->sfield_lowering_infos_.clear(); in DoPrepareSFields()
162 cu_.mir_graph->sfield_lowering_infos_.reserve(count); in DoPrepareSFields()
174 cu_.mir_graph->sfield_lowering_infos_.push_back(field_info); in DoPrepareSFields()
184 cu_.mir_graph->block_id_map_.clear(); in DoPrepareBasicBlocks()
185 cu_.mir_graph->block_list_.clear(); in DoPrepareBasicBlocks()
192 BasicBlock* bb = cu_.mir_graph->CreateNewBB(def->type); in DoPrepareBasicBlocks()
204 static_cast<SuccessorBlockInfo*>(cu_.arena.Alloc(sizeof(SuccessorBlockInfo), in DoPrepareBasicBlocks()
[all …]
Dmir_analysis.cc1051 if (cu_->enable_debug & (1 << kDebugShowFilterStats)) { in ComputeSkipCompilation()
1059 << PrettyMethod(cu_->method_idx, *cu_->dex_file); in ComputeSkipCompilation()
1068 if (cu_->compiler_driver->GetCompilerOptions().IsSmallMethod(GetNumDalvikInsns()) && in ComputeSkipCompilation()
1094 if (cu_->compiler_driver->GetCompilerOptions().IsSmallMethod(GetNumDalvikInsns()) && in ComputeSkipCompilation()
1108 const CompilerOptions& compiler_options = cu_->compiler_driver->GetCompilerOptions(); in SkipCompilation()
1172 if (((cu_->access_flags & kAccConstructor) != 0) && ((cu_->access_flags & kAccStatic) != 0)) { in SkipCompilation()
1178 if (cu_->compiler_driver->GetMethodInlinerMap() != nullptr && in SkipCompilation()
1179 cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file) in SkipCompilation()
1180 ->IsSpecial(cu_->method_idx)) { in SkipCompilation()
1210 ScopedArenaAllocator allocator(&cu_->arena_stack); in DoCacheFieldLoweringInfo()
[all …]
Dmir_graph.cc86 cu_(cu), in MIRGraph()
147 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) { in MIRGraph()
151 max_available_non_special_compiler_temps_ = cu_->target64 ? 2 : 1; in MIRGraph()
608 LOG(INFO) << PrettyMethod(cu_->method_idx, *cu_->dex_file); in ProcessCanThrow()
639 (cu_->disable_opt & (1 << kSuppressExceptionEdges)) || is_throw || in_try_block; in ProcessCanThrow()
709 cu_, class_loader, Runtime::Current()->GetClassLinker(), dex_file, in InlineMethod()
711 cu_->compiler_driver->GetVerifiedMethod(&dex_file, method_idx))); in InlineMethod()
720 ScopedArenaAllocator allocator(&cu_->arena_stack); in InlineMethod()
882 if (cu_->enable_debug & (1 << kDebugDumpCFG)) { in InlineMethod()
886 if (cu_->verbose) { in InlineMethod()
[all …]
/art/compiler/dex/quick/x86/
Dcall_x86.cc87 if (cu_->target64) { in GenLargePackedSwitch()
125 int ex_offset = cu_->target64 ? in GenMoveException()
129 NewLIR2(cu_->target64 ? kX86Mov64RT : kX86Mov32RT, rl_result.reg.GetReg(), ex_offset); in GenMoveException()
130 NewLIR2(cu_->target64 ? kX86Mov64TI : kX86Mov32TI, ex_offset, 0); in GenMoveException()
135 DCHECK_EQ(tgt_addr_reg.Is64Bit(), cu_->target64); in UnconditionallyMarkGCCard()
138 int ct_offset = cu_->target64 ? in UnconditionallyMarkGCCard()
141 NewLIR2(cu_->target64 ? kX86Mov64RT : kX86Mov32RT, reg_card_base.GetReg(), ct_offset); in UnconditionallyMarkGCCard()
170 const InstructionSet isa = cu_->target64 ? kX86_64 : kX86; in GenEntrySequence()
172 const RegStorage rs_rSP = cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32; in GenEntrySequence()
177 cu_->compiler_driver->GetCompilerOptions().GetImplicitStackOverflowChecks()) { in GenEntrySequence()
[all …]
Dquick_assemble_x86_test.cc76 cu_.reset(new CompilationUnit(pool_.get(), isa_, compiler_driver_.get(), nullptr)); in Prepare()
78 cu_->arena.Alloc(sizeof(DexFile::CodeItem), kArenaAllocMisc)); in Prepare()
80 cu_->mir_graph.reset(new MIRGraph(cu_.get(), &cu_->arena)); in Prepare()
81 cu_->mir_graph->current_code_item_ = code_item; in Prepare()
82 cu_->cg.reset(QuickCompiler::GetCodeGenerator(cu_.get(), nullptr)); in Prepare()
96 X86Mir2Lir* m2l = static_cast<X86Mir2Lir*>(cu_->cg.get()); in Prepare()
102 cu_.reset(); in Release()
123 std::unique_ptr<CompilationUnit> cu_; member in art::QuickAssembleX86TestBase
202 MIR* mir = cu_->mir_graph->NewMIR(); in TestVectorFn()
Dtarget_x86.cc152 return cu_->target64 ? x86_64_loc_c_return_ref : x86_loc_c_return_ref; in LocCReturnRef()
156 return cu_->target64 ? x86_64_loc_c_return_wide : x86_loc_c_return_wide; in LocCReturnWide()
253 return cu_->target64 ? RegStorage32FromSpecialTargetRegister_Target64[reg] in TargetReg32()
279 DCHECK(cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64); in SetupTargetResourceMasks()
472 if (!cu_->target64) { in AllocateByteRegister()
483 return cu_->target64 || reg.GetRegNum() < rs_rX86_SP_32.GetRegNum(); in IsByteRegister()
488 if (cu_->target64) { in ClobberCallerSave()
551 if (cu_->target64) { in LockCallTemps()
572 if (cu_->target64) { in FreeCallTemps()
601 if (!cu_->compiler_driver->GetInstructionSetFeatures()->IsSmp()) { in GenMemBarrier()
[all …]
Dutility_x86.cc392 DCHECK_EQ(r_base, cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32); in OpRegMem()
505 if (op == kOpMul && !cu_->target64) { in OpRegRegImm()
508 } else if (op == kOpAnd && !cu_->target64) { in OpRegRegImm()
531 DCHECK_EQ(kX86, cu_->instruction_set); in OpThreadMem()
544 DCHECK_EQ(kX86_64, cu_->instruction_set); in OpThreadMem()
578 } else if (pc_rel_base_reg_.Valid() || cu_->target64) { in LoadConstantWide()
590 if (cu_->target64) { in LoadConstantWide()
665 if (cu_->target64) { in LoadBaseIndexedDisp()
716 DCHECK_EQ(r_base, cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32); in LoadBaseIndexedDisp()
818 if (cu_->target64) { in StoreBaseIndexedDisp()
[all …]
Dint_x86.cc39 if (cu_->target64) { in GenCmpLong()
138 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) { in OpRegCopyNoInsert()
405 if (cu_->target64) { in GenFusedLongCmpBranch()
459 if (cu_->target64) { in GenFusedLongCmpImmBranch()
838 DCHECK(cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64); in GenInlinedMinMax()
840 if (is_long && !cu_->target64) { in GenInlinedMinMax()
935 const int kRegSize = cu_->target64 ? 8 : 4; in GenInlinedMinMax()
957 cfi_.RelOffset(DwarfCoreReg(cu_->target64, tmp.GetReg()), 0); in GenInlinedMinMax()
972 cfi_.Restore(DwarfCoreReg(cu_->target64, tmp.GetReg())); in GenInlinedMinMax()
1039 if (!cu_->target64) { in GenInlinedPeek()
[all …]
/art/compiler/dex/quick/
Dgen_invoke.cc82 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) { in CallHelperSetup()
151 RegStorage r_tmp = TargetReg(cu_->instruction_set == kMips ? kArg2 : kArg1, kWide); in CallRuntimeHelperImmRegLocation()
227 if (cu_->instruction_set == kArm64 || cu_->instruction_set == kMips64 || in CallRuntimeHelperRegLocationRegLocation()
228 cu_->instruction_set == kX86_64) { in CallRuntimeHelperRegLocationRegLocation()
250 DCHECK(!cu_->target64); in CallRuntimeHelperRegLocationRegLocation()
255 if (cu_->instruction_set == kMips) { in CallRuntimeHelperRegLocationRegLocation()
262 if (cu_->instruction_set == kMips) { in CallRuntimeHelperRegLocationRegLocation()
272 if (cu_->instruction_set == kMips) { in CallRuntimeHelperRegLocationRegLocation()
279 if (cu_->instruction_set == kMips) { in CallRuntimeHelperRegLocationRegLocation()
410 if (cu_->target64) { in FlushIns()
[all …]
Dmir_to_lir-inl.h74 << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " " in NewLIR0()
84 << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " " in NewLIR1()
94 << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " " in NewLIR2()
104 << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " " in NewLIR2NoDest()
114 << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " " in NewLIR3()
124 << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " " in NewLIR4()
135 << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " " in NewLIR5()
Dgen_common.cc100 OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, r_base, false); in GenGetOtherTypeForSgetSput()
291 if (!cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) { in GenNullCheck()
303 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) { in GenExplicitNullCheck()
310 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) { in MarkPossibleNullPointerException()
311 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) { in MarkPossibleNullPointerException()
320 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) { in MarkPossibleNullPointerExceptionAfter()
321 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) { in MarkPossibleNullPointerExceptionAfter()
329 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitStackOverflowChecks()) { in MarkPossibleStackOverflowException()
335 if (cu_->compiler_driver->GetCompilerOptions().GetImplicitNullChecks()) { in ForceImplicitNullCheck()
336 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) { in ForceImplicitNullCheck()
[all …]
Dcodegen_util.cc136 if (!cu_->verbose) { in NopLIR()
202 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops)); in DumpLIRInsn()
334 DCHECK_EQ(val_reg.Is64Bit(), cu_->target64); in MarkGCCard()
348 << PrettyMethod(cu_->method_idx, *cu_->dex_file); in CodegenDump()
374 cu_->dex_file->GetMethodId(cu_->method_idx); in CodegenDump()
375 const Signature signature = cu_->dex_file->GetMethodSignature(method_id); in CodegenDump()
376 const char* name = cu_->dex_file->GetMethodName(method_id); in CodegenDump()
377 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id)); in CodegenDump()
539 switch (cu_->instruction_set) { in InstallSwitchTables()
554 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set; in InstallSwitchTables()
[all …]
Dmir_to_lir.cc117 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); in LoadArg()
119 if (cu_->instruction_set == kX86) { in LoadArg()
127 if (cu_->instruction_set == kX86_64) { in LoadArg()
168 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); in LoadArgDirect()
169 if (cu_->instruction_set == kX86) { in LoadArgDirect()
177 if (cu_->instruction_set == kX86_64) { in LoadArgDirect()
211 int offset = frame_size_ + StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); in SpillArg()
223 int offset = frame_size_ + StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); in UnspillArg()
287 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]); in GenSpecialIGet()
385 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]); in GenSpecialIdentity()
[all …]
Dralloc_util.cc291 DCHECK_NE(cu_->instruction_set, kThumb2); in RecordFpPromotion()
310 DCHECK_NE(cu_->instruction_set, kThumb2); in AllocPreservedFpReg()
481 if (cu_->target64) { in AllocLiveReg()
489 if (wide && !reg.IsFloat() && !cu_->target64) { in AllocLiveReg()
698 if (IsTemp(rl.reg) && !(cu_->disable_opt & (1 << kSuppressLoads))) { in ResetDefLoc()
708 if (IsTemp(rs) && !(cu_->disable_opt & (1 << kSuppressLoads))) { in ResetDefLocWide()
1151 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks( in AnalyzeMIR()
1152 cu_->method_idx, *cu_->dex_file, type_idx, in AnalyzeMIR()
1155 cu_->compiler_driver->IsSafeCast( in AnalyzeMIR()
1170 cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *cu_->dex_file, in AnalyzeMIR()
[all …]
/art/compiler/dex/quick/mips/
Dtarget_mips.cc156 return cu_->target64 ? mips64_loc_c_return_ref : mips_loc_c_return; in LocCReturnRef()
160 return cu_->target64 ? mips64_loc_c_return_wide : mips_loc_c_return_wide; in LocCReturnWide()
168 if (cu_->target64) { in LocCReturnDouble()
197 if (!cu_->target64 && wide_kind == kWide) { in TargetReg()
207 } else if (cu_->target64 && (wide_kind == kWide || wide_kind == kRef)) { in TargetReg()
227 case kArg4: res_reg = cu_->target64 ? rs_rA4 : RegStorage::InvalidReg(); break; in TargetReg()
228 case kArg5: res_reg = cu_->target64 ? rs_rA5 : RegStorage::InvalidReg(); break; in TargetReg()
229 case kArg6: res_reg = cu_->target64 ? rs_rA6 : RegStorage::InvalidReg(); break; in TargetReg()
230 case kArg7: res_reg = cu_->target64 ? rs_rA7 : RegStorage::InvalidReg(); break; in TargetReg()
235 case kFArg4: res_reg = cu_->target64 ? rs_rF16 : RegStorage::InvalidReg(); break; in TargetReg()
[all …]
Dutility_mips.cc34 if (cu_->target64) { in OpFpRegCopy()
90 if (cu_->target64) { in OpFpRegCopy()
95 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) { in OpFpRegCopy()
292 return NewLIR2(opcode, cu_->target64 ? rRAd : rRA, r_dest_src.GetReg()); in OpReg()
306 bool is64bit = cu_->target64 && (r_dest.Is64Bit() || r_src1.Is64Bit() || r_src2.Is64Bit()); in OpRegRegReg()
350 bool is64bit = cu_->target64 && (r_dest.Is64Bit() || r_src1.Is64Bit()); in OpRegRegImm()
476 if (cu_->target64 && r_dest_src1.Is64Bit()) { in OpRegReg()
489 if (cu_->target64) { in OpRegReg()
492 if (cu_->compiler_driver->GetInstructionSetFeatures()->AsMipsInstructionSetFeatures() in OpRegReg()
502 if (cu_->target64) { in OpRegReg()
[all …]
Dcall_mips.cc215 int ex_offset = cu_->target64 ? Thread::ExceptionOffset<8>().Int32Value() : in GenMoveException()
229 if (cu_->target64) { in UnconditionallyMarkGCCard()
269 if (cu_->target64) { in GenEntrySequence()
277 InstructionSet target = (cu_->target64) ? kMips64 : kMips; in GenEntrySequence()
278 int ptr_size = cu_->target64 ? 8 : 4; in GenEntrySequence()
292 !cu_->compiler_driver->GetCompilerOptions().GetImplicitStackOverflowChecks(); in GenEntrySequence()
297 if (cu_->target64) { in GenEntrySequence()
358 if (cu_->target64) { in GenEntrySequence()
399 StoreWordDisp(rs_sp, frame_size_ - (cu_->target64 ? 8 : 4), TargetPtrReg(kLr)); in GenSpecialEntryForSuspend()
400 cfi_.RelOffset(DwarfCoreReg(rRA), frame_size_ - (cu_->target64 ? 8 : 4)); in GenSpecialEntryForSuspend()
[all …]
Dint_mips.cc56 if (cu_->target64) { in GenCmpLong()
185 if (!cu_->target64) { in OpRegCopyNoInsert()
199 if (cu_->target64) { in OpRegCopyNoInsert()
210 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) { in OpRegCopyNoInsert()
224 if (cu_->target64) { in OpRegCopyWide()
372 if (!cu_->target64) { in GenInlinedPeek()
377 if (cu_->target64) { in GenInlinedPeek()
395 if (!cu_->target64) { in GenInlinedPoke()
400 if (cu_->target64) { in GenInlinedPoke()
442 if (cu_->target64) { in GenDivZeroCheckWide()
[all …]
/art/compiler/driver/
Ddex_compilation_unit.h45 return cu_; in GetCompilationUnit()
113 CompilationUnit* const cu_;

123