Home
last modified time | relevance | path

Searched refs:codegen (Results 1 – 25 of 69) sorted by relevance

123

/art/compiler/optimizing/
Dintrinsics_utils.h44 Location MoveArguments(CodeGenerator* codegen) { in MoveArguments() argument
46 IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor); in MoveArguments()
50 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() argument
51 Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode()
54 SaveLiveRegisters(codegen, invoke_->GetLocations()); in EmitNativeCode()
56 Location method_loc = MoveArguments(codegen); in EmitNativeCode()
59 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), method_loc, this); in EmitNativeCode()
61 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this); in EmitNativeCode()
69 codegen->MoveFromReturnRegister(out, invoke_->GetType()); in EmitNativeCode()
72 RestoreLiveRegisters(codegen, invoke_->GetLocations()); in EmitNativeCode()
Doptimizing_compiler.cc109 CodeGenerator* codegen, in PassObserver() argument
121 visualizer_(&visualizer_oss_, graph, *codegen), in PassObserver()
130 codegen->SetDisassemblyInformation(&disasm_info_); in PassObserver()
302 CodeGenerator* codegen, in RunOptimizations() argument
315 codegen, in RunOptimizations()
329 CodeGenerator* codegen, in RunOptimizations() argument
335 graph, codegen, dex_compilation_unit, pass_observer, handles, definitions, length); in RunOptimizations()
339 CodeGenerator* codegen,
348 CodeGenerator* codegen,
374 CodeGenerator* codegen,
[all …]
Dsharpening.h36 CodeGenerator* codegen,
40 codegen_(codegen), in HOptimization()
49 CodeGenerator* codegen,
56 CodeGenerator* codegen,
63 CodeGenerator* codegen,
Dregister_allocator_test.cc63 const CodeGenerator& codegen) { in ValidateIntervals() argument
67 codegen, in ValidateIntervals()
86 x86::CodeGeneratorX86 codegen(graph, *features_x86.get(), CompilerOptions()); in Check() local
87 SsaLivenessAnalysis liveness(graph, &codegen, GetScopedAllocator()); in Check()
90 RegisterAllocator::Create(GetScopedAllocator(), &codegen, liveness, strategy); in Check()
103 x86::CodeGeneratorX86 codegen(graph, *features_x86.get(), CompilerOptions()); in TEST_F() local
111 ASSERT_TRUE(ValidateIntervals(intervals, codegen)); in TEST_F()
114 ASSERT_FALSE(ValidateIntervals(intervals, codegen)); in TEST_F()
124 ASSERT_TRUE(ValidateIntervals(intervals, codegen)); in TEST_F()
127 ASSERT_TRUE(ValidateIntervals(intervals, codegen)); in TEST_F()
[all …]
Dsharpening.cc85 CodeGenerator* codegen, in SharpenInvokeStaticOrDirect() argument
113 if (callee == codegen->GetGraph()->GetArtMethod() && !codegen->GetGraph()->IsDebuggable()) { in SharpenInvokeStaticOrDirect()
118 AOTCanEmbedMethod(callee, codegen->GetCompilerOptions())) { in SharpenInvokeStaticOrDirect()
124 } else if (codegen->GetCompilerOptions().IsBootImage() && in SharpenInvokeStaticOrDirect()
134 if (codegen->GetGraph()->IsDebuggable()) { in SharpenInvokeStaticOrDirect()
144 codegen->GetSupportedInvokeStaticOrDirectDispatch(desired_dispatch_info, invoke); in SharpenInvokeStaticOrDirect()
150 CodeGenerator* codegen, in ComputeLoadClassKind() argument
175 if (codegen->GetCompilerOptions().IsBootImage()) { in ComputeLoadClassKind()
194 DCHECK(!codegen->GetCompilerOptions().GetCompilePic()); in ComputeLoadClassKind()
209 if (codegen->GetCompilerOptions().GetCompilePic()) { in ComputeLoadClassKind()
[all …]
Dintrinsics_arm64.h42 explicit IntrinsicLocationsBuilderARM64(ArenaAllocator* allocator, CodeGeneratorARM64* codegen) in IntrinsicLocationsBuilderARM64() argument
43 : allocator_(allocator), codegen_(codegen) {} in IntrinsicLocationsBuilderARM64()
68 explicit IntrinsicCodeGeneratorARM64(CodeGeneratorARM64* codegen) : codegen_(codegen) {} in IntrinsicCodeGeneratorARM64() argument
Dregister_allocator.cc33 CodeGenerator* codegen, in RegisterAllocator() argument
36 codegen_(codegen), in RegisterAllocator()
40 CodeGenerator* codegen, in Create() argument
46 new (allocator) RegisterAllocatorLinearScan(allocator, codegen, analysis)); in Create()
49 new (allocator) RegisterAllocatorGraphColor(allocator, codegen, analysis)); in Create()
112 const CodeGenerator& codegen, in ValidateIntervals() argument
116 ? codegen.GetNumberOfCoreRegisters() in ValidateIntervals()
117 : codegen.GetNumberOfFloatingPointRegisters(); in ValidateIntervals()
118 ScopedArenaAllocator allocator(codegen.GetGraph()->GetArenaStack()); in ValidateIntervals()
168 CHECK(codegen.HasAllocatedRegister(processing_core_registers, current->GetRegister())); in ValidateIntervals()
[all …]
Dintrinsics_mips64.h35 explicit IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen);
60 explicit IntrinsicCodeGeneratorMIPS64(CodeGeneratorMIPS64* codegen) : codegen_(codegen) {} in IntrinsicCodeGeneratorMIPS64() argument
Dintrinsics_arm_vixl.h32 explicit IntrinsicLocationsBuilderARMVIXL(CodeGeneratorARMVIXL* codegen);
59 explicit IntrinsicCodeGeneratorARMVIXL(CodeGeneratorARMVIXL* codegen) : codegen_(codegen) {} in IntrinsicCodeGeneratorARMVIXL() argument
Dintrinsics_mips.h35 explicit IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen);
60 explicit IntrinsicCodeGeneratorMIPS(CodeGeneratorMIPS* codegen) : codegen_(codegen) {} in IntrinsicCodeGeneratorMIPS() argument
Dintrinsics_x86.h35 explicit IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen);
60 explicit IntrinsicCodeGeneratorX86(CodeGeneratorX86* codegen) : codegen_(codegen) {} in IntrinsicCodeGeneratorX86() argument
Dintrinsics_x86_64.h35 explicit IntrinsicLocationsBuilderX86_64(CodeGeneratorX86_64* codegen);
60 explicit IntrinsicCodeGeneratorX86_64(CodeGeneratorX86_64* codegen) : codegen_(codegen) {} in IntrinsicCodeGeneratorX86_64() argument
Dcodegen_test_utils.h266 const CodeGenerator& codegen, in Run() argument
269 InstructionSet target_isa = codegen.GetInstructionSet(); in Run()
293 static void RunCodeNoCheck(CodeGenerator* codegen, in RunCodeNoCheck() argument
300 SsaLivenessAnalysis liveness(graph, codegen, &local_allocator); in RunCodeNoCheck()
304 RegisterAllocator::Create(&local_allocator, codegen, liveness); in RunCodeNoCheck()
309 codegen->Compile(&allocator); in RunCodeNoCheck()
310 Run(allocator, *codegen, has_result, expected); in RunCodeNoCheck()
314 static void RunCode(CodeGenerator* codegen, in RunCode() argument
320 RunCodeNoCheck(codegen, graph, hook_before_codegen, has_result, expected); in RunCode()
330 std::unique_ptr<CodeGenerator> codegen(target_config.CreateCodeGenerator(graph, in RunCode()
[all …]
Dlive_ranges_test.cc68 x86::CodeGeneratorX86 codegen(graph, *features_x86.get(), CompilerOptions()); in TEST_F() local
69 SsaLivenessAnalysis liveness(graph, &codegen, GetScopedAllocator()); in TEST_F()
114 x86::CodeGeneratorX86 codegen(graph, *features_x86.get(), CompilerOptions()); in TEST_F() local
115 SsaLivenessAnalysis liveness(graph, &codegen, GetScopedAllocator()); in TEST_F()
163 x86::CodeGeneratorX86 codegen(graph, *features_x86.get(), CompilerOptions()); in TEST_F() local
164 SsaLivenessAnalysis liveness(graph, &codegen, GetScopedAllocator()); in TEST_F()
240 x86::CodeGeneratorX86 codegen(graph, *features_x86.get(), CompilerOptions()); in TEST_F() local
241 SsaLivenessAnalysis liveness(graph, &codegen, GetScopedAllocator()); in TEST_F()
317 x86::CodeGeneratorX86 codegen(graph, *features_x86.get(), CompilerOptions()); in TEST_F() local
318 SsaLivenessAnalysis liveness(graph, &codegen, GetScopedAllocator()); in TEST_F()
[all …]
Dregister_allocator.h49 CodeGenerator* codegen,
70 const CodeGenerator& codegen,
78 CodeGenerator* codegen,
Dpc_relative_fixups_x86.h31 PcRelativeFixups(HGraph* graph, CodeGenerator* codegen, OptimizingCompilerStats* stats) in PcRelativeFixups() argument
33 codegen_(codegen) {} in PcRelativeFixups()
Dpc_relative_fixups_mips.h31 PcRelativeFixups(HGraph* graph, CodeGenerator* codegen, OptimizingCompilerStats* stats) in PcRelativeFixups() argument
33 codegen_(codegen) {} in PcRelativeFixups()
Dinstruction_simplifier_mips.h32 InstructionSimplifierMips(HGraph* graph, CodeGenerator* codegen, OptimizingCompilerStats* stats) in InstructionSimplifierMips() argument
34 codegen_(down_cast<CodeGeneratorMIPS*>(codegen)) {} in InstructionSimplifierMips()
Doptimization.cc175 CodeGenerator* codegen, in ConstructOptimizations() argument
249 codegen, in ConstructOptimizations()
263 opt = new (allocator) HSharpening(graph, codegen, driver, name); in ConstructOptimizations()
269 opt = new (allocator) InstructionSimplifier(graph, codegen, driver, stats, name); in ConstructOptimizations()
285 graph, driver->GetInstructionSet(), codegen, name); in ConstructOptimizations()
305 opt = new (allocator) mips::PcRelativeFixups(graph, codegen, stats); in ConstructOptimizations()
309 opt = new (allocator) mips::InstructionSimplifierMips(graph, codegen, stats); in ConstructOptimizations()
315 opt = new (allocator) x86::PcRelativeFixups(graph, codegen, stats); in ConstructOptimizations()
319 opt = new (allocator) x86::X86MemoryOperandGeneration(graph, codegen, stats); in ConstructOptimizations()
Dinstruction_simplifier.h42 CodeGenerator* codegen,
47 codegen_(codegen), in HOptimization()
Dx86_memory_gen.cc73 CodeGenerator* codegen, in X86MemoryOperandGeneration() argument
76 do_implicit_null_checks_(codegen->GetCompilerOptions().GetImplicitNullChecks()) { in X86MemoryOperandGeneration()
Dintrinsics.h93 CodeGenerator* codegen, in INTRINSICS_LIST()
111 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator()); in INTRINSICS_LIST()
121 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move); in INTRINSICS_LIST()
125 CodeGenerator* codegen,
327 bool IsCallFreeIntrinsic(HInvoke* invoke, Codegenerator* codegen) { in IsCallFreeIntrinsic() argument
336 IntrinsicLocationsBuilder builder(codegen); in IsCallFreeIntrinsic()
Dcode_generator_arm_vixl.cc108 #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
126 static inline void EmitPlaceholderBne(CodeGeneratorARMVIXL* codegen, vixl32::Label* patch_label) { in EmitPlaceholderBne() argument
127 ExactAssemblyScope eas(codegen->GetVIXLAssembler(), kMaxInstructionSizeInBytes); in EmitPlaceholderBne()
190 CodeGenerator* codegen, in SaveContiguousSRegisterList() argument
218 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in SaveContiguousSRegisterList()
239 CodeGenerator* codegen, in RestoreContiguousSRegisterList() argument
266 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in RestoreContiguousSRegisterList()
337 void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { in SaveLiveRegisters() argument
338 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); in SaveLiveRegisters()
341 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true); in SaveLiveRegisters()
[all …]
Dintrinsics_x86_64.cc43 IntrinsicLocationsBuilderX86_64::IntrinsicLocationsBuilderX86_64(CodeGeneratorX86_64* codegen) in IntrinsicLocationsBuilderX86_64() argument
44 : allocator_(codegen->GetGraph()->GetAllocator()), codegen_(codegen) { in IntrinsicLocationsBuilderX86_64()
64 static void MoveArguments(HInvoke* invoke, CodeGeneratorX86_64* codegen) { in MoveArguments() argument
66 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); in MoveArguments()
72 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
83 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() argument
84 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen); in EmitNativeCode()
255 CodeGeneratorX86_64* codegen) { in MathAbsFP() argument
265 __ movsd(xmm_temp, codegen->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF))); in MathAbsFP()
268 __ movss(xmm_temp, codegen->LiteralInt32Address(INT32_C(0x7FFFFFFF))); in MathAbsFP()
[all …]
Dintrinsics_arm_vixl.cc86 Location MoveArguments(CodeGenerator* codegen) { in MoveArguments() argument
88 IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor); in MoveArguments()
92 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() argument
93 ArmVIXLAssembler* assembler = down_cast<ArmVIXLAssembler*>(codegen->GetAssembler()); in EmitNativeCode()
96 SaveLiveRegisters(codegen, invoke_->GetLocations()); in EmitNativeCode()
98 Location method_loc = MoveArguments(codegen); in EmitNativeCode()
101 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), method_loc, this); in EmitNativeCode()
103 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this); in EmitNativeCode()
111 codegen->MoveFromReturnRegister(out, invoke_->GetType()); in EmitNativeCode()
114 RestoreLiveRegisters(codegen, invoke_->GetLocations()); in EmitNativeCode()
[all …]

123