Home
last modified time | relevance | path

Searched refs:EmitNativeCode (Results 1 – 16 of 16) sorted by relevance

/art/compiler/optimizing/
Dparallel_move_test.cc189 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST()
200 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST()
216 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST()
227 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST()
238 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST()
249 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST()
274 resolver.EmitNativeCode(moves); in TYPED_TEST()
295 resolver.EmitNativeCode(moves); in TYPED_TEST()
312 resolver.EmitNativeCode(moves); in TYPED_TEST()
329 resolver.EmitNativeCode(moves); in TYPED_TEST()
[all …]
Dparallel_move_resolver.h41 virtual void EmitNativeCode(HParallelMove* parallel_move) = 0;
61 void EmitNativeCode(HParallelMove* parallel_move) OVERRIDE;
136 void EmitNativeCode(HParallelMove* parallel_move) OVERRIDE;
Dintrinsics_utils.h50 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function
Dparallel_move_resolver.cc37 void ParallelMoveResolverWithSwap::EmitNativeCode(HParallelMove* parallel_move) { in EmitNativeCode() function in art::ParallelMoveResolverWithSwap
302 void ParallelMoveResolverNoSwap::EmitNativeCode(HParallelMove* parallel_move) { in EmitNativeCode() function in art::ParallelMoveResolverNoSwap
Dintrinsics.h112 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move); in INTRINSICS_LIST()
Dcode_generator_arm64.cc224 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::BoundsCheckSlowPathARM64
256 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::DivZeroCheckSlowPathARM64
286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::LoadClassSlowPathARM64
339 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::LoadStringSlowPathARM64
370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::NullCheckSlowPathARM64
395 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::SuspendCheckSlowPathARM64
436 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::TypeCheckSlowPathARM64
493 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::DeoptimizationSlowPathARM64
514 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm64::ArraySetSlowPathARM64
536 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move); in EmitNativeCode()
[all …]
Dcode_generator_arm.cc69 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::NullCheckSlowPathARM
93 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::DivZeroCheckSlowPathARM
118 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::SuspendCheckSlowPathARM
159 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::BoundsCheckSlowPathARM
201 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::LoadClassSlowPathARM
253 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::LoadStringSlowPathARM
284 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::TypeCheckSlowPathARM
347 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::DeoptimizationSlowPathARM
368 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::arm::ArraySetSlowPathARM
390 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move); in EmitNativeCode()
[all …]
Dcode_generator_x86_64.cc61 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::NullCheckSlowPathX86_64
87 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::DivZeroCheckSlowPathX86_64
114 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::DivRemMinusOneSlowPathX86_64
148 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::SuspendCheckSlowPathX86_64
188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::BoundsCheckSlowPathX86_64
231 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::LoadClassSlowPathX86_64
286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::LoadStringSlowPathX86_64
318 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::TypeCheckSlowPathX86_64
385 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::DeoptimizationSlowPathX86_64
406 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86_64::ArraySetSlowPathX86_64
[all …]
Dcode_generator_x86.cc57 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::NullCheckSlowPathX86
83 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::DivZeroCheckSlowPathX86
110 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::DivRemMinusOneSlowPathX86
132 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::BoundsCheckSlowPathX86
170 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::SuspendCheckSlowPathX86
209 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::LoadStringSlowPathX86
247 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::LoadClassSlowPathX86
299 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::TypeCheckSlowPathX86
364 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::DeoptimizationSlowPathX86
385 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::x86::ArraySetSlowPathX86
[all …]
Dcode_generator_mips64.cc112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::BoundsCheckSlowPathMIPS64
148 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::DivZeroCheckSlowPathMIPS64
180 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::LoadClassSlowPathMIPS64
233 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::LoadStringSlowPathMIPS64
268 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::NullCheckSlowPathMIPS64
295 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::SuspendCheckSlowPathMIPS64
333 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::TypeCheckSlowPathMIPS64
385 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips64::DeoptimizationSlowPathMIPS64
3563 codegen_->GetMoveResolver()->EmitNativeCode(instruction); in VisitParallelMove()
Dcode_generator.cc198 slow_path->EmitNativeCode(this); in GenerateSlowPaths()
1148 GetMoveResolver()->EmitNativeCode(&parallel_move); in EmitParallelMoves()
Dcode_generator_mips.cc151 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::BoundsCheckSlowPathMIPS
188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::DivZeroCheckSlowPathMIPS
221 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::LoadClassSlowPathMIPS
278 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::LoadStringSlowPathMIPS
314 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::NullCheckSlowPathMIPS
342 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::SuspendCheckSlowPathMIPS
381 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::TypeCheckSlowPathMIPS
438 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { in EmitNativeCode() function in art::mips::DeoptimizationSlowPathMIPS
4408 codegen_->GetMoveResolver()->EmitNativeCode(instruction); in VisitParallelMove()
Dcode_generator.h81 virtual void EmitNativeCode(CodeGenerator* codegen) = 0;
Dintrinsics_mips64.cc93 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { in EmitNativeCode() function in art::mips64::IntrinsicSlowPathMIPS64
Dintrinsics_arm64.cc106 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { in EmitNativeCode() function in art::arm64::IntrinsicSlowPathARM64
Dintrinsics_mips.cc104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { in EmitNativeCode() function in art::mips::IntrinsicSlowPathMIPS