/art/runtime/arch/x86_64/ |
D | quick_entrypoints_x86_64.S | 37 addq MACRO_LITERAL(4 * 8), %rsp 132 addq LITERAL(8 + 4*8), %rsp 247 addq MACRO_LITERAL(80 + 4 * 8), %rsp 399 addq MACRO_LITERAL(1), %r10 // shorty++ 406 addq MACRO_LITERAL(4), %r11 // arg_array++ 410 addq MACRO_LITERAL(4), %r11 // arg_array++ 414 addq MACRO_LITERAL(8), %r11 // arg_array+=2 418 addq MACRO_LITERAL(4), %r11 // arg_array++ 430 addq MACRO_LITERAL(1), %r10 // shorty++ 440 addq MACRO_LITERAL(4), %r11 // arg_array++ [all …]
|
D | jni_entrypoints_x86_64.S | 63 addq LITERAL(72 + 4 * 8), %rsp
|
/art/runtime/interpreter/mterp/x86_64/ |
D | binopWide2addr.S | 9 $instr # for ex: addq %rax,(rFP,%rcx,4)
|
D | binopWide.S | 8 $instr # ex: addq (rFP,%rcx,4),%rax
|
D | footer.S | 286 addq $$FRAME_SIZE, %rsp
|
D | header.S | 224 addq rIBASE, %rax
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.h | 556 void addq(CpuRegister reg, const Immediate& imm); 557 void addq(CpuRegister dst, CpuRegister src); 558 void addq(CpuRegister dst, const Address& address);
|
D | assembler_x86_64.cc | 1582 void X86_64Assembler::addq(CpuRegister reg, const Immediate& imm) { in addq() function in art::x86_64::X86_64Assembler 1590 void X86_64Assembler::addq(CpuRegister dst, const Address& address) { in addq() function in art::x86_64::X86_64Assembler 1598 void X86_64Assembler::addq(CpuRegister dst, CpuRegister src) { in addq() function in art::x86_64::X86_64Assembler 2336 addq(CpuRegister(RSP), Immediate(2 * sizeof(intptr_t))); in LoadDoubleConstant() 2718 addq(CpuRegister(RSP), Immediate(adjust)); in RemoveFrame() 2736 addq(CpuRegister(RSP), Immediate(-static_cast<int64_t>(adjust))); in IncreaseFrameSize() 2742 addq(CpuRegister(RSP), Immediate(adjust)); in DecreaseFrameSize() 2946 addq(CpuRegister(RSP), Immediate(16)); in Move()
|
D | assembler_x86_64_test.cc | 305 DriverStr(RepeatRR(&x86_64::X86_64Assembler::addq, "addq %{reg2}, %{reg1}"), "addq"); in TEST_F() 309 DriverStr(RepeatRI(&x86_64::X86_64Assembler::addq, 4U, "addq ${imm}, %{reg}"), "addqi"); in TEST_F() 881 GetAssembler()->addq(x86_64::CpuRegister(x86_64::R12), in TEST_F()
|
/art/compiler/jni/ |
D | jni_cfi_test_expected.inc | 290 // 0x0000004c: addq rsp, -32 292 // 0x00000050: addq rsp, 32 303 // 0x00000070: addq rsp, 72
|
/art/runtime/interpreter/mterp/out/ |
D | mterp_x86_64.S | 231 addq rIBASE, %rax 4004 addq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax 4021 subq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax 4038 imulq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax 4135 andq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax 4152 orq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax 4169 xorq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax 4715 addq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4) 4733 subq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4) 4846 andq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4) [all …]
|
/art/compiler/optimizing/ |
D | code_generator_x86_64.cc | 1102 __ addq(CpuRegister(RSP), Immediate(adjust)); in GenerateFrameExit() local 2927 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>()); in VisitAdd() local 2929 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>()); in VisitAdd() local 2940 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value)); in VisitAdd() local 3271 __ addq(CpuRegister(RSP), Immediate(2 * elem_size)); in GenerateRemFP() local 3347 __ addq(rdx, numerator); in DivByPowerOfTwo() local 3450 __ addq(rdx, numerator); in GenerateDivRemWithAnyConstant() local 3464 __ addq(rdx, rax); in GenerateDivRemWithAnyConstant() local 6553 __ addq(temp_reg, base_reg); in VisitPackedSwitch() local
|
D | optimizing_cfi_test_expected.inc | 130 // 0x00000026: addq rsp, 40
|
D | intrinsics_x86_64.cc | 263 __ addq(out, mask); in GenAbsInteger() local 1560 __ addq(string_obj, Immediate(value_offset)); in GenerateStringIndexOf() local
|