/art/runtime/interpreter/mterp/x86_64/ |
D | floating_point.S | 19 movzbq 3(rPC), %rcx # ecx<- CC 23 ucomis${suff} VREG_ADDRESS(%rcx), %xmm0 42 movl rINST, %ecx # rcx <- A+ 47 SET_VREG_XMMd %xmm0, %rcx 48 CLEAR_WIDE_REF %rcx 50 SET_VREG_XMMs %xmm0, %rcx 51 CLEAR_REF %rcx 56 movzbq 2(rPC), %rcx # ecx <- BB 58 GET_VREG_XMM${suff} %xmm0, %rcx # %xmm0 <- 1st src 75 GET_VREG_XMM${suff} %xmm0, %rcx # %xmm0 <- 1st src [all …]
|
D | array.S | 10 movzbq 3(rPC), %rcx # ecx <- CC 12 GET_VREG %ecx, %rcx # ecx <- vCC (requested index) 18 movq $data_offset(%rax,%rcx,8), %rax 21 $load $data_offset(%rax,%rcx,$shift), %eax 43 movzbq 3(rPC), %rcx # rcx <- CC 45 GET_VREG OUT_32_ARG1, %rcx # ecx <- vCC (requested index) 48 movq rSELF, %rcx 49 cmpq $$0, THREAD_EXCEPTION_OFFSET(%rcx) 69 movzbq 3(rPC), %rcx # rcx <- CC 71 GET_VREG %ecx, %rcx # ecx <- vCC (requested index) [all …]
|
D | arithmetic.S | 7 movzbq 3(rPC), %rcx # rcx <- CC 10 GET_WIDE_VREG $second, %rcx # ecx <- vCC 13 GET_VREG $second, %rcx # ecx <- vCC 66 movl rINST, %ecx # rcx <- BA 67 sarl $$4, %ecx # rcx <- B 71 GET_WIDE_VREG $second, %rcx # ecx <- vB 74 GET_VREG $second, %rcx # ecx <- vB 188 movzbq 3(rPC), %rcx # rcx <- CC 190 $instr VREG_ADDRESS(%rcx),%eax 201 movzbq 3(rPC), %rcx # ecx <- CC [all …]
|
D | control_flow.S | 10 movl rINST, %ecx # rcx <- A+ 12 andb $$0xf, %cl # rcx <- A 13 GET_VREG %eax, %rcx # eax <- vA 129 movslq 2(rPC), OUT_ARG0 # rcx <- ssssssssBBBBbbbb 130 leaq (rPC,OUT_ARG0,2), OUT_ARG0 # rcx <- PC + ssssssssBBBBbbbb*2 195 movq rSELF, %rcx 196 movq %rax, THREAD_EXCEPTION_OFFSET(%rcx)
|
D | other.S | 157 movzwq 4(rPC), %rcx # ecx <- BBBB 159 GET_VREG %edx, %rcx 169 movq rSELF, %rcx 170 movl THREAD_EXCEPTION_OFFSET(%rcx), %eax 172 movl $$0, THREAD_EXCEPTION_OFFSET(%rcx) 225 SET_WIDE_VREG %rdx, %rcx # v[A] <- rdx 231 movzwq 4(rPC), %rcx # ecx<- BBBB 233 GET_WIDE_VREG %rdx, %rcx # rdx <- v[B] 241 GET_WIDE_VREG %rdx, %rcx # rdx <- v[B]
|
D | main.S | 145 #define IN_ARG3 %rcx 152 #define OUT_ARG3 %rcx 522 movq rSELF, %rcx 523 cmpq $$0, THREAD_EXCEPTION_OFFSET(%rcx) 540 leaq (%rax, %rcx, 2), rPC
|
D | object.S | 69 movq rSELF, %rcx 70 cmpq $$0, THREAD_EXCEPTION_OFFSET(%rcx)
|
/art/runtime/interpreter/mterp/x86_64ng/ |
D | array.S | 10 movzbq 3(rPC), %rcx # ecx <- CC 12 GET_VREG %esi, %rcx # ecx <- vCC (requested index) 65 movzbq 3(rPC), %rcx # rcx <- CC 67 GET_VREG %esi, %rcx # esi <- vCC (requested index) 98 movzbq 3(rPC), %rcx # rcx <- CC 100 GET_VREG %esi, %rcx # esi <- vCC (requested index) 119 movl MIRROR_ARRAY_LENGTH_OFFSET(%rcx), rINST 126 movslq 2(rPC), %rcx # rcx <- ssssssssBBBBbbbb 127 leaq (rPC,%rcx,2), OUT_ARG0 # OUT_ARG0 <- PC + ssssssssBBBBbbbb*2
|
D | control_flow.S | 10 movl rINST, %ecx # rcx <- A+ 12 andb $$0xf, %cl # rcx <- A 13 GET_VREG %eax, %rcx # eax <- vA 120 movslq 2(rPC), OUT_ARG0 # rcx <- ssssssssBBBBbbbb 121 leaq (rPC,OUT_ARG0,2), OUT_ARG0 # rcx <- PC + ssssssssBBBBbbbb*2
|
D | other.S | 138 movzwq 4(rPC), %rcx # ecx <- BBBB 140 GET_VREG %edx, %rcx 201 SET_WIDE_VREG %rdx, %rcx # v[A] <- rdx 207 movzwq 4(rPC), %rcx # ecx<- BBBB 209 GET_WIDE_VREG %rdx, %rcx # rdx <- v[B] 217 GET_WIDE_VREG %rdx, %rcx # rdx <- v[B]
|
D | main.S | 45 #define IN_ARG3 %rcx 50 #define OUT_ARG3 %rcx 124 addq rIBASE, %rcx 125 jmp *%rcx 996 leaq (rREFS, %rcx, 4), %rax # pointer to first argument in reference array 998 leaq (rFP, %rcx, 4), %rcx # pointer to first argument in register array 999 leaq (%rcx, %rdi, 4), %rdi # pointer to last argument in register array 1009 cmpq %rcx, %rdi 1152 GET_VREG %ecx, %rcx 1251 LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, r11, r9, r10, .Lgpr_setup_finished_\suffix [all …]
|
D | invoke.S | 95 movq (%rdx, %rcx, 8), %rdi
|
/art/runtime/arch/x86_64/ |
D | jni_entrypoints_x86_64.S | 29 PUSH_ARG rcx // Arg. 66 POP_ARG rcx // Arg. 91 PUSH_ARG rcx 123 POP_ARG rcx 145 PUSH_ARG rcx 148 movq %rax, %rcx 154 POP_ARG rcx 177 movq %rcx, 112(%rax) 258 movq 112(%rbp), %rcx 278 PUSH_ARG rcx [all …]
|
D | quick_entrypoints_x86_64.S | 84 PUSH rcx 163 POP rcx 313 movq %rsp, %rcx // pass SP 472 LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished 565 LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished2 639 popq %rcx 672 movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() 716 movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() 806 movq THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%r8), %rcx // rcx = alloc stack top. 807 cmpq THREAD_LOCAL_ALLOC_STACK_END_OFFSET(%r8), %rcx [all …]
|
D | memcmp16_x86_64.S | 40 movslq (%r11, INDEX, SCALE), %rcx; \ 41 add %r11, %rcx; \ 42 jmp *%rcx; \ 61 mov %rsi, %rcx 64 sub %rsi, %rcx 66 sub %rcx, %rdi 67 add %rcx, %rdx 780 mov -16(%rsi), %rcx 781 cmp %rax, %rcx 785 mov -8(%rsi), %rcx [all …]
|
D | asm_support_x86_64.S | 352 PUSH_ARG rcx // Quick arg 3. 392 POP_ARG rcx 483 movq %gs:THREAD_EXCEPTION_OFFSET, %rcx // get exception field 484 testq %rcx, %rcx // rcx == 0 ?
|
/art/compiler/optimizing/ |
D | intrinsics_x86_64.cc | 1194 CpuRegister rcx = locations->GetTemp(0).AsRegister<CpuRegister>(); in VisitStringEquals() local 1227 __ movl(rcx, Address(str, class_offset)); in VisitStringEquals() 1229 __ cmpl(rcx, Address(arg, class_offset)); in VisitStringEquals() 1238 __ movl(rcx, Address(str, count_offset)); in VisitStringEquals() 1242 __ cmpl(rcx, Address(arg, count_offset)); in VisitStringEquals() 1253 __ shrl(rcx, Immediate(1)); in VisitStringEquals() 1257 __ addl(rcx, Immediate(1)); in VisitStringEquals() 1258 __ shrl(rcx, Immediate(1)); in VisitStringEquals() 1266 __ addl(rcx, Immediate(3)); in VisitStringEquals() 1267 __ shrl(rcx, Immediate(2)); in VisitStringEquals()
|
/art/runtime/arch/ |
D | stub_test.cc | 334 PUSH(%%rcx) in Invoke3WithReferrerAndHidden() 379 POP(%%rcx) in Invoke3WithReferrerAndHidden()
|