/external/valgrind/none/tests/amd64/ |
D | pcmpxstrx64.stdout.exp | 3 istri $0x4A: xmm0 55555555555555555555555555555555 rcx 5555555555550006 flags 00000881 4 istri $0x0A: xmm0 55555555555555555555555555555555 rcx 5555555555550000 flags 00000881 5 istrm $0x4A: xmm0 000000000000000000ffffffffffffff rcx 5555555555555555 flags 00000881 6 istrm $0x0A: xmm0 0000000000000000000000000000007f rcx 5555555555555555 flags 00000881 7 estri $0x4A: xmm0 55555555555555555555555555555555 rcx 555555555555000f flags 000008c1 8 estri $0x0A: xmm0 55555555555555555555555555555555 rcx 5555555555550000 flags 000008c1 9 estrm $0x4A: xmm0 ffffffffffffffffffffffffffffffff rcx 5555555555555555 flags 000008c1 10 estrm $0x0A: xmm0 0000000000000000000000000000ffff rcx 5555555555555555 flags 000008c1 13 istri $0x4A: xmm0 55555555555555555555555555555555 rcx 555555555555000f flags 000000c1 14 istri $0x0A: xmm0 55555555555555555555555555555555 rcx 5555555555550007 flags 000000c1 [all …]
|
D | pcmpxstrx64w.stdout.exp | 3 istri $0x4B: xmm0 55555555555555555555555555555555 rcx 5555555555550002 flags 00000881 4 istri $0x0B: xmm0 55555555555555555555555555555555 rcx 5555555555550000 flags 00000881 5 istrm $0x4B: xmm0 00000000000000000000ffffffffffff rcx 5555555555555555 flags 00000881 6 istrm $0x0B: xmm0 00000000000000000000000000000007 rcx 5555555555555555 flags 00000881 7 estri $0x4B: xmm0 55555555555555555555555555555555 rcx 5555555555550007 flags 000008c1 8 estri $0x0B: xmm0 55555555555555555555555555555555 rcx 5555555555550000 flags 000008c1 9 estrm $0x4B: xmm0 ffffffffffffffffffffffffffffffff rcx 5555555555555555 flags 000008c1 10 estrm $0x0B: xmm0 000000000000000000000000000000ff rcx 5555555555555555 flags 000008c1 13 istri $0x4B: xmm0 55555555555555555555555555555555 rcx 5555555555550007 flags 000000c1 14 istri $0x0B: xmm0 55555555555555555555555555555555 rcx 5555555555550003 flags 000000c1 [all …]
|
D | cmpxchg.c | 13 ULong rcx; variable 25 rcx = 0x33333333; rbx = 0x44444444; in main() 28 rax&0xff,rbx&0xff,rcx&0xff); in main() 39 "\tmov " VG_SYM(rcx) ",%rcx\n" in main() 44 "\tmov " VG_SYM(rcx) "(%rip),%rcx\n" in main() 69 rcx = 0x55555555; rbx = 0x55555555; in main() 72 rax&0xff,rbx&0xff,rcx&0xff); in main() 83 "\tmov " VG_SYM(rcx) ",%rcx\n" in main() 88 "\tmov " VG_SYM(rcx) "(%rip),%rcx\n" in main() 113 rcx = 0x33333333; rbx = 0x44444444; in main() [all …]
|
D | asorep.c | 10 unsigned long rdi, rsi, rcx, rax; in main() local 20 : "=D" (rdi), "=S" (rsi), "=c" (rcx) in main() 26 || rcx) in main() 31 : "=D" (rdi), "=c" (rcx), "+a" (rax) in main() 36 || rcx in main() 50 : "=D" (rdi), "=S" (rsi), "=c" (rcx) in main() 54 || rcx != 17ULL) in main() 60 : "=D" (rdi), "=c" (rcx), "+a" (rax) in main() 63 || rcx != 23ULL in main() 69 : "=D" (rdi), "=c" (rcx), "+a" (rax) in main() [all …]
|
/external/llvm/test/MC/X86/ |
D | x86_64-fma4-encoding.s | 6 vfmaddss (%rcx), %xmm1, %xmm0, %xmm0 10 vfmaddss %xmm1, (%rcx),%xmm0, %xmm0 18 vfmaddsd (%rcx), %xmm1, %xmm0, %xmm0 22 vfmaddsd %xmm1, (%rcx),%xmm0, %xmm0 34 vfmaddps (%rcx), %xmm1, %xmm0, %xmm0 38 vfmaddps %xmm1, (%rcx),%xmm0, %xmm0 46 vfmaddpd (%rcx), %xmm1, %xmm0, %xmm0 50 vfmaddpd %xmm1, (%rcx),%xmm0, %xmm0 58 vfmaddps (%rcx), %ymm1, %ymm0, %ymm0 62 vfmaddps %ymm1, (%rcx),%ymm0, %ymm0 [all …]
|
D | x86_64-avx-encoding.s | 37 vaddss -4(%rcx,%rbx,8), %xmm10, %xmm11 41 vsubss -4(%rcx,%rbx,8), %xmm10, %xmm11 45 vmulss -4(%rcx,%rbx,8), %xmm10, %xmm11 49 vdivss -4(%rcx,%rbx,8), %xmm10, %xmm11 53 vaddsd -4(%rcx,%rbx,8), %xmm10, %xmm11 57 vsubsd -4(%rcx,%rbx,8), %xmm10, %xmm11 61 vmulsd -4(%rcx,%rbx,8), %xmm10, %xmm11 65 vdivsd -4(%rcx,%rbx,8), %xmm10, %xmm11 101 vaddps -4(%rcx,%rbx,8), %xmm10, %xmm11 105 vsubps -4(%rcx,%rbx,8), %xmm10, %xmm11 [all …]
|
D | x86_64-xop-encoding.s | 10 vphsubwd (%rcx,%rax), %xmm1 18 vphsubdq (%rcx,%rax), %xmm1 34 vphaddwq (%rcx), %xmm4 50 vphadduwq (%rcx,%rax), %xmm6 66 vphaddudq 8(%rcx,%rax), %xmm4 74 vphaddubw (%rcx), %xmm3 82 vphaddubq (%rcx), %xmm4 106 vphaddbw (%rcx,%rax), %xmm1 114 vphaddbq (%rcx,%rax), %xmm1 122 vphaddbd (%rcx,%rax), %xmm1 [all …]
|
D | avx512-encodings.s | 18 vaddpd (%rcx), %zmm27, %zmm8 26 vaddpd (%rcx){1to8}, %zmm27, %zmm8 74 vaddps (%rcx), %zmm13, %zmm18 82 vaddps (%rcx){1to16}, %zmm13, %zmm18 118 vbroadcastsd (%rcx), %zmm30 122 vbroadcastsd (%rcx), %zmm30 {%k4} 126 vbroadcastsd (%rcx), %zmm30 {%k4} {z} 162 vbroadcastss (%rcx), %zmm3 166 vbroadcastss (%rcx), %zmm3 {%k4} 170 vbroadcastss (%rcx), %zmm3 {%k4} {z} [all …]
|
D | x86-64-avx512f_vl.s | 17 vaddpd (%rcx), %xmm29, %xmm20 25 vaddpd (%rcx){1to2}, %xmm29, %xmm20 73 vaddpd (%rcx), %ymm26, %ymm28 81 vaddpd (%rcx){1to4}, %ymm26, %ymm28 129 vaddps (%rcx), %xmm19, %xmm24 137 vaddps (%rcx){1to4}, %xmm19, %xmm24 185 vaddps (%rcx), %ymm26, %ymm25 193 vaddps (%rcx){1to8}, %ymm26, %ymm25 229 vbroadcastsd (%rcx), %ymm22 233 vbroadcastsd (%rcx), %ymm22 {%k5} [all …]
|
D | x86-64-avx512bw_vl.s | 17 vpaddb (%rcx), %xmm17, %xmm26 53 vpaddb (%rcx), %ymm27, %ymm26 89 vpaddw (%rcx), %xmm17, %xmm18 125 vpaddw (%rcx), %ymm21, %ymm23 205 vpcmpeqb (%rcx), %xmm21, %k4 237 vpcmpeqb (%rcx), %ymm21, %k4 269 vpcmpeqw (%rcx), %xmm30, %k3 301 vpcmpeqw (%rcx), %ymm20, %k2 333 vpcmpgtb (%rcx), %xmm30, %k3 365 vpcmpgtb (%rcx), %ymm17, %k2 [all …]
|
/external/libvpx/libvpx/vp8/common/x86/ |
D | recon_mmx.asm | 39 movsxd rcx, dword ptr arg(3) ;dst_stride 45 movq [rdi+rcx], mm1 46 movq [rdi+rcx*2], mm2 49 lea rdi, [rdi+rcx*2] 52 add rdi, rcx 59 movq [rdi+rcx], mm4 61 movq [rdi+rcx*2], mm5 62 lea rdi, [rdi+rcx*2] 67 movq [rdi+rcx], mm0 68 movq [rdi+rcx*2],mm1 [all …]
|
D | recon_sse2.asm | 38 movsxd rcx, dword ptr arg(3) ;dst_stride 44 movdqa [rdi+rcx], xmm1 45 movdqa [rdi+rcx*2],xmm2 47 lea rdi, [rdi+rcx*2] 50 add rdi, rcx 59 movdqa [rdi+rcx], xmm4 60 movdqa [rdi+rcx*2],xmm5 62 lea rdi, [rdi+rcx*2] 65 add rdi, rcx 74 movdqa [rdi+rcx], xmm1 [all …]
|
D | postproc_mmx.asm | 61 mov rcx, 8 66 dec rcx 74 mov rcx, 8 79 dec rcx 93 mov rcx, 15 ; 111 dec rcx 182 mov rcx, rdx 184 and rcx, 127 188 movq mm4, [rax + rcx*2] ;vp8_rv[rcx*2] 191 movq mm4, [r8 + rcx*2] ;vp8_rv[rcx*2] [all …]
|
/external/v8/test/cctest/ |
D | test-disasm-x64.cc | 66 __ movq(rbx, Operand(rsp, rcx, times_2, 0)); // [rsp+rcx*4] in TEST() 81 __ addq(rsi, Operand(rcx, times_4, 0)); in TEST() 82 __ addq(rsi, Operand(rcx, times_4, 24)); in TEST() 83 __ addq(rsi, Operand(rcx, times_4, -4)); in TEST() 84 __ addq(rsi, Operand(rcx, times_4, -1999)); in TEST() 86 __ addq(rdi, Operand(rbp, rcx, times_4, 0)); in TEST() 87 __ addq(rdi, Operand(rbp, rcx, times_4, 12)); in TEST() 88 __ addq(rdi, Operand(rbp, rcx, times_4, -8)); in TEST() 89 __ addq(rdi, Operand(rbp, rcx, times_4, -3999)); in TEST() 90 __ addq(Operand(rbp, rcx, times_4, 12), Immediate(12)); in TEST() [all …]
|
D | test-macro-assembler-x64.cc | 78 using i::rcx; 147 __ Move(rcx, value); in TestMoveSmi() 149 __ cmpq(rcx, rdx); in TestMoveSmi() 195 __ Move(rcx, Smi::FromInt(x)); in TestSmiCompare() 196 __ movq(r8, rcx); in TestSmiCompare() 199 __ SmiCompare(rcx, rdx); in TestSmiCompare() 212 __ cmpq(rcx, r8); in TestSmiCompare() 219 __ SmiCompare(rdx, rcx); in TestSmiCompare() 229 __ cmpq(rcx, rcx); in TestSmiCompare() 233 __ cmpq(rcx, r8); in TestSmiCompare() [all …]
|
/external/boringssl/linux-x86_64/crypto/rc4/ |
D | rc4-x86_64.S | 21 movq %rcx,%r13 23 xorq %rcx,%rcx 46 movl (%rdi,%rcx,4),%edx 47 movl %eax,(%rdi,%rcx,4) 64 movl (%rdi,%rcx,4),%edx 65 movl %eax,(%rdi,%rcx,4) 72 movl (%rdi,%rcx,4),%edx 73 movl %ebx,(%rdi,%rcx,4) 80 movl (%rdi,%rcx,4),%edx 81 movl %eax,(%rdi,%rcx,4) [all …]
|
D | rc4-md5-x86_64.S | 19 movq %rcx,%r11 25 xorq %rcx,%rcx 55 movl (%rdi,%rcx,4),%edx 57 movl %eax,(%rdi,%rcx,4) 74 movl (%rdi,%rcx,4),%edx 76 movl %ebx,(%rdi,%rcx,4) 92 movl (%rdi,%rcx,4),%edx 94 movl %eax,(%rdi,%rcx,4) 110 movl (%rdi,%rcx,4),%edx 112 movl %ebx,(%rdi,%rcx,4) [all …]
|
/external/boringssl/mac-x86_64/crypto/rc4/ |
D | rc4-x86_64.S | 20 movq %rcx,%r13 22 xorq %rcx,%rcx 45 movl (%rdi,%rcx,4),%edx 46 movl %eax,(%rdi,%rcx,4) 63 movl (%rdi,%rcx,4),%edx 64 movl %eax,(%rdi,%rcx,4) 71 movl (%rdi,%rcx,4),%edx 72 movl %ebx,(%rdi,%rcx,4) 79 movl (%rdi,%rcx,4),%edx 80 movl %eax,(%rdi,%rcx,4) [all …]
|
D | rc4-md5-x86_64.S | 19 movq %rcx,%r11 25 xorq %rcx,%rcx 55 movl (%rdi,%rcx,4),%edx 57 movl %eax,(%rdi,%rcx,4) 74 movl (%rdi,%rcx,4),%edx 76 movl %ebx,(%rdi,%rcx,4) 92 movl (%rdi,%rcx,4),%edx 94 movl %eax,(%rdi,%rcx,4) 110 movl (%rdi,%rcx,4),%edx 112 movl %ebx,(%rdi,%rcx,4) [all …]
|
/external/boringssl/win-x86_64/crypto/rc4/ |
D | rc4-x86_64.asm | 17 mov rdi,rcx 20 mov rcx,r9 35 mov r13,rcx 37 xor rcx,rcx 60 mov edx,DWORD[rcx*4+rdi] 61 mov DWORD[rcx*4+rdi],eax 78 mov edx,DWORD[rcx*4+rdi] 79 mov DWORD[rcx*4+rdi],eax 86 mov edx,DWORD[rcx*4+rdi] 87 mov DWORD[rcx*4+rdi],ebx [all …]
|
D | rc4-md5-x86_64.asm | 16 mov rdi,rcx 19 mov rcx,r9 34 mov r11,rcx 40 xor rcx,rcx 70 mov edx,DWORD[rcx*4+rdi] 72 mov DWORD[rcx*4+rdi],eax 89 mov edx,DWORD[rcx*4+rdi] 91 mov DWORD[rcx*4+rdi],ebx 107 mov edx,DWORD[rcx*4+rdi] 109 mov DWORD[rcx*4+rdi],eax [all …]
|
/external/boringssl/linux-x86_64/crypto/aes/ |
D | aesni-x86_64.S | 57 movups (%rcx),%xmm0 59 movups 16(%rcx),%xmm1 62 movups 32(%rcx),%xmm0 63 leaq 32(%rcx,%rax,1),%rcx 70 movups (%rcx,%rax,1),%xmm1 74 movups -16(%rcx,%rax,1),%xmm0 86 movups (%rcx),%xmm0 88 movups 16(%rcx),%xmm1 91 movups 32(%rcx),%xmm0 92 leaq 32(%rcx,%rax,1),%rcx [all …]
|
/external/boringssl/mac-x86_64/crypto/aes/ |
D | aesni-x86_64.S | 56 movups (%rcx),%xmm0 58 movups 16(%rcx),%xmm1 61 movups 32(%rcx),%xmm0 62 leaq 32(%rcx,%rax,1),%rcx 69 movups (%rcx,%rax,1),%xmm1 73 movups -16(%rcx,%rax,1),%xmm0 85 movups (%rcx),%xmm0 87 movups 16(%rcx),%xmm1 90 movups 32(%rcx),%xmm0 91 leaq 32(%rcx,%rax,1),%rcx [all …]
|
/external/boringssl/win-x86_64/crypto/aes/ |
D | aesni-x86_64.asm | 12 movups xmm2,XMMWORD[rcx] 36 movups xmm2,XMMWORD[rcx] 58 movups xmm0,XMMWORD[rcx] 60 movups xmm1,XMMWORD[16+rcx] 63 movups xmm0,XMMWORD[32+rcx] 64 lea rcx,[32+rax*1+rcx] 71 movups xmm1,XMMWORD[rax*1+rcx] 75 movups xmm0,XMMWORD[((-16))+rax*1+rcx] 87 movups xmm0,XMMWORD[rcx] 89 movups xmm1,XMMWORD[16+rcx] [all …]
|
/external/boringssl/src/crypto/bn/asm/ |
D | rsaz-x86_64.pl | 123 my ($out,$inp,$mod,$n0,$times) = ("%rdi","%rsi","%rdx","%rcx","%r8d"); 204 movq %r9, %rcx 213 shrq \$63, %rcx 266 lea (%rcx,%r10,2), %r10 #shld \$1, %rcx, %r10 285 movq %rdx, %rcx 286 adcq \$0, %rcx 292 addq %rcx, %r13 293 movq %rdx, %rcx 294 adcq \$0, %rcx 300 addq %rcx, %r14 [all …]
|