Home
last modified time | relevance | path

Searched refs:xmm8 (Results 1 – 25 of 64) sorted by relevance

123

/external/libvpx/libvpx/vp8/common/x86/
Dloopfilter_block_sse2_x86_64.asm204 movdqa xmm8, i5
207 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10
212 movdqa xmm8, i5
213 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4
219 movdqa i5, xmm8
227 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9
232 movdqa xmm8, i9
233 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2
239 movdqa i9, xmm8
247 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm10
[all …]
/external/boringssl/linux-x86_64/crypto/aes/
Dbsaes-x86_64.S14 movdqa (%rax),%xmm8
17 pxor %xmm8,%xmm15
18 pxor %xmm8,%xmm0
19 pxor %xmm8,%xmm1
20 pxor %xmm8,%xmm2
23 pxor %xmm8,%xmm3
24 pxor %xmm8,%xmm4
27 pxor %xmm8,%xmm5
28 pxor %xmm8,%xmm6
35 movdqa 16(%r11),%xmm8
[all …]
Daesni-x86_64.S383 pxor %xmm0,%xmm8
447 pxor %xmm0,%xmm8
520 movdqu 96(%rdi),%xmm8
541 movups %xmm8,96(%rsi)
542 movdqu 96(%rdi),%xmm8
562 movups %xmm8,96(%rsi)
584 movdqu 96(%rdi),%xmm8
593 movups %xmm8,96(%rsi)
663 movdqu 96(%rdi),%xmm8
684 movups %xmm8,96(%rsi)
[all …]
/external/boringssl/mac-x86_64/crypto/aes/
Dbsaes-x86_64.S12 movdqa (%rax),%xmm8
15 pxor %xmm8,%xmm15
16 pxor %xmm8,%xmm0
17 pxor %xmm8,%xmm1
18 pxor %xmm8,%xmm2
21 pxor %xmm8,%xmm3
22 pxor %xmm8,%xmm4
25 pxor %xmm8,%xmm5
26 pxor %xmm8,%xmm6
33 movdqa 16(%r11),%xmm8
[all …]
Daesni-x86_64.S382 pxor %xmm0,%xmm8
446 pxor %xmm0,%xmm8
519 movdqu 96(%rdi),%xmm8
540 movups %xmm8,96(%rsi)
541 movdqu 96(%rdi),%xmm8
561 movups %xmm8,96(%rsi)
583 movdqu 96(%rdi),%xmm8
592 movups %xmm8,96(%rsi)
662 movdqu 96(%rdi),%xmm8
683 movups %xmm8,96(%rsi)
[all …]
/external/boringssl/win-x86_64/crypto/aes/
Dbsaes-x86_64.asm16 movdqa xmm8,XMMWORD[rax]
19 pxor xmm15,xmm8
20 pxor xmm0,xmm8
21 pxor xmm1,xmm8
22 pxor xmm2,xmm8
25 pxor xmm3,xmm8
26 pxor xmm4,xmm8
29 pxor xmm5,xmm8
30 pxor xmm6,xmm8
37 movdqa xmm8,XMMWORD[16+r11]
[all …]
Daesni-x86_64.asm384 pxor xmm8,xmm0
448 pxor xmm8,xmm0
515 movaps XMMWORD[32+rsp],xmm8
537 movdqu xmm8,XMMWORD[96+rdi]
558 movups XMMWORD[96+rsi],xmm8
559 movdqu xmm8,XMMWORD[96+rdi]
579 movups XMMWORD[96+rsi],xmm8
601 movdqu xmm8,XMMWORD[96+rdi]
610 movups XMMWORD[96+rsi],xmm8
680 movdqu xmm8,XMMWORD[96+rdi]
[all …]
/external/llvm/test/CodeGen/X86/
Dstack-folding-fp-avx1.ll14 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
22 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
30 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
38 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
46 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
54 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
63 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
71 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
80 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
89 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
[all …]
Dstack-folding-fp-sse42.ll14 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
22 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
30 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
38 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
47 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
55 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
64 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
73 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
82 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
96 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
[all …]
Dstack-folding-xop.ll14 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
23 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
32 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
41 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
50 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
59 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
68 …%1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8}…
75 …%1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8}…
84 …%1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8}…
91 …%1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8}…
[all …]
Dstack-folding-int-avx1.ll14 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
23 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
32 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
41 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
50 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
59 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
88 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
108 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
117 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
126 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
[all …]
Dstack-folding-int-sse42.ll14 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
23 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
32 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
41 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
50 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
59 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
115 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
135 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
144 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
153 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
[all …]
Dstack-folding-int-avx2.ll14 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
23 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
32 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
44 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
51 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
61 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
70 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
79 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
88 …fect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
97 …m sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~…
[all …]
D2009-06-03-Win64SpillXMM.ll3 ; CHECK: movaps %xmm8, 16(%rsp)
8 tail call void asm sideeffect "", "~{xmm7},~{xmm8},~{dirflag},~{fpsr},~{flags}"() nounwind
/external/boringssl/mac-x86_64/crypto/modes/
Dghash-x86_64.S926 movdqu 0(%rdx),%xmm8
931 pxor %xmm8,%xmm0
935 pshufd $78,%xmm0,%xmm8
936 pxor %xmm0,%xmm8
963 xorps %xmm4,%xmm8
967 pxor %xmm0,%xmm8
969 pxor %xmm1,%xmm8
971 movdqa %xmm8,%xmm9
973 pslldq $8,%xmm8
975 pxor %xmm8,%xmm0
[all …]
/external/boringssl/linux-x86_64/crypto/modes/
Dghash-x86_64.S927 movdqu 0(%rdx),%xmm8
932 pxor %xmm8,%xmm0
936 pshufd $78,%xmm0,%xmm8
937 pxor %xmm0,%xmm8
964 xorps %xmm4,%xmm8
968 pxor %xmm0,%xmm8
970 pxor %xmm1,%xmm8
972 movdqa %xmm8,%xmm9
974 pslldq $8,%xmm8
976 pxor %xmm8,%xmm0
[all …]
/external/boringssl/src/crypto/aes/asm/
Dvpaes-x86_64.pl80 ## Preserves %xmm6 - %xmm8 so you get some local vectors
288 movdqa .Lk_rcon(%rip), %xmm8 # load rcon
486 ## Adds rcon from low byte of %xmm8, then rotates %xmm8 for
500 palignr \$15, %xmm8, %xmm1
501 palignr \$15, %xmm8, %xmm8
681 movaps %xmm8,0x30(%rsp)
704 movaps 0x30(%rsp),%xmm8
729 movaps %xmm8,0x30(%rsp)
757 movaps 0x30(%rsp),%xmm8
782 movaps %xmm8,0x30(%rsp)
[all …]
/external/boringssl/win-x86_64/crypto/modes/
Dghash-x86_64.asm968 movdqu xmm8,XMMWORD[r8]
973 pxor xmm0,xmm8
977 pshufd xmm8,xmm0,78
978 pxor xmm8,xmm0
1005 xorps xmm8,xmm4
1009 pxor xmm8,xmm0
1011 pxor xmm8,xmm1
1013 movdqa xmm9,xmm8
1015 pslldq xmm8,8
1017 pxor xmm0,xmm8
[all …]
/external/mesa3d/src/mesa/x86-64/
Dxform4.S82 movups (%rdx), %xmm8 /* ox | oy | oz | ow */
85 pshufd $0x00, %xmm8, %xmm0 /* ox | ox | ox | ox */
87 pshufd $0x55, %xmm8, %xmm1 /* oy | oy | oy | oy */
89 pshufd $0xAA, %xmm8, %xmm2 /* oz | oz | oz | ox */
91 pshufd $0xFF, %xmm8, %xmm3 /* ow | ow | ow | ow */
168 movups (%rdx), %xmm8 /* ox | oy | oz | ow */
171 pshufd $0x00, %xmm8, %xmm0 /* ox | ox | ox | ox */
173 pshufd $0x55, %xmm8, %xmm1 /* oy | oy | oy | oy */
175 pshufd $0xAA, %xmm8, %xmm2 /* oz | oz | oz | ox */
177 pshufd $0xFF, %xmm8, %xmm3 /* ow | ow | ow | ow */
/external/boringssl/mac-x86_64/crypto/sha/
Dsha1-x86_64.S1291 movdqa %xmm3,%xmm8
1299 psrldq $4,%xmm8
1305 pxor %xmm2,%xmm8
1309 pxor %xmm8,%xmm4
1319 movdqa %xmm4,%xmm8
1325 psrld $31,%xmm8
1335 por %xmm8,%xmm4
1376 movdqa %xmm5,%xmm8
1382 pslldq $12,%xmm8
1390 movdqa %xmm8,%xmm10
[all …]
/external/boringssl/linux-x86_64/crypto/sha/
Dsha1-x86_64.S1292 movdqa %xmm3,%xmm8
1300 psrldq $4,%xmm8
1306 pxor %xmm2,%xmm8
1310 pxor %xmm8,%xmm4
1320 movdqa %xmm4,%xmm8
1326 psrld $31,%xmm8
1336 por %xmm8,%xmm4
1377 movdqa %xmm5,%xmm8
1383 pslldq $12,%xmm8
1391 movdqa %xmm8,%xmm10
[all …]
/external/boringssl/win-x86_64/crypto/sha/
Dsha1-x86_64.asm1270 movaps XMMWORD[(-40-64)+rax],xmm8
1321 movdqa xmm8,xmm3
1329 psrldq xmm8,4
1335 pxor xmm8,xmm2
1339 pxor xmm4,xmm8
1349 movdqa xmm8,xmm4
1355 psrld xmm8,31
1365 por xmm4,xmm8
1406 movdqa xmm8,xmm5
1412 pslldq xmm8,12
[all …]
/external/llvm/test/MC/COFF/
Dseh.s133 movups %xmm8, (%rsp)
134 .seh_savexmm %xmm8, 0
/external/llvm/test/MC/X86/
Dx86_64-avx-encoding.s5 vaddss %xmm8, %xmm9, %xmm10
9 vmulss %xmm8, %xmm9, %xmm10
13 vsubss %xmm8, %xmm9, %xmm10
17 vdivss %xmm8, %xmm9, %xmm10
21 vaddsd %xmm8, %xmm9, %xmm10
25 vmulsd %xmm8, %xmm9, %xmm10
29 vsubsd %xmm8, %xmm9, %xmm10
33 vdivsd %xmm8, %xmm9, %xmm10
3869 vcvtsd2si %xmm8, %r8d
3885 vcvtsi2sdl %r8d, %xmm8, %xmm15
[all …]
/external/llvm/test/tools/llvm-objdump/Inputs/
Dwin64-unwind.exe.coff-x86_64.asm11 movups %xmm8, (%rsp)
12 .seh_savexmm %xmm8, 0

123