Lines Matching refs:rcx
36 lea -16(%rcx, %r11), %r9; \
92 and $0x3f, %rcx /* rsi alignment in cache line */
137 sub %rcx, %r9
166 mov $16, %rcx
176 movdqa (%rsi, %rcx), %xmm1
177 movdqa (%rdi, %rcx), %xmm2
190 add $16, %rcx
191 movdqa (%rsi, %rcx), %xmm1
192 movdqa (%rdi, %rcx), %xmm2
204 add $16, %rcx
230 mov $16, %rcx /* index for loads*/
247 movdqa (%rsi, %rcx), %xmm1
248 movdqa (%rdi, %rcx), %xmm2
264 add $16, %rcx
270 movdqa (%rsi, %rcx), %xmm1
271 movdqa (%rdi, %rcx), %xmm2
287 add $16, %rcx
317 movdqa (%rsi, %rcx), %xmm1
345 mov $16, %rcx /* index for loads */
362 movdqa (%rsi, %rcx), %xmm1
363 movdqa (%rdi, %rcx), %xmm2
380 add $16, %rcx
386 movdqa (%rsi, %rcx), %xmm1
387 movdqa (%rdi, %rcx), %xmm2
404 add $16, %rcx
426 movdqa (%rsi, %rcx), %xmm1
455 mov $16, %rcx /* index for loads */
472 movdqa (%rsi, %rcx), %xmm1
473 movdqa (%rdi, %rcx), %xmm2
490 add $16, %rcx
496 movdqa (%rsi, %rcx), %xmm1
497 movdqa (%rdi, %rcx), %xmm2
514 add $16, %rcx
536 movdqa (%rsi, %rcx), %xmm1
565 mov $16, %rcx /* index for loads */
582 movdqa (%rsi, %rcx), %xmm1
583 movdqa (%rdi, %rcx), %xmm2
600 add $16, %rcx
606 movdqa (%rsi, %rcx), %xmm1
607 movdqa (%rdi, %rcx), %xmm2
624 add $16, %rcx
646 movdqa (%rsi, %rcx), %xmm1
675 mov $16, %rcx /* index for loads */
692 movdqa (%rsi, %rcx), %xmm1
693 movdqa (%rdi, %rcx), %xmm2
710 add $16, %rcx
716 movdqa (%rsi, %rcx), %xmm1
717 movdqa (%rdi, %rcx), %xmm2
734 add $16, %rcx
756 movdqa (%rsi, %rcx), %xmm1
785 mov $16, %rcx /* index for loads */
802 movdqa (%rsi, %rcx), %xmm1
803 movdqa (%rdi, %rcx), %xmm2
820 add $16, %rcx
826 movdqa (%rsi, %rcx), %xmm1
827 movdqa (%rdi, %rcx), %xmm2
844 add $16, %rcx
866 movdqa (%rsi, %rcx), %xmm1
895 mov $16, %rcx /* index for loads */
912 movdqa (%rsi, %rcx), %xmm1
913 movdqa (%rdi, %rcx), %xmm2
930 add $16, %rcx
936 movdqa (%rsi, %rcx), %xmm1
937 movdqa (%rdi, %rcx), %xmm2
954 add $16, %rcx
976 movdqa (%rsi, %rcx), %xmm1
1005 mov $16, %rcx /* index for loads */
1022 movdqa (%rsi, %rcx), %xmm1
1023 movdqa (%rdi, %rcx), %xmm2
1040 add $16, %rcx
1046 movdqa (%rsi, %rcx), %xmm1
1047 movdqa (%rdi, %rcx), %xmm2
1064 add $16, %rcx
1086 movdqa (%rsi, %rcx), %xmm1
1115 mov $16, %rcx /* index for loads */
1132 movdqa (%rsi, %rcx), %xmm1
1133 movdqa (%rdi, %rcx), %xmm2
1150 add $16, %rcx
1156 movdqa (%rsi, %rcx), %xmm1
1157 movdqa (%rdi, %rcx), %xmm2
1174 add $16, %rcx
1196 movdqa (%rsi, %rcx), %xmm1
1225 mov $16, %rcx /* index for loads */
1242 movdqa (%rsi, %rcx), %xmm1
1243 movdqa (%rdi, %rcx), %xmm2
1260 add $16, %rcx
1266 movdqa (%rsi, %rcx), %xmm1
1267 movdqa (%rdi, %rcx), %xmm2
1284 add $16, %rcx
1306 movdqa (%rsi, %rcx), %xmm1
1335 mov $16, %rcx /* index for loads */
1352 movdqa (%rsi, %rcx), %xmm1
1353 movdqa (%rdi, %rcx), %xmm2
1370 add $16, %rcx
1376 movdqa (%rsi, %rcx), %xmm1
1377 movdqa (%rdi, %rcx), %xmm2
1394 add $16, %rcx
1416 movdqa (%rsi, %rcx), %xmm1
1445 mov $16, %rcx /* index for loads */
1462 movdqa (%rsi, %rcx), %xmm1
1463 movdqa (%rdi, %rcx), %xmm2
1480 add $16, %rcx
1486 movdqa (%rsi, %rcx), %xmm1
1487 movdqa (%rdi, %rcx), %xmm2
1504 add $16, %rcx
1526 movdqa (%rsi, %rcx), %xmm1
1555 mov $16, %rcx /* index for loads */
1572 movdqa (%rsi, %rcx), %xmm1
1573 movdqa (%rdi, %rcx), %xmm2
1590 add $16, %rcx
1596 movdqa (%rsi, %rcx), %xmm1
1597 movdqa (%rdi, %rcx), %xmm2
1614 add $16, %rcx
1636 movdqa (%rsi, %rcx), %xmm1
1665 mov $16, %rcx /* index for loads */
1682 movdqa (%rsi, %rcx), %xmm1
1683 movdqa (%rdi, %rcx), %xmm2
1700 add $16, %rcx
1706 movdqa (%rsi, %rcx), %xmm1
1707 movdqa (%rdi, %rcx), %xmm2
1724 add $16, %rcx
1746 movdqa (%rsi, %rcx), %xmm1
1776 mov $16, %rcx /* index for loads */
1794 movdqa (%rsi, %rcx), %xmm1
1795 movdqa (%rdi, %rcx), %xmm2
1812 add $16, %rcx
1818 movdqa (%rsi, %rcx), %xmm1
1819 movdqa (%rdi, %rcx), %xmm2
1836 add $16, %rcx
1858 movdqa (%rsi, %rcx), %xmm1
1871 lea -16(%r9, %rcx), %rax /* locate the exact offset for rdi */
1874 lea (%rsi, %rcx), %rsi /* locate the exact address for second operand(rsi) */