Lines Matching refs:rcx
144 #define IN_ARG3 %rcx
151 #define OUT_ARG3 %rcx
430 movzwq 4(rPC), %rcx # ecx <- BBBB
432 GET_VREG %edx, %rcx
450 SET_WIDE_VREG %rdx, %rcx # v[A] <- rdx
460 GET_WIDE_VREG %rdx, %rcx # rdx <- v[B]
470 movzwq 4(rPC), %rcx # ecx<- BBBB
472 GET_WIDE_VREG %rdx, %rcx # rdx <- v[B]
519 movzwq 4(rPC), %rcx # ecx <- BBBB
521 GET_VREG %edx, %rcx
577 movq rSELF, %rcx
578 movl THREAD_EXCEPTION_OFFSET(%rcx), %eax
580 movl $0, THREAD_EXCEPTION_OFFSET(%rcx)
853 movq rSELF, %rcx
854 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
873 movl MIRROR_ARRAY_LENGTH_OFFSET(%rcx), rINST
969 leaq (rPC,%rcx,2), OUT_ARG1 # OUT_ARG1 <- PC + BBBBbbbb*2
988 movq rSELF, %rcx
989 movq %rax, THREAD_EXCEPTION_OFFSET(%rcx)
1054 movslq 2(rPC), OUT_ARG0 # rcx <- BBBBbbbb
1055 leaq (rPC,OUT_ARG0,2), OUT_ARG0 # rcx <- PC + BBBBbbbb*2
1077 movslq 2(rPC), OUT_ARG0 # rcx <- BBBBbbbb
1078 leaq (rPC,OUT_ARG0,2), OUT_ARG0 # rcx <- PC + BBBBbbbb*2
1108 movzbq 3(rPC), %rcx # ecx<- CC
1112 ucomiss VREG_ADDRESS(%rcx), %xmm0
1149 movzbq 3(rPC), %rcx # ecx<- CC
1153 ucomiss VREG_ADDRESS(%rcx), %xmm0
1190 movzbq 3(rPC), %rcx # ecx<- CC
1194 ucomisd VREG_ADDRESS(%rcx), %xmm0
1231 movzbq 3(rPC), %rcx # ecx<- CC
1235 ucomisd VREG_ADDRESS(%rcx), %xmm0
1260 movzbq 3(rPC), %rcx # ecx <- CC
1266 cmpq VREG_ADDRESS(%rcx), %rdx
1285 movl rINST, %ecx # rcx <- A+
1287 andb $0xf, %cl # rcx <- A
1288 GET_VREG %eax, %rcx # eax <- vA
1313 movl rINST, %ecx # rcx <- A+
1315 andb $0xf, %cl # rcx <- A
1316 GET_VREG %eax, %rcx # eax <- vA
1341 movl rINST, %ecx # rcx <- A+
1343 andb $0xf, %cl # rcx <- A
1344 GET_VREG %eax, %rcx # eax <- vA
1369 movl rINST, %ecx # rcx <- A+
1371 andb $0xf, %cl # rcx <- A
1372 GET_VREG %eax, %rcx # eax <- vA
1397 movl rINST, %ecx # rcx <- A+
1399 andb $0xf, %cl # rcx <- A
1400 GET_VREG %eax, %rcx # eax <- vA
1425 movl rINST, %ecx # rcx <- A+
1427 andb $0xf, %cl # rcx <- A
1428 GET_VREG %eax, %rcx # eax <- vA
1662 movzbq 3(rPC), %rcx # ecx <- CC
1664 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1670 movq MIRROR_INT_ARRAY_DATA_OFFSET(%rax,%rcx,8), %rax
1673 movl MIRROR_INT_ARRAY_DATA_OFFSET(%rax,%rcx,4), %eax
1691 movzbq 3(rPC), %rcx # ecx <- CC
1693 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1699 movq MIRROR_WIDE_ARRAY_DATA_OFFSET(%rax,%rcx,8), %rax
1702 movq MIRROR_WIDE_ARRAY_DATA_OFFSET(%rax,%rcx,8), %eax
1719 movzbq 3(rPC), %rcx # rcx <- CC
1721 GET_VREG OUT_32_ARG1, %rcx # ecx <- vCC (requested index)
1724 movq rSELF, %rcx
1725 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
1743 movzbq 3(rPC), %rcx # ecx <- CC
1745 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1751 movq MIRROR_BOOLEAN_ARRAY_DATA_OFFSET(%rax,%rcx,8), %rax
1754 movzbl MIRROR_BOOLEAN_ARRAY_DATA_OFFSET(%rax,%rcx,1), %eax
1773 movzbq 3(rPC), %rcx # ecx <- CC
1775 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1781 movq MIRROR_BYTE_ARRAY_DATA_OFFSET(%rax,%rcx,8), %rax
1784 movsbl MIRROR_BYTE_ARRAY_DATA_OFFSET(%rax,%rcx,1), %eax
1803 movzbq 3(rPC), %rcx # ecx <- CC
1805 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1811 movq MIRROR_CHAR_ARRAY_DATA_OFFSET(%rax,%rcx,8), %rax
1814 movzwl MIRROR_CHAR_ARRAY_DATA_OFFSET(%rax,%rcx,2), %eax
1833 movzbq 3(rPC), %rcx # ecx <- CC
1835 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1841 movq MIRROR_SHORT_ARRAY_DATA_OFFSET(%rax,%rcx,8), %rax
1844 movswl MIRROR_SHORT_ARRAY_DATA_OFFSET(%rax,%rcx,2), %eax
1862 movzbq 3(rPC), %rcx # rcx <- CC
1864 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1874 movl rINST, MIRROR_INT_ARRAY_DATA_OFFSET(%rax,%rcx,4)
1890 movzbq 3(rPC), %rcx # rcx <- CC
1892 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1902 movq rINSTq, MIRROR_WIDE_ARRAY_DATA_OFFSET(%rax,%rcx,8)
1937 movzbq 3(rPC), %rcx # rcx <- CC
1939 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1949 movb rINSTbl, MIRROR_BOOLEAN_ARRAY_DATA_OFFSET(%rax,%rcx,1)
1966 movzbq 3(rPC), %rcx # rcx <- CC
1968 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
1978 movb rINSTbl, MIRROR_BYTE_ARRAY_DATA_OFFSET(%rax,%rcx,1)
1995 movzbq 3(rPC), %rcx # rcx <- CC
1997 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
2007 movw rINSTw, MIRROR_CHAR_ARRAY_DATA_OFFSET(%rax,%rcx,2)
2024 movzbq 3(rPC), %rcx # rcx <- CC
2026 GET_VREG %ecx, %rcx # ecx <- vCC (requested index)
2036 movw rINSTw, MIRROR_SHORT_ARRAY_DATA_OFFSET(%rax,%rcx,2)
2050 movzbq rINSTbl, %rcx # rcx <- BA
2053 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2057 movq rSELF, %rcx
2058 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
2083 movzbq rINSTbl, %rcx # rcx <- BA
2086 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2090 movq rSELF, %rcx
2091 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
2117 movzbq rINSTbl, %rcx # rcx <- BA
2120 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2124 movq rSELF, %rcx
2125 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
2151 movzbq rINSTbl, %rcx # rcx <- BA
2154 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2158 movq rSELF, %rcx
2159 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
2185 movzbq rINSTbl, %rcx # rcx <- BA
2188 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2192 movq rSELF, %rcx
2193 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
2219 movzbq rINSTbl, %rcx # rcx <- BA
2222 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2226 movq rSELF, %rcx
2227 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
2253 movzbq rINSTbl, %rcx # rcx <- BA
2256 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2260 movq rSELF, %rcx
2261 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
2289 movzbq rINSTbl, %rcx # rcx<- BA
2291 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2308 movzbq rINSTbl, %rcx # rcx <- BA
2310 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2348 movzbq rINSTbl, %rcx # rcx<- BA
2350 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2374 movzbq rINSTbl, %rcx # rcx<- BA
2376 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2400 movzbq rINSTbl, %rcx # rcx<- BA
2402 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2426 movzbq rINSTbl, %rcx # rcx<- BA
2428 GET_VREG OUT_32_ARG1, %rcx # the object pointer
2454 movq rSELF, %rcx
2455 cmpl $0, THREAD_EXCEPTION_OFFSET(%rcx)
2485 movq rSELF, %rcx
2486 cmpl $0, THREAD_EXCEPTION_OFFSET(%rcx)
2517 movq rSELF, %rcx
2518 cmpl $0, THREAD_EXCEPTION_OFFSET(%rcx)
2549 movq rSELF, %rcx
2550 cmpl $0, THREAD_EXCEPTION_OFFSET(%rcx)
2581 movq rSELF, %rcx
2582 cmpl $0, THREAD_EXCEPTION_OFFSET(%rcx)
2613 movq rSELF, %rcx
2614 cmpl $0, THREAD_EXCEPTION_OFFSET(%rcx)
2645 movq rSELF, %rcx
2646 cmpl $0, THREAD_EXCEPTION_OFFSET(%rcx)
3154 movl rINST, %ecx # rcx <- A+
3165 SET_WIDE_VREG %rax, %rcx
3167 SET_VREG %eax, %rcx
3182 movl rINST, %ecx # rcx <- A+
3193 SET_WIDE_VREG %rax, %rcx
3195 SET_VREG %eax, %rcx
3210 movl rINST, %ecx # rcx <- A+
3221 SET_WIDE_VREG %rax, %rcx
3223 SET_VREG %eax, %rcx
3238 movl rINST, %ecx # rcx <- A+
3249 SET_WIDE_VREG %rax, %rcx
3251 SET_VREG %eax, %rcx
3266 movl rINST, %ecx # rcx <- A+
3277 SET_WIDE_VREG %rax, %rcx
3279 SET_VREG %eax, %rcx
3294 movl rINST, %ecx # rcx <- A+
3305 SET_WIDE_VREG %rax, %rcx
3307 SET_VREG %eax, %rcx
3334 movl rINST, %ecx # rcx <- A+
3339 movsd %xmm0, VREG_ADDRESS(%rcx)
3340 CLEAR_WIDE_REF %rcx
3342 movss %xmm0, VREG_ADDRESS(%rcx)
3343 CLEAR_REF %rcx
3357 movl rINST, %ecx # rcx <- A+
3362 movsd %xmm0, VREG_ADDRESS(%rcx)
3363 CLEAR_WIDE_REF %rcx
3365 movss %xmm0, VREG_ADDRESS(%rcx)
3366 CLEAR_REF %rcx
3400 movl rINST, %ecx # rcx <- A+
3405 movsd %xmm0, VREG_ADDRESS(%rcx)
3406 CLEAR_WIDE_REF %rcx
3408 movss %xmm0, VREG_ADDRESS(%rcx)
3409 CLEAR_REF %rcx
3423 movl rINST, %ecx # rcx <- A+
3428 movsd %xmm0, VREG_ADDRESS(%rcx)
3429 CLEAR_WIDE_REF %rcx
3431 movss %xmm0, VREG_ADDRESS(%rcx)
3432 CLEAR_REF %rcx
3448 movl rINST, %ecx # rcx <- A+
3463 SET_WIDE_VREG %eax, %rcx
3465 SET_VREG %eax, %rcx
3481 movl rINST, %ecx # rcx <- A+
3496 SET_WIDE_VREG %rax, %rcx
3498 SET_VREG %rax, %rcx
3512 movl rINST, %ecx # rcx <- A+
3517 movsd %xmm0, VREG_ADDRESS(%rcx)
3518 CLEAR_WIDE_REF %rcx
3520 movss %xmm0, VREG_ADDRESS(%rcx)
3521 CLEAR_REF %rcx
3537 movl rINST, %ecx # rcx <- A+
3552 SET_WIDE_VREG %eax, %rcx
3554 SET_VREG %eax, %rcx
3570 movl rINST, %ecx # rcx <- A+
3585 SET_WIDE_VREG %rax, %rcx
3587 SET_VREG %rax, %rcx
3601 movl rINST, %ecx # rcx <- A+
3606 movsd %xmm0, VREG_ADDRESS(%rcx)
3607 CLEAR_WIDE_REF %rcx
3609 movss %xmm0, VREG_ADDRESS(%rcx)
3610 CLEAR_REF %rcx
3625 movl rINST, %ecx # rcx <- A+
3636 SET_WIDE_VREG %rax, %rcx
3638 SET_VREG %eax, %rcx
3653 movl rINST, %ecx # rcx <- A+
3664 SET_WIDE_VREG %rax, %rcx
3666 SET_VREG %eax, %rcx
3681 movl rINST, %ecx # rcx <- A+
3692 SET_WIDE_VREG %rax, %rcx
3694 SET_VREG %eax, %rcx
3715 movzbq 3(rPC), %rcx # rcx <- CC
3717 addl (rFP,%rcx,4), %eax # ex: addl (rFP,%rcx,4),%eax
3738 movzbq 3(rPC), %rcx # rcx <- CC
3740 subl (rFP,%rcx,4), %eax # ex: addl (rFP,%rcx,4),%eax
3761 movzbq 3(rPC), %rcx # rcx <- CC
3763 imull (rFP,%rcx,4), %eax # ex: addl (rFP,%rcx,4),%eax
3778 movzbq 3(rPC), %rcx # rcx <- CC
3781 GET_WIDE_VREG %ecx, %rcx # ecx <- vCC
3784 GET_VREG %ecx, %rcx # ecx <- vCC
3818 movzbq 3(rPC), %rcx # rcx <- CC
3821 GET_WIDE_VREG %ecx, %rcx # ecx <- vCC
3824 GET_VREG %ecx, %rcx # ecx <- vCC
3864 movzbq 3(rPC), %rcx # rcx <- CC
3866 andl (rFP,%rcx,4), %eax # ex: addl (rFP,%rcx,4),%eax
3887 movzbq 3(rPC), %rcx # rcx <- CC
3889 orl (rFP,%rcx,4), %eax # ex: addl (rFP,%rcx,4),%eax
3910 movzbq 3(rPC), %rcx # rcx <- CC
3912 xorl (rFP,%rcx,4), %eax # ex: addl (rFP,%rcx,4),%eax
3928 movzbq 3(rPC), %rcx # ecx <- CC
3929 GET_VREG %ecx, %rcx # eax <- vCC
3953 movzbq 3(rPC), %rcx # ecx <- CC
3954 GET_VREG %ecx, %rcx # eax <- vCC
3978 movzbq 3(rPC), %rcx # ecx <- CC
3979 GET_VREG %ecx, %rcx # eax <- vCC
4002 movzbq 3(rPC), %rcx # ecx <- CC
4004 addq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax
4019 movzbq 3(rPC), %rcx # ecx <- CC
4021 subq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax
4036 movzbq 3(rPC), %rcx # ecx <- CC
4038 imulq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax
4053 movzbq 3(rPC), %rcx # rcx <- CC
4056 GET_WIDE_VREG %rcx, %rcx # ecx <- vCC
4059 GET_VREG %rcx, %rcx # ecx <- vCC
4061 testq %rcx, %rcx
4063 cmpq $-1, %rcx
4066 idivq %rcx
4093 movzbq 3(rPC), %rcx # rcx <- CC
4096 GET_WIDE_VREG %rcx, %rcx # ecx <- vCC
4099 GET_VREG %rcx, %rcx # ecx <- vCC
4101 testq %rcx, %rcx
4103 cmpq $-1, %rcx
4106 idivq %rcx
4133 movzbq 3(rPC), %rcx # ecx <- CC
4135 andq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax
4150 movzbq 3(rPC), %rcx # ecx <- CC
4152 orq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax
4167 movzbq 3(rPC), %rcx # ecx <- CC
4169 xorq (rFP,%rcx,4), %rax # ex: addq (rFP,%rcx,4),%rax
4185 movzbq 3(rPC), %rcx # ecx <- CC
4186 GET_VREG %ecx, %rcx # eax <- vCC
4210 movzbq 3(rPC), %rcx # ecx <- CC
4211 GET_VREG %ecx, %rcx # eax <- vCC
4235 movzbq 3(rPC), %rcx # ecx <- CC
4236 GET_VREG %ecx, %rcx # eax <- vCC
4254 movzbq 2(rPC), %rcx # ecx <- BB
4256 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4269 movzbq 2(rPC), %rcx # ecx <- BB
4271 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4284 movzbq 2(rPC), %rcx # ecx <- BB
4286 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4299 movzbq 2(rPC), %rcx # ecx <- BB
4301 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4314 movzbq 3(rPC), %rcx # ecx <- BB
4316 flds VREG_ADDRESS(%rcx) # vBB to fp stack
4333 movzbq 2(rPC), %rcx # ecx <- BB
4335 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4348 movzbq 2(rPC), %rcx # ecx <- BB
4350 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4363 movzbq 2(rPC), %rcx # ecx <- BB
4365 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4378 movzbq 2(rPC), %rcx # ecx <- BB
4380 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4393 movzbq 3(rPC), %rcx # ecx <- BB
4395 fldl VREG_ADDRESS(%rcx) # %st1 <- fp[vBB]
4423 movl rINST, %ecx # rcx <- A+
4427 addl %eax, (rFP,%rcx,4) # for ex: addl %eax,(rFP,%ecx,4)
4428 CLEAR_REF %rcx
4448 movl rINST, %ecx # rcx <- A+
4452 subl %eax, (rFP,%rcx,4) # for ex: addl %eax,(rFP,%ecx,4)
4453 CLEAR_REF %rcx
4462 movl rINST, %ecx # rcx <- A+
4465 GET_VREG %eax, %rcx # eax <- vA
4467 SET_VREG %eax, %rcx
4479 movl rINST, %ecx # rcx <- BA
4480 sarl $4, %ecx # rcx <- B
4484 GET_WIDE_VREG %ecx, %rcx # ecx <- vB
4487 GET_VREG %ecx, %rcx # ecx <- vB
4520 movl rINST, %ecx # rcx <- BA
4521 sarl $4, %ecx # rcx <- B
4525 GET_WIDE_VREG %ecx, %rcx # ecx <- vB
4528 GET_VREG %ecx, %rcx # ecx <- vB
4568 movl rINST, %ecx # rcx <- A+
4572 andl %eax, (rFP,%rcx,4) # for ex: addl %eax,(rFP,%ecx,4)
4573 CLEAR_REF %rcx
4593 movl rINST, %ecx # rcx <- A+
4597 orl %eax, (rFP,%rcx,4) # for ex: addl %eax,(rFP,%ecx,4)
4598 CLEAR_REF %rcx
4618 movl rINST, %ecx # rcx <- A+
4622 xorl %eax, (rFP,%rcx,4) # for ex: addl %eax,(rFP,%ecx,4)
4623 CLEAR_REF %rcx
4638 GET_VREG %ecx, %rcx # ecx <- vBB
4663 GET_VREG %ecx, %rcx # ecx <- vBB
4688 GET_VREG %ecx, %rcx # ecx <- vBB
4711 movl rINST, %ecx # rcx <- A+
4715 addq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4)
4716 CLEAR_WIDE_REF %rcx
4729 movl rINST, %ecx # rcx <- A+
4733 subq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4)
4734 CLEAR_WIDE_REF %rcx
4743 movl rINST, %ecx # rcx <- A+
4746 GET_WIDE_VREG %rax, %rcx # rax <- vA
4748 SET_WIDE_VREG %rax, %rcx
4760 movl rINST, %ecx # rcx <- BA
4761 sarl $4, %ecx # rcx <- B
4765 GET_WIDE_VREG %rcx, %rcx # ecx <- vB
4768 GET_VREG %rcx, %rcx # ecx <- vB
4770 testq %rcx, %rcx
4772 cmpq $-1, %rcx
4775 idivq %rcx
4801 movl rINST, %ecx # rcx <- BA
4802 sarl $4, %ecx # rcx <- B
4806 GET_WIDE_VREG %rcx, %rcx # ecx <- vB
4809 GET_VREG %rcx, %rcx # ecx <- vB
4811 testq %rcx, %rcx
4813 cmpq $-1, %rcx
4816 idivq %rcx
4842 movl rINST, %ecx # rcx <- A+
4846 andq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4)
4847 CLEAR_WIDE_REF %rcx
4860 movl rINST, %ecx # rcx <- A+
4864 orq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4)
4865 CLEAR_WIDE_REF %rcx
4878 movl rINST, %ecx # rcx <- A+
4882 xorq %rax, (rFP,%rcx,4) # for ex: addq %rax,(rFP,%rcx,4)
4883 CLEAR_WIDE_REF %rcx
4898 GET_VREG %ecx, %rcx # ecx <- vBB
4923 GET_VREG %ecx, %rcx # ecx <- vBB
4948 GET_VREG %ecx, %rcx # ecx <- vBB
4969 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4972 movss %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
4985 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
4988 movss %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
5001 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
5004 movss %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
5017 movss VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
5020 movss %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
5031 movzbq rINSTbl, %rcx # ecx <- A+
5035 flds VREG_ADDRESS(%rcx) # vA to fp stack
5042 fstps VREG_ADDRESS(%rcx) # %st to vA
5043 CLEAR_REF %rcx
5053 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
5056 movsd %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
5069 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
5072 movsd %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
5085 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
5088 movsd %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
5101 movsd VREG_ADDRESS(%rcx), %xmm0 # %xmm0 <- 1st src
5104 movsd %xmm0, VREG_ADDRESS(%rcx) # vAA<- %xmm0
5115 movzbq rINSTbl, %rcx # ecx <- A+
5119 fldl VREG_ADDRESS(%rcx) # vA to fp stack
5126 fstpl VREG_ADDRESS(%rcx) # %st to vA
5127 CLEAR_WIDE_REF %rcx
5364 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5388 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5412 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5498 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5522 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5546 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5570 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5594 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5618 movsbl 3(rPC), %ecx # rcx <- ssssssCC
5631 movl rINST, %ecx # rcx <- BA
5633 GET_VREG %ecx, %rcx # vB (object we're operating on)
5662 movq (%rcx,%rax,1), %rax
5665 movswl (%rcx,%rax,1), %eax
5678 movzbq rINSTbl, %rcx # rcx <- BA
5680 GET_VREG OUT_32_ARG0, %rcx # vB (object we're operating on)
5705 movl rINST, (%rcx,%rax,1)
5713 movzbq rINSTbl, %rcx # rcx<- BA
5715 GET_VREG %ecx, %rcx # vB (object we're operating on)
5810 movb rINSTbl, (%rcx,%rax,1)
5821 movzbq rINSTbl, %rcx # rcx <- BA
5823 GET_VREG %ecx, %rcx # vB (object we're operating on)
5848 movw rINSTw, (%rcx,%rax,1)
5859 movzbq rINSTbl, %rcx # rcx <- BA
5861 GET_VREG %ecx, %rcx # vB (object we're operating on)
5886 movq (%rcx,%rax,1), %rax
5889 movsbl (%rcx,%rax,1), %eax
5902 movl rINST, %ecx # rcx <- BA
5904 GET_VREG %ecx, %rcx # vB (object we're operating on)
5934 movq (%rcx,%rax,1), %rax
5937 movzwl (%rcx,%rax,1), %eax
5950 movl rINST, %ecx # rcx <- BA
5952 GET_VREG %ecx, %rcx # vB (object we're operating on)
11833 movq rSELF, %rcx
11834 cmpq $0, THREAD_EXCEPTION_OFFSET(%rcx)
11852 leaq (rPC, %rcx, 2), rPC