Lines Matching refs:src

141 void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) {  in movq()  argument
144 EmitRex64(src, dst); in movq()
146 EmitRegisterOperand(src.LowBits(), dst.LowBits()); in movq()
150 void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) { in movl() argument
152 EmitOptionalRex32(dst, src); in movl()
154 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movl()
158 void X86_64Assembler::movq(CpuRegister dst, const Address& src) { in movq() argument
160 EmitRex64(dst, src); in movq()
162 EmitOperand(dst.LowBits(), src); in movq()
166 void X86_64Assembler::movl(CpuRegister dst, const Address& src) { in movl() argument
168 EmitOptionalRex32(dst, src); in movl()
170 EmitOperand(dst.LowBits(), src); in movl()
174 void X86_64Assembler::movq(const Address& dst, CpuRegister src) { in movq() argument
176 EmitRex64(src, dst); in movq()
178 EmitOperand(src.LowBits(), dst); in movq()
182 void X86_64Assembler::movl(const Address& dst, CpuRegister src) { in movl() argument
184 EmitOptionalRex32(src, dst); in movl()
186 EmitOperand(src.LowBits(), dst); in movl()
198 void X86_64Assembler::cmov(Condition c, CpuRegister dst, CpuRegister src) { in cmov() argument
199 cmov(c, dst, src, true); in cmov()
202 void X86_64Assembler::cmov(Condition c, CpuRegister dst, CpuRegister src, bool is64bit) { in cmov() argument
204 EmitOptionalRex(false, is64bit, dst.NeedsRex(), false, src.NeedsRex()); in cmov()
207 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in cmov()
211 void X86_64Assembler::movzxb(CpuRegister dst, CpuRegister src) { in movzxb() argument
213 EmitOptionalByteRegNormalizingRex32(dst, src); in movzxb()
216 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movzxb()
220 void X86_64Assembler::movzxb(CpuRegister dst, const Address& src) { in movzxb() argument
224 EmitOptionalRex32(dst, src); in movzxb()
227 EmitOperand(dst.LowBits(), src); in movzxb()
231 void X86_64Assembler::movsxb(CpuRegister dst, CpuRegister src) { in movsxb() argument
233 EmitOptionalByteRegNormalizingRex32(dst, src); in movsxb()
236 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movsxb()
240 void X86_64Assembler::movsxb(CpuRegister dst, const Address& src) { in movsxb() argument
244 EmitOptionalRex32(dst, src); in movsxb()
247 EmitOperand(dst.LowBits(), src); in movsxb()
256 void X86_64Assembler::movb(const Address& dst, CpuRegister src) { in movb() argument
258 EmitOptionalByteRegNormalizingRex32(src, dst); in movb()
260 EmitOperand(src.LowBits(), dst); in movb()
274 void X86_64Assembler::movzxw(CpuRegister dst, CpuRegister src) { in movzxw() argument
276 EmitOptionalRex32(dst, src); in movzxw()
279 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movzxw()
283 void X86_64Assembler::movzxw(CpuRegister dst, const Address& src) { in movzxw() argument
285 EmitOptionalRex32(dst, src); in movzxw()
288 EmitOperand(dst.LowBits(), src); in movzxw()
292 void X86_64Assembler::movsxw(CpuRegister dst, CpuRegister src) { in movsxw() argument
294 EmitOptionalRex32(dst, src); in movsxw()
297 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movsxw()
301 void X86_64Assembler::movsxw(CpuRegister dst, const Address& src) { in movsxw() argument
303 EmitOptionalRex32(dst, src); in movsxw()
306 EmitOperand(dst.LowBits(), src); in movsxw()
315 void X86_64Assembler::movw(const Address& dst, CpuRegister src) { in movw() argument
318 EmitOptionalRex32(src, dst); in movw()
320 EmitOperand(src.LowBits(), dst); in movw()
336 void X86_64Assembler::leaq(CpuRegister dst, const Address& src) { in leaq() argument
338 EmitRex64(dst, src); in leaq()
340 EmitOperand(dst.LowBits(), src); in leaq()
344 void X86_64Assembler::leal(CpuRegister dst, const Address& src) { in leal() argument
346 EmitOptionalRex32(dst, src); in leal()
348 EmitOperand(dst.LowBits(), src); in leal()
352 void X86_64Assembler::movaps(XmmRegister dst, XmmRegister src) { in movaps() argument
354 EmitOptionalRex32(dst, src); in movaps()
357 EmitXmmRegisterOperand(dst.LowBits(), src); in movaps()
361 void X86_64Assembler::movss(XmmRegister dst, const Address& src) { in movss() argument
364 EmitOptionalRex32(dst, src); in movss()
367 EmitOperand(dst.LowBits(), src); in movss()
371 void X86_64Assembler::movss(const Address& dst, XmmRegister src) { in movss() argument
374 EmitOptionalRex32(src, dst); in movss()
377 EmitOperand(src.LowBits(), dst); in movss()
381 void X86_64Assembler::movss(XmmRegister dst, XmmRegister src) { in movss() argument
384 EmitOptionalRex32(src, dst); // Movss is MR encoding instead of the usual RM. in movss()
387 EmitXmmRegisterOperand(src.LowBits(), dst); in movss()
391 void X86_64Assembler::movsxd(CpuRegister dst, CpuRegister src) { in movsxd() argument
393 EmitRex64(dst, src); in movsxd()
395 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in movsxd()
399 void X86_64Assembler::movsxd(CpuRegister dst, const Address& src) { in movsxd() argument
401 EmitRex64(dst, src); in movsxd()
403 EmitOperand(dst.LowBits(), src); in movsxd()
407 void X86_64Assembler::movd(XmmRegister dst, CpuRegister src) { in movd() argument
408 movd(dst, src, true); in movd()
411 void X86_64Assembler::movd(CpuRegister dst, XmmRegister src) { in movd() argument
412 movd(dst, src, true); in movd()
415 void X86_64Assembler::movd(XmmRegister dst, CpuRegister src, bool is64bit) { in movd() argument
418 EmitOptionalRex(false, is64bit, dst.NeedsRex(), false, src.NeedsRex()); in movd()
421 EmitOperand(dst.LowBits(), Operand(src)); in movd()
424 void X86_64Assembler::movd(CpuRegister dst, XmmRegister src, bool is64bit) { in movd() argument
427 EmitOptionalRex(false, is64bit, src.NeedsRex(), false, dst.NeedsRex()); in movd()
430 EmitOperand(src.LowBits(), Operand(dst)); in movd()
434 void X86_64Assembler::addss(XmmRegister dst, XmmRegister src) { in addss() argument
437 EmitOptionalRex32(dst, src); in addss()
440 EmitXmmRegisterOperand(dst.LowBits(), src); in addss()
444 void X86_64Assembler::addss(XmmRegister dst, const Address& src) { in addss() argument
447 EmitOptionalRex32(dst, src); in addss()
450 EmitOperand(dst.LowBits(), src); in addss()
454 void X86_64Assembler::subss(XmmRegister dst, XmmRegister src) { in subss() argument
457 EmitOptionalRex32(dst, src); in subss()
460 EmitXmmRegisterOperand(dst.LowBits(), src); in subss()
464 void X86_64Assembler::subss(XmmRegister dst, const Address& src) { in subss() argument
467 EmitOptionalRex32(dst, src); in subss()
470 EmitOperand(dst.LowBits(), src); in subss()
474 void X86_64Assembler::mulss(XmmRegister dst, XmmRegister src) { in mulss() argument
477 EmitOptionalRex32(dst, src); in mulss()
480 EmitXmmRegisterOperand(dst.LowBits(), src); in mulss()
484 void X86_64Assembler::mulss(XmmRegister dst, const Address& src) { in mulss() argument
487 EmitOptionalRex32(dst, src); in mulss()
490 EmitOperand(dst.LowBits(), src); in mulss()
494 void X86_64Assembler::divss(XmmRegister dst, XmmRegister src) { in divss() argument
497 EmitOptionalRex32(dst, src); in divss()
500 EmitXmmRegisterOperand(dst.LowBits(), src); in divss()
504 void X86_64Assembler::divss(XmmRegister dst, const Address& src) { in divss() argument
507 EmitOptionalRex32(dst, src); in divss()
510 EmitOperand(dst.LowBits(), src); in divss()
514 void X86_64Assembler::flds(const Address& src) { in flds() argument
517 EmitOperand(0, src); in flds()
535 void X86_64Assembler::movsd(XmmRegister dst, const Address& src) { in movsd() argument
538 EmitOptionalRex32(dst, src); in movsd()
541 EmitOperand(dst.LowBits(), src); in movsd()
545 void X86_64Assembler::movsd(const Address& dst, XmmRegister src) { in movsd() argument
548 EmitOptionalRex32(src, dst); in movsd()
551 EmitOperand(src.LowBits(), dst); in movsd()
555 void X86_64Assembler::movsd(XmmRegister dst, XmmRegister src) { in movsd() argument
558 EmitOptionalRex32(src, dst); // Movsd is MR encoding instead of the usual RM. in movsd()
561 EmitXmmRegisterOperand(src.LowBits(), dst); in movsd()
565 void X86_64Assembler::addsd(XmmRegister dst, XmmRegister src) { in addsd() argument
568 EmitOptionalRex32(dst, src); in addsd()
571 EmitXmmRegisterOperand(dst.LowBits(), src); in addsd()
575 void X86_64Assembler::addsd(XmmRegister dst, const Address& src) { in addsd() argument
578 EmitOptionalRex32(dst, src); in addsd()
581 EmitOperand(dst.LowBits(), src); in addsd()
585 void X86_64Assembler::subsd(XmmRegister dst, XmmRegister src) { in subsd() argument
588 EmitOptionalRex32(dst, src); in subsd()
591 EmitXmmRegisterOperand(dst.LowBits(), src); in subsd()
595 void X86_64Assembler::subsd(XmmRegister dst, const Address& src) { in subsd() argument
598 EmitOptionalRex32(dst, src); in subsd()
601 EmitOperand(dst.LowBits(), src); in subsd()
605 void X86_64Assembler::mulsd(XmmRegister dst, XmmRegister src) { in mulsd() argument
608 EmitOptionalRex32(dst, src); in mulsd()
611 EmitXmmRegisterOperand(dst.LowBits(), src); in mulsd()
615 void X86_64Assembler::mulsd(XmmRegister dst, const Address& src) { in mulsd() argument
618 EmitOptionalRex32(dst, src); in mulsd()
621 EmitOperand(dst.LowBits(), src); in mulsd()
625 void X86_64Assembler::divsd(XmmRegister dst, XmmRegister src) { in divsd() argument
628 EmitOptionalRex32(dst, src); in divsd()
631 EmitXmmRegisterOperand(dst.LowBits(), src); in divsd()
635 void X86_64Assembler::divsd(XmmRegister dst, const Address& src) { in divsd() argument
638 EmitOptionalRex32(dst, src); in divsd()
641 EmitOperand(dst.LowBits(), src); in divsd()
645 void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src) { in cvtsi2ss() argument
646 cvtsi2ss(dst, src, false); in cvtsi2ss()
650 void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src, bool is64bit) { in cvtsi2ss() argument
655 EmitRex64(dst, src); in cvtsi2ss()
657 EmitOptionalRex32(dst, src); in cvtsi2ss()
661 EmitOperand(dst.LowBits(), Operand(src)); in cvtsi2ss()
665 void X86_64Assembler::cvtsi2ss(XmmRegister dst, const Address& src, bool is64bit) { in cvtsi2ss() argument
670 EmitRex64(dst, src); in cvtsi2ss()
672 EmitOptionalRex32(dst, src); in cvtsi2ss()
676 EmitOperand(dst.LowBits(), src); in cvtsi2ss()
680 void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src) { in cvtsi2sd() argument
681 cvtsi2sd(dst, src, false); in cvtsi2sd()
685 void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src, bool is64bit) { in cvtsi2sd() argument
690 EmitRex64(dst, src); in cvtsi2sd()
692 EmitOptionalRex32(dst, src); in cvtsi2sd()
696 EmitOperand(dst.LowBits(), Operand(src)); in cvtsi2sd()
700 void X86_64Assembler::cvtsi2sd(XmmRegister dst, const Address& src, bool is64bit) { in cvtsi2sd() argument
705 EmitRex64(dst, src); in cvtsi2sd()
707 EmitOptionalRex32(dst, src); in cvtsi2sd()
711 EmitOperand(dst.LowBits(), src); in cvtsi2sd()
715 void X86_64Assembler::cvtss2si(CpuRegister dst, XmmRegister src) { in cvtss2si() argument
718 EmitOptionalRex32(dst, src); in cvtss2si()
721 EmitXmmRegisterOperand(dst.LowBits(), src); in cvtss2si()
725 void X86_64Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) { in cvtss2sd() argument
728 EmitOptionalRex32(dst, src); in cvtss2sd()
731 EmitXmmRegisterOperand(dst.LowBits(), src); in cvtss2sd()
735 void X86_64Assembler::cvtss2sd(XmmRegister dst, const Address& src) { in cvtss2sd() argument
738 EmitOptionalRex32(dst, src); in cvtss2sd()
741 EmitOperand(dst.LowBits(), src); in cvtss2sd()
745 void X86_64Assembler::cvtsd2si(CpuRegister dst, XmmRegister src) { in cvtsd2si() argument
748 EmitOptionalRex32(dst, src); in cvtsd2si()
751 EmitXmmRegisterOperand(dst.LowBits(), src); in cvtsd2si()
755 void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src) { in cvttss2si() argument
756 cvttss2si(dst, src, false); in cvttss2si()
760 void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src, bool is64bit) { in cvttss2si() argument
765 EmitRex64(dst, src); in cvttss2si()
767 EmitOptionalRex32(dst, src); in cvttss2si()
771 EmitXmmRegisterOperand(dst.LowBits(), src); in cvttss2si()
775 void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src) { in cvttsd2si() argument
776 cvttsd2si(dst, src, false); in cvttsd2si()
780 void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src, bool is64bit) { in cvttsd2si() argument
785 EmitRex64(dst, src); in cvttsd2si()
787 EmitOptionalRex32(dst, src); in cvttsd2si()
791 EmitXmmRegisterOperand(dst.LowBits(), src); in cvttsd2si()
795 void X86_64Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) { in cvtsd2ss() argument
798 EmitOptionalRex32(dst, src); in cvtsd2ss()
801 EmitXmmRegisterOperand(dst.LowBits(), src); in cvtsd2ss()
805 void X86_64Assembler::cvtsd2ss(XmmRegister dst, const Address& src) { in cvtsd2ss() argument
808 EmitOptionalRex32(dst, src); in cvtsd2ss()
811 EmitOperand(dst.LowBits(), src); in cvtsd2ss()
815 void X86_64Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) { in cvtdq2pd() argument
818 EmitOptionalRex32(dst, src); in cvtdq2pd()
821 EmitXmmRegisterOperand(dst.LowBits(), src); in cvtdq2pd()
901 void X86_64Assembler::roundsd(XmmRegister dst, XmmRegister src, const Immediate& imm) { in roundsd() argument
904 EmitOptionalRex32(dst, src); in roundsd()
908 EmitXmmRegisterOperand(dst.LowBits(), src); in roundsd()
913 void X86_64Assembler::roundss(XmmRegister dst, XmmRegister src, const Immediate& imm) { in roundss() argument
916 EmitOptionalRex32(dst, src); in roundss()
920 EmitXmmRegisterOperand(dst.LowBits(), src); in roundss()
925 void X86_64Assembler::sqrtsd(XmmRegister dst, XmmRegister src) { in sqrtsd() argument
928 EmitOptionalRex32(dst, src); in sqrtsd()
931 EmitXmmRegisterOperand(dst.LowBits(), src); in sqrtsd()
935 void X86_64Assembler::sqrtss(XmmRegister dst, XmmRegister src) { in sqrtss() argument
938 EmitOptionalRex32(dst, src); in sqrtss()
941 EmitXmmRegisterOperand(dst.LowBits(), src); in sqrtss()
945 void X86_64Assembler::xorpd(XmmRegister dst, const Address& src) { in xorpd() argument
948 EmitOptionalRex32(dst, src); in xorpd()
951 EmitOperand(dst.LowBits(), src); in xorpd()
955 void X86_64Assembler::xorpd(XmmRegister dst, XmmRegister src) { in xorpd() argument
958 EmitOptionalRex32(dst, src); in xorpd()
961 EmitXmmRegisterOperand(dst.LowBits(), src); in xorpd()
965 void X86_64Assembler::xorps(XmmRegister dst, const Address& src) { in xorps() argument
967 EmitOptionalRex32(dst, src); in xorps()
970 EmitOperand(dst.LowBits(), src); in xorps()
974 void X86_64Assembler::xorps(XmmRegister dst, XmmRegister src) { in xorps() argument
976 EmitOptionalRex32(dst, src); in xorps()
979 EmitXmmRegisterOperand(dst.LowBits(), src); in xorps()
983 void X86_64Assembler::andpd(XmmRegister dst, const Address& src) { in andpd() argument
986 EmitOptionalRex32(dst, src); in andpd()
989 EmitOperand(dst.LowBits(), src); in andpd()
992 void X86_64Assembler::andpd(XmmRegister dst, XmmRegister src) { in andpd() argument
995 EmitOptionalRex32(dst, src); in andpd()
998 EmitXmmRegisterOperand(dst.LowBits(), src); in andpd()
1001 void X86_64Assembler::andps(XmmRegister dst, XmmRegister src) { in andps() argument
1003 EmitOptionalRex32(dst, src); in andps()
1006 EmitXmmRegisterOperand(dst.LowBits(), src); in andps()
1009 void X86_64Assembler::orpd(XmmRegister dst, XmmRegister src) { in orpd() argument
1012 EmitOptionalRex32(dst, src); in orpd()
1015 EmitXmmRegisterOperand(dst.LowBits(), src); in orpd()
1018 void X86_64Assembler::orps(XmmRegister dst, XmmRegister src) { in orps() argument
1020 EmitOptionalRex32(dst, src); in orps()
1023 EmitXmmRegisterOperand(dst.LowBits(), src); in orps()
1026 void X86_64Assembler::fldl(const Address& src) { in fldl() argument
1029 EmitOperand(0, src); in fldl()
1062 void X86_64Assembler::fldcw(const Address& src) { in fldcw() argument
1065 EmitOperand(5, src); in fldcw()
1083 void X86_64Assembler::fildl(const Address& src) { in fildl() argument
1086 EmitOperand(5, src); in fildl()
1090 void X86_64Assembler::filds(const Address& src) { in filds() argument
1093 EmitOperand(0, src); in filds()
1146 void X86_64Assembler::xchgl(CpuRegister dst, CpuRegister src) { in xchgl() argument
1151 const bool src_rax = src.AsRegister() == RAX; in xchgl()
1154 EmitOptionalRex32(src_rax ? dst : src); in xchgl()
1155 EmitUint8(0x90 + (src_rax ? dst.LowBits() : src.LowBits())); in xchgl()
1160 EmitOptionalRex32(src, dst); in xchgl()
1162 EmitRegisterOperand(src.LowBits(), dst.LowBits()); in xchgl()
1166 void X86_64Assembler::xchgq(CpuRegister dst, CpuRegister src) { in xchgq() argument
1171 const bool src_rax = src.AsRegister() == RAX; in xchgq()
1178 EmitRex64(src_rax ? dst : src); in xchgq()
1179 EmitUint8(0x90 + (src_rax ? dst.LowBits() : src.LowBits())); in xchgq()
1185 EmitRex64(src, dst); in xchgq()
1187 EmitRegisterOperand(src.LowBits(), dst.LowBits()); in xchgq()
1277 void X86_64Assembler::addl(CpuRegister dst, CpuRegister src) { in addl() argument
1279 EmitOptionalRex32(dst, src); in addl()
1281 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in addl()
1351 void X86_64Assembler::andl(CpuRegister dst, CpuRegister src) { in andl() argument
1353 EmitOptionalRex32(dst, src); in andl()
1355 EmitOperand(dst.LowBits(), Operand(src)); in andl()
1382 void X86_64Assembler::andq(CpuRegister dst, CpuRegister src) { in andq() argument
1384 EmitRex64(dst, src); in andq()
1386 EmitOperand(dst.LowBits(), Operand(src)); in andq()
1390 void X86_64Assembler::andq(CpuRegister dst, const Address& src) { in andq() argument
1392 EmitRex64(dst, src); in andq()
1394 EmitOperand(dst.LowBits(), src); in andq()
1398 void X86_64Assembler::orl(CpuRegister dst, CpuRegister src) { in orl() argument
1400 EmitOptionalRex32(dst, src); in orl()
1402 EmitOperand(dst.LowBits(), Operand(src)); in orl()
1429 void X86_64Assembler::orq(CpuRegister dst, CpuRegister src) { in orq() argument
1431 EmitRex64(dst, src); in orq()
1433 EmitOperand(dst.LowBits(), Operand(src)); in orq()
1437 void X86_64Assembler::orq(CpuRegister dst, const Address& src) { in orq() argument
1439 EmitRex64(dst, src); in orq()
1441 EmitOperand(dst.LowBits(), src); in orq()
1445 void X86_64Assembler::xorl(CpuRegister dst, CpuRegister src) { in xorl() argument
1447 EmitOptionalRex32(dst, src); in xorl()
1449 EmitOperand(dst.LowBits(), Operand(src)); in xorl()
1468 void X86_64Assembler::xorq(CpuRegister dst, CpuRegister src) { in xorq() argument
1470 EmitRex64(dst, src); in xorq()
1472 EmitOperand(dst.LowBits(), Operand(src)); in xorq()
1483 void X86_64Assembler::xorq(CpuRegister dst, const Address& src) { in xorq() argument
1485 EmitRex64(dst, src); in xorq()
1487 EmitOperand(dst.LowBits(), src); in xorq()
1541 void rex_mem_reg(bool force, bool w, Address* mem, Register* src);
1567 void X86_64Assembler::addq(CpuRegister dst, CpuRegister src) { in addq() argument
1570 EmitRex64(src, dst); in addq()
1572 EmitRegisterOperand(src.LowBits(), dst.LowBits()); in addq()
1591 void X86_64Assembler::subl(CpuRegister dst, CpuRegister src) { in subl() argument
1593 EmitOptionalRex32(dst, src); in subl()
1595 EmitOperand(dst.LowBits(), Operand(src)); in subl()
1614 void X86_64Assembler::subq(CpuRegister dst, CpuRegister src) { in subq() argument
1616 EmitRex64(dst, src); in subq()
1618 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in subq()
1667 void X86_64Assembler::imull(CpuRegister dst, CpuRegister src) { in imull() argument
1669 EmitOptionalRex32(dst, src); in imull()
1672 EmitOperand(dst.LowBits(), Operand(src)); in imull()
1706 void X86_64Assembler::imulq(CpuRegister dst, CpuRegister src) { in imulq() argument
1708 EmitRex64(dst, src); in imulq()
1711 EmitRegisterOperand(dst.LowBits(), src.LowBits()); in imulq()
2242 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, CpuRegister src) { in EmitOptionalRex32() argument
2243 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex()); in EmitOptionalRex32()
2246 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, XmmRegister src) { in EmitOptionalRex32() argument
2247 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex()); in EmitOptionalRex32()
2250 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, XmmRegister src) { in EmitOptionalRex32() argument
2251 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex()); in EmitOptionalRex32()
2254 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, CpuRegister src) { in EmitOptionalRex32() argument
2255 EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex()); in EmitOptionalRex32()
2299 void X86_64Assembler::EmitRex64(CpuRegister dst, CpuRegister src) { in EmitRex64() argument
2300 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex()); in EmitRex64()
2303 void X86_64Assembler::EmitRex64(XmmRegister dst, CpuRegister src) { in EmitRex64() argument
2304 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex()); in EmitRex64()
2307 void X86_64Assembler::EmitRex64(CpuRegister dst, XmmRegister src) { in EmitRex64() argument
2308 EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex()); in EmitRex64()
2327 void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, CpuRegister src) { in EmitOptionalByteRegNormalizingRex32() argument
2329 bool force = src.AsRegister() > 3; in EmitOptionalByteRegNormalizingRex32()
2330 EmitOptionalRex(force, false, dst.NeedsRex(), false, src.NeedsRex()); in EmitOptionalByteRegNormalizingRex32()
2463 X86_64ManagedRegister src = msrc.AsX86_64(); in Store() local
2464 if (src.IsNoRegister()) { in Store()
2466 } else if (src.IsCpuRegister()) { in Store()
2469 movl(Address(CpuRegister(RSP), offs), src.AsCpuRegister()); in Store()
2472 movq(Address(CpuRegister(RSP), offs), src.AsCpuRegister()); in Store()
2474 } else if (src.IsRegisterPair()) { in Store()
2476 movq(Address(CpuRegister(RSP), offs), src.AsRegisterPairLow()); in Store()
2478 src.AsRegisterPairHigh()); in Store()
2479 } else if (src.IsX87Register()) { in Store()
2486 CHECK(src.IsXmmRegister()); in Store()
2488 movss(Address(CpuRegister(RSP), offs), src.AsXmmRegister()); in Store()
2490 movsd(Address(CpuRegister(RSP), offs), src.AsXmmRegister()); in Store()
2496 X86_64ManagedRegister src = msrc.AsX86_64(); in StoreRef() local
2497 CHECK(src.IsCpuRegister()); in StoreRef()
2498 movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister()); in StoreRef()
2502 X86_64ManagedRegister src = msrc.AsX86_64(); in StoreRawPtr() local
2503 CHECK(src.IsCpuRegister()); in StoreRawPtr()
2504 movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister()); in StoreRawPtr()
2535 void X86_64Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) { in Load() argument
2542 movl(dest.AsCpuRegister(), Address(CpuRegister(RSP), src)); in Load()
2545 movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src)); in Load()
2549 movq(dest.AsRegisterPairLow(), Address(CpuRegister(RSP), src)); in Load()
2550 movq(dest.AsRegisterPairHigh(), Address(CpuRegister(RSP), FrameOffset(src.Int32Value()+4))); in Load()
2553 flds(Address(CpuRegister(RSP), src)); in Load()
2555 fldl(Address(CpuRegister(RSP), src)); in Load()
2560 movss(dest.AsXmmRegister(), Address(CpuRegister(RSP), src)); in Load()
2562 movsd(dest.AsXmmRegister(), Address(CpuRegister(RSP), src)); in Load()
2567 void X86_64Assembler::LoadFromThread64(ManagedRegister mdest, ThreadOffset<8> src, size_t size) { in LoadFromThread64() argument
2573 gs()->movl(dest.AsCpuRegister(), Address::Absolute(src, true)); in LoadFromThread64()
2576 gs()->movq(dest.AsRegisterPairLow(), Address::Absolute(src, true)); in LoadFromThread64()
2579 gs()->flds(Address::Absolute(src, true)); in LoadFromThread64()
2581 gs()->fldl(Address::Absolute(src, true)); in LoadFromThread64()
2586 gs()->movss(dest.AsXmmRegister(), Address::Absolute(src, true)); in LoadFromThread64()
2588 gs()->movsd(dest.AsXmmRegister(), Address::Absolute(src, true)); in LoadFromThread64()
2593 void X86_64Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) { in LoadRef() argument
2596 movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src)); in LoadRef()
2646 X86_64ManagedRegister src = msrc.AsX86_64(); in Move() local
2647 if (!dest.Equals(src)) { in Move()
2648 if (dest.IsCpuRegister() && src.IsCpuRegister()) { in Move()
2649 movq(dest.AsCpuRegister(), src.AsCpuRegister()); in Move()
2650 } else if (src.IsX87Register() && dest.IsXmmRegister()) { in Move()
2654 CHECK_EQ(src.AsX87Register(), ST0); in Move()
2658 CHECK_EQ(src.AsX87Register(), ST0); in Move()
2665 UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src; in Move()
2670 void X86_64Assembler::CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister mscratch) { in CopyRef() argument
2673 movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), src)); in CopyRef()
2695 void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src, ManagedRegister mscratch, in Copy() argument
2699 Load(scratch, src, 4); in Copy()
2701 Load(scratch, FrameOffset(src.Int32Value() + 4), 4); in Copy()
2704 Load(scratch, src, size); in Copy()
2714 void X86_64Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, in Copy() argument
2718 pushq(Address(CpuRegister(RSP), src)); in Copy()
2732 ManagedRegister src, Offset src_offset, in Copy() argument
2736 pushq(Address(src.AsX86_64().AsCpuRegister(), src_offset)); in Copy()
2740 void X86_64Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset, in Copy() argument
2744 CHECK_EQ(dest.Int32Value(), src.Int32Value()); in Copy()
2745 movq(scratch, Address(CpuRegister(RSP), src)); in Copy()