1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "assembler_x86.h"
18 
19 #include "base/casts.h"
20 #include "entrypoints/quick/quick_entrypoints.h"
21 #include "memory_region.h"
22 #include "thread.h"
23 
24 namespace art {
25 namespace x86 {
26 
operator <<(std::ostream & os,const XmmRegister & reg)27 std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) {
28   return os << "XMM" << static_cast<int>(reg);
29 }
30 
operator <<(std::ostream & os,const X87Register & reg)31 std::ostream& operator<<(std::ostream& os, const X87Register& reg) {
32   return os << "ST" << static_cast<int>(reg);
33 }
34 
call(Register reg)35 void X86Assembler::call(Register reg) {
36   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
37   EmitUint8(0xFF);
38   EmitRegisterOperand(2, reg);
39 }
40 
41 
call(const Address & address)42 void X86Assembler::call(const Address& address) {
43   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
44   EmitUint8(0xFF);
45   EmitOperand(2, address);
46 }
47 
48 
call(Label * label)49 void X86Assembler::call(Label* label) {
50   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
51   EmitUint8(0xE8);
52   static const int kSize = 5;
53   // Offset by one because we already have emitted the opcode.
54   EmitLabel(label, kSize - 1);
55 }
56 
57 
call(const ExternalLabel & label)58 void X86Assembler::call(const ExternalLabel& label) {
59   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
60   intptr_t call_start = buffer_.GetPosition();
61   EmitUint8(0xE8);
62   EmitInt32(label.address());
63   static const intptr_t kCallExternalLabelSize = 5;
64   DCHECK_EQ((buffer_.GetPosition() - call_start), kCallExternalLabelSize);
65 }
66 
67 
pushl(Register reg)68 void X86Assembler::pushl(Register reg) {
69   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
70   EmitUint8(0x50 + reg);
71 }
72 
73 
pushl(const Address & address)74 void X86Assembler::pushl(const Address& address) {
75   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
76   EmitUint8(0xFF);
77   EmitOperand(6, address);
78 }
79 
80 
pushl(const Immediate & imm)81 void X86Assembler::pushl(const Immediate& imm) {
82   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
83   if (imm.is_int8()) {
84     EmitUint8(0x6A);
85     EmitUint8(imm.value() & 0xFF);
86   } else {
87     EmitUint8(0x68);
88     EmitImmediate(imm);
89   }
90 }
91 
92 
popl(Register reg)93 void X86Assembler::popl(Register reg) {
94   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
95   EmitUint8(0x58 + reg);
96 }
97 
98 
popl(const Address & address)99 void X86Assembler::popl(const Address& address) {
100   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
101   EmitUint8(0x8F);
102   EmitOperand(0, address);
103 }
104 
105 
movl(Register dst,const Immediate & imm)106 void X86Assembler::movl(Register dst, const Immediate& imm) {
107   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
108   EmitUint8(0xB8 + dst);
109   EmitImmediate(imm);
110 }
111 
112 
movl(Register dst,Register src)113 void X86Assembler::movl(Register dst, Register src) {
114   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
115   EmitUint8(0x89);
116   EmitRegisterOperand(src, dst);
117 }
118 
119 
movl(Register dst,const Address & src)120 void X86Assembler::movl(Register dst, const Address& src) {
121   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
122   EmitUint8(0x8B);
123   EmitOperand(dst, src);
124 }
125 
126 
movl(const Address & dst,Register src)127 void X86Assembler::movl(const Address& dst, Register src) {
128   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
129   EmitUint8(0x89);
130   EmitOperand(src, dst);
131 }
132 
133 
movl(const Address & dst,const Immediate & imm)134 void X86Assembler::movl(const Address& dst, const Immediate& imm) {
135   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
136   EmitUint8(0xC7);
137   EmitOperand(0, dst);
138   EmitImmediate(imm);
139 }
140 
movl(const Address & dst,Label * lbl)141 void X86Assembler::movl(const Address& dst, Label* lbl) {
142   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
143   EmitUint8(0xC7);
144   EmitOperand(0, dst);
145   EmitLabel(lbl, dst.length_ + 5);
146 }
147 
bswapl(Register dst)148 void X86Assembler::bswapl(Register dst) {
149   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
150   EmitUint8(0x0F);
151   EmitUint8(0xC8 + dst);
152 }
153 
movzxb(Register dst,ByteRegister src)154 void X86Assembler::movzxb(Register dst, ByteRegister src) {
155   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
156   EmitUint8(0x0F);
157   EmitUint8(0xB6);
158   EmitRegisterOperand(dst, src);
159 }
160 
161 
movzxb(Register dst,const Address & src)162 void X86Assembler::movzxb(Register dst, const Address& src) {
163   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
164   EmitUint8(0x0F);
165   EmitUint8(0xB6);
166   EmitOperand(dst, src);
167 }
168 
169 
movsxb(Register dst,ByteRegister src)170 void X86Assembler::movsxb(Register dst, ByteRegister src) {
171   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
172   EmitUint8(0x0F);
173   EmitUint8(0xBE);
174   EmitRegisterOperand(dst, src);
175 }
176 
177 
movsxb(Register dst,const Address & src)178 void X86Assembler::movsxb(Register dst, const Address& src) {
179   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
180   EmitUint8(0x0F);
181   EmitUint8(0xBE);
182   EmitOperand(dst, src);
183 }
184 
185 
movb(Register,const Address &)186 void X86Assembler::movb(Register /*dst*/, const Address& /*src*/) {
187   LOG(FATAL) << "Use movzxb or movsxb instead.";
188 }
189 
190 
movb(const Address & dst,ByteRegister src)191 void X86Assembler::movb(const Address& dst, ByteRegister src) {
192   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
193   EmitUint8(0x88);
194   EmitOperand(src, dst);
195 }
196 
197 
movb(const Address & dst,const Immediate & imm)198 void X86Assembler::movb(const Address& dst, const Immediate& imm) {
199   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
200   EmitUint8(0xC6);
201   EmitOperand(EAX, dst);
202   CHECK(imm.is_int8());
203   EmitUint8(imm.value() & 0xFF);
204 }
205 
206 
movzxw(Register dst,Register src)207 void X86Assembler::movzxw(Register dst, Register src) {
208   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
209   EmitUint8(0x0F);
210   EmitUint8(0xB7);
211   EmitRegisterOperand(dst, src);
212 }
213 
214 
movzxw(Register dst,const Address & src)215 void X86Assembler::movzxw(Register dst, const Address& src) {
216   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
217   EmitUint8(0x0F);
218   EmitUint8(0xB7);
219   EmitOperand(dst, src);
220 }
221 
222 
movsxw(Register dst,Register src)223 void X86Assembler::movsxw(Register dst, Register src) {
224   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
225   EmitUint8(0x0F);
226   EmitUint8(0xBF);
227   EmitRegisterOperand(dst, src);
228 }
229 
230 
movsxw(Register dst,const Address & src)231 void X86Assembler::movsxw(Register dst, const Address& src) {
232   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
233   EmitUint8(0x0F);
234   EmitUint8(0xBF);
235   EmitOperand(dst, src);
236 }
237 
238 
movw(Register,const Address &)239 void X86Assembler::movw(Register /*dst*/, const Address& /*src*/) {
240   LOG(FATAL) << "Use movzxw or movsxw instead.";
241 }
242 
243 
movw(const Address & dst,Register src)244 void X86Assembler::movw(const Address& dst, Register src) {
245   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
246   EmitOperandSizeOverride();
247   EmitUint8(0x89);
248   EmitOperand(src, dst);
249 }
250 
251 
movw(const Address & dst,const Immediate & imm)252 void X86Assembler::movw(const Address& dst, const Immediate& imm) {
253   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
254   EmitOperandSizeOverride();
255   EmitUint8(0xC7);
256   EmitOperand(0, dst);
257   CHECK(imm.is_uint16() || imm.is_int16());
258   EmitUint8(imm.value() & 0xFF);
259   EmitUint8(imm.value() >> 8);
260 }
261 
262 
leal(Register dst,const Address & src)263 void X86Assembler::leal(Register dst, const Address& src) {
264   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
265   EmitUint8(0x8D);
266   EmitOperand(dst, src);
267 }
268 
269 
cmovl(Condition condition,Register dst,Register src)270 void X86Assembler::cmovl(Condition condition, Register dst, Register src) {
271   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
272   EmitUint8(0x0F);
273   EmitUint8(0x40 + condition);
274   EmitRegisterOperand(dst, src);
275 }
276 
277 
setb(Condition condition,Register dst)278 void X86Assembler::setb(Condition condition, Register dst) {
279   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
280   EmitUint8(0x0F);
281   EmitUint8(0x90 + condition);
282   EmitOperand(0, Operand(dst));
283 }
284 
285 
movaps(XmmRegister dst,XmmRegister src)286 void X86Assembler::movaps(XmmRegister dst, XmmRegister src) {
287   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
288   EmitUint8(0x0F);
289   EmitUint8(0x28);
290   EmitXmmRegisterOperand(dst, src);
291 }
292 
293 
movss(XmmRegister dst,const Address & src)294 void X86Assembler::movss(XmmRegister dst, const Address& src) {
295   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
296   EmitUint8(0xF3);
297   EmitUint8(0x0F);
298   EmitUint8(0x10);
299   EmitOperand(dst, src);
300 }
301 
302 
movss(const Address & dst,XmmRegister src)303 void X86Assembler::movss(const Address& dst, XmmRegister src) {
304   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
305   EmitUint8(0xF3);
306   EmitUint8(0x0F);
307   EmitUint8(0x11);
308   EmitOperand(src, dst);
309 }
310 
311 
movss(XmmRegister dst,XmmRegister src)312 void X86Assembler::movss(XmmRegister dst, XmmRegister src) {
313   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
314   EmitUint8(0xF3);
315   EmitUint8(0x0F);
316   EmitUint8(0x11);
317   EmitXmmRegisterOperand(src, dst);
318 }
319 
320 
movd(XmmRegister dst,Register src)321 void X86Assembler::movd(XmmRegister dst, Register src) {
322   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
323   EmitUint8(0x66);
324   EmitUint8(0x0F);
325   EmitUint8(0x6E);
326   EmitOperand(dst, Operand(src));
327 }
328 
329 
movd(Register dst,XmmRegister src)330 void X86Assembler::movd(Register dst, XmmRegister src) {
331   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
332   EmitUint8(0x66);
333   EmitUint8(0x0F);
334   EmitUint8(0x7E);
335   EmitOperand(src, Operand(dst));
336 }
337 
338 
addss(XmmRegister dst,XmmRegister src)339 void X86Assembler::addss(XmmRegister dst, XmmRegister src) {
340   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
341   EmitUint8(0xF3);
342   EmitUint8(0x0F);
343   EmitUint8(0x58);
344   EmitXmmRegisterOperand(dst, src);
345 }
346 
347 
addss(XmmRegister dst,const Address & src)348 void X86Assembler::addss(XmmRegister dst, const Address& src) {
349   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
350   EmitUint8(0xF3);
351   EmitUint8(0x0F);
352   EmitUint8(0x58);
353   EmitOperand(dst, src);
354 }
355 
356 
subss(XmmRegister dst,XmmRegister src)357 void X86Assembler::subss(XmmRegister dst, XmmRegister src) {
358   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
359   EmitUint8(0xF3);
360   EmitUint8(0x0F);
361   EmitUint8(0x5C);
362   EmitXmmRegisterOperand(dst, src);
363 }
364 
365 
subss(XmmRegister dst,const Address & src)366 void X86Assembler::subss(XmmRegister dst, const Address& src) {
367   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
368   EmitUint8(0xF3);
369   EmitUint8(0x0F);
370   EmitUint8(0x5C);
371   EmitOperand(dst, src);
372 }
373 
374 
mulss(XmmRegister dst,XmmRegister src)375 void X86Assembler::mulss(XmmRegister dst, XmmRegister src) {
376   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
377   EmitUint8(0xF3);
378   EmitUint8(0x0F);
379   EmitUint8(0x59);
380   EmitXmmRegisterOperand(dst, src);
381 }
382 
383 
mulss(XmmRegister dst,const Address & src)384 void X86Assembler::mulss(XmmRegister dst, const Address& src) {
385   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
386   EmitUint8(0xF3);
387   EmitUint8(0x0F);
388   EmitUint8(0x59);
389   EmitOperand(dst, src);
390 }
391 
392 
divss(XmmRegister dst,XmmRegister src)393 void X86Assembler::divss(XmmRegister dst, XmmRegister src) {
394   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
395   EmitUint8(0xF3);
396   EmitUint8(0x0F);
397   EmitUint8(0x5E);
398   EmitXmmRegisterOperand(dst, src);
399 }
400 
401 
divss(XmmRegister dst,const Address & src)402 void X86Assembler::divss(XmmRegister dst, const Address& src) {
403   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
404   EmitUint8(0xF3);
405   EmitUint8(0x0F);
406   EmitUint8(0x5E);
407   EmitOperand(dst, src);
408 }
409 
410 
flds(const Address & src)411 void X86Assembler::flds(const Address& src) {
412   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
413   EmitUint8(0xD9);
414   EmitOperand(0, src);
415 }
416 
417 
fsts(const Address & dst)418 void X86Assembler::fsts(const Address& dst) {
419   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
420   EmitUint8(0xD9);
421   EmitOperand(2, dst);
422 }
423 
424 
fstps(const Address & dst)425 void X86Assembler::fstps(const Address& dst) {
426   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
427   EmitUint8(0xD9);
428   EmitOperand(3, dst);
429 }
430 
431 
movsd(XmmRegister dst,const Address & src)432 void X86Assembler::movsd(XmmRegister dst, const Address& src) {
433   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
434   EmitUint8(0xF2);
435   EmitUint8(0x0F);
436   EmitUint8(0x10);
437   EmitOperand(dst, src);
438 }
439 
440 
movsd(const Address & dst,XmmRegister src)441 void X86Assembler::movsd(const Address& dst, XmmRegister src) {
442   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
443   EmitUint8(0xF2);
444   EmitUint8(0x0F);
445   EmitUint8(0x11);
446   EmitOperand(src, dst);
447 }
448 
449 
movsd(XmmRegister dst,XmmRegister src)450 void X86Assembler::movsd(XmmRegister dst, XmmRegister src) {
451   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
452   EmitUint8(0xF2);
453   EmitUint8(0x0F);
454   EmitUint8(0x11);
455   EmitXmmRegisterOperand(src, dst);
456 }
457 
458 
movhpd(XmmRegister dst,const Address & src)459 void X86Assembler::movhpd(XmmRegister dst, const Address& src) {
460   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
461   EmitUint8(0x66);
462   EmitUint8(0x0F);
463   EmitUint8(0x16);
464   EmitOperand(dst, src);
465 }
466 
467 
movhpd(const Address & dst,XmmRegister src)468 void X86Assembler::movhpd(const Address& dst, XmmRegister src) {
469   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
470   EmitUint8(0x66);
471   EmitUint8(0x0F);
472   EmitUint8(0x17);
473   EmitOperand(src, dst);
474 }
475 
476 
psrldq(XmmRegister reg,const Immediate & shift_count)477 void X86Assembler::psrldq(XmmRegister reg, const Immediate& shift_count) {
478   DCHECK(shift_count.is_uint8());
479 
480   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
481   EmitUint8(0x66);
482   EmitUint8(0x0F);
483   EmitUint8(0x73);
484   EmitXmmRegisterOperand(3, reg);
485   EmitUint8(shift_count.value());
486 }
487 
488 
psrlq(XmmRegister reg,const Immediate & shift_count)489 void X86Assembler::psrlq(XmmRegister reg, const Immediate& shift_count) {
490   DCHECK(shift_count.is_uint8());
491 
492   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
493   EmitUint8(0x66);
494   EmitUint8(0x0F);
495   EmitUint8(0x73);
496   EmitXmmRegisterOperand(2, reg);
497   EmitUint8(shift_count.value());
498 }
499 
500 
punpckldq(XmmRegister dst,XmmRegister src)501 void X86Assembler::punpckldq(XmmRegister dst, XmmRegister src) {
502   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
503   EmitUint8(0x66);
504   EmitUint8(0x0F);
505   EmitUint8(0x62);
506   EmitXmmRegisterOperand(dst, src);
507 }
508 
509 
addsd(XmmRegister dst,XmmRegister src)510 void X86Assembler::addsd(XmmRegister dst, XmmRegister src) {
511   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
512   EmitUint8(0xF2);
513   EmitUint8(0x0F);
514   EmitUint8(0x58);
515   EmitXmmRegisterOperand(dst, src);
516 }
517 
518 
addsd(XmmRegister dst,const Address & src)519 void X86Assembler::addsd(XmmRegister dst, const Address& src) {
520   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
521   EmitUint8(0xF2);
522   EmitUint8(0x0F);
523   EmitUint8(0x58);
524   EmitOperand(dst, src);
525 }
526 
527 
subsd(XmmRegister dst,XmmRegister src)528 void X86Assembler::subsd(XmmRegister dst, XmmRegister src) {
529   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
530   EmitUint8(0xF2);
531   EmitUint8(0x0F);
532   EmitUint8(0x5C);
533   EmitXmmRegisterOperand(dst, src);
534 }
535 
536 
subsd(XmmRegister dst,const Address & src)537 void X86Assembler::subsd(XmmRegister dst, const Address& src) {
538   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
539   EmitUint8(0xF2);
540   EmitUint8(0x0F);
541   EmitUint8(0x5C);
542   EmitOperand(dst, src);
543 }
544 
545 
mulsd(XmmRegister dst,XmmRegister src)546 void X86Assembler::mulsd(XmmRegister dst, XmmRegister src) {
547   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
548   EmitUint8(0xF2);
549   EmitUint8(0x0F);
550   EmitUint8(0x59);
551   EmitXmmRegisterOperand(dst, src);
552 }
553 
554 
mulsd(XmmRegister dst,const Address & src)555 void X86Assembler::mulsd(XmmRegister dst, const Address& src) {
556   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
557   EmitUint8(0xF2);
558   EmitUint8(0x0F);
559   EmitUint8(0x59);
560   EmitOperand(dst, src);
561 }
562 
563 
divsd(XmmRegister dst,XmmRegister src)564 void X86Assembler::divsd(XmmRegister dst, XmmRegister src) {
565   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
566   EmitUint8(0xF2);
567   EmitUint8(0x0F);
568   EmitUint8(0x5E);
569   EmitXmmRegisterOperand(dst, src);
570 }
571 
572 
divsd(XmmRegister dst,const Address & src)573 void X86Assembler::divsd(XmmRegister dst, const Address& src) {
574   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
575   EmitUint8(0xF2);
576   EmitUint8(0x0F);
577   EmitUint8(0x5E);
578   EmitOperand(dst, src);
579 }
580 
581 
cvtsi2ss(XmmRegister dst,Register src)582 void X86Assembler::cvtsi2ss(XmmRegister dst, Register src) {
583   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
584   EmitUint8(0xF3);
585   EmitUint8(0x0F);
586   EmitUint8(0x2A);
587   EmitOperand(dst, Operand(src));
588 }
589 
590 
cvtsi2sd(XmmRegister dst,Register src)591 void X86Assembler::cvtsi2sd(XmmRegister dst, Register src) {
592   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
593   EmitUint8(0xF2);
594   EmitUint8(0x0F);
595   EmitUint8(0x2A);
596   EmitOperand(dst, Operand(src));
597 }
598 
599 
cvtss2si(Register dst,XmmRegister src)600 void X86Assembler::cvtss2si(Register dst, XmmRegister src) {
601   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
602   EmitUint8(0xF3);
603   EmitUint8(0x0F);
604   EmitUint8(0x2D);
605   EmitXmmRegisterOperand(dst, src);
606 }
607 
608 
cvtss2sd(XmmRegister dst,XmmRegister src)609 void X86Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
610   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
611   EmitUint8(0xF3);
612   EmitUint8(0x0F);
613   EmitUint8(0x5A);
614   EmitXmmRegisterOperand(dst, src);
615 }
616 
617 
cvtsd2si(Register dst,XmmRegister src)618 void X86Assembler::cvtsd2si(Register dst, XmmRegister src) {
619   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
620   EmitUint8(0xF2);
621   EmitUint8(0x0F);
622   EmitUint8(0x2D);
623   EmitXmmRegisterOperand(dst, src);
624 }
625 
626 
cvttss2si(Register dst,XmmRegister src)627 void X86Assembler::cvttss2si(Register dst, XmmRegister src) {
628   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
629   EmitUint8(0xF3);
630   EmitUint8(0x0F);
631   EmitUint8(0x2C);
632   EmitXmmRegisterOperand(dst, src);
633 }
634 
635 
cvttsd2si(Register dst,XmmRegister src)636 void X86Assembler::cvttsd2si(Register dst, XmmRegister src) {
637   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
638   EmitUint8(0xF2);
639   EmitUint8(0x0F);
640   EmitUint8(0x2C);
641   EmitXmmRegisterOperand(dst, src);
642 }
643 
644 
cvtsd2ss(XmmRegister dst,XmmRegister src)645 void X86Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
646   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
647   EmitUint8(0xF2);
648   EmitUint8(0x0F);
649   EmitUint8(0x5A);
650   EmitXmmRegisterOperand(dst, src);
651 }
652 
653 
cvtdq2pd(XmmRegister dst,XmmRegister src)654 void X86Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
655   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
656   EmitUint8(0xF3);
657   EmitUint8(0x0F);
658   EmitUint8(0xE6);
659   EmitXmmRegisterOperand(dst, src);
660 }
661 
662 
comiss(XmmRegister a,XmmRegister b)663 void X86Assembler::comiss(XmmRegister a, XmmRegister b) {
664   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
665   EmitUint8(0x0F);
666   EmitUint8(0x2F);
667   EmitXmmRegisterOperand(a, b);
668 }
669 
670 
comisd(XmmRegister a,XmmRegister b)671 void X86Assembler::comisd(XmmRegister a, XmmRegister b) {
672   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
673   EmitUint8(0x66);
674   EmitUint8(0x0F);
675   EmitUint8(0x2F);
676   EmitXmmRegisterOperand(a, b);
677 }
678 
679 
ucomiss(XmmRegister a,XmmRegister b)680 void X86Assembler::ucomiss(XmmRegister a, XmmRegister b) {
681   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
682   EmitUint8(0x0F);
683   EmitUint8(0x2E);
684   EmitXmmRegisterOperand(a, b);
685 }
686 
687 
ucomisd(XmmRegister a,XmmRegister b)688 void X86Assembler::ucomisd(XmmRegister a, XmmRegister b) {
689   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
690   EmitUint8(0x66);
691   EmitUint8(0x0F);
692   EmitUint8(0x2E);
693   EmitXmmRegisterOperand(a, b);
694 }
695 
696 
roundsd(XmmRegister dst,XmmRegister src,const Immediate & imm)697 void X86Assembler::roundsd(XmmRegister dst, XmmRegister src, const Immediate& imm) {
698   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
699   EmitUint8(0x66);
700   EmitUint8(0x0F);
701   EmitUint8(0x3A);
702   EmitUint8(0x0B);
703   EmitXmmRegisterOperand(dst, src);
704   EmitUint8(imm.value());
705 }
706 
707 
roundss(XmmRegister dst,XmmRegister src,const Immediate & imm)708 void X86Assembler::roundss(XmmRegister dst, XmmRegister src, const Immediate& imm) {
709   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
710   EmitUint8(0x66);
711   EmitUint8(0x0F);
712   EmitUint8(0x3A);
713   EmitUint8(0x0A);
714   EmitXmmRegisterOperand(dst, src);
715   EmitUint8(imm.value());
716 }
717 
718 
sqrtsd(XmmRegister dst,XmmRegister src)719 void X86Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
720   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
721   EmitUint8(0xF2);
722   EmitUint8(0x0F);
723   EmitUint8(0x51);
724   EmitXmmRegisterOperand(dst, src);
725 }
726 
727 
sqrtss(XmmRegister dst,XmmRegister src)728 void X86Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
729   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
730   EmitUint8(0xF3);
731   EmitUint8(0x0F);
732   EmitUint8(0x51);
733   EmitXmmRegisterOperand(dst, src);
734 }
735 
736 
xorpd(XmmRegister dst,const Address & src)737 void X86Assembler::xorpd(XmmRegister dst, const Address& src) {
738   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
739   EmitUint8(0x66);
740   EmitUint8(0x0F);
741   EmitUint8(0x57);
742   EmitOperand(dst, src);
743 }
744 
745 
xorpd(XmmRegister dst,XmmRegister src)746 void X86Assembler::xorpd(XmmRegister dst, XmmRegister src) {
747   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
748   EmitUint8(0x66);
749   EmitUint8(0x0F);
750   EmitUint8(0x57);
751   EmitXmmRegisterOperand(dst, src);
752 }
753 
754 
andps(XmmRegister dst,XmmRegister src)755 void X86Assembler::andps(XmmRegister dst, XmmRegister src) {
756   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
757   EmitUint8(0x0F);
758   EmitUint8(0x54);
759   EmitXmmRegisterOperand(dst, src);
760 }
761 
762 
andpd(XmmRegister dst,XmmRegister src)763 void X86Assembler::andpd(XmmRegister dst, XmmRegister src) {
764   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
765   EmitUint8(0x66);
766   EmitUint8(0x0F);
767   EmitUint8(0x54);
768   EmitXmmRegisterOperand(dst, src);
769 }
770 
771 
orpd(XmmRegister dst,XmmRegister src)772 void X86Assembler::orpd(XmmRegister dst, XmmRegister src) {
773   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
774   EmitUint8(0x66);
775   EmitUint8(0x0F);
776   EmitUint8(0x56);
777   EmitXmmRegisterOperand(dst, src);
778 }
779 
780 
xorps(XmmRegister dst,const Address & src)781 void X86Assembler::xorps(XmmRegister dst, const Address& src) {
782   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
783   EmitUint8(0x0F);
784   EmitUint8(0x57);
785   EmitOperand(dst, src);
786 }
787 
788 
orps(XmmRegister dst,XmmRegister src)789 void X86Assembler::orps(XmmRegister dst, XmmRegister src) {
790   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
791   EmitUint8(0x0F);
792   EmitUint8(0x56);
793   EmitXmmRegisterOperand(dst, src);
794 }
795 
796 
xorps(XmmRegister dst,XmmRegister src)797 void X86Assembler::xorps(XmmRegister dst, XmmRegister src) {
798   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
799   EmitUint8(0x0F);
800   EmitUint8(0x57);
801   EmitXmmRegisterOperand(dst, src);
802 }
803 
804 
andps(XmmRegister dst,const Address & src)805 void X86Assembler::andps(XmmRegister dst, const Address& src) {
806   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
807   EmitUint8(0x0F);
808   EmitUint8(0x54);
809   EmitOperand(dst, src);
810 }
811 
812 
andpd(XmmRegister dst,const Address & src)813 void X86Assembler::andpd(XmmRegister dst, const Address& src) {
814   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
815   EmitUint8(0x66);
816   EmitUint8(0x0F);
817   EmitUint8(0x54);
818   EmitOperand(dst, src);
819 }
820 
821 
fldl(const Address & src)822 void X86Assembler::fldl(const Address& src) {
823   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
824   EmitUint8(0xDD);
825   EmitOperand(0, src);
826 }
827 
828 
fstl(const Address & dst)829 void X86Assembler::fstl(const Address& dst) {
830   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
831   EmitUint8(0xDD);
832   EmitOperand(2, dst);
833 }
834 
835 
fstpl(const Address & dst)836 void X86Assembler::fstpl(const Address& dst) {
837   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
838   EmitUint8(0xDD);
839   EmitOperand(3, dst);
840 }
841 
842 
fstsw()843 void X86Assembler::fstsw() {
844   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
845   EmitUint8(0x9B);
846   EmitUint8(0xDF);
847   EmitUint8(0xE0);
848 }
849 
850 
fnstcw(const Address & dst)851 void X86Assembler::fnstcw(const Address& dst) {
852   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
853   EmitUint8(0xD9);
854   EmitOperand(7, dst);
855 }
856 
857 
fldcw(const Address & src)858 void X86Assembler::fldcw(const Address& src) {
859   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
860   EmitUint8(0xD9);
861   EmitOperand(5, src);
862 }
863 
864 
fistpl(const Address & dst)865 void X86Assembler::fistpl(const Address& dst) {
866   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
867   EmitUint8(0xDF);
868   EmitOperand(7, dst);
869 }
870 
871 
fistps(const Address & dst)872 void X86Assembler::fistps(const Address& dst) {
873   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
874   EmitUint8(0xDB);
875   EmitOperand(3, dst);
876 }
877 
878 
fildl(const Address & src)879 void X86Assembler::fildl(const Address& src) {
880   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
881   EmitUint8(0xDF);
882   EmitOperand(5, src);
883 }
884 
885 
filds(const Address & src)886 void X86Assembler::filds(const Address& src) {
887   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
888   EmitUint8(0xDB);
889   EmitOperand(0, src);
890 }
891 
892 
fincstp()893 void X86Assembler::fincstp() {
894   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
895   EmitUint8(0xD9);
896   EmitUint8(0xF7);
897 }
898 
899 
ffree(const Immediate & index)900 void X86Assembler::ffree(const Immediate& index) {
901   CHECK_LT(index.value(), 7);
902   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
903   EmitUint8(0xDD);
904   EmitUint8(0xC0 + index.value());
905 }
906 
907 
fsin()908 void X86Assembler::fsin() {
909   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
910   EmitUint8(0xD9);
911   EmitUint8(0xFE);
912 }
913 
914 
fcos()915 void X86Assembler::fcos() {
916   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
917   EmitUint8(0xD9);
918   EmitUint8(0xFF);
919 }
920 
921 
fptan()922 void X86Assembler::fptan() {
923   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
924   EmitUint8(0xD9);
925   EmitUint8(0xF2);
926 }
927 
928 
fucompp()929 void X86Assembler::fucompp() {
930   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
931   EmitUint8(0xDA);
932   EmitUint8(0xE9);
933 }
934 
935 
fprem()936 void X86Assembler::fprem() {
937   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
938   EmitUint8(0xD9);
939   EmitUint8(0xF8);
940 }
941 
942 
xchgl(Register dst,Register src)943 void X86Assembler::xchgl(Register dst, Register src) {
944   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
945   EmitUint8(0x87);
946   EmitRegisterOperand(dst, src);
947 }
948 
949 
xchgl(Register reg,const Address & address)950 void X86Assembler::xchgl(Register reg, const Address& address) {
951   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
952   EmitUint8(0x87);
953   EmitOperand(reg, address);
954 }
955 
956 
cmpw(const Address & address,const Immediate & imm)957 void X86Assembler::cmpw(const Address& address, const Immediate& imm) {
958   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
959   EmitUint8(0x66);
960   EmitComplex(7, address, imm);
961 }
962 
963 
cmpl(Register reg,const Immediate & imm)964 void X86Assembler::cmpl(Register reg, const Immediate& imm) {
965   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
966   EmitComplex(7, Operand(reg), imm);
967 }
968 
969 
cmpl(Register reg0,Register reg1)970 void X86Assembler::cmpl(Register reg0, Register reg1) {
971   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
972   EmitUint8(0x3B);
973   EmitOperand(reg0, Operand(reg1));
974 }
975 
976 
cmpl(Register reg,const Address & address)977 void X86Assembler::cmpl(Register reg, const Address& address) {
978   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
979   EmitUint8(0x3B);
980   EmitOperand(reg, address);
981 }
982 
983 
addl(Register dst,Register src)984 void X86Assembler::addl(Register dst, Register src) {
985   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
986   EmitUint8(0x03);
987   EmitRegisterOperand(dst, src);
988 }
989 
990 
addl(Register reg,const Address & address)991 void X86Assembler::addl(Register reg, const Address& address) {
992   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
993   EmitUint8(0x03);
994   EmitOperand(reg, address);
995 }
996 
997 
cmpl(const Address & address,Register reg)998 void X86Assembler::cmpl(const Address& address, Register reg) {
999   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1000   EmitUint8(0x39);
1001   EmitOperand(reg, address);
1002 }
1003 
1004 
cmpl(const Address & address,const Immediate & imm)1005 void X86Assembler::cmpl(const Address& address, const Immediate& imm) {
1006   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1007   EmitComplex(7, address, imm);
1008 }
1009 
1010 
testl(Register reg1,Register reg2)1011 void X86Assembler::testl(Register reg1, Register reg2) {
1012   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1013   EmitUint8(0x85);
1014   EmitRegisterOperand(reg1, reg2);
1015 }
1016 
1017 
testl(Register reg,const Address & address)1018 void X86Assembler::testl(Register reg, const Address& address) {
1019   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1020   EmitUint8(0x85);
1021   EmitOperand(reg, address);
1022 }
1023 
1024 
testl(Register reg,const Immediate & immediate)1025 void X86Assembler::testl(Register reg, const Immediate& immediate) {
1026   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1027   // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
1028   // we only test the byte register to keep the encoding short.
1029   if (immediate.is_uint8() && reg < 4) {
1030     // Use zero-extended 8-bit immediate.
1031     if (reg == EAX) {
1032       EmitUint8(0xA8);
1033     } else {
1034       EmitUint8(0xF6);
1035       EmitUint8(0xC0 + reg);
1036     }
1037     EmitUint8(immediate.value() & 0xFF);
1038   } else if (reg == EAX) {
1039     // Use short form if the destination is EAX.
1040     EmitUint8(0xA9);
1041     EmitImmediate(immediate);
1042   } else {
1043     EmitUint8(0xF7);
1044     EmitOperand(0, Operand(reg));
1045     EmitImmediate(immediate);
1046   }
1047 }
1048 
1049 
andl(Register dst,Register src)1050 void X86Assembler::andl(Register dst, Register src) {
1051   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1052   EmitUint8(0x23);
1053   EmitOperand(dst, Operand(src));
1054 }
1055 
1056 
andl(Register reg,const Address & address)1057 void X86Assembler::andl(Register reg, const Address& address) {
1058   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1059   EmitUint8(0x23);
1060   EmitOperand(reg, address);
1061 }
1062 
1063 
andl(Register dst,const Immediate & imm)1064 void X86Assembler::andl(Register dst, const Immediate& imm) {
1065   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1066   EmitComplex(4, Operand(dst), imm);
1067 }
1068 
1069 
orl(Register dst,Register src)1070 void X86Assembler::orl(Register dst, Register src) {
1071   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1072   EmitUint8(0x0B);
1073   EmitOperand(dst, Operand(src));
1074 }
1075 
1076 
orl(Register reg,const Address & address)1077 void X86Assembler::orl(Register reg, const Address& address) {
1078   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1079   EmitUint8(0x0B);
1080   EmitOperand(reg, address);
1081 }
1082 
1083 
orl(Register dst,const Immediate & imm)1084 void X86Assembler::orl(Register dst, const Immediate& imm) {
1085   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1086   EmitComplex(1, Operand(dst), imm);
1087 }
1088 
1089 
xorl(Register dst,Register src)1090 void X86Assembler::xorl(Register dst, Register src) {
1091   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1092   EmitUint8(0x33);
1093   EmitOperand(dst, Operand(src));
1094 }
1095 
1096 
xorl(Register reg,const Address & address)1097 void X86Assembler::xorl(Register reg, const Address& address) {
1098   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1099   EmitUint8(0x33);
1100   EmitOperand(reg, address);
1101 }
1102 
1103 
xorl(Register dst,const Immediate & imm)1104 void X86Assembler::xorl(Register dst, const Immediate& imm) {
1105   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1106   EmitComplex(6, Operand(dst), imm);
1107 }
1108 
1109 
addl(Register reg,const Immediate & imm)1110 void X86Assembler::addl(Register reg, const Immediate& imm) {
1111   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1112   EmitComplex(0, Operand(reg), imm);
1113 }
1114 
1115 
addl(const Address & address,Register reg)1116 void X86Assembler::addl(const Address& address, Register reg) {
1117   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1118   EmitUint8(0x01);
1119   EmitOperand(reg, address);
1120 }
1121 
1122 
addl(const Address & address,const Immediate & imm)1123 void X86Assembler::addl(const Address& address, const Immediate& imm) {
1124   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1125   EmitComplex(0, address, imm);
1126 }
1127 
1128 
adcl(Register reg,const Immediate & imm)1129 void X86Assembler::adcl(Register reg, const Immediate& imm) {
1130   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1131   EmitComplex(2, Operand(reg), imm);
1132 }
1133 
1134 
adcl(Register dst,Register src)1135 void X86Assembler::adcl(Register dst, Register src) {
1136   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1137   EmitUint8(0x13);
1138   EmitOperand(dst, Operand(src));
1139 }
1140 
1141 
adcl(Register dst,const Address & address)1142 void X86Assembler::adcl(Register dst, const Address& address) {
1143   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1144   EmitUint8(0x13);
1145   EmitOperand(dst, address);
1146 }
1147 
1148 
subl(Register dst,Register src)1149 void X86Assembler::subl(Register dst, Register src) {
1150   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1151   EmitUint8(0x2B);
1152   EmitOperand(dst, Operand(src));
1153 }
1154 
1155 
subl(Register reg,const Immediate & imm)1156 void X86Assembler::subl(Register reg, const Immediate& imm) {
1157   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1158   EmitComplex(5, Operand(reg), imm);
1159 }
1160 
1161 
subl(Register reg,const Address & address)1162 void X86Assembler::subl(Register reg, const Address& address) {
1163   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1164   EmitUint8(0x2B);
1165   EmitOperand(reg, address);
1166 }
1167 
1168 
subl(const Address & address,Register reg)1169 void X86Assembler::subl(const Address& address, Register reg) {
1170   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1171   EmitUint8(0x29);
1172   EmitOperand(reg, address);
1173 }
1174 
1175 
cdq()1176 void X86Assembler::cdq() {
1177   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1178   EmitUint8(0x99);
1179 }
1180 
1181 
idivl(Register reg)1182 void X86Assembler::idivl(Register reg) {
1183   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1184   EmitUint8(0xF7);
1185   EmitUint8(0xF8 | reg);
1186 }
1187 
1188 
imull(Register dst,Register src)1189 void X86Assembler::imull(Register dst, Register src) {
1190   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1191   EmitUint8(0x0F);
1192   EmitUint8(0xAF);
1193   EmitOperand(dst, Operand(src));
1194 }
1195 
1196 
imull(Register reg,const Immediate & imm)1197 void X86Assembler::imull(Register reg, const Immediate& imm) {
1198   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1199   EmitUint8(0x69);
1200   EmitOperand(reg, Operand(reg));
1201   EmitImmediate(imm);
1202 }
1203 
1204 
imull(Register reg,const Address & address)1205 void X86Assembler::imull(Register reg, const Address& address) {
1206   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1207   EmitUint8(0x0F);
1208   EmitUint8(0xAF);
1209   EmitOperand(reg, address);
1210 }
1211 
1212 
imull(Register reg)1213 void X86Assembler::imull(Register reg) {
1214   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1215   EmitUint8(0xF7);
1216   EmitOperand(5, Operand(reg));
1217 }
1218 
1219 
imull(const Address & address)1220 void X86Assembler::imull(const Address& address) {
1221   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1222   EmitUint8(0xF7);
1223   EmitOperand(5, address);
1224 }
1225 
1226 
mull(Register reg)1227 void X86Assembler::mull(Register reg) {
1228   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1229   EmitUint8(0xF7);
1230   EmitOperand(4, Operand(reg));
1231 }
1232 
1233 
mull(const Address & address)1234 void X86Assembler::mull(const Address& address) {
1235   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1236   EmitUint8(0xF7);
1237   EmitOperand(4, address);
1238 }
1239 
1240 
sbbl(Register dst,Register src)1241 void X86Assembler::sbbl(Register dst, Register src) {
1242   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1243   EmitUint8(0x1B);
1244   EmitOperand(dst, Operand(src));
1245 }
1246 
1247 
sbbl(Register reg,const Immediate & imm)1248 void X86Assembler::sbbl(Register reg, const Immediate& imm) {
1249   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1250   EmitComplex(3, Operand(reg), imm);
1251 }
1252 
1253 
sbbl(Register dst,const Address & address)1254 void X86Assembler::sbbl(Register dst, const Address& address) {
1255   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1256   EmitUint8(0x1B);
1257   EmitOperand(dst, address);
1258 }
1259 
1260 
sbbl(const Address & address,Register src)1261 void X86Assembler::sbbl(const Address& address, Register src) {
1262   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1263   EmitUint8(0x19);
1264   EmitOperand(src, address);
1265 }
1266 
1267 
incl(Register reg)1268 void X86Assembler::incl(Register reg) {
1269   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1270   EmitUint8(0x40 + reg);
1271 }
1272 
1273 
incl(const Address & address)1274 void X86Assembler::incl(const Address& address) {
1275   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1276   EmitUint8(0xFF);
1277   EmitOperand(0, address);
1278 }
1279 
1280 
decl(Register reg)1281 void X86Assembler::decl(Register reg) {
1282   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1283   EmitUint8(0x48 + reg);
1284 }
1285 
1286 
decl(const Address & address)1287 void X86Assembler::decl(const Address& address) {
1288   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1289   EmitUint8(0xFF);
1290   EmitOperand(1, address);
1291 }
1292 
1293 
shll(Register reg,const Immediate & imm)1294 void X86Assembler::shll(Register reg, const Immediate& imm) {
1295   EmitGenericShift(4, Operand(reg), imm);
1296 }
1297 
1298 
shll(Register operand,Register shifter)1299 void X86Assembler::shll(Register operand, Register shifter) {
1300   EmitGenericShift(4, Operand(operand), shifter);
1301 }
1302 
1303 
shll(const Address & address,const Immediate & imm)1304 void X86Assembler::shll(const Address& address, const Immediate& imm) {
1305   EmitGenericShift(4, address, imm);
1306 }
1307 
1308 
shll(const Address & address,Register shifter)1309 void X86Assembler::shll(const Address& address, Register shifter) {
1310   EmitGenericShift(4, address, shifter);
1311 }
1312 
1313 
shrl(Register reg,const Immediate & imm)1314 void X86Assembler::shrl(Register reg, const Immediate& imm) {
1315   EmitGenericShift(5, Operand(reg), imm);
1316 }
1317 
1318 
shrl(Register operand,Register shifter)1319 void X86Assembler::shrl(Register operand, Register shifter) {
1320   EmitGenericShift(5, Operand(operand), shifter);
1321 }
1322 
1323 
shrl(const Address & address,const Immediate & imm)1324 void X86Assembler::shrl(const Address& address, const Immediate& imm) {
1325   EmitGenericShift(5, address, imm);
1326 }
1327 
1328 
shrl(const Address & address,Register shifter)1329 void X86Assembler::shrl(const Address& address, Register shifter) {
1330   EmitGenericShift(5, address, shifter);
1331 }
1332 
1333 
sarl(Register reg,const Immediate & imm)1334 void X86Assembler::sarl(Register reg, const Immediate& imm) {
1335   EmitGenericShift(7, Operand(reg), imm);
1336 }
1337 
1338 
sarl(Register operand,Register shifter)1339 void X86Assembler::sarl(Register operand, Register shifter) {
1340   EmitGenericShift(7, Operand(operand), shifter);
1341 }
1342 
1343 
sarl(const Address & address,const Immediate & imm)1344 void X86Assembler::sarl(const Address& address, const Immediate& imm) {
1345   EmitGenericShift(7, address, imm);
1346 }
1347 
1348 
sarl(const Address & address,Register shifter)1349 void X86Assembler::sarl(const Address& address, Register shifter) {
1350   EmitGenericShift(7, address, shifter);
1351 }
1352 
1353 
shld(Register dst,Register src,Register shifter)1354 void X86Assembler::shld(Register dst, Register src, Register shifter) {
1355   DCHECK_EQ(ECX, shifter);
1356   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1357   EmitUint8(0x0F);
1358   EmitUint8(0xA5);
1359   EmitRegisterOperand(src, dst);
1360 }
1361 
1362 
shld(Register dst,Register src,const Immediate & imm)1363 void X86Assembler::shld(Register dst, Register src, const Immediate& imm) {
1364   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1365   EmitUint8(0x0F);
1366   EmitUint8(0xA4);
1367   EmitRegisterOperand(src, dst);
1368   EmitUint8(imm.value() & 0xFF);
1369 }
1370 
1371 
shrd(Register dst,Register src,Register shifter)1372 void X86Assembler::shrd(Register dst, Register src, Register shifter) {
1373   DCHECK_EQ(ECX, shifter);
1374   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1375   EmitUint8(0x0F);
1376   EmitUint8(0xAD);
1377   EmitRegisterOperand(src, dst);
1378 }
1379 
1380 
shrd(Register dst,Register src,const Immediate & imm)1381 void X86Assembler::shrd(Register dst, Register src, const Immediate& imm) {
1382   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1383   EmitUint8(0x0F);
1384   EmitUint8(0xAC);
1385   EmitRegisterOperand(src, dst);
1386   EmitUint8(imm.value() & 0xFF);
1387 }
1388 
1389 
negl(Register reg)1390 void X86Assembler::negl(Register reg) {
1391   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1392   EmitUint8(0xF7);
1393   EmitOperand(3, Operand(reg));
1394 }
1395 
1396 
notl(Register reg)1397 void X86Assembler::notl(Register reg) {
1398   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1399   EmitUint8(0xF7);
1400   EmitUint8(0xD0 | reg);
1401 }
1402 
1403 
enter(const Immediate & imm)1404 void X86Assembler::enter(const Immediate& imm) {
1405   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1406   EmitUint8(0xC8);
1407   CHECK(imm.is_uint16());
1408   EmitUint8(imm.value() & 0xFF);
1409   EmitUint8((imm.value() >> 8) & 0xFF);
1410   EmitUint8(0x00);
1411 }
1412 
1413 
leave()1414 void X86Assembler::leave() {
1415   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1416   EmitUint8(0xC9);
1417 }
1418 
1419 
ret()1420 void X86Assembler::ret() {
1421   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1422   EmitUint8(0xC3);
1423 }
1424 
1425 
ret(const Immediate & imm)1426 void X86Assembler::ret(const Immediate& imm) {
1427   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1428   EmitUint8(0xC2);
1429   CHECK(imm.is_uint16());
1430   EmitUint8(imm.value() & 0xFF);
1431   EmitUint8((imm.value() >> 8) & 0xFF);
1432 }
1433 
1434 
1435 
nop()1436 void X86Assembler::nop() {
1437   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1438   EmitUint8(0x90);
1439 }
1440 
1441 
int3()1442 void X86Assembler::int3() {
1443   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1444   EmitUint8(0xCC);
1445 }
1446 
1447 
hlt()1448 void X86Assembler::hlt() {
1449   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1450   EmitUint8(0xF4);
1451 }
1452 
1453 
j(Condition condition,Label * label)1454 void X86Assembler::j(Condition condition, Label* label) {
1455   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1456   if (label->IsBound()) {
1457     static const int kShortSize = 2;
1458     static const int kLongSize = 6;
1459     int offset = label->Position() - buffer_.Size();
1460     CHECK_LE(offset, 0);
1461     if (IsInt<8>(offset - kShortSize)) {
1462       EmitUint8(0x70 + condition);
1463       EmitUint8((offset - kShortSize) & 0xFF);
1464     } else {
1465       EmitUint8(0x0F);
1466       EmitUint8(0x80 + condition);
1467       EmitInt32(offset - kLongSize);
1468     }
1469   } else {
1470     EmitUint8(0x0F);
1471     EmitUint8(0x80 + condition);
1472     EmitLabelLink(label);
1473   }
1474 }
1475 
1476 
jmp(Register reg)1477 void X86Assembler::jmp(Register reg) {
1478   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1479   EmitUint8(0xFF);
1480   EmitRegisterOperand(4, reg);
1481 }
1482 
jmp(const Address & address)1483 void X86Assembler::jmp(const Address& address) {
1484   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1485   EmitUint8(0xFF);
1486   EmitOperand(4, address);
1487 }
1488 
jmp(Label * label)1489 void X86Assembler::jmp(Label* label) {
1490   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1491   if (label->IsBound()) {
1492     static const int kShortSize = 2;
1493     static const int kLongSize = 5;
1494     int offset = label->Position() - buffer_.Size();
1495     CHECK_LE(offset, 0);
1496     if (IsInt<8>(offset - kShortSize)) {
1497       EmitUint8(0xEB);
1498       EmitUint8((offset - kShortSize) & 0xFF);
1499     } else {
1500       EmitUint8(0xE9);
1501       EmitInt32(offset - kLongSize);
1502     }
1503   } else {
1504     EmitUint8(0xE9);
1505     EmitLabelLink(label);
1506   }
1507 }
1508 
1509 
repne_scasw()1510 void X86Assembler::repne_scasw() {
1511   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1512   EmitUint8(0x66);
1513   EmitUint8(0xF2);
1514   EmitUint8(0xAF);
1515 }
1516 
1517 
lock()1518 X86Assembler* X86Assembler::lock() {
1519   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1520   EmitUint8(0xF0);
1521   return this;
1522 }
1523 
1524 
cmpxchgl(const Address & address,Register reg)1525 void X86Assembler::cmpxchgl(const Address& address, Register reg) {
1526   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1527   EmitUint8(0x0F);
1528   EmitUint8(0xB1);
1529   EmitOperand(reg, address);
1530 }
1531 
1532 
cmpxchg8b(const Address & address)1533 void X86Assembler::cmpxchg8b(const Address& address) {
1534   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1535   EmitUint8(0x0F);
1536   EmitUint8(0xC7);
1537   EmitOperand(1, address);
1538 }
1539 
1540 
mfence()1541 void X86Assembler::mfence() {
1542   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1543   EmitUint8(0x0F);
1544   EmitUint8(0xAE);
1545   EmitUint8(0xF0);
1546 }
1547 
fs()1548 X86Assembler* X86Assembler::fs() {
1549   // TODO: fs is a prefix and not an instruction
1550   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1551   EmitUint8(0x64);
1552   return this;
1553 }
1554 
gs()1555 X86Assembler* X86Assembler::gs() {
1556   // TODO: fs is a prefix and not an instruction
1557   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1558   EmitUint8(0x65);
1559   return this;
1560 }
1561 
AddImmediate(Register reg,const Immediate & imm)1562 void X86Assembler::AddImmediate(Register reg, const Immediate& imm) {
1563   int value = imm.value();
1564   if (value > 0) {
1565     if (value == 1) {
1566       incl(reg);
1567     } else if (value != 0) {
1568       addl(reg, imm);
1569     }
1570   } else if (value < 0) {
1571     value = -value;
1572     if (value == 1) {
1573       decl(reg);
1574     } else if (value != 0) {
1575       subl(reg, Immediate(value));
1576     }
1577   }
1578 }
1579 
1580 
LoadLongConstant(XmmRegister dst,int64_t value)1581 void X86Assembler::LoadLongConstant(XmmRegister dst, int64_t value) {
1582   // TODO: Need to have a code constants table.
1583   pushl(Immediate(High32Bits(value)));
1584   pushl(Immediate(Low32Bits(value)));
1585   movsd(dst, Address(ESP, 0));
1586   addl(ESP, Immediate(2 * sizeof(int32_t)));
1587 }
1588 
1589 
LoadDoubleConstant(XmmRegister dst,double value)1590 void X86Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
1591   // TODO: Need to have a code constants table.
1592   int64_t constant = bit_cast<int64_t, double>(value);
1593   LoadLongConstant(dst, constant);
1594 }
1595 
1596 
Align(int alignment,int offset)1597 void X86Assembler::Align(int alignment, int offset) {
1598   CHECK(IsPowerOfTwo(alignment));
1599   // Emit nop instruction until the real position is aligned.
1600   while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) {
1601     nop();
1602   }
1603 }
1604 
1605 
Bind(Label * label)1606 void X86Assembler::Bind(Label* label) {
1607   int bound = buffer_.Size();
1608   CHECK(!label->IsBound());  // Labels can only be bound once.
1609   while (label->IsLinked()) {
1610     int position = label->LinkPosition();
1611     int next = buffer_.Load<int32_t>(position);
1612     buffer_.Store<int32_t>(position, bound - (position + 4));
1613     label->position_ = next;
1614   }
1615   label->BindTo(bound);
1616 }
1617 
1618 
EmitOperand(int reg_or_opcode,const Operand & operand)1619 void X86Assembler::EmitOperand(int reg_or_opcode, const Operand& operand) {
1620   CHECK_GE(reg_or_opcode, 0);
1621   CHECK_LT(reg_or_opcode, 8);
1622   const int length = operand.length_;
1623   CHECK_GT(length, 0);
1624   // Emit the ModRM byte updated with the given reg value.
1625   CHECK_EQ(operand.encoding_[0] & 0x38, 0);
1626   EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3));
1627   // Emit the rest of the encoded operand.
1628   for (int i = 1; i < length; i++) {
1629     EmitUint8(operand.encoding_[i]);
1630   }
1631 }
1632 
1633 
EmitImmediate(const Immediate & imm)1634 void X86Assembler::EmitImmediate(const Immediate& imm) {
1635   EmitInt32(imm.value());
1636 }
1637 
1638 
EmitComplex(int reg_or_opcode,const Operand & operand,const Immediate & immediate)1639 void X86Assembler::EmitComplex(int reg_or_opcode,
1640                                const Operand& operand,
1641                                const Immediate& immediate) {
1642   CHECK_GE(reg_or_opcode, 0);
1643   CHECK_LT(reg_or_opcode, 8);
1644   if (immediate.is_int8()) {
1645     // Use sign-extended 8-bit immediate.
1646     EmitUint8(0x83);
1647     EmitOperand(reg_or_opcode, operand);
1648     EmitUint8(immediate.value() & 0xFF);
1649   } else if (operand.IsRegister(EAX)) {
1650     // Use short form if the destination is eax.
1651     EmitUint8(0x05 + (reg_or_opcode << 3));
1652     EmitImmediate(immediate);
1653   } else {
1654     EmitUint8(0x81);
1655     EmitOperand(reg_or_opcode, operand);
1656     EmitImmediate(immediate);
1657   }
1658 }
1659 
1660 
EmitLabel(Label * label,int instruction_size)1661 void X86Assembler::EmitLabel(Label* label, int instruction_size) {
1662   if (label->IsBound()) {
1663     int offset = label->Position() - buffer_.Size();
1664     CHECK_LE(offset, 0);
1665     EmitInt32(offset - instruction_size);
1666   } else {
1667     EmitLabelLink(label);
1668   }
1669 }
1670 
1671 
EmitLabelLink(Label * label)1672 void X86Assembler::EmitLabelLink(Label* label) {
1673   CHECK(!label->IsBound());
1674   int position = buffer_.Size();
1675   EmitInt32(label->position_);
1676   label->LinkTo(position);
1677 }
1678 
1679 
EmitGenericShift(int reg_or_opcode,const Operand & operand,const Immediate & imm)1680 void X86Assembler::EmitGenericShift(int reg_or_opcode,
1681                                     const Operand& operand,
1682                                     const Immediate& imm) {
1683   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1684   CHECK(imm.is_int8());
1685   if (imm.value() == 1) {
1686     EmitUint8(0xD1);
1687     EmitOperand(reg_or_opcode, operand);
1688   } else {
1689     EmitUint8(0xC1);
1690     EmitOperand(reg_or_opcode, operand);
1691     EmitUint8(imm.value() & 0xFF);
1692   }
1693 }
1694 
1695 
EmitGenericShift(int reg_or_opcode,const Operand & operand,Register shifter)1696 void X86Assembler::EmitGenericShift(int reg_or_opcode,
1697                                     const Operand& operand,
1698                                     Register shifter) {
1699   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1700   CHECK_EQ(shifter, ECX);
1701   EmitUint8(0xD3);
1702   EmitOperand(reg_or_opcode, operand);
1703 }
1704 
DWARFReg(Register reg)1705 static dwarf::Reg DWARFReg(Register reg) {
1706   return dwarf::Reg::X86Core(static_cast<int>(reg));
1707 }
1708 
1709 constexpr size_t kFramePointerSize = 4;
1710 
BuildFrame(size_t frame_size,ManagedRegister method_reg,const std::vector<ManagedRegister> & spill_regs,const ManagedRegisterEntrySpills & entry_spills)1711 void X86Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
1712                               const std::vector<ManagedRegister>& spill_regs,
1713                               const ManagedRegisterEntrySpills& entry_spills) {
1714   DCHECK_EQ(buffer_.Size(), 0U);  // Nothing emitted yet.
1715   cfi_.SetCurrentCFAOffset(4);  // Return address on stack.
1716   CHECK_ALIGNED(frame_size, kStackAlignment);
1717   int gpr_count = 0;
1718   for (int i = spill_regs.size() - 1; i >= 0; --i) {
1719     Register spill = spill_regs.at(i).AsX86().AsCpuRegister();
1720     pushl(spill);
1721     gpr_count++;
1722     cfi_.AdjustCFAOffset(kFramePointerSize);
1723     cfi_.RelOffset(DWARFReg(spill), 0);
1724   }
1725 
1726   // return address then method on stack.
1727   int32_t adjust = frame_size - gpr_count * kFramePointerSize -
1728       kFramePointerSize /*method*/ -
1729       kFramePointerSize /*return address*/;
1730   addl(ESP, Immediate(-adjust));
1731   cfi_.AdjustCFAOffset(adjust);
1732   pushl(method_reg.AsX86().AsCpuRegister());
1733   cfi_.AdjustCFAOffset(kFramePointerSize);
1734   DCHECK_EQ(static_cast<size_t>(cfi_.GetCurrentCFAOffset()), frame_size);
1735 
1736   for (size_t i = 0; i < entry_spills.size(); ++i) {
1737     ManagedRegisterSpill spill = entry_spills.at(i);
1738     if (spill.AsX86().IsCpuRegister()) {
1739       int offset = frame_size + spill.getSpillOffset();
1740       movl(Address(ESP, offset), spill.AsX86().AsCpuRegister());
1741     } else {
1742       DCHECK(spill.AsX86().IsXmmRegister());
1743       if (spill.getSize() == 8) {
1744         movsd(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister());
1745       } else {
1746         CHECK_EQ(spill.getSize(), 4);
1747         movss(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister());
1748       }
1749     }
1750   }
1751 }
1752 
RemoveFrame(size_t frame_size,const std::vector<ManagedRegister> & spill_regs)1753 void X86Assembler::RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& spill_regs) {
1754   CHECK_ALIGNED(frame_size, kStackAlignment);
1755   cfi_.RememberState();
1756   // -kFramePointerSize for ArtMethod*.
1757   int adjust = frame_size - spill_regs.size() * kFramePointerSize - kFramePointerSize;
1758   addl(ESP, Immediate(adjust));
1759   cfi_.AdjustCFAOffset(-adjust);
1760   for (size_t i = 0; i < spill_regs.size(); ++i) {
1761     Register spill = spill_regs.at(i).AsX86().AsCpuRegister();
1762     popl(spill);
1763     cfi_.AdjustCFAOffset(-static_cast<int>(kFramePointerSize));
1764     cfi_.Restore(DWARFReg(spill));
1765   }
1766   ret();
1767   // The CFI should be restored for any code that follows the exit block.
1768   cfi_.RestoreState();
1769   cfi_.DefCFAOffset(frame_size);
1770 }
1771 
IncreaseFrameSize(size_t adjust)1772 void X86Assembler::IncreaseFrameSize(size_t adjust) {
1773   CHECK_ALIGNED(adjust, kStackAlignment);
1774   addl(ESP, Immediate(-adjust));
1775   cfi_.AdjustCFAOffset(adjust);
1776 }
1777 
DecreaseFrameSize(size_t adjust)1778 void X86Assembler::DecreaseFrameSize(size_t adjust) {
1779   CHECK_ALIGNED(adjust, kStackAlignment);
1780   addl(ESP, Immediate(adjust));
1781   cfi_.AdjustCFAOffset(-adjust);
1782 }
1783 
Store(FrameOffset offs,ManagedRegister msrc,size_t size)1784 void X86Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
1785   X86ManagedRegister src = msrc.AsX86();
1786   if (src.IsNoRegister()) {
1787     CHECK_EQ(0u, size);
1788   } else if (src.IsCpuRegister()) {
1789     CHECK_EQ(4u, size);
1790     movl(Address(ESP, offs), src.AsCpuRegister());
1791   } else if (src.IsRegisterPair()) {
1792     CHECK_EQ(8u, size);
1793     movl(Address(ESP, offs), src.AsRegisterPairLow());
1794     movl(Address(ESP, FrameOffset(offs.Int32Value()+4)),
1795          src.AsRegisterPairHigh());
1796   } else if (src.IsX87Register()) {
1797     if (size == 4) {
1798       fstps(Address(ESP, offs));
1799     } else {
1800       fstpl(Address(ESP, offs));
1801     }
1802   } else {
1803     CHECK(src.IsXmmRegister());
1804     if (size == 4) {
1805       movss(Address(ESP, offs), src.AsXmmRegister());
1806     } else {
1807       movsd(Address(ESP, offs), src.AsXmmRegister());
1808     }
1809   }
1810 }
1811 
StoreRef(FrameOffset dest,ManagedRegister msrc)1812 void X86Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
1813   X86ManagedRegister src = msrc.AsX86();
1814   CHECK(src.IsCpuRegister());
1815   movl(Address(ESP, dest), src.AsCpuRegister());
1816 }
1817 
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)1818 void X86Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
1819   X86ManagedRegister src = msrc.AsX86();
1820   CHECK(src.IsCpuRegister());
1821   movl(Address(ESP, dest), src.AsCpuRegister());
1822 }
1823 
StoreImmediateToFrame(FrameOffset dest,uint32_t imm,ManagedRegister)1824 void X86Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
1825                                          ManagedRegister) {
1826   movl(Address(ESP, dest), Immediate(imm));
1827 }
1828 
StoreImmediateToThread32(ThreadOffset<4> dest,uint32_t imm,ManagedRegister)1829 void X86Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm,
1830                                           ManagedRegister) {
1831   fs()->movl(Address::Absolute(dest), Immediate(imm));
1832 }
1833 
StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)1834 void X86Assembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,
1835                                             FrameOffset fr_offs,
1836                                             ManagedRegister mscratch) {
1837   X86ManagedRegister scratch = mscratch.AsX86();
1838   CHECK(scratch.IsCpuRegister());
1839   leal(scratch.AsCpuRegister(), Address(ESP, fr_offs));
1840   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
1841 }
1842 
StoreStackPointerToThread32(ThreadOffset<4> thr_offs)1843 void X86Assembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs) {
1844   fs()->movl(Address::Absolute(thr_offs), ESP);
1845 }
1846 
StoreSpanning(FrameOffset,ManagedRegister,FrameOffset,ManagedRegister)1847 void X86Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
1848                                  FrameOffset /*in_off*/, ManagedRegister /*scratch*/) {
1849   UNIMPLEMENTED(FATAL);  // this case only currently exists for ARM
1850 }
1851 
Load(ManagedRegister mdest,FrameOffset src,size_t size)1852 void X86Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
1853   X86ManagedRegister dest = mdest.AsX86();
1854   if (dest.IsNoRegister()) {
1855     CHECK_EQ(0u, size);
1856   } else if (dest.IsCpuRegister()) {
1857     CHECK_EQ(4u, size);
1858     movl(dest.AsCpuRegister(), Address(ESP, src));
1859   } else if (dest.IsRegisterPair()) {
1860     CHECK_EQ(8u, size);
1861     movl(dest.AsRegisterPairLow(), Address(ESP, src));
1862     movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4)));
1863   } else if (dest.IsX87Register()) {
1864     if (size == 4) {
1865       flds(Address(ESP, src));
1866     } else {
1867       fldl(Address(ESP, src));
1868     }
1869   } else {
1870     CHECK(dest.IsXmmRegister());
1871     if (size == 4) {
1872       movss(dest.AsXmmRegister(), Address(ESP, src));
1873     } else {
1874       movsd(dest.AsXmmRegister(), Address(ESP, src));
1875     }
1876   }
1877 }
1878 
LoadFromThread32(ManagedRegister mdest,ThreadOffset<4> src,size_t size)1879 void X86Assembler::LoadFromThread32(ManagedRegister mdest, ThreadOffset<4> src, size_t size) {
1880   X86ManagedRegister dest = mdest.AsX86();
1881   if (dest.IsNoRegister()) {
1882     CHECK_EQ(0u, size);
1883   } else if (dest.IsCpuRegister()) {
1884     CHECK_EQ(4u, size);
1885     fs()->movl(dest.AsCpuRegister(), Address::Absolute(src));
1886   } else if (dest.IsRegisterPair()) {
1887     CHECK_EQ(8u, size);
1888     fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src));
1889     fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset<4>(src.Int32Value()+4)));
1890   } else if (dest.IsX87Register()) {
1891     if (size == 4) {
1892       fs()->flds(Address::Absolute(src));
1893     } else {
1894       fs()->fldl(Address::Absolute(src));
1895     }
1896   } else {
1897     CHECK(dest.IsXmmRegister());
1898     if (size == 4) {
1899       fs()->movss(dest.AsXmmRegister(), Address::Absolute(src));
1900     } else {
1901       fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src));
1902     }
1903   }
1904 }
1905 
LoadRef(ManagedRegister mdest,FrameOffset src)1906 void X86Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
1907   X86ManagedRegister dest = mdest.AsX86();
1908   CHECK(dest.IsCpuRegister());
1909   movl(dest.AsCpuRegister(), Address(ESP, src));
1910 }
1911 
LoadRef(ManagedRegister mdest,ManagedRegister base,MemberOffset offs,bool poison_reference)1912 void X86Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
1913                            bool poison_reference) {
1914   X86ManagedRegister dest = mdest.AsX86();
1915   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
1916   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
1917   if (kPoisonHeapReferences && poison_reference) {
1918     negl(dest.AsCpuRegister());
1919   }
1920 }
1921 
LoadRawPtr(ManagedRegister mdest,ManagedRegister base,Offset offs)1922 void X86Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
1923                               Offset offs) {
1924   X86ManagedRegister dest = mdest.AsX86();
1925   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
1926   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
1927 }
1928 
LoadRawPtrFromThread32(ManagedRegister mdest,ThreadOffset<4> offs)1929 void X86Assembler::LoadRawPtrFromThread32(ManagedRegister mdest,
1930                                         ThreadOffset<4> offs) {
1931   X86ManagedRegister dest = mdest.AsX86();
1932   CHECK(dest.IsCpuRegister());
1933   fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs));
1934 }
1935 
SignExtend(ManagedRegister mreg,size_t size)1936 void X86Assembler::SignExtend(ManagedRegister mreg, size_t size) {
1937   X86ManagedRegister reg = mreg.AsX86();
1938   CHECK(size == 1 || size == 2) << size;
1939   CHECK(reg.IsCpuRegister()) << reg;
1940   if (size == 1) {
1941     movsxb(reg.AsCpuRegister(), reg.AsByteRegister());
1942   } else {
1943     movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
1944   }
1945 }
1946 
ZeroExtend(ManagedRegister mreg,size_t size)1947 void X86Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
1948   X86ManagedRegister reg = mreg.AsX86();
1949   CHECK(size == 1 || size == 2) << size;
1950   CHECK(reg.IsCpuRegister()) << reg;
1951   if (size == 1) {
1952     movzxb(reg.AsCpuRegister(), reg.AsByteRegister());
1953   } else {
1954     movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
1955   }
1956 }
1957 
Move(ManagedRegister mdest,ManagedRegister msrc,size_t size)1958 void X86Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
1959   X86ManagedRegister dest = mdest.AsX86();
1960   X86ManagedRegister src = msrc.AsX86();
1961   if (!dest.Equals(src)) {
1962     if (dest.IsCpuRegister() && src.IsCpuRegister()) {
1963       movl(dest.AsCpuRegister(), src.AsCpuRegister());
1964     } else if (src.IsX87Register() && dest.IsXmmRegister()) {
1965       // Pass via stack and pop X87 register
1966       subl(ESP, Immediate(16));
1967       if (size == 4) {
1968         CHECK_EQ(src.AsX87Register(), ST0);
1969         fstps(Address(ESP, 0));
1970         movss(dest.AsXmmRegister(), Address(ESP, 0));
1971       } else {
1972         CHECK_EQ(src.AsX87Register(), ST0);
1973         fstpl(Address(ESP, 0));
1974         movsd(dest.AsXmmRegister(), Address(ESP, 0));
1975       }
1976       addl(ESP, Immediate(16));
1977     } else {
1978       // TODO: x87, SSE
1979       UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
1980     }
1981   }
1982 }
1983 
CopyRef(FrameOffset dest,FrameOffset src,ManagedRegister mscratch)1984 void X86Assembler::CopyRef(FrameOffset dest, FrameOffset src,
1985                            ManagedRegister mscratch) {
1986   X86ManagedRegister scratch = mscratch.AsX86();
1987   CHECK(scratch.IsCpuRegister());
1988   movl(scratch.AsCpuRegister(), Address(ESP, src));
1989   movl(Address(ESP, dest), scratch.AsCpuRegister());
1990 }
1991 
CopyRawPtrFromThread32(FrameOffset fr_offs,ThreadOffset<4> thr_offs,ManagedRegister mscratch)1992 void X86Assembler::CopyRawPtrFromThread32(FrameOffset fr_offs,
1993                                         ThreadOffset<4> thr_offs,
1994                                         ManagedRegister mscratch) {
1995   X86ManagedRegister scratch = mscratch.AsX86();
1996   CHECK(scratch.IsCpuRegister());
1997   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(thr_offs));
1998   Store(fr_offs, scratch, 4);
1999 }
2000 
CopyRawPtrToThread32(ThreadOffset<4> thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)2001 void X86Assembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs,
2002                                       FrameOffset fr_offs,
2003                                       ManagedRegister mscratch) {
2004   X86ManagedRegister scratch = mscratch.AsX86();
2005   CHECK(scratch.IsCpuRegister());
2006   Load(scratch, fr_offs, 4);
2007   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
2008 }
2009 
Copy(FrameOffset dest,FrameOffset src,ManagedRegister mscratch,size_t size)2010 void X86Assembler::Copy(FrameOffset dest, FrameOffset src,
2011                         ManagedRegister mscratch,
2012                         size_t size) {
2013   X86ManagedRegister scratch = mscratch.AsX86();
2014   if (scratch.IsCpuRegister() && size == 8) {
2015     Load(scratch, src, 4);
2016     Store(dest, scratch, 4);
2017     Load(scratch, FrameOffset(src.Int32Value() + 4), 4);
2018     Store(FrameOffset(dest.Int32Value() + 4), scratch, 4);
2019   } else {
2020     Load(scratch, src, size);
2021     Store(dest, scratch, size);
2022   }
2023 }
2024 
Copy(FrameOffset,ManagedRegister,Offset,ManagedRegister,size_t)2025 void X86Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
2026                         ManagedRegister /*scratch*/, size_t /*size*/) {
2027   UNIMPLEMENTED(FATAL);
2028 }
2029 
Copy(ManagedRegister dest_base,Offset dest_offset,FrameOffset src,ManagedRegister scratch,size_t size)2030 void X86Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
2031                         ManagedRegister scratch, size_t size) {
2032   CHECK(scratch.IsNoRegister());
2033   CHECK_EQ(size, 4u);
2034   pushl(Address(ESP, src));
2035   popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset));
2036 }
2037 
Copy(FrameOffset dest,FrameOffset src_base,Offset src_offset,ManagedRegister mscratch,size_t size)2038 void X86Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
2039                         ManagedRegister mscratch, size_t size) {
2040   Register scratch = mscratch.AsX86().AsCpuRegister();
2041   CHECK_EQ(size, 4u);
2042   movl(scratch, Address(ESP, src_base));
2043   movl(scratch, Address(scratch, src_offset));
2044   movl(Address(ESP, dest), scratch);
2045 }
2046 
Copy(ManagedRegister dest,Offset dest_offset,ManagedRegister src,Offset src_offset,ManagedRegister scratch,size_t size)2047 void X86Assembler::Copy(ManagedRegister dest, Offset dest_offset,
2048                         ManagedRegister src, Offset src_offset,
2049                         ManagedRegister scratch, size_t size) {
2050   CHECK_EQ(size, 4u);
2051   CHECK(scratch.IsNoRegister());
2052   pushl(Address(src.AsX86().AsCpuRegister(), src_offset));
2053   popl(Address(dest.AsX86().AsCpuRegister(), dest_offset));
2054 }
2055 
Copy(FrameOffset dest,Offset dest_offset,FrameOffset src,Offset src_offset,ManagedRegister mscratch,size_t size)2056 void X86Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
2057                         ManagedRegister mscratch, size_t size) {
2058   Register scratch = mscratch.AsX86().AsCpuRegister();
2059   CHECK_EQ(size, 4u);
2060   CHECK_EQ(dest.Int32Value(), src.Int32Value());
2061   movl(scratch, Address(ESP, src));
2062   pushl(Address(scratch, src_offset));
2063   popl(Address(scratch, dest_offset));
2064 }
2065 
MemoryBarrier(ManagedRegister)2066 void X86Assembler::MemoryBarrier(ManagedRegister) {
2067   mfence();
2068 }
2069 
CreateHandleScopeEntry(ManagedRegister mout_reg,FrameOffset handle_scope_offset,ManagedRegister min_reg,bool null_allowed)2070 void X86Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
2071                                    FrameOffset handle_scope_offset,
2072                                    ManagedRegister min_reg, bool null_allowed) {
2073   X86ManagedRegister out_reg = mout_reg.AsX86();
2074   X86ManagedRegister in_reg = min_reg.AsX86();
2075   CHECK(in_reg.IsCpuRegister());
2076   CHECK(out_reg.IsCpuRegister());
2077   VerifyObject(in_reg, null_allowed);
2078   if (null_allowed) {
2079     Label null_arg;
2080     if (!out_reg.Equals(in_reg)) {
2081       xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
2082     }
2083     testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
2084     j(kZero, &null_arg);
2085     leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset));
2086     Bind(&null_arg);
2087   } else {
2088     leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset));
2089   }
2090 }
2091 
CreateHandleScopeEntry(FrameOffset out_off,FrameOffset handle_scope_offset,ManagedRegister mscratch,bool null_allowed)2092 void X86Assembler::CreateHandleScopeEntry(FrameOffset out_off,
2093                                    FrameOffset handle_scope_offset,
2094                                    ManagedRegister mscratch,
2095                                    bool null_allowed) {
2096   X86ManagedRegister scratch = mscratch.AsX86();
2097   CHECK(scratch.IsCpuRegister());
2098   if (null_allowed) {
2099     Label null_arg;
2100     movl(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
2101     testl(scratch.AsCpuRegister(), scratch.AsCpuRegister());
2102     j(kZero, &null_arg);
2103     leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
2104     Bind(&null_arg);
2105   } else {
2106     leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
2107   }
2108   Store(out_off, scratch, 4);
2109 }
2110 
2111 // Given a handle scope entry, load the associated reference.
LoadReferenceFromHandleScope(ManagedRegister mout_reg,ManagedRegister min_reg)2112 void X86Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
2113                                          ManagedRegister min_reg) {
2114   X86ManagedRegister out_reg = mout_reg.AsX86();
2115   X86ManagedRegister in_reg = min_reg.AsX86();
2116   CHECK(out_reg.IsCpuRegister());
2117   CHECK(in_reg.IsCpuRegister());
2118   Label null_arg;
2119   if (!out_reg.Equals(in_reg)) {
2120     xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
2121   }
2122   testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
2123   j(kZero, &null_arg);
2124   movl(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0));
2125   Bind(&null_arg);
2126 }
2127 
VerifyObject(ManagedRegister,bool)2128 void X86Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
2129   // TODO: not validating references
2130 }
2131 
VerifyObject(FrameOffset,bool)2132 void X86Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
2133   // TODO: not validating references
2134 }
2135 
Call(ManagedRegister mbase,Offset offset,ManagedRegister)2136 void X86Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
2137   X86ManagedRegister base = mbase.AsX86();
2138   CHECK(base.IsCpuRegister());
2139   call(Address(base.AsCpuRegister(), offset.Int32Value()));
2140   // TODO: place reference map on call
2141 }
2142 
Call(FrameOffset base,Offset offset,ManagedRegister mscratch)2143 void X86Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
2144   Register scratch = mscratch.AsX86().AsCpuRegister();
2145   movl(scratch, Address(ESP, base));
2146   call(Address(scratch, offset));
2147 }
2148 
CallFromThread32(ThreadOffset<4> offset,ManagedRegister)2149 void X86Assembler::CallFromThread32(ThreadOffset<4> offset, ManagedRegister /*mscratch*/) {
2150   fs()->call(Address::Absolute(offset));
2151 }
2152 
GetCurrentThread(ManagedRegister tr)2153 void X86Assembler::GetCurrentThread(ManagedRegister tr) {
2154   fs()->movl(tr.AsX86().AsCpuRegister(),
2155              Address::Absolute(Thread::SelfOffset<4>()));
2156 }
2157 
GetCurrentThread(FrameOffset offset,ManagedRegister mscratch)2158 void X86Assembler::GetCurrentThread(FrameOffset offset,
2159                                     ManagedRegister mscratch) {
2160   X86ManagedRegister scratch = mscratch.AsX86();
2161   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset<4>()));
2162   movl(Address(ESP, offset), scratch.AsCpuRegister());
2163 }
2164 
ExceptionPoll(ManagedRegister,size_t stack_adjust)2165 void X86Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
2166   X86ExceptionSlowPath* slow = new X86ExceptionSlowPath(stack_adjust);
2167   buffer_.EnqueueSlowPath(slow);
2168   fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<4>()), Immediate(0));
2169   j(kNotEqual, slow->Entry());
2170 }
2171 
Emit(Assembler * sasm)2172 void X86ExceptionSlowPath::Emit(Assembler *sasm) {
2173   X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm);
2174 #define __ sp_asm->
2175   __ Bind(&entry_);
2176   // Note: the return value is dead
2177   if (stack_adjust_ != 0) {  // Fix up the frame.
2178     __ DecreaseFrameSize(stack_adjust_);
2179   }
2180   // Pass exception as argument in EAX
2181   __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<4>()));
2182   __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(4, pDeliverException)));
2183   // this call should never return
2184   __ int3();
2185 #undef __
2186 }
2187 
2188 }  // namespace x86
2189 }  // namespace art
2190