1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
6 #define V8_X64_ASSEMBLER_X64_INL_H_
7 
8 #include "src/x64/assembler-x64.h"
9 
10 #include "src/base/cpu.h"
11 #include "src/debug/debug.h"
12 #include "src/v8memory.h"
13 
14 namespace v8 {
15 namespace internal {
16 
SupportsCrankshaft()17 bool CpuFeatures::SupportsCrankshaft() { return true; }
18 
19 
20 // -----------------------------------------------------------------------------
21 // Implementation of Assembler
22 
23 
24 static const byte kCallOpcode = 0xE8;
25 // The length of pushq(rbp), movp(rbp, rsp), Push(rsi) and Push(rdi).
26 static const int kNoCodeAgeSequenceLength = kPointerSize == kInt64Size ? 6 : 17;
27 
28 
emitl(uint32_t x)29 void Assembler::emitl(uint32_t x) {
30   Memory::uint32_at(pc_) = x;
31   pc_ += sizeof(uint32_t);
32 }
33 
34 
emitp(void * x,RelocInfo::Mode rmode)35 void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
36   uintptr_t value = reinterpret_cast<uintptr_t>(x);
37   Memory::uintptr_at(pc_) = value;
38   if (!RelocInfo::IsNone(rmode)) {
39     RecordRelocInfo(rmode, value);
40   }
41   pc_ += sizeof(uintptr_t);
42 }
43 
44 
emitq(uint64_t x)45 void Assembler::emitq(uint64_t x) {
46   Memory::uint64_at(pc_) = x;
47   pc_ += sizeof(uint64_t);
48 }
49 
50 
emitw(uint16_t x)51 void Assembler::emitw(uint16_t x) {
52   Memory::uint16_at(pc_) = x;
53   pc_ += sizeof(uint16_t);
54 }
55 
56 
emit_code_target(Handle<Code> target,RelocInfo::Mode rmode,TypeFeedbackId ast_id)57 void Assembler::emit_code_target(Handle<Code> target,
58                                  RelocInfo::Mode rmode,
59                                  TypeFeedbackId ast_id) {
60   DCHECK(RelocInfo::IsCodeTarget(rmode) ||
61       rmode == RelocInfo::CODE_AGE_SEQUENCE);
62   if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
63     RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
64   } else {
65     RecordRelocInfo(rmode);
66   }
67   int current = code_targets_.length();
68   if (current > 0 && code_targets_.last().is_identical_to(target)) {
69     // Optimization if we keep jumping to the same code target.
70     emitl(current - 1);
71   } else {
72     code_targets_.Add(target);
73     emitl(current);
74   }
75 }
76 
77 
emit_runtime_entry(Address entry,RelocInfo::Mode rmode)78 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
79   DCHECK(RelocInfo::IsRuntimeEntry(rmode));
80   RecordRelocInfo(rmode);
81   emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
82 }
83 
84 
emit_rex_64(Register reg,Register rm_reg)85 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
86   emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
87 }
88 
89 
emit_rex_64(XMMRegister reg,Register rm_reg)90 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
91   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
92 }
93 
94 
emit_rex_64(Register reg,XMMRegister rm_reg)95 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
96   emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
97 }
98 
99 
emit_rex_64(Register reg,const Operand & op)100 void Assembler::emit_rex_64(Register reg, const Operand& op) {
101   emit(0x48 | reg.high_bit() << 2 | op.rex_);
102 }
103 
104 
emit_rex_64(XMMRegister reg,const Operand & op)105 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
106   emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
107 }
108 
109 
emit_rex_64(Register rm_reg)110 void Assembler::emit_rex_64(Register rm_reg) {
111   DCHECK_EQ(rm_reg.code() & 0xf, rm_reg.code());
112   emit(0x48 | rm_reg.high_bit());
113 }
114 
115 
emit_rex_64(const Operand & op)116 void Assembler::emit_rex_64(const Operand& op) {
117   emit(0x48 | op.rex_);
118 }
119 
120 
emit_rex_32(Register reg,Register rm_reg)121 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
122   emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
123 }
124 
125 
emit_rex_32(Register reg,const Operand & op)126 void Assembler::emit_rex_32(Register reg, const Operand& op) {
127   emit(0x40 | reg.high_bit() << 2  | op.rex_);
128 }
129 
130 
emit_rex_32(Register rm_reg)131 void Assembler::emit_rex_32(Register rm_reg) {
132   emit(0x40 | rm_reg.high_bit());
133 }
134 
135 
emit_rex_32(const Operand & op)136 void Assembler::emit_rex_32(const Operand& op) {
137   emit(0x40 | op.rex_);
138 }
139 
140 
emit_optional_rex_32(Register reg,Register rm_reg)141 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
142   byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
143   if (rex_bits != 0) emit(0x40 | rex_bits);
144 }
145 
146 
emit_optional_rex_32(Register reg,const Operand & op)147 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
148   byte rex_bits =  reg.high_bit() << 2 | op.rex_;
149   if (rex_bits != 0) emit(0x40 | rex_bits);
150 }
151 
152 
emit_optional_rex_32(XMMRegister reg,const Operand & op)153 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
154   byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
155   if (rex_bits != 0) emit(0x40 | rex_bits);
156 }
157 
158 
emit_optional_rex_32(XMMRegister reg,XMMRegister base)159 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
160   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
161   if (rex_bits != 0) emit(0x40 | rex_bits);
162 }
163 
164 
emit_optional_rex_32(XMMRegister reg,Register base)165 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
166   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
167   if (rex_bits != 0) emit(0x40 | rex_bits);
168 }
169 
170 
emit_optional_rex_32(Register reg,XMMRegister base)171 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
172   byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
173   if (rex_bits != 0) emit(0x40 | rex_bits);
174 }
175 
176 
emit_optional_rex_32(Register rm_reg)177 void Assembler::emit_optional_rex_32(Register rm_reg) {
178   if (rm_reg.high_bit()) emit(0x41);
179 }
180 
181 
emit_optional_rex_32(XMMRegister rm_reg)182 void Assembler::emit_optional_rex_32(XMMRegister rm_reg) {
183   if (rm_reg.high_bit()) emit(0x41);
184 }
185 
186 
emit_optional_rex_32(const Operand & op)187 void Assembler::emit_optional_rex_32(const Operand& op) {
188   if (op.rex_ != 0) emit(0x40 | op.rex_);
189 }
190 
191 
192 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,XMMRegister rm,LeadingOpcode m)193 void Assembler::emit_vex3_byte1(XMMRegister reg, XMMRegister rm,
194                                 LeadingOpcode m) {
195   byte rxb = ~((reg.high_bit() << 2) | rm.high_bit()) << 5;
196   emit(rxb | m);
197 }
198 
199 
200 // byte 1 of 3-byte VEX
emit_vex3_byte1(XMMRegister reg,const Operand & rm,LeadingOpcode m)201 void Assembler::emit_vex3_byte1(XMMRegister reg, const Operand& rm,
202                                 LeadingOpcode m) {
203   byte rxb = ~((reg.high_bit() << 2) | rm.rex_) << 5;
204   emit(rxb | m);
205 }
206 
207 
208 // byte 1 of 2-byte VEX
emit_vex2_byte1(XMMRegister reg,XMMRegister v,VectorLength l,SIMDPrefix pp)209 void Assembler::emit_vex2_byte1(XMMRegister reg, XMMRegister v, VectorLength l,
210                                 SIMDPrefix pp) {
211   byte rv = ~((reg.high_bit() << 4) | v.code()) << 3;
212   emit(rv | l | pp);
213 }
214 
215 
216 // byte 2 of 3-byte VEX
emit_vex3_byte2(VexW w,XMMRegister v,VectorLength l,SIMDPrefix pp)217 void Assembler::emit_vex3_byte2(VexW w, XMMRegister v, VectorLength l,
218                                 SIMDPrefix pp) {
219   emit(w | ((~v.code() & 0xf) << 3) | l | pp);
220 }
221 
222 
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,XMMRegister rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)223 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
224                                 XMMRegister rm, VectorLength l, SIMDPrefix pp,
225                                 LeadingOpcode mm, VexW w) {
226   if (rm.high_bit() || mm != k0F || w != kW0) {
227     emit_vex3_byte0();
228     emit_vex3_byte1(reg, rm, mm);
229     emit_vex3_byte2(w, vreg, l, pp);
230   } else {
231     emit_vex2_byte0();
232     emit_vex2_byte1(reg, vreg, l, pp);
233   }
234 }
235 
236 
emit_vex_prefix(Register reg,Register vreg,Register rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)237 void Assembler::emit_vex_prefix(Register reg, Register vreg, Register rm,
238                                 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
239                                 VexW w) {
240   XMMRegister ireg = {reg.code()};
241   XMMRegister ivreg = {vreg.code()};
242   XMMRegister irm = {rm.code()};
243   emit_vex_prefix(ireg, ivreg, irm, l, pp, mm, w);
244 }
245 
246 
emit_vex_prefix(XMMRegister reg,XMMRegister vreg,const Operand & rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)247 void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
248                                 const Operand& rm, VectorLength l,
249                                 SIMDPrefix pp, LeadingOpcode mm, VexW w) {
250   if (rm.rex_ || mm != k0F || w != kW0) {
251     emit_vex3_byte0();
252     emit_vex3_byte1(reg, rm, mm);
253     emit_vex3_byte2(w, vreg, l, pp);
254   } else {
255     emit_vex2_byte0();
256     emit_vex2_byte1(reg, vreg, l, pp);
257   }
258 }
259 
260 
emit_vex_prefix(Register reg,Register vreg,const Operand & rm,VectorLength l,SIMDPrefix pp,LeadingOpcode mm,VexW w)261 void Assembler::emit_vex_prefix(Register reg, Register vreg, const Operand& rm,
262                                 VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
263                                 VexW w) {
264   XMMRegister ireg = {reg.code()};
265   XMMRegister ivreg = {vreg.code()};
266   emit_vex_prefix(ireg, ivreg, rm, l, pp, mm, w);
267 }
268 
269 
target_address_at(Address pc,Address constant_pool)270 Address Assembler::target_address_at(Address pc, Address constant_pool) {
271   return Memory::int32_at(pc) + pc + 4;
272 }
273 
274 
set_target_address_at(Isolate * isolate,Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)275 void Assembler::set_target_address_at(Isolate* isolate, Address pc,
276                                       Address constant_pool, Address target,
277                                       ICacheFlushMode icache_flush_mode) {
278   Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
279   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
280     Assembler::FlushICache(isolate, pc, sizeof(int32_t));
281   }
282 }
283 
284 
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)285 void Assembler::deserialization_set_target_internal_reference_at(
286     Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
287   Memory::Address_at(pc) = target;
288 }
289 
290 
target_address_from_return_address(Address pc)291 Address Assembler::target_address_from_return_address(Address pc) {
292   return pc - kCallTargetAddressOffset;
293 }
294 
295 
code_target_object_handle_at(Address pc)296 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
297   return code_targets_[Memory::int32_at(pc)];
298 }
299 
300 
runtime_entry_at(Address pc)301 Address Assembler::runtime_entry_at(Address pc) {
302   return Memory::int32_at(pc) + isolate()->code_range()->start();
303 }
304 
305 // -----------------------------------------------------------------------------
306 // Implementation of RelocInfo
307 
308 // The modes possibly affected by apply must be in kApplyMask.
apply(intptr_t delta)309 void RelocInfo::apply(intptr_t delta) {
310   if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
311     Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
312   } else if (IsCodeAgeSequence(rmode_)) {
313     if (*pc_ == kCallOpcode) {
314       int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
315       *p -= static_cast<int32_t>(delta);  // Relocate entry.
316     }
317   } else if (IsInternalReference(rmode_)) {
318     // absolute code pointer inside code object moves with the code object.
319     Memory::Address_at(pc_) += delta;
320   }
321 }
322 
323 
target_address()324 Address RelocInfo::target_address() {
325   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
326   return Assembler::target_address_at(pc_, host_);
327 }
328 
329 
target_address_address()330 Address RelocInfo::target_address_address() {
331   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
332                               || rmode_ == EMBEDDED_OBJECT
333                               || rmode_ == EXTERNAL_REFERENCE);
334   return reinterpret_cast<Address>(pc_);
335 }
336 
337 
constant_pool_entry_address()338 Address RelocInfo::constant_pool_entry_address() {
339   UNREACHABLE();
340   return NULL;
341 }
342 
343 
target_address_size()344 int RelocInfo::target_address_size() {
345   if (IsCodedSpecially()) {
346     return Assembler::kSpecialTargetSize;
347   } else {
348     return kPointerSize;
349   }
350 }
351 
352 
set_target_address(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)353 void RelocInfo::set_target_address(Address target,
354                                    WriteBarrierMode write_barrier_mode,
355                                    ICacheFlushMode icache_flush_mode) {
356   DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
357   Assembler::set_target_address_at(isolate_, pc_, host_, target,
358                                    icache_flush_mode);
359   if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
360       IsCodeTarget(rmode_)) {
361     Object* target_code = Code::GetCodeFromTargetAddress(target);
362     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
363         host(), this, HeapObject::cast(target_code));
364   }
365 }
366 
367 
target_object()368 Object* RelocInfo::target_object() {
369   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
370   return Memory::Object_at(pc_);
371 }
372 
373 
target_object_handle(Assembler * origin)374 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
375   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
376   if (rmode_ == EMBEDDED_OBJECT) {
377     return Memory::Object_Handle_at(pc_);
378   } else {
379     return origin->code_target_object_handle_at(pc_);
380   }
381 }
382 
383 
target_external_reference()384 Address RelocInfo::target_external_reference() {
385   DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
386   return Memory::Address_at(pc_);
387 }
388 
389 
target_internal_reference()390 Address RelocInfo::target_internal_reference() {
391   DCHECK(rmode_ == INTERNAL_REFERENCE);
392   return Memory::Address_at(pc_);
393 }
394 
395 
target_internal_reference_address()396 Address RelocInfo::target_internal_reference_address() {
397   DCHECK(rmode_ == INTERNAL_REFERENCE);
398   return reinterpret_cast<Address>(pc_);
399 }
400 
401 
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)402 void RelocInfo::set_target_object(Object* target,
403                                   WriteBarrierMode write_barrier_mode,
404                                   ICacheFlushMode icache_flush_mode) {
405   DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
406   Memory::Object_at(pc_) = target;
407   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
408     Assembler::FlushICache(isolate_, pc_, sizeof(Address));
409   }
410   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
411       host() != NULL &&
412       target->IsHeapObject()) {
413     host()->GetHeap()->incremental_marking()->RecordWrite(
414         host(), &Memory::Object_at(pc_), HeapObject::cast(target));
415   }
416 }
417 
418 
target_runtime_entry(Assembler * origin)419 Address RelocInfo::target_runtime_entry(Assembler* origin) {
420   DCHECK(IsRuntimeEntry(rmode_));
421   return origin->runtime_entry_at(pc_);
422 }
423 
424 
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)425 void RelocInfo::set_target_runtime_entry(Address target,
426                                          WriteBarrierMode write_barrier_mode,
427                                          ICacheFlushMode icache_flush_mode) {
428   DCHECK(IsRuntimeEntry(rmode_));
429   if (target_address() != target) {
430     set_target_address(target, write_barrier_mode, icache_flush_mode);
431   }
432 }
433 
434 
target_cell_handle()435 Handle<Cell> RelocInfo::target_cell_handle() {
436   DCHECK(rmode_ == RelocInfo::CELL);
437   Address address = Memory::Address_at(pc_);
438   return Handle<Cell>(reinterpret_cast<Cell**>(address));
439 }
440 
441 
target_cell()442 Cell* RelocInfo::target_cell() {
443   DCHECK(rmode_ == RelocInfo::CELL);
444   return Cell::FromValueAddress(Memory::Address_at(pc_));
445 }
446 
447 
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)448 void RelocInfo::set_target_cell(Cell* cell,
449                                 WriteBarrierMode write_barrier_mode,
450                                 ICacheFlushMode icache_flush_mode) {
451   DCHECK(rmode_ == RelocInfo::CELL);
452   Address address = cell->address() + Cell::kValueOffset;
453   Memory::Address_at(pc_) = address;
454   if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
455     Assembler::FlushICache(isolate_, pc_, sizeof(Address));
456   }
457   if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
458       host() != NULL) {
459     // TODO(1550) We are passing NULL as a slot because cell can never be on
460     // evacuation candidate.
461     host()->GetHeap()->incremental_marking()->RecordWrite(
462         host(), NULL, cell);
463   }
464 }
465 
466 
WipeOut()467 void RelocInfo::WipeOut() {
468   if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
469       IsInternalReference(rmode_)) {
470     Memory::Address_at(pc_) = NULL;
471   } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
472     // Effectively write zero into the relocation.
473     Assembler::set_target_address_at(isolate_, pc_, host_,
474                                      pc_ + sizeof(int32_t));
475   } else {
476     UNREACHABLE();
477   }
478 }
479 
480 
IsPatchedReturnSequence()481 bool RelocInfo::IsPatchedReturnSequence() {
482   // The recognized call sequence is:
483   //  movq(kScratchRegister, address); call(kScratchRegister);
484   // It only needs to be distinguished from a return sequence
485   //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
486   // The 11th byte is int3 (0xCC) in the return sequence and
487   // REX.WB (0x48+register bit) for the call sequence.
488   return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
489          0xCC;
490 }
491 
492 
IsPatchedDebugBreakSlotSequence()493 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
494   return !Assembler::IsNop(pc());
495 }
496 
497 
code_age_stub_handle(Assembler * origin)498 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
499   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
500   DCHECK(*pc_ == kCallOpcode);
501   return origin->code_target_object_handle_at(pc_ + 1);
502 }
503 
504 
code_age_stub()505 Code* RelocInfo::code_age_stub() {
506   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
507   DCHECK(*pc_ == kCallOpcode);
508   return Code::GetCodeFromTargetAddress(
509       Assembler::target_address_at(pc_ + 1, host_));
510 }
511 
512 
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)513 void RelocInfo::set_code_age_stub(Code* stub,
514                                   ICacheFlushMode icache_flush_mode) {
515   DCHECK(*pc_ == kCallOpcode);
516   DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
517   Assembler::set_target_address_at(
518       isolate_, pc_ + 1, host_, stub->instruction_start(), icache_flush_mode);
519 }
520 
521 
debug_call_address()522 Address RelocInfo::debug_call_address() {
523   DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
524   return Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset);
525 }
526 
527 
set_debug_call_address(Address target)528 void RelocInfo::set_debug_call_address(Address target) {
529   DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
530   Memory::Address_at(pc_ + Assembler::kPatchDebugBreakSlotAddressOffset) =
531       target;
532   Assembler::FlushICache(isolate_,
533                          pc_ + Assembler::kPatchDebugBreakSlotAddressOffset,
534                          sizeof(Address));
535   if (host() != NULL) {
536     Object* target_code = Code::GetCodeFromTargetAddress(target);
537     host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
538         host(), this, HeapObject::cast(target_code));
539   }
540 }
541 
542 
Visit(Isolate * isolate,ObjectVisitor * visitor)543 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
544   RelocInfo::Mode mode = rmode();
545   if (mode == RelocInfo::EMBEDDED_OBJECT) {
546     visitor->VisitEmbeddedPointer(this);
547     Assembler::FlushICache(isolate, pc_, sizeof(Address));
548   } else if (RelocInfo::IsCodeTarget(mode)) {
549     visitor->VisitCodeTarget(this);
550   } else if (mode == RelocInfo::CELL) {
551     visitor->VisitCell(this);
552   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
553     visitor->VisitExternalReference(this);
554   } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
555     visitor->VisitInternalReference(this);
556   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
557     visitor->VisitCodeAgeSequence(this);
558   } else if (RelocInfo::IsDebugBreakSlot(mode) &&
559              IsPatchedDebugBreakSlotSequence()) {
560     visitor->VisitDebugTarget(this);
561   } else if (RelocInfo::IsRuntimeEntry(mode)) {
562     visitor->VisitRuntimeEntry(this);
563   }
564 }
565 
566 
567 template<typename StaticVisitor>
Visit(Heap * heap)568 void RelocInfo::Visit(Heap* heap) {
569   RelocInfo::Mode mode = rmode();
570   if (mode == RelocInfo::EMBEDDED_OBJECT) {
571     StaticVisitor::VisitEmbeddedPointer(heap, this);
572     Assembler::FlushICache(heap->isolate(), pc_, sizeof(Address));
573   } else if (RelocInfo::IsCodeTarget(mode)) {
574     StaticVisitor::VisitCodeTarget(heap, this);
575   } else if (mode == RelocInfo::CELL) {
576     StaticVisitor::VisitCell(heap, this);
577   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
578     StaticVisitor::VisitExternalReference(this);
579   } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
580     StaticVisitor::VisitInternalReference(this);
581   } else if (RelocInfo::IsCodeAgeSequence(mode)) {
582     StaticVisitor::VisitCodeAgeSequence(heap, this);
583   } else if (RelocInfo::IsDebugBreakSlot(mode) &&
584              IsPatchedDebugBreakSlotSequence()) {
585     StaticVisitor::VisitDebugTarget(heap, this);
586   } else if (RelocInfo::IsRuntimeEntry(mode)) {
587     StaticVisitor::VisitRuntimeEntry(this);
588   }
589 }
590 
591 
592 // -----------------------------------------------------------------------------
593 // Implementation of Operand
594 
set_modrm(int mod,Register rm_reg)595 void Operand::set_modrm(int mod, Register rm_reg) {
596   DCHECK(is_uint2(mod));
597   buf_[0] = mod << 6 | rm_reg.low_bits();
598   // Set REX.B to the high bit of rm.code().
599   rex_ |= rm_reg.high_bit();
600 }
601 
602 
set_sib(ScaleFactor scale,Register index,Register base)603 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
604   DCHECK(len_ == 1);
605   DCHECK(is_uint2(scale));
606   // Use SIB with no index register only for base rsp or r12. Otherwise we
607   // would skip the SIB byte entirely.
608   DCHECK(!index.is(rsp) || base.is(rsp) || base.is(r12));
609   buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
610   rex_ |= index.high_bit() << 1 | base.high_bit();
611   len_ = 2;
612 }
613 
set_disp8(int disp)614 void Operand::set_disp8(int disp) {
615   DCHECK(is_int8(disp));
616   DCHECK(len_ == 1 || len_ == 2);
617   int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
618   *p = disp;
619   len_ += sizeof(int8_t);
620 }
621 
set_disp32(int disp)622 void Operand::set_disp32(int disp) {
623   DCHECK(len_ == 1 || len_ == 2);
624   int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
625   *p = disp;
626   len_ += sizeof(int32_t);
627 }
628 
set_disp64(int64_t disp)629 void Operand::set_disp64(int64_t disp) {
630   DCHECK_EQ(1, len_);
631   int64_t* p = reinterpret_cast<int64_t*>(&buf_[len_]);
632   *p = disp;
633   len_ += sizeof(disp);
634 }
635 }  // namespace internal
636 }  // namespace v8
637 
638 #endif  // V8_X64_ASSEMBLER_X64_INL_H_
639