1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2014 the V8 project authors. All rights reserved.
36
37 #ifndef V8_PPC_ASSEMBLER_PPC_INL_H_
38 #define V8_PPC_ASSEMBLER_PPC_INL_H_
39
40 #include "src/ppc/assembler-ppc.h"
41
42 #include "src/assembler.h"
43 #include "src/debug/debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
SupportsCrankshaft()50 bool CpuFeatures::SupportsCrankshaft() { return true; }
51
SupportsSimd128()52 bool CpuFeatures::SupportsSimd128() { return false; }
53
apply(intptr_t delta)54 void RelocInfo::apply(intptr_t delta) {
55 // absolute code pointer inside code object moves with the code object.
56 if (IsInternalReference(rmode_)) {
57 // Jump table entry
58 Address target = Memory::Address_at(pc_);
59 Memory::Address_at(pc_) = target + delta;
60 } else {
61 // mov sequence
62 DCHECK(IsInternalReferenceEncoded(rmode_));
63 Address target = Assembler::target_address_at(pc_, host_);
64 Assembler::set_target_address_at(isolate_, pc_, host_, target + delta,
65 SKIP_ICACHE_FLUSH);
66 }
67 }
68
69
target_internal_reference()70 Address RelocInfo::target_internal_reference() {
71 if (IsInternalReference(rmode_)) {
72 // Jump table entry
73 return Memory::Address_at(pc_);
74 } else {
75 // mov sequence
76 DCHECK(IsInternalReferenceEncoded(rmode_));
77 return Assembler::target_address_at(pc_, host_);
78 }
79 }
80
81
target_internal_reference_address()82 Address RelocInfo::target_internal_reference_address() {
83 DCHECK(IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
84 return reinterpret_cast<Address>(pc_);
85 }
86
87
target_address()88 Address RelocInfo::target_address() {
89 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
90 return Assembler::target_address_at(pc_, host_);
91 }
92
target_address_address()93 Address RelocInfo::target_address_address() {
94 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) ||
95 rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE);
96
97 if (FLAG_enable_embedded_constant_pool &&
98 Assembler::IsConstantPoolLoadStart(pc_)) {
99 // We return the PC for embedded constant pool since this function is used
100 // by the serializer and expects the address to reside within the code
101 // object.
102 return reinterpret_cast<Address>(pc_);
103 }
104
105 // Read the address of the word containing the target_address in an
106 // instruction stream.
107 // The only architecture-independent user of this function is the serializer.
108 // The serializer uses it to find out how many raw bytes of instruction to
109 // output before the next target.
110 // For an instruction like LIS/ORI where the target bits are mixed into the
111 // instruction bits, the size of the target will be zero, indicating that the
112 // serializer should not step forward in memory after a target is resolved
113 // and written.
114 return reinterpret_cast<Address>(pc_);
115 }
116
117
constant_pool_entry_address()118 Address RelocInfo::constant_pool_entry_address() {
119 if (FLAG_enable_embedded_constant_pool) {
120 Address constant_pool = host_->constant_pool();
121 DCHECK(constant_pool);
122 ConstantPoolEntry::Access access;
123 if (Assembler::IsConstantPoolLoadStart(pc_, &access))
124 return Assembler::target_constant_pool_address_at(
125 pc_, constant_pool, access, ConstantPoolEntry::INTPTR);
126 }
127 UNREACHABLE();
128 return NULL;
129 }
130
131
target_address_size()132 int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; }
133
134
target_address_from_return_address(Address pc)135 Address Assembler::target_address_from_return_address(Address pc) {
136 // Returns the address of the call target from the return address that will
137 // be returned to after a call.
138 // Call sequence is :
139 // mov ip, @ call address
140 // mtlr ip
141 // blrl
142 // @ return address
143 int len;
144 ConstantPoolEntry::Access access;
145 if (FLAG_enable_embedded_constant_pool &&
146 IsConstantPoolLoadEnd(pc - 3 * kInstrSize, &access)) {
147 len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
148 } else {
149 len = kMovInstructionsNoConstantPool;
150 }
151 return pc - (len + 2) * kInstrSize;
152 }
153
154
return_address_from_call_start(Address pc)155 Address Assembler::return_address_from_call_start(Address pc) {
156 int len;
157 ConstantPoolEntry::Access access;
158 if (FLAG_enable_embedded_constant_pool &&
159 IsConstantPoolLoadStart(pc, &access)) {
160 len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1;
161 } else {
162 len = kMovInstructionsNoConstantPool;
163 }
164 return pc + (len + 2) * kInstrSize;
165 }
166
target_object()167 Object* RelocInfo::target_object() {
168 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
169 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
170 }
171
172
target_object_handle(Assembler * origin)173 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
174 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
175 return Handle<Object>(
176 reinterpret_cast<Object**>(Assembler::target_address_at(pc_, host_)));
177 }
178
179
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)180 void RelocInfo::set_target_object(Object* target,
181 WriteBarrierMode write_barrier_mode,
182 ICacheFlushMode icache_flush_mode) {
183 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
184 Assembler::set_target_address_at(isolate_, pc_, host_,
185 reinterpret_cast<Address>(target),
186 icache_flush_mode);
187 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
188 target->IsHeapObject()) {
189 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
190 host(), this, HeapObject::cast(target));
191 host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
192 }
193 }
194
195
target_external_reference()196 Address RelocInfo::target_external_reference() {
197 DCHECK(rmode_ == EXTERNAL_REFERENCE);
198 return Assembler::target_address_at(pc_, host_);
199 }
200
201
target_runtime_entry(Assembler * origin)202 Address RelocInfo::target_runtime_entry(Assembler* origin) {
203 DCHECK(IsRuntimeEntry(rmode_));
204 return target_address();
205 }
206
207
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)208 void RelocInfo::set_target_runtime_entry(Address target,
209 WriteBarrierMode write_barrier_mode,
210 ICacheFlushMode icache_flush_mode) {
211 DCHECK(IsRuntimeEntry(rmode_));
212 if (target_address() != target)
213 set_target_address(target, write_barrier_mode, icache_flush_mode);
214 }
215
216
target_cell_handle()217 Handle<Cell> RelocInfo::target_cell_handle() {
218 DCHECK(rmode_ == RelocInfo::CELL);
219 Address address = Memory::Address_at(pc_);
220 return Handle<Cell>(reinterpret_cast<Cell**>(address));
221 }
222
223
target_cell()224 Cell* RelocInfo::target_cell() {
225 DCHECK(rmode_ == RelocInfo::CELL);
226 return Cell::FromValueAddress(Memory::Address_at(pc_));
227 }
228
229
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)230 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode write_barrier_mode,
231 ICacheFlushMode icache_flush_mode) {
232 DCHECK(rmode_ == RelocInfo::CELL);
233 Address address = cell->address() + Cell::kValueOffset;
234 Memory::Address_at(pc_) = address;
235 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
236 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
237 cell);
238 }
239 }
240
241
242 static const int kNoCodeAgeInstructions =
243 FLAG_enable_embedded_constant_pool ? 7 : 6;
244 static const int kCodeAgingInstructions =
245 Assembler::kMovInstructionsNoConstantPool + 3;
246 static const int kNoCodeAgeSequenceInstructions =
247 ((kNoCodeAgeInstructions >= kCodeAgingInstructions)
248 ? kNoCodeAgeInstructions
249 : kCodeAgingInstructions);
250 static const int kNoCodeAgeSequenceNops =
251 (kNoCodeAgeSequenceInstructions - kNoCodeAgeInstructions);
252 static const int kCodeAgingSequenceNops =
253 (kNoCodeAgeSequenceInstructions - kCodeAgingInstructions);
254 static const int kCodeAgingTargetDelta = 1 * Assembler::kInstrSize;
255 static const int kNoCodeAgeSequenceLength =
256 (kNoCodeAgeSequenceInstructions * Assembler::kInstrSize);
257
258
code_age_stub_handle(Assembler * origin)259 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
260 UNREACHABLE(); // This should never be reached on PPC.
261 return Handle<Object>();
262 }
263
264
code_age_stub()265 Code* RelocInfo::code_age_stub() {
266 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
267 return Code::GetCodeFromTargetAddress(
268 Assembler::target_address_at(pc_ + kCodeAgingTargetDelta, host_));
269 }
270
271
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)272 void RelocInfo::set_code_age_stub(Code* stub,
273 ICacheFlushMode icache_flush_mode) {
274 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
275 Assembler::set_target_address_at(isolate_, pc_ + kCodeAgingTargetDelta, host_,
276 stub->instruction_start(),
277 icache_flush_mode);
278 }
279
280
debug_call_address()281 Address RelocInfo::debug_call_address() {
282 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
283 return Assembler::target_address_at(pc_, host_);
284 }
285
286
set_debug_call_address(Address target)287 void RelocInfo::set_debug_call_address(Address target) {
288 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
289 Assembler::set_target_address_at(isolate_, pc_, host_, target);
290 if (host() != NULL) {
291 Object* target_code = Code::GetCodeFromTargetAddress(target);
292 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
293 host(), this, HeapObject::cast(target_code));
294 }
295 }
296
297
WipeOut()298 void RelocInfo::WipeOut() {
299 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
300 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
301 IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
302 if (IsInternalReference(rmode_)) {
303 // Jump table entry
304 Memory::Address_at(pc_) = NULL;
305 } else if (IsInternalReferenceEncoded(rmode_)) {
306 // mov sequence
307 // Currently used only by deserializer, no need to flush.
308 Assembler::set_target_address_at(isolate_, pc_, host_, NULL,
309 SKIP_ICACHE_FLUSH);
310 } else {
311 Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
312 }
313 }
314
315 template <typename ObjectVisitor>
Visit(Isolate * isolate,ObjectVisitor * visitor)316 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
317 RelocInfo::Mode mode = rmode();
318 if (mode == RelocInfo::EMBEDDED_OBJECT) {
319 visitor->VisitEmbeddedPointer(this);
320 } else if (RelocInfo::IsCodeTarget(mode)) {
321 visitor->VisitCodeTarget(this);
322 } else if (mode == RelocInfo::CELL) {
323 visitor->VisitCell(this);
324 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
325 visitor->VisitExternalReference(this);
326 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
327 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
328 visitor->VisitInternalReference(this);
329 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
330 visitor->VisitCodeAgeSequence(this);
331 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
332 IsPatchedDebugBreakSlotSequence()) {
333 visitor->VisitDebugTarget(this);
334 } else if (IsRuntimeEntry(mode)) {
335 visitor->VisitRuntimeEntry(this);
336 }
337 }
338
339
340 template <typename StaticVisitor>
Visit(Heap * heap)341 void RelocInfo::Visit(Heap* heap) {
342 RelocInfo::Mode mode = rmode();
343 if (mode == RelocInfo::EMBEDDED_OBJECT) {
344 StaticVisitor::VisitEmbeddedPointer(heap, this);
345 } else if (RelocInfo::IsCodeTarget(mode)) {
346 StaticVisitor::VisitCodeTarget(heap, this);
347 } else if (mode == RelocInfo::CELL) {
348 StaticVisitor::VisitCell(heap, this);
349 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
350 StaticVisitor::VisitExternalReference(this);
351 } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
352 mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
353 StaticVisitor::VisitInternalReference(this);
354 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
355 StaticVisitor::VisitCodeAgeSequence(heap, this);
356 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
357 IsPatchedDebugBreakSlotSequence()) {
358 StaticVisitor::VisitDebugTarget(heap, this);
359 } else if (IsRuntimeEntry(mode)) {
360 StaticVisitor::VisitRuntimeEntry(this);
361 }
362 }
363
Operand(intptr_t immediate,RelocInfo::Mode rmode)364 Operand::Operand(intptr_t immediate, RelocInfo::Mode rmode) {
365 rm_ = no_reg;
366 imm_ = immediate;
367 rmode_ = rmode;
368 }
369
Operand(const ExternalReference & f)370 Operand::Operand(const ExternalReference& f) {
371 rm_ = no_reg;
372 imm_ = reinterpret_cast<intptr_t>(f.address());
373 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
374 }
375
Operand(Smi * value)376 Operand::Operand(Smi* value) {
377 rm_ = no_reg;
378 imm_ = reinterpret_cast<intptr_t>(value);
379 rmode_ = kRelocInfo_NONEPTR;
380 }
381
Operand(Register rm)382 Operand::Operand(Register rm) {
383 rm_ = rm;
384 rmode_ = kRelocInfo_NONEPTR; // PPC -why doesn't ARM do this?
385 }
386
CheckBuffer()387 void Assembler::CheckBuffer() {
388 if (buffer_space() <= kGap) {
389 GrowBuffer();
390 }
391 }
392
TrackBranch()393 void Assembler::TrackBranch() {
394 DCHECK(!trampoline_emitted_);
395 int count = tracked_branch_count_++;
396 if (count == 0) {
397 // We leave space (kMaxBlockTrampolineSectionSize)
398 // for BlockTrampolinePoolScope buffer.
399 next_trampoline_check_ =
400 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize;
401 } else {
402 next_trampoline_check_ -= kTrampolineSlotsSize;
403 }
404 }
405
UntrackBranch()406 void Assembler::UntrackBranch() {
407 DCHECK(!trampoline_emitted_);
408 DCHECK(tracked_branch_count_ > 0);
409 int count = --tracked_branch_count_;
410 if (count == 0) {
411 // Reset
412 next_trampoline_check_ = kMaxInt;
413 } else {
414 next_trampoline_check_ += kTrampolineSlotsSize;
415 }
416 }
417
CheckTrampolinePoolQuick()418 void Assembler::CheckTrampolinePoolQuick() {
419 if (pc_offset() >= next_trampoline_check_) {
420 CheckTrampolinePool();
421 }
422 }
423
emit(Instr x)424 void Assembler::emit(Instr x) {
425 CheckBuffer();
426 *reinterpret_cast<Instr*>(pc_) = x;
427 pc_ += kInstrSize;
428 CheckTrampolinePoolQuick();
429 }
430
is_reg()431 bool Operand::is_reg() const { return rm_.is_valid(); }
432
433
434 // Fetch the 32bit value from the FIXED_SEQUENCE lis/ori
target_address_at(Address pc,Address constant_pool)435 Address Assembler::target_address_at(Address pc, Address constant_pool) {
436 if (FLAG_enable_embedded_constant_pool && constant_pool) {
437 ConstantPoolEntry::Access access;
438 if (IsConstantPoolLoadStart(pc, &access))
439 return Memory::Address_at(target_constant_pool_address_at(
440 pc, constant_pool, access, ConstantPoolEntry::INTPTR));
441 }
442
443 Instr instr1 = instr_at(pc);
444 Instr instr2 = instr_at(pc + kInstrSize);
445 // Interpret 2 instructions generated by lis/ori
446 if (IsLis(instr1) && IsOri(instr2)) {
447 #if V8_TARGET_ARCH_PPC64
448 Instr instr4 = instr_at(pc + (3 * kInstrSize));
449 Instr instr5 = instr_at(pc + (4 * kInstrSize));
450 // Assemble the 64 bit value.
451 uint64_t hi = (static_cast<uint32_t>((instr1 & kImm16Mask) << 16) |
452 static_cast<uint32_t>(instr2 & kImm16Mask));
453 uint64_t lo = (static_cast<uint32_t>((instr4 & kImm16Mask) << 16) |
454 static_cast<uint32_t>(instr5 & kImm16Mask));
455 return reinterpret_cast<Address>((hi << 32) | lo);
456 #else
457 // Assemble the 32 bit value.
458 return reinterpret_cast<Address>(((instr1 & kImm16Mask) << 16) |
459 (instr2 & kImm16Mask));
460 #endif
461 }
462
463 UNREACHABLE();
464 return NULL;
465 }
466
467
468 #if V8_TARGET_ARCH_PPC64
469 const int kLoadIntptrOpcode = LD;
470 #else
471 const int kLoadIntptrOpcode = LWZ;
472 #endif
473
474 // Constant pool load sequence detection:
475 // 1) REGULAR access:
476 // load <dst>, kConstantPoolRegister + <offset>
477 //
478 // 2) OVERFLOWED access:
479 // addis <scratch>, kConstantPoolRegister, <offset_high>
480 // load <dst>, <scratch> + <offset_low>
IsConstantPoolLoadStart(Address pc,ConstantPoolEntry::Access * access)481 bool Assembler::IsConstantPoolLoadStart(Address pc,
482 ConstantPoolEntry::Access* access) {
483 Instr instr = instr_at(pc);
484 int opcode = instr & kOpcodeMask;
485 if (!GetRA(instr).is(kConstantPoolRegister)) return false;
486 bool overflowed = (opcode == ADDIS);
487 #ifdef DEBUG
488 if (overflowed) {
489 opcode = instr_at(pc + kInstrSize) & kOpcodeMask;
490 }
491 DCHECK(opcode == kLoadIntptrOpcode || opcode == LFD);
492 #endif
493 if (access) {
494 *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
495 : ConstantPoolEntry::REGULAR);
496 }
497 return true;
498 }
499
500
IsConstantPoolLoadEnd(Address pc,ConstantPoolEntry::Access * access)501 bool Assembler::IsConstantPoolLoadEnd(Address pc,
502 ConstantPoolEntry::Access* access) {
503 Instr instr = instr_at(pc);
504 int opcode = instr & kOpcodeMask;
505 bool overflowed = false;
506 if (!(opcode == kLoadIntptrOpcode || opcode == LFD)) return false;
507 if (!GetRA(instr).is(kConstantPoolRegister)) {
508 instr = instr_at(pc - kInstrSize);
509 opcode = instr & kOpcodeMask;
510 if ((opcode != ADDIS) || !GetRA(instr).is(kConstantPoolRegister)) {
511 return false;
512 }
513 overflowed = true;
514 }
515 if (access) {
516 *access = (overflowed ? ConstantPoolEntry::OVERFLOWED
517 : ConstantPoolEntry::REGULAR);
518 }
519 return true;
520 }
521
522
GetConstantPoolOffset(Address pc,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)523 int Assembler::GetConstantPoolOffset(Address pc,
524 ConstantPoolEntry::Access access,
525 ConstantPoolEntry::Type type) {
526 bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
527 #ifdef DEBUG
528 ConstantPoolEntry::Access access_check =
529 static_cast<ConstantPoolEntry::Access>(-1);
530 DCHECK(IsConstantPoolLoadStart(pc, &access_check));
531 DCHECK(access_check == access);
532 #endif
533 int offset;
534 if (overflowed) {
535 offset = (instr_at(pc) & kImm16Mask) << 16;
536 offset += SIGN_EXT_IMM16(instr_at(pc + kInstrSize) & kImm16Mask);
537 DCHECK(!is_int16(offset));
538 } else {
539 offset = SIGN_EXT_IMM16((instr_at(pc) & kImm16Mask));
540 }
541 return offset;
542 }
543
544
PatchConstantPoolAccessInstruction(int pc_offset,int offset,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)545 void Assembler::PatchConstantPoolAccessInstruction(
546 int pc_offset, int offset, ConstantPoolEntry::Access access,
547 ConstantPoolEntry::Type type) {
548 Address pc = buffer_ + pc_offset;
549 bool overflowed = (access == ConstantPoolEntry::OVERFLOWED);
550 CHECK(overflowed != is_int16(offset));
551 #ifdef DEBUG
552 ConstantPoolEntry::Access access_check =
553 static_cast<ConstantPoolEntry::Access>(-1);
554 DCHECK(IsConstantPoolLoadStart(pc, &access_check));
555 DCHECK(access_check == access);
556 #endif
557 if (overflowed) {
558 int hi_word = static_cast<int>(offset >> 16);
559 int lo_word = static_cast<int>(offset & 0xffff);
560 if (lo_word & 0x8000) hi_word++;
561
562 Instr instr1 = instr_at(pc);
563 Instr instr2 = instr_at(pc + kInstrSize);
564 instr1 &= ~kImm16Mask;
565 instr1 |= (hi_word & kImm16Mask);
566 instr2 &= ~kImm16Mask;
567 instr2 |= (lo_word & kImm16Mask);
568 instr_at_put(pc, instr1);
569 instr_at_put(pc + kInstrSize, instr2);
570 } else {
571 Instr instr = instr_at(pc);
572 instr &= ~kImm16Mask;
573 instr |= (offset & kImm16Mask);
574 instr_at_put(pc, instr);
575 }
576 }
577
578
target_constant_pool_address_at(Address pc,Address constant_pool,ConstantPoolEntry::Access access,ConstantPoolEntry::Type type)579 Address Assembler::target_constant_pool_address_at(
580 Address pc, Address constant_pool, ConstantPoolEntry::Access access,
581 ConstantPoolEntry::Type type) {
582 Address addr = constant_pool;
583 DCHECK(addr);
584 addr += GetConstantPoolOffset(pc, access, type);
585 return addr;
586 }
587
588
589 // This sets the branch destination (which gets loaded at the call address).
590 // This is for calls and branches within generated code. The serializer
591 // has already deserialized the mov instructions etc.
592 // There is a FIXED_SEQUENCE assumption here
deserialization_set_special_target_at(Isolate * isolate,Address instruction_payload,Code * code,Address target)593 void Assembler::deserialization_set_special_target_at(
594 Isolate* isolate, Address instruction_payload, Code* code, Address target) {
595 set_target_address_at(isolate, instruction_payload, code, target);
596 }
597
598
deserialization_set_target_internal_reference_at(Isolate * isolate,Address pc,Address target,RelocInfo::Mode mode)599 void Assembler::deserialization_set_target_internal_reference_at(
600 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
601 if (RelocInfo::IsInternalReferenceEncoded(mode)) {
602 Code* code = NULL;
603 set_target_address_at(isolate, pc, code, target, SKIP_ICACHE_FLUSH);
604 } else {
605 Memory::Address_at(pc) = target;
606 }
607 }
608
609
610 // This code assumes the FIXED_SEQUENCE of lis/ori
set_target_address_at(Isolate * isolate,Address pc,Address constant_pool,Address target,ICacheFlushMode icache_flush_mode)611 void Assembler::set_target_address_at(Isolate* isolate, Address pc,
612 Address constant_pool, Address target,
613 ICacheFlushMode icache_flush_mode) {
614 if (FLAG_enable_embedded_constant_pool && constant_pool) {
615 ConstantPoolEntry::Access access;
616 if (IsConstantPoolLoadStart(pc, &access)) {
617 Memory::Address_at(target_constant_pool_address_at(
618 pc, constant_pool, access, ConstantPoolEntry::INTPTR)) = target;
619 return;
620 }
621 }
622
623 Instr instr1 = instr_at(pc);
624 Instr instr2 = instr_at(pc + kInstrSize);
625 // Interpret 2 instructions generated by lis/ori
626 if (IsLis(instr1) && IsOri(instr2)) {
627 #if V8_TARGET_ARCH_PPC64
628 Instr instr4 = instr_at(pc + (3 * kInstrSize));
629 Instr instr5 = instr_at(pc + (4 * kInstrSize));
630 // Needs to be fixed up when mov changes to handle 64-bit values.
631 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
632 uintptr_t itarget = reinterpret_cast<uintptr_t>(target);
633
634 instr5 &= ~kImm16Mask;
635 instr5 |= itarget & kImm16Mask;
636 itarget = itarget >> 16;
637
638 instr4 &= ~kImm16Mask;
639 instr4 |= itarget & kImm16Mask;
640 itarget = itarget >> 16;
641
642 instr2 &= ~kImm16Mask;
643 instr2 |= itarget & kImm16Mask;
644 itarget = itarget >> 16;
645
646 instr1 &= ~kImm16Mask;
647 instr1 |= itarget & kImm16Mask;
648 itarget = itarget >> 16;
649
650 *p = instr1;
651 *(p + 1) = instr2;
652 *(p + 3) = instr4;
653 *(p + 4) = instr5;
654 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
655 Assembler::FlushICache(isolate, p, 5 * kInstrSize);
656 }
657 #else
658 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
659 uint32_t itarget = reinterpret_cast<uint32_t>(target);
660 int lo_word = itarget & kImm16Mask;
661 int hi_word = itarget >> 16;
662 instr1 &= ~kImm16Mask;
663 instr1 |= hi_word;
664 instr2 &= ~kImm16Mask;
665 instr2 |= lo_word;
666
667 *p = instr1;
668 *(p + 1) = instr2;
669 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
670 Assembler::FlushICache(isolate, p, 2 * kInstrSize);
671 }
672 #endif
673 return;
674 }
675 UNREACHABLE();
676 }
677 } // namespace internal
678 } // namespace v8
679
680 #endif // V8_PPC_ASSEMBLER_PPC_INL_H_
681