1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_
6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_
7
8 #include "src/arm64/assembler-arm64.h"
9 #include "src/assembler.h"
10 #include "src/debug/debug.h"
11
12
13 namespace v8 {
14 namespace internal {
15
16
SupportsCrankshaft()17 bool CpuFeatures::SupportsCrankshaft() { return true; }
18
19
apply(intptr_t delta)20 void RelocInfo::apply(intptr_t delta) {
21 // On arm64 only internal references need extra work.
22 DCHECK(RelocInfo::IsInternalReference(rmode_));
23
24 // Absolute code pointer inside code object moves with the code object.
25 intptr_t* p = reinterpret_cast<intptr_t*>(pc_);
26 *p += delta; // Relocate entry.
27 }
28
29
set_target_address(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)30 void RelocInfo::set_target_address(Address target,
31 WriteBarrierMode write_barrier_mode,
32 ICacheFlushMode icache_flush_mode) {
33 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
34 Assembler::set_target_address_at(isolate_, pc_, host_, target,
35 icache_flush_mode);
36 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
37 IsCodeTarget(rmode_)) {
38 Object* target_code = Code::GetCodeFromTargetAddress(target);
39 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
40 host(), this, HeapObject::cast(target_code));
41 }
42 }
43
44
code()45 inline int CPURegister::code() const {
46 DCHECK(IsValid());
47 return reg_code;
48 }
49
50
type()51 inline CPURegister::RegisterType CPURegister::type() const {
52 DCHECK(IsValidOrNone());
53 return reg_type;
54 }
55
56
Bit()57 inline RegList CPURegister::Bit() const {
58 DCHECK(static_cast<size_t>(reg_code) < (sizeof(RegList) * kBitsPerByte));
59 return IsValid() ? 1UL << reg_code : 0;
60 }
61
62
SizeInBits()63 inline int CPURegister::SizeInBits() const {
64 DCHECK(IsValid());
65 return reg_size;
66 }
67
68
SizeInBytes()69 inline int CPURegister::SizeInBytes() const {
70 DCHECK(IsValid());
71 DCHECK(SizeInBits() % 8 == 0);
72 return reg_size / 8;
73 }
74
75
Is32Bits()76 inline bool CPURegister::Is32Bits() const {
77 DCHECK(IsValid());
78 return reg_size == 32;
79 }
80
81
Is64Bits()82 inline bool CPURegister::Is64Bits() const {
83 DCHECK(IsValid());
84 return reg_size == 64;
85 }
86
87
IsValid()88 inline bool CPURegister::IsValid() const {
89 if (IsValidRegister() || IsValidFPRegister()) {
90 DCHECK(!IsNone());
91 return true;
92 } else {
93 DCHECK(IsNone());
94 return false;
95 }
96 }
97
98
IsValidRegister()99 inline bool CPURegister::IsValidRegister() const {
100 return IsRegister() &&
101 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) &&
102 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode));
103 }
104
105
IsValidFPRegister()106 inline bool CPURegister::IsValidFPRegister() const {
107 return IsFPRegister() &&
108 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) &&
109 (reg_code < kNumberOfFPRegisters);
110 }
111
112
IsNone()113 inline bool CPURegister::IsNone() const {
114 // kNoRegister types should always have size 0 and code 0.
115 DCHECK((reg_type != kNoRegister) || (reg_code == 0));
116 DCHECK((reg_type != kNoRegister) || (reg_size == 0));
117
118 return reg_type == kNoRegister;
119 }
120
121
Is(const CPURegister & other)122 inline bool CPURegister::Is(const CPURegister& other) const {
123 DCHECK(IsValidOrNone() && other.IsValidOrNone());
124 return Aliases(other) && (reg_size == other.reg_size);
125 }
126
127
Aliases(const CPURegister & other)128 inline bool CPURegister::Aliases(const CPURegister& other) const {
129 DCHECK(IsValidOrNone() && other.IsValidOrNone());
130 return (reg_code == other.reg_code) && (reg_type == other.reg_type);
131 }
132
133
IsRegister()134 inline bool CPURegister::IsRegister() const {
135 return reg_type == kRegister;
136 }
137
138
IsFPRegister()139 inline bool CPURegister::IsFPRegister() const {
140 return reg_type == kFPRegister;
141 }
142
143
IsSameSizeAndType(const CPURegister & other)144 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const {
145 return (reg_size == other.reg_size) && (reg_type == other.reg_type);
146 }
147
148
IsValidOrNone()149 inline bool CPURegister::IsValidOrNone() const {
150 return IsValid() || IsNone();
151 }
152
153
IsZero()154 inline bool CPURegister::IsZero() const {
155 DCHECK(IsValid());
156 return IsRegister() && (reg_code == kZeroRegCode);
157 }
158
159
IsSP()160 inline bool CPURegister::IsSP() const {
161 DCHECK(IsValid());
162 return IsRegister() && (reg_code == kSPRegInternalCode);
163 }
164
165
Combine(const CPURegList & other)166 inline void CPURegList::Combine(const CPURegList& other) {
167 DCHECK(IsValid());
168 DCHECK(other.type() == type_);
169 DCHECK(other.RegisterSizeInBits() == size_);
170 list_ |= other.list();
171 }
172
173
Remove(const CPURegList & other)174 inline void CPURegList::Remove(const CPURegList& other) {
175 DCHECK(IsValid());
176 if (other.type() == type_) {
177 list_ &= ~other.list();
178 }
179 }
180
181
Combine(const CPURegister & other)182 inline void CPURegList::Combine(const CPURegister& other) {
183 DCHECK(other.type() == type_);
184 DCHECK(other.SizeInBits() == size_);
185 Combine(other.code());
186 }
187
188
Remove(const CPURegister & other1,const CPURegister & other2,const CPURegister & other3,const CPURegister & other4)189 inline void CPURegList::Remove(const CPURegister& other1,
190 const CPURegister& other2,
191 const CPURegister& other3,
192 const CPURegister& other4) {
193 if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code());
194 if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code());
195 if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code());
196 if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code());
197 }
198
199
Combine(int code)200 inline void CPURegList::Combine(int code) {
201 DCHECK(IsValid());
202 DCHECK(CPURegister::Create(code, size_, type_).IsValid());
203 list_ |= (1UL << code);
204 }
205
206
Remove(int code)207 inline void CPURegList::Remove(int code) {
208 DCHECK(IsValid());
209 DCHECK(CPURegister::Create(code, size_, type_).IsValid());
210 list_ &= ~(1UL << code);
211 }
212
213
XRegFromCode(unsigned code)214 inline Register Register::XRegFromCode(unsigned code) {
215 if (code == kSPRegInternalCode) {
216 return csp;
217 } else {
218 DCHECK(code < kNumberOfRegisters);
219 return Register::Create(code, kXRegSizeInBits);
220 }
221 }
222
223
WRegFromCode(unsigned code)224 inline Register Register::WRegFromCode(unsigned code) {
225 if (code == kSPRegInternalCode) {
226 return wcsp;
227 } else {
228 DCHECK(code < kNumberOfRegisters);
229 return Register::Create(code, kWRegSizeInBits);
230 }
231 }
232
233
SRegFromCode(unsigned code)234 inline FPRegister FPRegister::SRegFromCode(unsigned code) {
235 DCHECK(code < kNumberOfFPRegisters);
236 return FPRegister::Create(code, kSRegSizeInBits);
237 }
238
239
DRegFromCode(unsigned code)240 inline FPRegister FPRegister::DRegFromCode(unsigned code) {
241 DCHECK(code < kNumberOfFPRegisters);
242 return FPRegister::Create(code, kDRegSizeInBits);
243 }
244
245
W()246 inline Register CPURegister::W() const {
247 DCHECK(IsValidRegister());
248 return Register::WRegFromCode(reg_code);
249 }
250
251
X()252 inline Register CPURegister::X() const {
253 DCHECK(IsValidRegister());
254 return Register::XRegFromCode(reg_code);
255 }
256
257
S()258 inline FPRegister CPURegister::S() const {
259 DCHECK(IsValidFPRegister());
260 return FPRegister::SRegFromCode(reg_code);
261 }
262
263
D()264 inline FPRegister CPURegister::D() const {
265 DCHECK(IsValidFPRegister());
266 return FPRegister::DRegFromCode(reg_code);
267 }
268
269
270 // Immediate.
271 // Default initializer is for int types
272 template<typename T>
273 struct ImmediateInitializer {
274 static const bool kIsIntType = true;
rmode_forImmediateInitializer275 static inline RelocInfo::Mode rmode_for(T) {
276 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32;
277 }
immediate_forImmediateInitializer278 static inline int64_t immediate_for(T t) {
279 STATIC_ASSERT(sizeof(T) <= 8);
280 return t;
281 }
282 };
283
284
285 template<>
286 struct ImmediateInitializer<Smi*> {
287 static const bool kIsIntType = false;
288 static inline RelocInfo::Mode rmode_for(Smi* t) {
289 return RelocInfo::NONE64;
290 }
291 static inline int64_t immediate_for(Smi* t) {;
292 return reinterpret_cast<int64_t>(t);
293 }
294 };
295
296
297 template<>
298 struct ImmediateInitializer<ExternalReference> {
299 static const bool kIsIntType = false;
300 static inline RelocInfo::Mode rmode_for(ExternalReference t) {
301 return RelocInfo::EXTERNAL_REFERENCE;
302 }
303 static inline int64_t immediate_for(ExternalReference t) {;
304 return reinterpret_cast<int64_t>(t.address());
305 }
306 };
307
308
309 template<typename T>
310 Immediate::Immediate(Handle<T> value) {
311 InitializeHandle(value);
312 }
313
314
315 template<typename T>
316 Immediate::Immediate(T t)
317 : value_(ImmediateInitializer<T>::immediate_for(t)),
318 rmode_(ImmediateInitializer<T>::rmode_for(t)) {}
319
320
321 template<typename T>
322 Immediate::Immediate(T t, RelocInfo::Mode rmode)
323 : value_(ImmediateInitializer<T>::immediate_for(t)),
324 rmode_(rmode) {
325 STATIC_ASSERT(ImmediateInitializer<T>::kIsIntType);
326 }
327
328
329 // Operand.
330 template<typename T>
331 Operand::Operand(Handle<T> value) : immediate_(value), reg_(NoReg) {}
332
333
334 template<typename T>
335 Operand::Operand(T t) : immediate_(t), reg_(NoReg) {}
336
337
338 template<typename T>
339 Operand::Operand(T t, RelocInfo::Mode rmode)
340 : immediate_(t, rmode),
341 reg_(NoReg) {}
342
343
344 Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
345 : immediate_(0),
346 reg_(reg),
347 shift_(shift),
348 extend_(NO_EXTEND),
349 shift_amount_(shift_amount) {
350 DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits));
351 DCHECK(reg.Is32Bits() || (shift_amount < kXRegSizeInBits));
352 DCHECK(!reg.IsSP());
353 }
354
355
356 Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
357 : immediate_(0),
358 reg_(reg),
359 shift_(NO_SHIFT),
360 extend_(extend),
361 shift_amount_(shift_amount) {
362 DCHECK(reg.IsValid());
363 DCHECK(shift_amount <= 4);
364 DCHECK(!reg.IsSP());
365
366 // Extend modes SXTX and UXTX require a 64-bit register.
367 DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
368 }
369
370
371 bool Operand::IsImmediate() const {
372 return reg_.Is(NoReg);
373 }
374
375
376 bool Operand::IsShiftedRegister() const {
377 return reg_.IsValid() && (shift_ != NO_SHIFT);
378 }
379
380
381 bool Operand::IsExtendedRegister() const {
382 return reg_.IsValid() && (extend_ != NO_EXTEND);
383 }
384
385
386 bool Operand::IsZero() const {
387 if (IsImmediate()) {
388 return ImmediateValue() == 0;
389 } else {
390 return reg().IsZero();
391 }
392 }
393
394
395 Operand Operand::ToExtendedRegister() const {
396 DCHECK(IsShiftedRegister());
397 DCHECK((shift_ == LSL) && (shift_amount_ <= 4));
398 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
399 }
400
401
402 Immediate Operand::immediate() const {
403 DCHECK(IsImmediate());
404 return immediate_;
405 }
406
407
408 int64_t Operand::ImmediateValue() const {
409 DCHECK(IsImmediate());
410 return immediate_.value();
411 }
412
413
414 Register Operand::reg() const {
415 DCHECK(IsShiftedRegister() || IsExtendedRegister());
416 return reg_;
417 }
418
419
420 Shift Operand::shift() const {
421 DCHECK(IsShiftedRegister());
422 return shift_;
423 }
424
425
426 Extend Operand::extend() const {
427 DCHECK(IsExtendedRegister());
428 return extend_;
429 }
430
431
432 unsigned Operand::shift_amount() const {
433 DCHECK(IsShiftedRegister() || IsExtendedRegister());
434 return shift_amount_;
435 }
436
437
438 Operand Operand::UntagSmi(Register smi) {
439 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift +
440 kSmiValueSize));
441 DCHECK(smi.Is64Bits());
442 return Operand(smi, ASR, kSmiShift);
443 }
444
445
446 Operand Operand::UntagSmiAndScale(Register smi, int scale) {
447 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift +
448 kSmiValueSize));
449 DCHECK(smi.Is64Bits());
450 DCHECK((scale >= 0) && (scale <= (64 - kSmiValueSize)));
451 if (scale > kSmiShift) {
452 return Operand(smi, LSL, scale - kSmiShift);
453 } else if (scale < kSmiShift) {
454 return Operand(smi, ASR, kSmiShift - scale);
455 }
456 return Operand(smi);
457 }
458
459
460 MemOperand::MemOperand()
461 : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset),
462 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
463 }
464
465
466 MemOperand::MemOperand(Register base, int64_t offset, AddrMode addrmode)
467 : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode),
468 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
469 DCHECK(base.Is64Bits() && !base.IsZero());
470 }
471
472
473 MemOperand::MemOperand(Register base,
474 Register regoffset,
475 Extend extend,
476 unsigned shift_amount)
477 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
478 shift_(NO_SHIFT), extend_(extend), shift_amount_(shift_amount) {
479 DCHECK(base.Is64Bits() && !base.IsZero());
480 DCHECK(!regoffset.IsSP());
481 DCHECK((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
482
483 // SXTX extend mode requires a 64-bit offset register.
484 DCHECK(regoffset.Is64Bits() || (extend != SXTX));
485 }
486
487
488 MemOperand::MemOperand(Register base,
489 Register regoffset,
490 Shift shift,
491 unsigned shift_amount)
492 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
493 shift_(shift), extend_(NO_EXTEND), shift_amount_(shift_amount) {
494 DCHECK(base.Is64Bits() && !base.IsZero());
495 DCHECK(regoffset.Is64Bits() && !regoffset.IsSP());
496 DCHECK(shift == LSL);
497 }
498
499
500 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
501 : base_(base), addrmode_(addrmode) {
502 DCHECK(base.Is64Bits() && !base.IsZero());
503
504 if (offset.IsImmediate()) {
505 offset_ = offset.ImmediateValue();
506
507 regoffset_ = NoReg;
508 } else if (offset.IsShiftedRegister()) {
509 DCHECK(addrmode == Offset);
510
511 regoffset_ = offset.reg();
512 shift_ = offset.shift();
513 shift_amount_ = offset.shift_amount();
514
515 extend_ = NO_EXTEND;
516 offset_ = 0;
517
518 // These assertions match those in the shifted-register constructor.
519 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP());
520 DCHECK(shift_ == LSL);
521 } else {
522 DCHECK(offset.IsExtendedRegister());
523 DCHECK(addrmode == Offset);
524
525 regoffset_ = offset.reg();
526 extend_ = offset.extend();
527 shift_amount_ = offset.shift_amount();
528
529 shift_ = NO_SHIFT;
530 offset_ = 0;
531
532 // These assertions match those in the extended-register constructor.
533 DCHECK(!regoffset_.IsSP());
534 DCHECK((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
535 DCHECK((regoffset_.Is64Bits() || (extend_ != SXTX)));
536 }
537 }
538
539 bool MemOperand::IsImmediateOffset() const {
540 return (addrmode_ == Offset) && regoffset_.Is(NoReg);
541 }
542
543
544 bool MemOperand::IsRegisterOffset() const {
545 return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
546 }
547
548
549 bool MemOperand::IsPreIndex() const {
550 return addrmode_ == PreIndex;
551 }
552
553
554 bool MemOperand::IsPostIndex() const {
555 return addrmode_ == PostIndex;
556 }
557
558 Operand MemOperand::OffsetAsOperand() const {
559 if (IsImmediateOffset()) {
560 return offset();
561 } else {
562 DCHECK(IsRegisterOffset());
563 if (extend() == NO_EXTEND) {
564 return Operand(regoffset(), shift(), shift_amount());
565 } else {
566 return Operand(regoffset(), extend(), shift_amount());
567 }
568 }
569 }
570
571
572 void Assembler::Unreachable() {
573 #ifdef USE_SIMULATOR
574 debug("UNREACHABLE", __LINE__, BREAK);
575 #else
576 // Crash by branching to 0. lr now points near the fault.
577 Emit(BLR | Rn(xzr));
578 #endif
579 }
580
581
582 Address Assembler::target_pointer_address_at(Address pc) {
583 Instruction* instr = reinterpret_cast<Instruction*>(pc);
584 DCHECK(instr->IsLdrLiteralX());
585 return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
586 }
587
588
589 // Read/Modify the code target address in the branch/call instruction at pc.
590 Address Assembler::target_address_at(Address pc, Address constant_pool) {
591 return Memory::Address_at(target_pointer_address_at(pc));
592 }
593
594
595 Address Assembler::target_address_at(Address pc, Code* code) {
596 Address constant_pool = code ? code->constant_pool() : NULL;
597 return target_address_at(pc, constant_pool);
598 }
599
600
601 Address Assembler::target_address_from_return_address(Address pc) {
602 // Returns the address of the call target from the return address that will
603 // be returned to after a call.
604 // Call sequence on ARM64 is:
605 // ldr ip0, #... @ load from literal pool
606 // blr ip0
607 Address candidate = pc - 2 * kInstructionSize;
608 Instruction* instr = reinterpret_cast<Instruction*>(candidate);
609 USE(instr);
610 DCHECK(instr->IsLdrLiteralX());
611 return candidate;
612 }
613
614
615 Address Assembler::return_address_from_call_start(Address pc) {
616 // The call, generated by MacroAssembler::Call, is one of two possible
617 // sequences:
618 //
619 // Without relocation:
620 // movz temp, #(target & 0x000000000000ffff)
621 // movk temp, #(target & 0x00000000ffff0000)
622 // movk temp, #(target & 0x0000ffff00000000)
623 // blr temp
624 //
625 // With relocation:
626 // ldr temp, =target
627 // blr temp
628 //
629 // The return address is immediately after the blr instruction in both cases,
630 // so it can be found by adding the call size to the address at the start of
631 // the call sequence.
632 STATIC_ASSERT(Assembler::kCallSizeWithoutRelocation == 4 * kInstructionSize);
633 STATIC_ASSERT(Assembler::kCallSizeWithRelocation == 2 * kInstructionSize);
634
635 Instruction* instr = reinterpret_cast<Instruction*>(pc);
636 if (instr->IsMovz()) {
637 // Verify the instruction sequence.
638 DCHECK(instr->following(1)->IsMovk());
639 DCHECK(instr->following(2)->IsMovk());
640 DCHECK(instr->following(3)->IsBranchAndLinkToRegister());
641 return pc + Assembler::kCallSizeWithoutRelocation;
642 } else {
643 // Verify the instruction sequence.
644 DCHECK(instr->IsLdrLiteralX());
645 DCHECK(instr->following(1)->IsBranchAndLinkToRegister());
646 return pc + Assembler::kCallSizeWithRelocation;
647 }
648 }
649
650
651 void Assembler::deserialization_set_special_target_at(
652 Isolate* isolate, Address constant_pool_entry, Code* code, Address target) {
653 Memory::Address_at(constant_pool_entry) = target;
654 }
655
656
657 void Assembler::deserialization_set_target_internal_reference_at(
658 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
659 Memory::Address_at(pc) = target;
660 }
661
662
663 void Assembler::set_target_address_at(Isolate* isolate, Address pc,
664 Address constant_pool, Address target,
665 ICacheFlushMode icache_flush_mode) {
666 Memory::Address_at(target_pointer_address_at(pc)) = target;
667 // Intuitively, we would think it is necessary to always flush the
668 // instruction cache after patching a target address in the code as follows:
669 // Assembler::FlushICache(isolate(), pc, sizeof(target));
670 // However, on ARM, an instruction is actually patched in the case of
671 // embedded constants of the form:
672 // ldr ip, [pc, #...]
673 // since the instruction accessing this address in the constant pool remains
674 // unchanged, a flush is not required.
675 }
676
677
678 void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
679 Address target,
680 ICacheFlushMode icache_flush_mode) {
681 Address constant_pool = code ? code->constant_pool() : NULL;
682 set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
683 }
684
685
686 int RelocInfo::target_address_size() {
687 return kPointerSize;
688 }
689
690
691 Address RelocInfo::target_address() {
692 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
693 return Assembler::target_address_at(pc_, host_);
694 }
695
696
697 Address RelocInfo::target_address_address() {
698 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
699 || rmode_ == EMBEDDED_OBJECT
700 || rmode_ == EXTERNAL_REFERENCE);
701 return Assembler::target_pointer_address_at(pc_);
702 }
703
704
705 Address RelocInfo::constant_pool_entry_address() {
706 DCHECK(IsInConstantPool());
707 return Assembler::target_pointer_address_at(pc_);
708 }
709
710
711 Object* RelocInfo::target_object() {
712 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
713 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
714 }
715
716
717 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
718 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
719 return Handle<Object>(reinterpret_cast<Object**>(
720 Assembler::target_address_at(pc_, host_)));
721 }
722
723
724 void RelocInfo::set_target_object(Object* target,
725 WriteBarrierMode write_barrier_mode,
726 ICacheFlushMode icache_flush_mode) {
727 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
728 Assembler::set_target_address_at(isolate_, pc_, host_,
729 reinterpret_cast<Address>(target),
730 icache_flush_mode);
731 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
732 host() != NULL &&
733 target->IsHeapObject()) {
734 host()->GetHeap()->incremental_marking()->RecordWrite(
735 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
736 }
737 }
738
739
740 Address RelocInfo::target_external_reference() {
741 DCHECK(rmode_ == EXTERNAL_REFERENCE);
742 return Assembler::target_address_at(pc_, host_);
743 }
744
745
746 Address RelocInfo::target_internal_reference() {
747 DCHECK(rmode_ == INTERNAL_REFERENCE);
748 return Memory::Address_at(pc_);
749 }
750
751
752 Address RelocInfo::target_internal_reference_address() {
753 DCHECK(rmode_ == INTERNAL_REFERENCE);
754 return reinterpret_cast<Address>(pc_);
755 }
756
757
758 Address RelocInfo::target_runtime_entry(Assembler* origin) {
759 DCHECK(IsRuntimeEntry(rmode_));
760 return target_address();
761 }
762
763
764 void RelocInfo::set_target_runtime_entry(Address target,
765 WriteBarrierMode write_barrier_mode,
766 ICacheFlushMode icache_flush_mode) {
767 DCHECK(IsRuntimeEntry(rmode_));
768 if (target_address() != target) {
769 set_target_address(target, write_barrier_mode, icache_flush_mode);
770 }
771 }
772
773
774 Handle<Cell> RelocInfo::target_cell_handle() {
775 UNIMPLEMENTED();
776 Cell *null_cell = NULL;
777 return Handle<Cell>(null_cell);
778 }
779
780
781 Cell* RelocInfo::target_cell() {
782 DCHECK(rmode_ == RelocInfo::CELL);
783 return Cell::FromValueAddress(Memory::Address_at(pc_));
784 }
785
786
787 void RelocInfo::set_target_cell(Cell* cell,
788 WriteBarrierMode write_barrier_mode,
789 ICacheFlushMode icache_flush_mode) {
790 UNIMPLEMENTED();
791 }
792
793
794 static const int kNoCodeAgeSequenceLength = 5 * kInstructionSize;
795 static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize;
796
797
798 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
799 UNREACHABLE(); // This should never be reached on ARM64.
800 return Handle<Object>();
801 }
802
803
804 Code* RelocInfo::code_age_stub() {
805 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
806 // Read the stub entry point from the code age sequence.
807 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset;
808 return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address));
809 }
810
811
812 void RelocInfo::set_code_age_stub(Code* stub,
813 ICacheFlushMode icache_flush_mode) {
814 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
815 DCHECK(!Code::IsYoungSequence(stub->GetIsolate(), pc_));
816 // Overwrite the stub entry point in the code age sequence. This is loaded as
817 // a literal so there is no need to call FlushICache here.
818 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset;
819 Memory::Address_at(stub_entry_address) = stub->instruction_start();
820 }
821
822
823 Address RelocInfo::debug_call_address() {
824 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
825 // For the above sequences the Relocinfo points to the load literal loading
826 // the call address.
827 STATIC_ASSERT(Assembler::kPatchDebugBreakSlotAddressOffset == 0);
828 return Assembler::target_address_at(pc_, host_);
829 }
830
831
832 void RelocInfo::set_debug_call_address(Address target) {
833 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
834 STATIC_ASSERT(Assembler::kPatchDebugBreakSlotAddressOffset == 0);
835 Assembler::set_target_address_at(isolate_, pc_, host_, target);
836 if (host() != NULL) {
837 Object* target_code = Code::GetCodeFromTargetAddress(target);
838 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
839 host(), this, HeapObject::cast(target_code));
840 }
841 }
842
843
844 void RelocInfo::WipeOut() {
845 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
846 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
847 IsInternalReference(rmode_));
848 if (IsInternalReference(rmode_)) {
849 Memory::Address_at(pc_) = NULL;
850 } else {
851 Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
852 }
853 }
854
855
856 bool RelocInfo::IsPatchedReturnSequence() {
857 // The sequence must be:
858 // ldr ip0, [pc, #offset]
859 // blr ip0
860 // See arm64/debug-arm64.cc DebugCodegen::PatchDebugBreakSlot
861 Instruction* i1 = reinterpret_cast<Instruction*>(pc_);
862 Instruction* i2 = i1->following();
863 return i1->IsLdrLiteralX() && (i1->Rt() == kIp0Code) &&
864 i2->IsBranchAndLinkToRegister() && (i2->Rn() == kIp0Code);
865 }
866
867
868 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
869 Instruction* current_instr = reinterpret_cast<Instruction*>(pc_);
870 return !current_instr->IsNop(Assembler::DEBUG_BREAK_NOP);
871 }
872
873
874 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
875 RelocInfo::Mode mode = rmode();
876 if (mode == RelocInfo::EMBEDDED_OBJECT) {
877 visitor->VisitEmbeddedPointer(this);
878 } else if (RelocInfo::IsCodeTarget(mode)) {
879 visitor->VisitCodeTarget(this);
880 } else if (mode == RelocInfo::CELL) {
881 visitor->VisitCell(this);
882 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
883 visitor->VisitExternalReference(this);
884 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
885 visitor->VisitInternalReference(this);
886 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
887 IsPatchedDebugBreakSlotSequence()) {
888 visitor->VisitDebugTarget(this);
889 } else if (RelocInfo::IsRuntimeEntry(mode)) {
890 visitor->VisitRuntimeEntry(this);
891 }
892 }
893
894
895 template<typename StaticVisitor>
896 void RelocInfo::Visit(Heap* heap) {
897 RelocInfo::Mode mode = rmode();
898 if (mode == RelocInfo::EMBEDDED_OBJECT) {
899 StaticVisitor::VisitEmbeddedPointer(heap, this);
900 } else if (RelocInfo::IsCodeTarget(mode)) {
901 StaticVisitor::VisitCodeTarget(heap, this);
902 } else if (mode == RelocInfo::CELL) {
903 StaticVisitor::VisitCell(heap, this);
904 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
905 StaticVisitor::VisitExternalReference(this);
906 } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
907 StaticVisitor::VisitInternalReference(this);
908 } else if (RelocInfo::IsDebugBreakSlot(mode) &&
909 IsPatchedDebugBreakSlotSequence()) {
910 StaticVisitor::VisitDebugTarget(heap, this);
911 } else if (RelocInfo::IsRuntimeEntry(mode)) {
912 StaticVisitor::VisitRuntimeEntry(this);
913 }
914 }
915
916
917 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) {
918 DCHECK(rt.IsValid());
919 if (rt.IsRegister()) {
920 return rt.Is64Bits() ? LDR_x : LDR_w;
921 } else {
922 DCHECK(rt.IsFPRegister());
923 return rt.Is64Bits() ? LDR_d : LDR_s;
924 }
925 }
926
927
928 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt,
929 const CPURegister& rt2) {
930 DCHECK(AreSameSizeAndType(rt, rt2));
931 USE(rt2);
932 if (rt.IsRegister()) {
933 return rt.Is64Bits() ? LDP_x : LDP_w;
934 } else {
935 DCHECK(rt.IsFPRegister());
936 return rt.Is64Bits() ? LDP_d : LDP_s;
937 }
938 }
939
940
941 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) {
942 DCHECK(rt.IsValid());
943 if (rt.IsRegister()) {
944 return rt.Is64Bits() ? STR_x : STR_w;
945 } else {
946 DCHECK(rt.IsFPRegister());
947 return rt.Is64Bits() ? STR_d : STR_s;
948 }
949 }
950
951
952 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt,
953 const CPURegister& rt2) {
954 DCHECK(AreSameSizeAndType(rt, rt2));
955 USE(rt2);
956 if (rt.IsRegister()) {
957 return rt.Is64Bits() ? STP_x : STP_w;
958 } else {
959 DCHECK(rt.IsFPRegister());
960 return rt.Is64Bits() ? STP_d : STP_s;
961 }
962 }
963
964
965 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) {
966 if (rt.IsRegister()) {
967 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit;
968 } else {
969 DCHECK(rt.IsFPRegister());
970 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit;
971 }
972 }
973
974
975 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) {
976 DCHECK(kStartOfLabelLinkChain == 0);
977 int offset = LinkAndGetByteOffsetTo(label);
978 DCHECK(IsAligned(offset, kInstructionSize));
979 return offset >> kInstructionSizeLog2;
980 }
981
982
983 Instr Assembler::Flags(FlagsUpdate S) {
984 if (S == SetFlags) {
985 return 1 << FlagsUpdate_offset;
986 } else if (S == LeaveFlags) {
987 return 0 << FlagsUpdate_offset;
988 }
989 UNREACHABLE();
990 return 0;
991 }
992
993
994 Instr Assembler::Cond(Condition cond) {
995 return cond << Condition_offset;
996 }
997
998
999 Instr Assembler::ImmPCRelAddress(int imm21) {
1000 CHECK(is_int21(imm21));
1001 Instr imm = static_cast<Instr>(truncate_to_int21(imm21));
1002 Instr immhi = (imm >> ImmPCRelLo_width) << ImmPCRelHi_offset;
1003 Instr immlo = imm << ImmPCRelLo_offset;
1004 return (immhi & ImmPCRelHi_mask) | (immlo & ImmPCRelLo_mask);
1005 }
1006
1007
1008 Instr Assembler::ImmUncondBranch(int imm26) {
1009 CHECK(is_int26(imm26));
1010 return truncate_to_int26(imm26) << ImmUncondBranch_offset;
1011 }
1012
1013
1014 Instr Assembler::ImmCondBranch(int imm19) {
1015 CHECK(is_int19(imm19));
1016 return truncate_to_int19(imm19) << ImmCondBranch_offset;
1017 }
1018
1019
1020 Instr Assembler::ImmCmpBranch(int imm19) {
1021 CHECK(is_int19(imm19));
1022 return truncate_to_int19(imm19) << ImmCmpBranch_offset;
1023 }
1024
1025
1026 Instr Assembler::ImmTestBranch(int imm14) {
1027 CHECK(is_int14(imm14));
1028 return truncate_to_int14(imm14) << ImmTestBranch_offset;
1029 }
1030
1031
1032 Instr Assembler::ImmTestBranchBit(unsigned bit_pos) {
1033 DCHECK(is_uint6(bit_pos));
1034 // Subtract five from the shift offset, as we need bit 5 from bit_pos.
1035 unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5);
1036 unsigned b40 = bit_pos << ImmTestBranchBit40_offset;
1037 b5 &= ImmTestBranchBit5_mask;
1038 b40 &= ImmTestBranchBit40_mask;
1039 return b5 | b40;
1040 }
1041
1042
1043 Instr Assembler::SF(Register rd) {
1044 return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits;
1045 }
1046
1047
1048 Instr Assembler::ImmAddSub(int imm) {
1049 DCHECK(IsImmAddSub(imm));
1050 if (is_uint12(imm)) { // No shift required.
1051 imm <<= ImmAddSub_offset;
1052 } else {
1053 imm = ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset);
1054 }
1055 return imm;
1056 }
1057
1058
1059 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) {
1060 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(imms)) ||
1061 ((reg_size == kWRegSizeInBits) && is_uint5(imms)));
1062 USE(reg_size);
1063 return imms << ImmS_offset;
1064 }
1065
1066
1067 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) {
1068 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
1069 ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
1070 USE(reg_size);
1071 DCHECK(is_uint6(immr));
1072 return immr << ImmR_offset;
1073 }
1074
1075
1076 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) {
1077 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1078 DCHECK(is_uint6(imms));
1079 DCHECK((reg_size == kXRegSizeInBits) || is_uint6(imms + 3));
1080 USE(reg_size);
1081 return imms << ImmSetBits_offset;
1082 }
1083
1084
1085 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) {
1086 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1087 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
1088 ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
1089 USE(reg_size);
1090 return immr << ImmRotate_offset;
1091 }
1092
1093
1094 Instr Assembler::ImmLLiteral(int imm19) {
1095 CHECK(is_int19(imm19));
1096 return truncate_to_int19(imm19) << ImmLLiteral_offset;
1097 }
1098
1099
1100 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) {
1101 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1102 DCHECK((reg_size == kXRegSizeInBits) || (bitn == 0));
1103 USE(reg_size);
1104 return bitn << BitN_offset;
1105 }
1106
1107
1108 Instr Assembler::ShiftDP(Shift shift) {
1109 DCHECK(shift == LSL || shift == LSR || shift == ASR || shift == ROR);
1110 return shift << ShiftDP_offset;
1111 }
1112
1113
1114 Instr Assembler::ImmDPShift(unsigned amount) {
1115 DCHECK(is_uint6(amount));
1116 return amount << ImmDPShift_offset;
1117 }
1118
1119
1120 Instr Assembler::ExtendMode(Extend extend) {
1121 return extend << ExtendMode_offset;
1122 }
1123
1124
1125 Instr Assembler::ImmExtendShift(unsigned left_shift) {
1126 DCHECK(left_shift <= 4);
1127 return left_shift << ImmExtendShift_offset;
1128 }
1129
1130
1131 Instr Assembler::ImmCondCmp(unsigned imm) {
1132 DCHECK(is_uint5(imm));
1133 return imm << ImmCondCmp_offset;
1134 }
1135
1136
1137 Instr Assembler::Nzcv(StatusFlags nzcv) {
1138 return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset;
1139 }
1140
1141
1142 Instr Assembler::ImmLSUnsigned(int imm12) {
1143 DCHECK(is_uint12(imm12));
1144 return imm12 << ImmLSUnsigned_offset;
1145 }
1146
1147
1148 Instr Assembler::ImmLS(int imm9) {
1149 DCHECK(is_int9(imm9));
1150 return truncate_to_int9(imm9) << ImmLS_offset;
1151 }
1152
1153
1154 Instr Assembler::ImmLSPair(int imm7, LSDataSize size) {
1155 DCHECK(((imm7 >> size) << size) == imm7);
1156 int scaled_imm7 = imm7 >> size;
1157 DCHECK(is_int7(scaled_imm7));
1158 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset;
1159 }
1160
1161
1162 Instr Assembler::ImmShiftLS(unsigned shift_amount) {
1163 DCHECK(is_uint1(shift_amount));
1164 return shift_amount << ImmShiftLS_offset;
1165 }
1166
1167
1168 Instr Assembler::ImmException(int imm16) {
1169 DCHECK(is_uint16(imm16));
1170 return imm16 << ImmException_offset;
1171 }
1172
1173
1174 Instr Assembler::ImmSystemRegister(int imm15) {
1175 DCHECK(is_uint15(imm15));
1176 return imm15 << ImmSystemRegister_offset;
1177 }
1178
1179
1180 Instr Assembler::ImmHint(int imm7) {
1181 DCHECK(is_uint7(imm7));
1182 return imm7 << ImmHint_offset;
1183 }
1184
1185
1186 Instr Assembler::ImmBarrierDomain(int imm2) {
1187 DCHECK(is_uint2(imm2));
1188 return imm2 << ImmBarrierDomain_offset;
1189 }
1190
1191
1192 Instr Assembler::ImmBarrierType(int imm2) {
1193 DCHECK(is_uint2(imm2));
1194 return imm2 << ImmBarrierType_offset;
1195 }
1196
1197
1198 LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) {
1199 DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8));
1200 return static_cast<LSDataSize>(op >> SizeLS_offset);
1201 }
1202
1203
1204 Instr Assembler::ImmMoveWide(int imm) {
1205 DCHECK(is_uint16(imm));
1206 return imm << ImmMoveWide_offset;
1207 }
1208
1209
1210 Instr Assembler::ShiftMoveWide(int shift) {
1211 DCHECK(is_uint2(shift));
1212 return shift << ShiftMoveWide_offset;
1213 }
1214
1215
1216 Instr Assembler::FPType(FPRegister fd) {
1217 return fd.Is64Bits() ? FP64 : FP32;
1218 }
1219
1220
1221 Instr Assembler::FPScale(unsigned scale) {
1222 DCHECK(is_uint6(scale));
1223 return scale << FPScale_offset;
1224 }
1225
1226
1227 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const {
1228 return reg.Is64Bits() ? xzr : wzr;
1229 }
1230
1231
1232 inline void Assembler::CheckBufferSpace() {
1233 DCHECK(pc_ < (buffer_ + buffer_size_));
1234 if (buffer_space() < kGap) {
1235 GrowBuffer();
1236 }
1237 }
1238
1239
1240 inline void Assembler::CheckBuffer() {
1241 CheckBufferSpace();
1242 if (pc_offset() >= next_veneer_pool_check_) {
1243 CheckVeneerPool(false, true);
1244 }
1245 if (pc_offset() >= next_constant_pool_check_) {
1246 CheckConstPool(false, true);
1247 }
1248 }
1249
1250
1251 TypeFeedbackId Assembler::RecordedAstId() {
1252 DCHECK(!recorded_ast_id_.IsNone());
1253 return recorded_ast_id_;
1254 }
1255
1256
1257 void Assembler::ClearRecordedAstId() {
1258 recorded_ast_id_ = TypeFeedbackId::None();
1259 }
1260
1261
1262 } // namespace internal
1263 } // namespace v8
1264
1265 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_
1266