1
2 // Copyright (c) 1994-2006 Sun Microsystems Inc.
3 // All Rights Reserved.
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // - Redistributions of source code must retain the above copyright notice,
10 // this list of conditions and the following disclaimer.
11 //
12 // - Redistribution in binary form must reproduce the above copyright
13 // notice, this list of conditions and the following disclaimer in the
14 // documentation and/or other materials provided with the distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32 // The original source code covered by the above license above has been
33 // modified significantly by Google Inc.
34 // Copyright 2012 the V8 project authors. All rights reserved.
35
36
37 #ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38 #define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39
40 #include "src/mips/assembler-mips.h"
41
42 #include "src/assembler.h"
43 #include "src/debug.h"
44
45
46 namespace v8 {
47 namespace internal {
48
49
SupportsCrankshaft()50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
51
52
53 // -----------------------------------------------------------------------------
54 // Operand and MemOperand.
55
Operand(int32_t immediate,RelocInfo::Mode rmode)56 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
57 rm_ = no_reg;
58 imm32_ = immediate;
59 rmode_ = rmode;
60 }
61
62
Operand(const ExternalReference & f)63 Operand::Operand(const ExternalReference& f) {
64 rm_ = no_reg;
65 imm32_ = reinterpret_cast<int32_t>(f.address());
66 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
67 }
68
69
Operand(Smi * value)70 Operand::Operand(Smi* value) {
71 rm_ = no_reg;
72 imm32_ = reinterpret_cast<intptr_t>(value);
73 rmode_ = RelocInfo::NONE32;
74 }
75
76
Operand(Register rm)77 Operand::Operand(Register rm) {
78 rm_ = rm;
79 }
80
81
is_reg()82 bool Operand::is_reg() const {
83 return rm_.is_valid();
84 }
85
86
NumAllocatableRegisters()87 int Register::NumAllocatableRegisters() {
88 return kMaxNumAllocatableRegisters;
89 }
90
91
NumRegisters()92 int DoubleRegister::NumRegisters() {
93 return FPURegister::kMaxNumRegisters;
94 }
95
96
NumAllocatableRegisters()97 int DoubleRegister::NumAllocatableRegisters() {
98 return FPURegister::kMaxNumAllocatableRegisters;
99 }
100
101
ToAllocationIndex(FPURegister reg)102 int FPURegister::ToAllocationIndex(FPURegister reg) {
103 DCHECK(reg.code() % 2 == 0);
104 DCHECK(reg.code() / 2 < kMaxNumAllocatableRegisters);
105 DCHECK(reg.is_valid());
106 DCHECK(!reg.is(kDoubleRegZero));
107 DCHECK(!reg.is(kLithiumScratchDouble));
108 return (reg.code() / 2);
109 }
110
111
112 // -----------------------------------------------------------------------------
113 // RelocInfo.
114
apply(intptr_t delta,ICacheFlushMode icache_flush_mode)115 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
116 if (IsCodeTarget(rmode_)) {
117 uint32_t scope1 = (uint32_t) target_address() & ~kImm28Mask;
118 uint32_t scope2 = reinterpret_cast<uint32_t>(pc_) & ~kImm28Mask;
119
120 if (scope1 != scope2) {
121 Assembler::JumpLabelToJumpRegister(pc_);
122 }
123 }
124 if (IsInternalReference(rmode_)) {
125 // Absolute code pointer inside code object moves with the code object.
126 byte* p = reinterpret_cast<byte*>(pc_);
127 int count = Assembler::RelocateInternalReference(p, delta);
128 CpuFeatures::FlushICache(p, count * sizeof(uint32_t));
129 }
130 }
131
132
target_address()133 Address RelocInfo::target_address() {
134 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
135 return Assembler::target_address_at(pc_, host_);
136 }
137
138
target_address_address()139 Address RelocInfo::target_address_address() {
140 DCHECK(IsCodeTarget(rmode_) ||
141 IsRuntimeEntry(rmode_) ||
142 rmode_ == EMBEDDED_OBJECT ||
143 rmode_ == EXTERNAL_REFERENCE);
144 // Read the address of the word containing the target_address in an
145 // instruction stream.
146 // The only architecture-independent user of this function is the serializer.
147 // The serializer uses it to find out how many raw bytes of instruction to
148 // output before the next target.
149 // For an instruction like LUI/ORI where the target bits are mixed into the
150 // instruction bits, the size of the target will be zero, indicating that the
151 // serializer should not step forward in memory after a target is resolved
152 // and written. In this case the target_address_address function should
153 // return the end of the instructions to be patched, allowing the
154 // deserializer to deserialize the instructions as raw bytes and put them in
155 // place, ready to be patched with the target. After jump optimization,
156 // that is the address of the instruction that follows J/JAL/JR/JALR
157 // instruction.
158 return reinterpret_cast<Address>(
159 pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
160 }
161
162
constant_pool_entry_address()163 Address RelocInfo::constant_pool_entry_address() {
164 UNREACHABLE();
165 return NULL;
166 }
167
168
target_address_size()169 int RelocInfo::target_address_size() {
170 return Assembler::kSpecialTargetSize;
171 }
172
173
set_target_address(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)174 void RelocInfo::set_target_address(Address target,
175 WriteBarrierMode write_barrier_mode,
176 ICacheFlushMode icache_flush_mode) {
177 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
178 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
179 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
180 host() != NULL && IsCodeTarget(rmode_)) {
181 Object* target_code = Code::GetCodeFromTargetAddress(target);
182 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
183 host(), this, HeapObject::cast(target_code));
184 }
185 }
186
187
target_address_from_return_address(Address pc)188 Address Assembler::target_address_from_return_address(Address pc) {
189 return pc - kCallTargetAddressOffset;
190 }
191
192
break_address_from_return_address(Address pc)193 Address Assembler::break_address_from_return_address(Address pc) {
194 return pc - Assembler::kPatchDebugBreakSlotReturnOffset;
195 }
196
197
target_object()198 Object* RelocInfo::target_object() {
199 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
200 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
201 }
202
203
target_object_handle(Assembler * origin)204 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
205 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
206 return Handle<Object>(reinterpret_cast<Object**>(
207 Assembler::target_address_at(pc_, host_)));
208 }
209
210
set_target_object(Object * target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)211 void RelocInfo::set_target_object(Object* target,
212 WriteBarrierMode write_barrier_mode,
213 ICacheFlushMode icache_flush_mode) {
214 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
215 Assembler::set_target_address_at(pc_, host_,
216 reinterpret_cast<Address>(target),
217 icache_flush_mode);
218 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
219 host() != NULL &&
220 target->IsHeapObject()) {
221 host()->GetHeap()->incremental_marking()->RecordWrite(
222 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
223 }
224 }
225
226
target_reference()227 Address RelocInfo::target_reference() {
228 DCHECK(rmode_ == EXTERNAL_REFERENCE);
229 return Assembler::target_address_at(pc_, host_);
230 }
231
232
target_runtime_entry(Assembler * origin)233 Address RelocInfo::target_runtime_entry(Assembler* origin) {
234 DCHECK(IsRuntimeEntry(rmode_));
235 return target_address();
236 }
237
238
set_target_runtime_entry(Address target,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)239 void RelocInfo::set_target_runtime_entry(Address target,
240 WriteBarrierMode write_barrier_mode,
241 ICacheFlushMode icache_flush_mode) {
242 DCHECK(IsRuntimeEntry(rmode_));
243 if (target_address() != target)
244 set_target_address(target, write_barrier_mode, icache_flush_mode);
245 }
246
247
target_cell_handle()248 Handle<Cell> RelocInfo::target_cell_handle() {
249 DCHECK(rmode_ == RelocInfo::CELL);
250 Address address = Memory::Address_at(pc_);
251 return Handle<Cell>(reinterpret_cast<Cell**>(address));
252 }
253
254
target_cell()255 Cell* RelocInfo::target_cell() {
256 DCHECK(rmode_ == RelocInfo::CELL);
257 return Cell::FromValueAddress(Memory::Address_at(pc_));
258 }
259
260
set_target_cell(Cell * cell,WriteBarrierMode write_barrier_mode,ICacheFlushMode icache_flush_mode)261 void RelocInfo::set_target_cell(Cell* cell,
262 WriteBarrierMode write_barrier_mode,
263 ICacheFlushMode icache_flush_mode) {
264 DCHECK(rmode_ == RelocInfo::CELL);
265 Address address = cell->address() + Cell::kValueOffset;
266 Memory::Address_at(pc_) = address;
267 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
268 // TODO(1550) We are passing NULL as a slot because cell can never be on
269 // evacuation candidate.
270 host()->GetHeap()->incremental_marking()->RecordWrite(
271 host(), NULL, cell);
272 }
273 }
274
275
276 static const int kNoCodeAgeSequenceLength = 7 * Assembler::kInstrSize;
277
278
code_age_stub_handle(Assembler * origin)279 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
280 UNREACHABLE(); // This should never be reached on Arm.
281 return Handle<Object>();
282 }
283
284
code_age_stub()285 Code* RelocInfo::code_age_stub() {
286 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
287 return Code::GetCodeFromTargetAddress(
288 Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
289 }
290
291
set_code_age_stub(Code * stub,ICacheFlushMode icache_flush_mode)292 void RelocInfo::set_code_age_stub(Code* stub,
293 ICacheFlushMode icache_flush_mode) {
294 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
295 Assembler::set_target_address_at(pc_ + Assembler::kInstrSize,
296 host_,
297 stub->instruction_start());
298 }
299
300
call_address()301 Address RelocInfo::call_address() {
302 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
303 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
304 // The pc_ offset of 0 assumes mips patched return sequence per
305 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
306 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
307 return Assembler::target_address_at(pc_, host_);
308 }
309
310
set_call_address(Address target)311 void RelocInfo::set_call_address(Address target) {
312 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
313 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
314 // The pc_ offset of 0 assumes mips patched return sequence per
315 // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
316 // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
317 Assembler::set_target_address_at(pc_, host_, target);
318 if (host() != NULL) {
319 Object* target_code = Code::GetCodeFromTargetAddress(target);
320 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
321 host(), this, HeapObject::cast(target_code));
322 }
323 }
324
325
call_object()326 Object* RelocInfo::call_object() {
327 return *call_object_address();
328 }
329
330
call_object_address()331 Object** RelocInfo::call_object_address() {
332 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
333 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
334 return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
335 }
336
337
set_call_object(Object * target)338 void RelocInfo::set_call_object(Object* target) {
339 *call_object_address() = target;
340 }
341
342
WipeOut()343 void RelocInfo::WipeOut() {
344 DCHECK(IsEmbeddedObject(rmode_) ||
345 IsCodeTarget(rmode_) ||
346 IsRuntimeEntry(rmode_) ||
347 IsExternalReference(rmode_));
348 Assembler::set_target_address_at(pc_, host_, NULL);
349 }
350
351
IsPatchedReturnSequence()352 bool RelocInfo::IsPatchedReturnSequence() {
353 Instr instr0 = Assembler::instr_at(pc_);
354 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
355 Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize);
356 bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
357 (instr1 & kOpcodeMask) == ORI &&
358 ((instr2 & kOpcodeMask) == JAL ||
359 ((instr2 & kOpcodeMask) == SPECIAL &&
360 (instr2 & kFunctionFieldMask) == JALR)));
361 return patched_return;
362 }
363
364
IsPatchedDebugBreakSlotSequence()365 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
366 Instr current_instr = Assembler::instr_at(pc_);
367 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
368 }
369
370
Visit(Isolate * isolate,ObjectVisitor * visitor)371 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
372 RelocInfo::Mode mode = rmode();
373 if (mode == RelocInfo::EMBEDDED_OBJECT) {
374 visitor->VisitEmbeddedPointer(this);
375 } else if (RelocInfo::IsCodeTarget(mode)) {
376 visitor->VisitCodeTarget(this);
377 } else if (mode == RelocInfo::CELL) {
378 visitor->VisitCell(this);
379 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
380 visitor->VisitExternalReference(this);
381 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
382 visitor->VisitCodeAgeSequence(this);
383 } else if (((RelocInfo::IsJSReturn(mode) &&
384 IsPatchedReturnSequence()) ||
385 (RelocInfo::IsDebugBreakSlot(mode) &&
386 IsPatchedDebugBreakSlotSequence())) &&
387 isolate->debug()->has_break_points()) {
388 visitor->VisitDebugTarget(this);
389 } else if (RelocInfo::IsRuntimeEntry(mode)) {
390 visitor->VisitRuntimeEntry(this);
391 }
392 }
393
394
395 template<typename StaticVisitor>
Visit(Heap * heap)396 void RelocInfo::Visit(Heap* heap) {
397 RelocInfo::Mode mode = rmode();
398 if (mode == RelocInfo::EMBEDDED_OBJECT) {
399 StaticVisitor::VisitEmbeddedPointer(heap, this);
400 } else if (RelocInfo::IsCodeTarget(mode)) {
401 StaticVisitor::VisitCodeTarget(heap, this);
402 } else if (mode == RelocInfo::CELL) {
403 StaticVisitor::VisitCell(heap, this);
404 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
405 StaticVisitor::VisitExternalReference(this);
406 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
407 StaticVisitor::VisitCodeAgeSequence(heap, this);
408 } else if (heap->isolate()->debug()->has_break_points() &&
409 ((RelocInfo::IsJSReturn(mode) &&
410 IsPatchedReturnSequence()) ||
411 (RelocInfo::IsDebugBreakSlot(mode) &&
412 IsPatchedDebugBreakSlotSequence()))) {
413 StaticVisitor::VisitDebugTarget(heap, this);
414 } else if (RelocInfo::IsRuntimeEntry(mode)) {
415 StaticVisitor::VisitRuntimeEntry(this);
416 }
417 }
418
419
420 // -----------------------------------------------------------------------------
421 // Assembler.
422
423
CheckBuffer()424 void Assembler::CheckBuffer() {
425 if (buffer_space() <= kGap) {
426 GrowBuffer();
427 }
428 }
429
430
CheckTrampolinePoolQuick()431 void Assembler::CheckTrampolinePoolQuick() {
432 if (pc_offset() >= next_buffer_check_) {
433 CheckTrampolinePool();
434 }
435 }
436
437
emit(Instr x)438 void Assembler::emit(Instr x) {
439 if (!is_buffer_growth_blocked()) {
440 CheckBuffer();
441 }
442 *reinterpret_cast<Instr*>(pc_) = x;
443 pc_ += kInstrSize;
444 CheckTrampolinePoolQuick();
445 }
446
447
448 } } // namespace v8::internal
449
450 #endif // V8_MIPS_ASSEMBLER_MIPS_INL_H_
451