1 // Copyright 2011 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_MIPS_CODE_STUBS_MIPS_H_ 6 #define V8_MIPS_CODE_STUBS_MIPS_H_ 7 8 #include "src/mips/frames-mips.h" 9 10 namespace v8 { 11 namespace internal { 12 13 14 void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code); 15 16 17 class StringHelper : public AllStatic { 18 public: 19 // Generate code for copying a large number of characters. This function 20 // is allowed to spend extra time setting up conditions to make copying 21 // faster. Copying of overlapping regions is not supported. 22 // Dest register ends at the position after the last character written. 23 static void GenerateCopyCharacters(MacroAssembler* masm, 24 Register dest, 25 Register src, 26 Register count, 27 Register scratch, 28 String::Encoding encoding); 29 30 // Compares two flat one-byte strings and returns result in v0. 31 static void GenerateCompareFlatOneByteStrings( 32 MacroAssembler* masm, Register left, Register right, Register scratch1, 33 Register scratch2, Register scratch3, Register scratch4); 34 35 // Compares two flat one-byte strings for equality and returns result in v0. 36 static void GenerateFlatOneByteStringEquals(MacroAssembler* masm, 37 Register left, Register right, 38 Register scratch1, 39 Register scratch2, 40 Register scratch3); 41 42 private: 43 static void GenerateOneByteCharsCompareLoop( 44 MacroAssembler* masm, Register left, Register right, Register length, 45 Register scratch1, Register scratch2, Register scratch3, 46 Label* chars_not_equal); 47 48 DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper); 49 }; 50 51 52 class StoreRegistersStateStub: public PlatformCodeStub { 53 public: StoreRegistersStateStub(Isolate * isolate)54 explicit StoreRegistersStateStub(Isolate* isolate) 55 : PlatformCodeStub(isolate) {} 56 57 static void GenerateAheadOfTime(Isolate* isolate); 58 59 private: 60 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 61 DEFINE_PLATFORM_CODE_STUB(StoreRegistersState, PlatformCodeStub); 62 }; 63 64 65 class RestoreRegistersStateStub: public PlatformCodeStub { 66 public: RestoreRegistersStateStub(Isolate * isolate)67 explicit RestoreRegistersStateStub(Isolate* isolate) 68 : PlatformCodeStub(isolate) {} 69 70 static void GenerateAheadOfTime(Isolate* isolate); 71 72 private: 73 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 74 DEFINE_PLATFORM_CODE_STUB(RestoreRegistersState, PlatformCodeStub); 75 }; 76 77 78 class RecordWriteStub: public PlatformCodeStub { 79 public: RecordWriteStub(Isolate * isolate,Register object,Register value,Register address,RememberedSetAction remembered_set_action,SaveFPRegsMode fp_mode)80 RecordWriteStub(Isolate* isolate, 81 Register object, 82 Register value, 83 Register address, 84 RememberedSetAction remembered_set_action, 85 SaveFPRegsMode fp_mode) 86 : PlatformCodeStub(isolate), 87 regs_(object, // An input reg. 88 address, // An input reg. 89 value) { // One scratch reg. 90 minor_key_ = ObjectBits::encode(object.code()) | 91 ValueBits::encode(value.code()) | 92 AddressBits::encode(address.code()) | 93 RememberedSetActionBits::encode(remembered_set_action) | 94 SaveFPRegsModeBits::encode(fp_mode); 95 } 96 RecordWriteStub(uint32_t key,Isolate * isolate)97 RecordWriteStub(uint32_t key, Isolate* isolate) 98 : PlatformCodeStub(key, isolate), regs_(object(), address(), value()) {} 99 100 enum Mode { 101 STORE_BUFFER_ONLY, 102 INCREMENTAL, 103 INCREMENTAL_COMPACTION 104 }; 105 SometimesSetsUpAFrame()106 bool SometimesSetsUpAFrame() override { return false; } 107 PatchBranchIntoNop(MacroAssembler * masm,int pos)108 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) { 109 const unsigned offset = masm->instr_at(pos) & kImm16Mask; 110 masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) | 111 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); 112 DCHECK(Assembler::IsBne(masm->instr_at(pos))); 113 } 114 PatchNopIntoBranch(MacroAssembler * masm,int pos)115 static void PatchNopIntoBranch(MacroAssembler* masm, int pos) { 116 const unsigned offset = masm->instr_at(pos) & kImm16Mask; 117 masm->instr_at_put(pos, BEQ | (zero_reg.code() << kRsShift) | 118 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); 119 DCHECK(Assembler::IsBeq(masm->instr_at(pos))); 120 } 121 GetMode(Code * stub)122 static Mode GetMode(Code* stub) { 123 Instr first_instruction = Assembler::instr_at(stub->instruction_start()); 124 Instr second_instruction = Assembler::instr_at(stub->instruction_start() + 125 2 * Assembler::kInstrSize); 126 127 if (Assembler::IsBeq(first_instruction)) { 128 return INCREMENTAL; 129 } 130 131 DCHECK(Assembler::IsBne(first_instruction)); 132 133 if (Assembler::IsBeq(second_instruction)) { 134 return INCREMENTAL_COMPACTION; 135 } 136 137 DCHECK(Assembler::IsBne(second_instruction)); 138 139 return STORE_BUFFER_ONLY; 140 } 141 Patch(Code * stub,Mode mode)142 static void Patch(Code* stub, Mode mode) { 143 MacroAssembler masm(stub->GetIsolate(), stub->instruction_start(), 144 stub->instruction_size(), CodeObjectRequired::kNo); 145 switch (mode) { 146 case STORE_BUFFER_ONLY: 147 DCHECK(GetMode(stub) == INCREMENTAL || 148 GetMode(stub) == INCREMENTAL_COMPACTION); 149 PatchBranchIntoNop(&masm, 0); 150 PatchBranchIntoNop(&masm, 2 * Assembler::kInstrSize); 151 break; 152 case INCREMENTAL: 153 DCHECK(GetMode(stub) == STORE_BUFFER_ONLY); 154 PatchNopIntoBranch(&masm, 0); 155 break; 156 case INCREMENTAL_COMPACTION: 157 DCHECK(GetMode(stub) == STORE_BUFFER_ONLY); 158 PatchNopIntoBranch(&masm, 2 * Assembler::kInstrSize); 159 break; 160 } 161 DCHECK(GetMode(stub) == mode); 162 Assembler::FlushICache(stub->GetIsolate(), stub->instruction_start(), 163 4 * Assembler::kInstrSize); 164 } 165 166 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 167 168 private: 169 // This is a helper class for freeing up 3 scratch registers. The input is 170 // two registers that must be preserved and one scratch register provided by 171 // the caller. 172 class RegisterAllocation { 173 public: RegisterAllocation(Register object,Register address,Register scratch0)174 RegisterAllocation(Register object, 175 Register address, 176 Register scratch0) 177 : object_(object), 178 address_(address), 179 scratch0_(scratch0) { 180 DCHECK(!AreAliased(scratch0, object, address, no_reg)); 181 scratch1_ = GetRegisterThatIsNotOneOf(object_, address_, scratch0_); 182 } 183 Save(MacroAssembler * masm)184 void Save(MacroAssembler* masm) { 185 DCHECK(!AreAliased(object_, address_, scratch1_, scratch0_)); 186 // We don't have to save scratch0_ because it was given to us as 187 // a scratch register. 188 masm->push(scratch1_); 189 } 190 Restore(MacroAssembler * masm)191 void Restore(MacroAssembler* masm) { 192 masm->pop(scratch1_); 193 } 194 195 // If we have to call into C then we need to save and restore all caller- 196 // saved registers that were not already preserved. The scratch registers 197 // will be restored by other means so we don't bother pushing them here. SaveCallerSaveRegisters(MacroAssembler * masm,SaveFPRegsMode mode)198 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) { 199 masm->MultiPush((kJSCallerSaved | ra.bit()) & ~scratch1_.bit()); 200 if (mode == kSaveFPRegs) { 201 masm->MultiPushFPU(kCallerSavedFPU); 202 } 203 } 204 RestoreCallerSaveRegisters(MacroAssembler * masm,SaveFPRegsMode mode)205 inline void RestoreCallerSaveRegisters(MacroAssembler*masm, 206 SaveFPRegsMode mode) { 207 if (mode == kSaveFPRegs) { 208 masm->MultiPopFPU(kCallerSavedFPU); 209 } 210 masm->MultiPop((kJSCallerSaved | ra.bit()) & ~scratch1_.bit()); 211 } 212 object()213 inline Register object() { return object_; } address()214 inline Register address() { return address_; } scratch0()215 inline Register scratch0() { return scratch0_; } scratch1()216 inline Register scratch1() { return scratch1_; } 217 218 private: 219 Register object_; 220 Register address_; 221 Register scratch0_; 222 Register scratch1_; 223 224 friend class RecordWriteStub; 225 }; 226 227 enum OnNoNeedToInformIncrementalMarker { 228 kReturnOnNoNeedToInformIncrementalMarker, 229 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker 230 }; 231 MajorKey()232 inline Major MajorKey() const final { return RecordWrite; } 233 234 void Generate(MacroAssembler* masm) override; 235 void GenerateIncremental(MacroAssembler* masm, Mode mode); 236 void CheckNeedsToInformIncrementalMarker( 237 MacroAssembler* masm, 238 OnNoNeedToInformIncrementalMarker on_no_need, 239 Mode mode); 240 void InformIncrementalMarker(MacroAssembler* masm); 241 Activate(Code * code)242 void Activate(Code* code) override { 243 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code); 244 } 245 object()246 Register object() const { 247 return Register::from_code(ObjectBits::decode(minor_key_)); 248 } 249 value()250 Register value() const { 251 return Register::from_code(ValueBits::decode(minor_key_)); 252 } 253 address()254 Register address() const { 255 return Register::from_code(AddressBits::decode(minor_key_)); 256 } 257 remembered_set_action()258 RememberedSetAction remembered_set_action() const { 259 return RememberedSetActionBits::decode(minor_key_); 260 } 261 save_fp_regs_mode()262 SaveFPRegsMode save_fp_regs_mode() const { 263 return SaveFPRegsModeBits::decode(minor_key_); 264 } 265 266 class ObjectBits: public BitField<int, 0, 5> {}; 267 class ValueBits: public BitField<int, 5, 5> {}; 268 class AddressBits: public BitField<int, 10, 5> {}; 269 class RememberedSetActionBits: public BitField<RememberedSetAction, 15, 1> {}; 270 class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 16, 1> {}; 271 272 Label slow_; 273 RegisterAllocation regs_; 274 275 DISALLOW_COPY_AND_ASSIGN(RecordWriteStub); 276 }; 277 278 279 // Trampoline stub to call into native code. To call safely into native code 280 // in the presence of compacting GC (which can move code objects) we need to 281 // keep the code which called into native pinned in the memory. Currently the 282 // simplest approach is to generate such stub early enough so it can never be 283 // moved by GC 284 class DirectCEntryStub: public PlatformCodeStub { 285 public: DirectCEntryStub(Isolate * isolate)286 explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {} 287 void GenerateCall(MacroAssembler* masm, Register target); 288 289 private: NeedsImmovableCode()290 bool NeedsImmovableCode() override { return true; } 291 292 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 293 DEFINE_PLATFORM_CODE_STUB(DirectCEntry, PlatformCodeStub); 294 }; 295 296 297 class NameDictionaryLookupStub: public PlatformCodeStub { 298 public: 299 enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; 300 NameDictionaryLookupStub(Isolate * isolate,LookupMode mode)301 NameDictionaryLookupStub(Isolate* isolate, LookupMode mode) 302 : PlatformCodeStub(isolate) { 303 minor_key_ = LookupModeBits::encode(mode); 304 } 305 306 static void GenerateNegativeLookup(MacroAssembler* masm, 307 Label* miss, 308 Label* done, 309 Register receiver, 310 Register properties, 311 Handle<Name> name, 312 Register scratch0); 313 314 static void GeneratePositiveLookup(MacroAssembler* masm, 315 Label* miss, 316 Label* done, 317 Register elements, 318 Register name, 319 Register r0, 320 Register r1); 321 SometimesSetsUpAFrame()322 bool SometimesSetsUpAFrame() override { return false; } 323 324 private: 325 static const int kInlinedProbes = 4; 326 static const int kTotalProbes = 20; 327 328 static const int kCapacityOffset = 329 NameDictionary::kHeaderSize + 330 NameDictionary::kCapacityIndex * kPointerSize; 331 332 static const int kElementsStartOffset = 333 NameDictionary::kHeaderSize + 334 NameDictionary::kElementsStartIndex * kPointerSize; 335 mode()336 LookupMode mode() const { return LookupModeBits::decode(minor_key_); } 337 338 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; 339 340 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); 341 DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); 342 }; 343 344 345 } // namespace internal 346 } // namespace v8 347 348 #endif // V8_MIPS_CODE_STUBS_MIPS_H_ 349