Lines Matching full:__
16 #define __ ACCESS_MASM(masm) macro
29 __ AssertFunction(a1); in Generate_Adaptor()
35 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_Adaptor()
40 __ Addu(a0, a0, num_extra_args + 1); in Generate_Adaptor()
43 __ SmiTag(a0); in Generate_Adaptor()
44 __ Push(a0, a1, a3); in Generate_Adaptor()
45 __ SmiUntag(a0); in Generate_Adaptor()
47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor()
55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction()
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction()
77 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_InternalArrayCode()
78 __ SmiTst(a2, t0); in Generate_InternalArrayCode()
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, t0, in Generate_InternalArrayCode()
81 __ GetObjectType(a2, a3, t0); in Generate_InternalArrayCode()
82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, t0, in Generate_InternalArrayCode()
90 __ TailCallStub(&stub); in Generate_InternalArrayCode()
106 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayCode()
107 __ SmiTst(a2, t0); in Generate_ArrayCode()
108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, t0, in Generate_ArrayCode()
110 __ GetObjectType(a2, a3, t0); in Generate_ArrayCode()
111 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, t0, in Generate_ArrayCode()
117 __ mov(a3, a1); in Generate_ArrayCode()
118 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_ArrayCode()
120 __ TailCallStub(&stub); in Generate_ArrayCode()
139 __ LoadRoot(t2, root_index); in Generate_MathMaxMin()
140 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
143 __ mov(a3, a0); in Generate_MathMaxMin()
144 __ bind(&loop); in Generate_MathMaxMin()
147 __ Subu(a3, a3, Operand(1)); in Generate_MathMaxMin()
148 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_MathMaxMin()
151 __ Lsa(at, sp, a3, kPointerSizeLog2); in Generate_MathMaxMin()
152 __ lw(a2, MemOperand(at)); in Generate_MathMaxMin()
157 __ bind(&convert); in Generate_MathMaxMin()
158 __ JumpIfSmi(a2, &convert_smi); in Generate_MathMaxMin()
159 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset)); in Generate_MathMaxMin()
160 __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number); in Generate_MathMaxMin()
164 __ SmiTag(a0); in Generate_MathMaxMin()
165 __ SmiTag(a3); in Generate_MathMaxMin()
166 __ EnterBuiltinFrame(cp, a1, a0); in Generate_MathMaxMin()
167 __ Push(t2, a3); in Generate_MathMaxMin()
168 __ mov(a0, a2); in Generate_MathMaxMin()
169 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_MathMaxMin()
170 __ mov(a2, v0); in Generate_MathMaxMin()
171 __ Pop(t2, a3); in Generate_MathMaxMin()
172 __ LeaveBuiltinFrame(cp, a1, a0); in Generate_MathMaxMin()
173 __ SmiUntag(a3); in Generate_MathMaxMin()
174 __ SmiUntag(a0); in Generate_MathMaxMin()
178 __ JumpIfSmi(t2, &restore_smi); in Generate_MathMaxMin()
179 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
180 __ jmp(&done_restore); in Generate_MathMaxMin()
181 __ bind(&restore_smi); in Generate_MathMaxMin()
182 __ SmiToDoubleFPURegister(t2, f0, t0); in Generate_MathMaxMin()
183 __ bind(&done_restore); in Generate_MathMaxMin()
186 __ jmp(&convert); in Generate_MathMaxMin()
187 __ bind(&convert_number); in Generate_MathMaxMin()
188 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
189 __ jmp(&done_convert); in Generate_MathMaxMin()
190 __ bind(&convert_smi); in Generate_MathMaxMin()
191 __ SmiToDoubleFPURegister(a2, f2, t0); in Generate_MathMaxMin()
192 __ bind(&done_convert); in Generate_MathMaxMin()
199 __ BranchF(nullptr, &compare_nan, eq, f0, f2); in Generate_MathMaxMin()
200 __ Move(t0, t1, f0); in Generate_MathMaxMin()
202 __ MinNaNCheck_d(f0, f0, f2); in Generate_MathMaxMin()
205 __ MaxNaNCheck_d(f0, f0, f2); in Generate_MathMaxMin()
207 __ Move(at, t8, f0); in Generate_MathMaxMin()
208 __ Branch(&set_value, ne, t0, Operand(at)); in Generate_MathMaxMin()
209 __ Branch(&set_value, ne, t1, Operand(t8)); in Generate_MathMaxMin()
210 __ jmp(&loop); in Generate_MathMaxMin()
211 __ bind(&set_value); in Generate_MathMaxMin()
212 __ mov(t2, a2); in Generate_MathMaxMin()
213 __ jmp(&loop); in Generate_MathMaxMin()
216 __ bind(&compare_nan); in Generate_MathMaxMin()
217 __ LoadRoot(t2, Heap::kNanValueRootIndex); in Generate_MathMaxMin()
218 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
219 __ jmp(&loop); in Generate_MathMaxMin()
222 __ bind(&done_loop); in Generate_MathMaxMin()
224 __ Addu(a0, a0, Operand(1)); in Generate_MathMaxMin()
225 __ Lsa(sp, sp, a0, kPointerSizeLog2); in Generate_MathMaxMin()
226 __ Ret(USE_DELAY_SLOT); in Generate_MathMaxMin()
227 __ mov(v0, t2); // In delay slot. in Generate_MathMaxMin()
244 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_NumberConstructor()
245 __ Subu(t1, a0, Operand(1)); // In delay slot. in Generate_NumberConstructor()
246 __ mov(t0, a0); // Store argc in t0. in Generate_NumberConstructor()
247 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_NumberConstructor()
248 __ lw(a0, MemOperand(at)); in Generate_NumberConstructor()
254 __ SmiTag(t0); in Generate_NumberConstructor()
255 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor()
256 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor()
257 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor()
258 __ SmiUntag(t0); in Generate_NumberConstructor()
263 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_NumberConstructor()
264 __ DropAndRet(1); in Generate_NumberConstructor()
268 __ bind(&no_arguments); in Generate_NumberConstructor()
269 __ Move(v0, Smi::kZero); in Generate_NumberConstructor()
270 __ DropAndRet(1); in Generate_NumberConstructor()
286 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_NumberConstructor_ConstructStub()
291 __ mov(t0, a0); // Store argc in t0. in Generate_NumberConstructor_ConstructStub()
292 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_NumberConstructor_ConstructStub()
293 __ Subu(t1, a0, Operand(1)); // In delay slot. in Generate_NumberConstructor_ConstructStub()
294 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_NumberConstructor_ConstructStub()
295 __ lw(a0, MemOperand(at)); in Generate_NumberConstructor_ConstructStub()
296 __ jmp(&done); in Generate_NumberConstructor_ConstructStub()
297 __ bind(&no_arguments); in Generate_NumberConstructor_ConstructStub()
298 __ Move(a0, Smi::kZero); in Generate_NumberConstructor_ConstructStub()
299 __ bind(&done); in Generate_NumberConstructor_ConstructStub()
305 __ JumpIfSmi(a0, &done_convert); in Generate_NumberConstructor_ConstructStub()
306 __ GetObjectType(a0, a2, a2); in Generate_NumberConstructor_ConstructStub()
307 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE)); in Generate_NumberConstructor_ConstructStub()
310 __ SmiTag(t0); in Generate_NumberConstructor_ConstructStub()
311 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
312 __ Push(a3); in Generate_NumberConstructor_ConstructStub()
313 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor_ConstructStub()
314 __ Move(a0, v0); in Generate_NumberConstructor_ConstructStub()
315 __ Pop(a3); in Generate_NumberConstructor_ConstructStub()
316 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
317 __ SmiUntag(t0); in Generate_NumberConstructor_ConstructStub()
319 __ bind(&done_convert); in Generate_NumberConstructor_ConstructStub()
324 __ Branch(&new_object, ne, a1, Operand(a3)); in Generate_NumberConstructor_ConstructStub()
327 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); in Generate_NumberConstructor_ConstructStub()
328 __ jmp(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
331 __ bind(&new_object); in Generate_NumberConstructor_ConstructStub()
335 __ SmiTag(t0); in Generate_NumberConstructor_ConstructStub()
336 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
337 __ Push(a0); // first argument in Generate_NumberConstructor_ConstructStub()
338 __ CallStub(&stub); in Generate_NumberConstructor_ConstructStub()
339 __ Pop(a0); in Generate_NumberConstructor_ConstructStub()
340 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
341 __ SmiUntag(t0); in Generate_NumberConstructor_ConstructStub()
343 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); in Generate_NumberConstructor_ConstructStub()
345 __ bind(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
347 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_NumberConstructor_ConstructStub()
348 __ DropAndRet(1); in Generate_NumberConstructor_ConstructStub()
366 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_StringConstructor()
367 __ Subu(t1, a0, Operand(1)); in Generate_StringConstructor()
368 __ mov(t0, a0); // Store argc in t0. in Generate_StringConstructor()
369 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_StringConstructor()
370 __ lw(a0, MemOperand(at)); in Generate_StringConstructor()
377 __ JumpIfSmi(a0, &to_string); in Generate_StringConstructor()
378 __ GetObjectType(a0, t1, t1); in Generate_StringConstructor()
380 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE)); in Generate_StringConstructor()
381 __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg)); in Generate_StringConstructor()
382 __ Branch(&to_string, gt, t1, Operand(zero_reg)); in Generate_StringConstructor()
383 __ mov(v0, a0); in Generate_StringConstructor()
384 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor()
388 __ bind(&no_arguments); in Generate_StringConstructor()
390 __ LoadRoot(v0, Heap::kempty_stringRootIndex); in Generate_StringConstructor()
391 __ DropAndRet(1); in Generate_StringConstructor()
395 __ bind(&to_string); in Generate_StringConstructor()
398 __ SmiTag(t0); in Generate_StringConstructor()
399 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor()
400 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor()
401 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor()
402 __ SmiUntag(t0); in Generate_StringConstructor()
404 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor()
407 __ bind(&symbol_descriptive_string); in Generate_StringConstructor()
409 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor()
410 __ Drop(1); in Generate_StringConstructor()
411 __ Push(a0); in Generate_StringConstructor()
412 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); in Generate_StringConstructor()
415 __ bind(&drop_frame_and_ret); in Generate_StringConstructor()
417 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor()
418 __ DropAndRet(1); in Generate_StringConstructor()
435 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_StringConstructor_ConstructStub()
440 __ mov(t0, a0); // Store argc in t0. in Generate_StringConstructor_ConstructStub()
441 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_StringConstructor_ConstructStub()
442 __ Subu(t1, a0, Operand(1)); in Generate_StringConstructor_ConstructStub()
443 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_StringConstructor_ConstructStub()
444 __ lw(a0, MemOperand(at)); in Generate_StringConstructor_ConstructStub()
445 __ jmp(&done); in Generate_StringConstructor_ConstructStub()
446 __ bind(&no_arguments); in Generate_StringConstructor_ConstructStub()
447 __ LoadRoot(a0, Heap::kempty_stringRootIndex); in Generate_StringConstructor_ConstructStub()
448 __ bind(&done); in Generate_StringConstructor_ConstructStub()
454 __ JumpIfSmi(a0, &convert); in Generate_StringConstructor_ConstructStub()
455 __ GetObjectType(a0, a2, a2); in Generate_StringConstructor_ConstructStub()
456 __ And(t1, a2, Operand(kIsNotStringMask)); in Generate_StringConstructor_ConstructStub()
457 __ Branch(&done_convert, eq, t1, Operand(zero_reg)); in Generate_StringConstructor_ConstructStub()
458 __ bind(&convert); in Generate_StringConstructor_ConstructStub()
461 __ SmiTag(t0); in Generate_StringConstructor_ConstructStub()
462 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
463 __ Push(a3); in Generate_StringConstructor_ConstructStub()
464 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor_ConstructStub()
465 __ Move(a0, v0); in Generate_StringConstructor_ConstructStub()
466 __ Pop(a3); in Generate_StringConstructor_ConstructStub()
467 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
468 __ SmiUntag(t0); in Generate_StringConstructor_ConstructStub()
470 __ bind(&done_convert); in Generate_StringConstructor_ConstructStub()
475 __ Branch(&new_object, ne, a1, Operand(a3)); in Generate_StringConstructor_ConstructStub()
478 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); in Generate_StringConstructor_ConstructStub()
479 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
482 __ bind(&new_object); in Generate_StringConstructor_ConstructStub()
486 __ SmiTag(t0); in Generate_StringConstructor_ConstructStub()
487 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
488 __ Push(a0); // first argument in Generate_StringConstructor_ConstructStub()
489 __ CallStub(&stub); in Generate_StringConstructor_ConstructStub()
490 __ Pop(a0); in Generate_StringConstructor_ConstructStub()
491 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
492 __ SmiUntag(t0); in Generate_StringConstructor_ConstructStub()
494 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); in Generate_StringConstructor_ConstructStub()
496 __ bind(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
498 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor_ConstructStub()
499 __ DropAndRet(1); in Generate_StringConstructor_ConstructStub()
504 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in GenerateTailCallToSharedCode()
505 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); in GenerateTailCallToSharedCode()
506 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToSharedCode()
507 __ Jump(at); in GenerateTailCallToSharedCode()
521 __ SmiTag(a0); in GenerateTailCallToReturnedCode()
522 __ Push(a0, a1, a3, a1); in GenerateTailCallToReturnedCode()
524 __ CallRuntime(function_id, 1); in GenerateTailCallToReturnedCode()
527 __ Pop(a0, a1, a3); in GenerateTailCallToReturnedCode()
528 __ SmiUntag(a0); in GenerateTailCallToReturnedCode()
531 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToReturnedCode()
532 __ Jump(at); in GenerateTailCallToReturnedCode()
542 __ LoadRoot(t0, Heap::kStackLimitRootIndex); in Generate_InOptimizationQueue()
543 __ Branch(&ok, hs, sp, Operand(t0)); in Generate_InOptimizationQueue()
547 __ bind(&ok); in Generate_InOptimizationQueue()
572 __ SmiTag(a0); in Generate_JSConstructStubHelper()
573 __ Push(cp, a0); in Generate_JSConstructStubHelper()
577 __ Push(a1, a3); in Generate_JSConstructStubHelper()
579 __ CallStub(&stub); in Generate_JSConstructStubHelper()
580 __ mov(t4, v0); in Generate_JSConstructStubHelper()
581 __ Pop(a1, a3); in Generate_JSConstructStubHelper()
590 __ lw(a0, MemOperand(sp)); in Generate_JSConstructStubHelper()
593 __ SmiUntag(a0); in Generate_JSConstructStubHelper()
599 __ Push(t4, t4); in Generate_JSConstructStubHelper()
601 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_JSConstructStubHelper()
605 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
617 __ SmiTag(t4, a0); in Generate_JSConstructStubHelper()
618 __ jmp(&entry); in Generate_JSConstructStubHelper()
619 __ bind(&loop); in Generate_JSConstructStubHelper()
620 __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize); in Generate_JSConstructStubHelper()
621 __ lw(t1, MemOperand(t0)); in Generate_JSConstructStubHelper()
622 __ push(t1); in Generate_JSConstructStubHelper()
623 __ bind(&entry); in Generate_JSConstructStubHelper()
624 __ Addu(t4, t4, Operand(-2)); in Generate_JSConstructStubHelper()
625 __ Branch(&loop, greater_equal, t4, Operand(zero_reg)); in Generate_JSConstructStubHelper()
632 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION, in Generate_JSConstructStubHelper()
641 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubHelper()
653 __ JumpIfSmi(v0, &use_receiver); in Generate_JSConstructStubHelper()
657 __ GetObjectType(v0, a1, a3); in Generate_JSConstructStubHelper()
658 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in Generate_JSConstructStubHelper()
662 __ bind(&use_receiver); in Generate_JSConstructStubHelper()
663 __ lw(v0, MemOperand(sp)); in Generate_JSConstructStubHelper()
667 __ bind(&exit); in Generate_JSConstructStubHelper()
671 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); in Generate_JSConstructStubHelper()
673 __ lw(a1, MemOperand(sp)); in Generate_JSConstructStubHelper()
684 __ JumpIfNotSmi(v0, &dont_throw); in Generate_JSConstructStubHelper()
687 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); in Generate_JSConstructStubHelper()
689 __ bind(&dont_throw); in Generate_JSConstructStubHelper()
692 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1); in Generate_JSConstructStubHelper()
693 __ Addu(sp, sp, kPointerSize); in Generate_JSConstructStubHelper()
695 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); in Generate_JSConstructStubHelper()
697 __ Ret(); in Generate_JSConstructStubHelper()
721 __ Push(a1); in Generate_ConstructedNonConstructable()
722 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); in Generate_ConstructedNonConstructable()
734 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); in Generate_CheckStackOverflow()
737 __ Subu(a2, sp, a2); in Generate_CheckStackOverflow()
740 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); in Generate_CheckStackOverflow()
743 __ sll(t3, argc, kPointerSizeLog2); in Generate_CheckStackOverflow()
746 __ Branch(&okay, gt, a2, Operand(t3)); in Generate_CheckStackOverflow()
749 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CheckStackOverflow()
751 __ bind(&okay); in Generate_CheckStackOverflow()
774 __ li(cp, Operand(context_address)); in Generate_JSEntryTrampolineHelper()
775 __ lw(cp, MemOperand(cp)); in Generate_JSEntryTrampolineHelper()
778 __ Push(a1, a2); in Generate_JSEntryTrampolineHelper()
785 __ mov(t1, a0); in Generate_JSEntryTrampolineHelper()
791 __ Lsa(t2, s0, a3, kPointerSizeLog2); in Generate_JSEntryTrampolineHelper()
792 __ b(&entry); in Generate_JSEntryTrampolineHelper()
793 __ nop(); // Branch delay slot nop. in Generate_JSEntryTrampolineHelper()
795 __ bind(&loop); in Generate_JSEntryTrampolineHelper()
796 __ lw(t0, MemOperand(s0)); // Read next parameter. in Generate_JSEntryTrampolineHelper()
797 __ addiu(s0, s0, kPointerSize); in Generate_JSEntryTrampolineHelper()
798 __ lw(t0, MemOperand(t0)); // Dereference handle. in Generate_JSEntryTrampolineHelper()
799 __ push(t0); // Push parameter. in Generate_JSEntryTrampolineHelper()
800 __ bind(&entry); in Generate_JSEntryTrampolineHelper()
801 __ Branch(&loop, ne, s0, Operand(t2)); in Generate_JSEntryTrampolineHelper()
804 __ mov(a0, a3); in Generate_JSEntryTrampolineHelper()
805 __ mov(a3, t1); in Generate_JSEntryTrampolineHelper()
809 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate_JSEntryTrampolineHelper()
810 __ mov(s1, t0); in Generate_JSEntryTrampolineHelper()
811 __ mov(s2, t0); in Generate_JSEntryTrampolineHelper()
812 __ mov(s3, t0); in Generate_JSEntryTrampolineHelper()
813 __ mov(s4, t0); in Generate_JSEntryTrampolineHelper()
814 __ mov(s5, t0); in Generate_JSEntryTrampolineHelper()
822 __ Call(builtin, RelocInfo::CODE_TARGET); in Generate_JSEntryTrampolineHelper()
827 __ Jump(ra); in Generate_JSEntryTrampolineHelper()
846 __ AssertGeneratorObject(a1); in Generate_ResumeGeneratorTrampoline()
849 __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset)); in Generate_ResumeGeneratorTrampoline()
850 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3, in Generate_ResumeGeneratorTrampoline()
854 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset)); in Generate_ResumeGeneratorTrampoline()
857 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); in Generate_ResumeGeneratorTrampoline()
858 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
866 __ li(t1, Operand(last_step_action)); in Generate_ResumeGeneratorTrampoline()
867 __ lb(t1, MemOperand(t1)); in Generate_ResumeGeneratorTrampoline()
868 __ Branch(&prepare_step_in_if_stepping, ge, t1, Operand(StepIn)); in Generate_ResumeGeneratorTrampoline()
873 __ li(t1, Operand(debug_suspended_generator)); in Generate_ResumeGeneratorTrampoline()
874 __ lw(t1, MemOperand(t1)); in Generate_ResumeGeneratorTrampoline()
875 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1)); in Generate_ResumeGeneratorTrampoline()
876 __ bind(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
879 __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); in Generate_ResumeGeneratorTrampoline()
880 __ Push(t1); in Generate_ResumeGeneratorTrampoline()
895 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
896 __ lw(a3, in Generate_ResumeGeneratorTrampoline()
900 __ bind(&loop); in Generate_ResumeGeneratorTrampoline()
901 __ Subu(a3, a3, Operand(Smi::FromInt(1))); in Generate_ResumeGeneratorTrampoline()
902 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline()
903 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_ResumeGeneratorTrampoline()
904 __ Branch(&loop); in Generate_ResumeGeneratorTrampoline()
905 __ bind(&done_loop); in Generate_ResumeGeneratorTrampoline()
910 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
911 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); in Generate_ResumeGeneratorTrampoline()
912 __ GetObjectType(a3, a3, a3); in Generate_ResumeGeneratorTrampoline()
913 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE)); in Generate_ResumeGeneratorTrampoline()
917 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
918 __ lw(a0, in Generate_ResumeGeneratorTrampoline()
920 __ SmiUntag(a0); in Generate_ResumeGeneratorTrampoline()
924 __ Move(a3, a1); in Generate_ResumeGeneratorTrampoline()
925 __ Move(a1, t0); in Generate_ResumeGeneratorTrampoline()
926 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ResumeGeneratorTrampoline()
927 __ Jump(a2); in Generate_ResumeGeneratorTrampoline()
931 __ bind(&old_generator); in Generate_ResumeGeneratorTrampoline()
936 __ Push(ra, fp); in Generate_ResumeGeneratorTrampoline()
937 __ Move(fp, sp); in Generate_ResumeGeneratorTrampoline()
938 __ Push(cp, t0); in Generate_ResumeGeneratorTrampoline()
941 __ lw(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); in Generate_ResumeGeneratorTrampoline()
942 __ lw(a3, FieldMemOperand(a0, FixedArray::kLengthOffset)); in Generate_ResumeGeneratorTrampoline()
943 __ Addu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_ResumeGeneratorTrampoline()
944 __ Lsa(a3, a0, a3, kPointerSizeLog2 - 1); in Generate_ResumeGeneratorTrampoline()
947 __ bind(&loop); in Generate_ResumeGeneratorTrampoline()
948 __ Branch(&done_loop, eq, a0, Operand(a3)); in Generate_ResumeGeneratorTrampoline()
949 __ lw(t1, MemOperand(a0)); in Generate_ResumeGeneratorTrampoline()
950 __ Push(t1); in Generate_ResumeGeneratorTrampoline()
951 __ Branch(USE_DELAY_SLOT, &loop); in Generate_ResumeGeneratorTrampoline()
952 __ addiu(a0, a0, kPointerSize); // In delay slot. in Generate_ResumeGeneratorTrampoline()
953 __ bind(&done_loop); in Generate_ResumeGeneratorTrampoline()
957 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex); in Generate_ResumeGeneratorTrampoline()
958 __ sw(t1, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); in Generate_ResumeGeneratorTrampoline()
961 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
962 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset)); in Generate_ResumeGeneratorTrampoline()
963 __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ResumeGeneratorTrampoline()
964 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); in Generate_ResumeGeneratorTrampoline()
965 __ SmiUntag(a2); in Generate_ResumeGeneratorTrampoline()
966 __ Addu(a3, a3, Operand(a2)); in Generate_ResumeGeneratorTrampoline()
967 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); in Generate_ResumeGeneratorTrampoline()
968 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); in Generate_ResumeGeneratorTrampoline()
969 __ Move(v0, a1); // Continuation expects generator object in v0. in Generate_ResumeGeneratorTrampoline()
970 __ Jump(a3); in Generate_ResumeGeneratorTrampoline()
973 __ bind(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
976 __ Push(a1, a2, t0); in Generate_ResumeGeneratorTrampoline()
977 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); in Generate_ResumeGeneratorTrampoline()
978 __ Pop(a1, a2); in Generate_ResumeGeneratorTrampoline()
980 __ Branch(USE_DELAY_SLOT, &stepping_prepared); in Generate_ResumeGeneratorTrampoline()
981 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
983 __ bind(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
986 __ Push(a1, a2); in Generate_ResumeGeneratorTrampoline()
987 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); in Generate_ResumeGeneratorTrampoline()
988 __ Pop(a1, a2); in Generate_ResumeGeneratorTrampoline()
990 __ Branch(USE_DELAY_SLOT, &stepping_prepared); in Generate_ResumeGeneratorTrampoline()
991 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
998 __ lw(args_count, in LeaveInterpreterFrame()
1000 __ lw(args_count, in LeaveInterpreterFrame()
1004 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in LeaveInterpreterFrame()
1007 __ Addu(sp, sp, args_count); in LeaveInterpreterFrame()
1032 __ PushStandardFrame(a1); in Generate_InterpreterEntryTrampoline()
1036 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1040 __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset)); in Generate_InterpreterEntryTrampoline()
1041 __ Branch(&load_debug_bytecode_array, ne, debug_info, in Generate_InterpreterEntryTrampoline()
1043 __ lw(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1045 __ bind(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1049 __ lw(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1050 __ Branch(&switch_to_different_code_kind, ne, a0, in Generate_InterpreterEntryTrampoline()
1054 __ lw(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset)); in Generate_InterpreterEntryTrampoline()
1055 __ lw(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset)); in Generate_InterpreterEntryTrampoline()
1056 __ lw(t0, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1059 __ Addu(t0, t0, Operand(Smi::FromInt(1))); in Generate_InterpreterEntryTrampoline()
1060 __ sw(t0, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1066 __ SmiTst(kInterpreterBytecodeArrayRegister, t0); in Generate_InterpreterEntryTrampoline()
1067 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0, in Generate_InterpreterEntryTrampoline()
1069 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0); in Generate_InterpreterEntryTrampoline()
1070 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0, in Generate_InterpreterEntryTrampoline()
1075 __ li(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEntryTrampoline()
1079 __ SmiTag(t0, kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEntryTrampoline()
1080 __ Push(a3, kInterpreterBytecodeArrayRegister, t0); in Generate_InterpreterEntryTrampoline()
1085 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1090 __ Subu(t1, sp, Operand(t0)); in Generate_InterpreterEntryTrampoline()
1091 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); in Generate_InterpreterEntryTrampoline()
1092 __ Branch(&ok, hs, t1, Operand(a2)); in Generate_InterpreterEntryTrampoline()
1093 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterEntryTrampoline()
1094 __ bind(&ok); in Generate_InterpreterEntryTrampoline()
1099 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1100 __ Branch(&loop_check); in Generate_InterpreterEntryTrampoline()
1101 __ bind(&loop_header); in Generate_InterpreterEntryTrampoline()
1103 __ push(t1); in Generate_InterpreterEntryTrampoline()
1105 __ bind(&loop_check); in Generate_InterpreterEntryTrampoline()
1106 __ Subu(t0, t0, Operand(kPointerSize)); in Generate_InterpreterEntryTrampoline()
1107 __ Branch(&loop_header, ge, t0, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline()
1111 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1112 __ li(kInterpreterDispatchTableRegister, in Generate_InterpreterEntryTrampoline()
1117 __ Addu(a0, kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1119 __ lbu(a0, MemOperand(a0)); in Generate_InterpreterEntryTrampoline()
1120 __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2); in Generate_InterpreterEntryTrampoline()
1121 __ lw(at, MemOperand(at)); in Generate_InterpreterEntryTrampoline()
1122 __ Call(at); in Generate_InterpreterEntryTrampoline()
1127 __ Jump(ra); in Generate_InterpreterEntryTrampoline()
1130 __ bind(&load_debug_bytecode_array); in Generate_InterpreterEntryTrampoline()
1131 __ lw(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1133 __ Branch(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1138 __ bind(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1139 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in Generate_InterpreterEntryTrampoline()
1140 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1141 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1142 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterEntryTrampoline()
1143 __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_InterpreterEntryTrampoline()
1144 __ RecordWriteCodeEntryField(a1, t0, t1); in Generate_InterpreterEntryTrampoline()
1145 __ Jump(t0); in Generate_InterpreterEntryTrampoline()
1154 __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex); in Generate_StackOverflowCheck()
1157 __ subu(scratch1, sp, scratch1); in Generate_StackOverflowCheck()
1159 __ sll(scratch2, num_args, kPointerSizeLog2); in Generate_StackOverflowCheck()
1161 __ Branch(stack_overflow, le, scratch1, Operand(scratch2)); in Generate_StackOverflowCheck()
1172 __ mov(scratch2, num_args); in Generate_InterpreterPushArgs()
1173 __ sll(scratch2, scratch2, kPointerSizeLog2); in Generate_InterpreterPushArgs()
1174 __ Subu(scratch2, index, Operand(scratch2)); in Generate_InterpreterPushArgs()
1178 __ Branch(&loop_check); in Generate_InterpreterPushArgs()
1179 __ bind(&loop_header); in Generate_InterpreterPushArgs()
1180 __ lw(scratch, MemOperand(index)); in Generate_InterpreterPushArgs()
1181 __ Addu(index, index, Operand(-kPointerSize)); in Generate_InterpreterPushArgs()
1182 __ push(scratch); in Generate_InterpreterPushArgs()
1183 __ bind(&loop_check); in Generate_InterpreterPushArgs()
1184 __ Branch(&loop_header, gt, index, Operand(scratch2)); in Generate_InterpreterPushArgs()
1200 __ Addu(t0, a0, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndCallImpl()
1207 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1212 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1217 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndCallImpl()
1219 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndCallImpl()
1221 __ break_(0xCC); in Generate_InterpreterPushArgsAndCallImpl()
1238 __ push(zero_reg); in Generate_InterpreterPushArgsAndConstructImpl()
1243 __ AssertUndefinedOrAllocationSite(a2, t0); in Generate_InterpreterPushArgsAndConstructImpl()
1245 __ AssertFunction(a1); in Generate_InterpreterPushArgsAndConstructImpl()
1249 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1250 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1251 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterPushArgsAndConstructImpl()
1252 __ Jump(at); in Generate_InterpreterPushArgsAndConstructImpl()
1256 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_InterpreterPushArgsAndConstructImpl()
1259 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructImpl()
1261 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructImpl()
1263 __ break_(0xCC); in Generate_InterpreterPushArgsAndConstructImpl()
1280 __ Addu(t0, a0, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndConstructArray()
1286 __ mov(a3, a1); in Generate_InterpreterPushArgsAndConstructArray()
1289 __ TailCallStub(&stub); in Generate_InterpreterPushArgsAndConstructArray()
1291 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructArray()
1293 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructArray()
1295 __ break_(0xCC); in Generate_InterpreterPushArgsAndConstructArray()
1305 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); in Generate_InterpreterEnterBytecode()
1306 __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() + in Generate_InterpreterEnterBytecode()
1310 __ li(kInterpreterDispatchTableRegister, in Generate_InterpreterEnterBytecode()
1315 __ lw(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1320 __ SmiTst(kInterpreterBytecodeArrayRegister, at); in Generate_InterpreterEnterBytecode()
1321 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, in Generate_InterpreterEnterBytecode()
1323 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); in Generate_InterpreterEnterBytecode()
1324 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, in Generate_InterpreterEnterBytecode()
1329 __ lw(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEnterBytecode()
1331 __ SmiUntag(kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEnterBytecode()
1334 __ Addu(a1, kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1336 __ lbu(a1, MemOperand(a1)); in Generate_InterpreterEnterBytecode()
1337 __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2); in Generate_InterpreterEnterBytecode()
1338 __ lw(a1, MemOperand(a1)); in Generate_InterpreterEnterBytecode()
1339 __ Jump(a1); in Generate_InterpreterEnterBytecode()
1346 __ lw(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1347 __ lw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1348 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate_InterpreterEnterBytecodeAdvance()
1351 __ Push(kInterpreterAccumulatorRegister, a1, a2); in Generate_InterpreterEnterBytecodeAdvance()
1352 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); in Generate_InterpreterEnterBytecodeAdvance()
1353 __ mov(a2, v0); // Result is the new bytecode offset. in Generate_InterpreterEnterBytecodeAdvance()
1354 __ Pop(kInterpreterAccumulatorRegister); in Generate_InterpreterEnterBytecodeAdvance()
1356 __ sw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1379 __ push(argument_count); in Generate_CompileLazy()
1380 __ push(new_target); in Generate_CompileLazy()
1381 __ push(closure); in Generate_CompileLazy()
1385 __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); in Generate_CompileLazy()
1386 __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); in Generate_CompileLazy()
1387 __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset)); in Generate_CompileLazy()
1388 __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2))); in Generate_CompileLazy()
1397 __ lw(native_context, NativeContextMemOperand()); in Generate_CompileLazy()
1399 __ bind(&loop_top); in Generate_CompileLazy()
1404 __ sll(at, index, kPointerSizeLog2 - kSmiTagSize); in Generate_CompileLazy()
1405 __ Addu(array_pointer, map, Operand(at)); in Generate_CompileLazy()
1406 __ lw(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1408 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); in Generate_CompileLazy()
1409 __ Branch(&loop_bottom, ne, temp, Operand(native_context)); in Generate_CompileLazy()
1411 __ lw(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1414 __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id))); in Generate_CompileLazy()
1416 __ lw(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1418 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); in Generate_CompileLazy()
1419 __ JumpIfSmi(temp, &gotta_call_runtime); in Generate_CompileLazy()
1422 __ lw(t0, MemOperand(sp, 0)); in Generate_CompileLazy()
1423 __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset)); in Generate_CompileLazy()
1424 __ push(index); in Generate_CompileLazy()
1425 __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index, in Generate_CompileLazy()
1428 __ pop(index); in Generate_CompileLazy()
1432 __ lw(entry, in Generate_CompileLazy()
1435 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); in Generate_CompileLazy()
1436 __ JumpIfSmi(entry, &try_shared); in Generate_CompileLazy()
1439 __ pop(closure); in Generate_CompileLazy()
1441 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1442 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1443 __ RecordWriteCodeEntryField(closure, entry, t1); in Generate_CompileLazy()
1449 __ lw(t1, in Generate_CompileLazy()
1451 __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset)); in Generate_CompileLazy()
1452 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0, in Generate_CompileLazy()
1457 __ sw(closure, in Generate_CompileLazy()
1460 __ mov(t1, closure); in Generate_CompileLazy()
1461 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0, in Generate_CompileLazy()
1463 __ mov(closure, t1); in Generate_CompileLazy()
1464 __ pop(new_target); in Generate_CompileLazy()
1465 __ pop(argument_count); in Generate_CompileLazy()
1466 __ Jump(entry); in Generate_CompileLazy()
1468 __ bind(&loop_bottom); in Generate_CompileLazy()
1469 __ Subu(index, index, in Generate_CompileLazy()
1471 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1))); in Generate_CompileLazy()
1474 __ jmp(&gotta_call_runtime); in Generate_CompileLazy()
1476 __ bind(&try_shared); in Generate_CompileLazy()
1477 __ pop(closure); in Generate_CompileLazy()
1478 __ pop(new_target); in Generate_CompileLazy()
1479 __ pop(argument_count); in Generate_CompileLazy()
1480 __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); in Generate_CompileLazy()
1482 __ lbu(t1, FieldMemOperand(entry, in Generate_CompileLazy()
1484 __ And(t1, t1, in Generate_CompileLazy()
1486 __ Branch(&gotta_call_runtime_no_stack, ne, t1, Operand(zero_reg)); in Generate_CompileLazy()
1488 __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); in Generate_CompileLazy()
1489 __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset)); in Generate_CompileLazy()
1490 __ And(t1, t1, Operand(Code::KindField::kMask)); in Generate_CompileLazy()
1491 __ srl(t1, t1, Code::KindField::kShift); in Generate_CompileLazy()
1492 __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN)); in Generate_CompileLazy()
1494 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1495 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1496 __ RecordWriteCodeEntryField(closure, entry, t1); in Generate_CompileLazy()
1497 __ Jump(entry); in Generate_CompileLazy()
1499 __ bind(&gotta_call_runtime); in Generate_CompileLazy()
1500 __ pop(closure); in Generate_CompileLazy()
1501 __ pop(new_target); in Generate_CompileLazy()
1502 __ pop(argument_count); in Generate_CompileLazy()
1503 __ bind(&gotta_call_runtime_no_stack); in Generate_CompileLazy()
1530 __ Move(t4, a0); in Generate_InstantiateAsmJs()
1533 __ SmiTag(a0); in Generate_InstantiateAsmJs()
1534 __ Push(a0, a1, a3, a1); in Generate_InstantiateAsmJs()
1541 __ Branch(&over, ne, t4, Operand(j)); in Generate_InstantiateAsmJs()
1544 __ lw(t4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + in Generate_InstantiateAsmJs()
1546 __ push(t4); in Generate_InstantiateAsmJs()
1549 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_InstantiateAsmJs()
1552 __ jmp(&args_done); in Generate_InstantiateAsmJs()
1553 __ bind(&over); in Generate_InstantiateAsmJs()
1556 __ bind(&args_done); in Generate_InstantiateAsmJs()
1559 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); in Generate_InstantiateAsmJs()
1561 __ JumpIfSmi(v0, &failed); in Generate_InstantiateAsmJs()
1563 __ Drop(2); in Generate_InstantiateAsmJs()
1564 __ pop(t4); in Generate_InstantiateAsmJs()
1565 __ SmiUntag(t4); in Generate_InstantiateAsmJs()
1568 __ Addu(t4, t4, Operand(1)); in Generate_InstantiateAsmJs()
1569 __ Lsa(sp, sp, t4, kPointerSizeLog2); in Generate_InstantiateAsmJs()
1570 __ Ret(); in Generate_InstantiateAsmJs()
1572 __ bind(&failed); in Generate_InstantiateAsmJs()
1574 __ Pop(a0, a1, a3); in Generate_InstantiateAsmJs()
1575 __ SmiUntag(a0); in Generate_InstantiateAsmJs()
1589 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); in GenerateMakeCodeYoungAgainCommon()
1599 __ MultiPush(saved_regs); in GenerateMakeCodeYoungAgainCommon()
1600 __ PrepareCallCFunction(2, 0, a2); in GenerateMakeCodeYoungAgainCommon()
1601 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); in GenerateMakeCodeYoungAgainCommon()
1602 __ CallCFunction( in GenerateMakeCodeYoungAgainCommon()
1604 __ MultiPop(saved_regs); in GenerateMakeCodeYoungAgainCommon()
1605 __ Jump(a0); in GenerateMakeCodeYoungAgainCommon()
1627 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); in CODE_AGE_LIST()
1637 __ MultiPush(saved_regs); in CODE_AGE_LIST()
1638 __ PrepareCallCFunction(2, 0, a2); in CODE_AGE_LIST()
1639 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); in CODE_AGE_LIST()
1640 __ CallCFunction( in CODE_AGE_LIST()
1643 __ MultiPop(saved_regs); in CODE_AGE_LIST()
1646 __ PushStandardFrame(a1); in CODE_AGE_LIST()
1649 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength)); in CODE_AGE_LIST()
1650 __ Jump(a0); in CODE_AGE_LIST()
1669 __ MultiPush(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1671 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); in Generate_NotifyStubFailureHelper()
1672 __ MultiPop(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1675 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state in Generate_NotifyStubFailureHelper()
1676 __ Jump(ra); // Jump to miss handler in Generate_NotifyStubFailureHelper()
1692 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); in Generate_NotifyDeoptimizedHelper()
1693 __ push(a0); in Generate_NotifyDeoptimizedHelper()
1694 __ CallRuntime(Runtime::kNotifyDeoptimized); in Generate_NotifyDeoptimizedHelper()
1698 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1699 __ SmiUntag(t2); in Generate_NotifyDeoptimizedHelper()
1702 __ Branch(&with_tos_register, ne, t2, in Generate_NotifyDeoptimizedHelper()
1704 __ Ret(USE_DELAY_SLOT); in Generate_NotifyDeoptimizedHelper()
1706 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1708 __ bind(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1710 __ lw(v0, MemOperand(sp, 1 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1711 __ Branch(&unknown_state, ne, t2, in Generate_NotifyDeoptimizedHelper()
1714 __ Ret(USE_DELAY_SLOT); in Generate_NotifyDeoptimizedHelper()
1716 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1718 __ bind(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1719 __ stop("no cases left"); in Generate_NotifyDeoptimizedHelper()
1744 __ lw(signature, FieldMemOperand(function_template_info, in CompatibleReceiverCheck()
1747 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, in CompatibleReceiverCheck()
1751 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1753 __ bind(&prototype_loop_start); in CompatibleReceiverCheck()
1756 __ GetMapConstructor(constructor, map, scratch, scratch); in CompatibleReceiverCheck()
1758 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE)); in CompatibleReceiverCheck()
1760 __ lw(type, in CompatibleReceiverCheck()
1762 __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); in CompatibleReceiverCheck()
1766 __ bind(&function_template_loop); in CompatibleReceiverCheck()
1769 __ Branch(&receiver_check_passed, eq, signature, Operand(type), in CompatibleReceiverCheck()
1774 __ JumpIfSmi(type, &next_prototype); in CompatibleReceiverCheck()
1775 __ GetObjectType(type, scratch, scratch); in CompatibleReceiverCheck()
1776 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE)); in CompatibleReceiverCheck()
1779 __ lw(type, in CompatibleReceiverCheck()
1781 __ Branch(&function_template_loop); in CompatibleReceiverCheck()
1784 __ bind(&next_prototype); in CompatibleReceiverCheck()
1785 __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset)); in CompatibleReceiverCheck()
1786 __ DecodeField<Map::HasHiddenPrototype>(scratch); in CompatibleReceiverCheck()
1787 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg)); in CompatibleReceiverCheck()
1788 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); in CompatibleReceiverCheck()
1789 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1791 __ Branch(&prototype_loop_start); in CompatibleReceiverCheck()
1793 __ bind(&receiver_check_passed); in CompatibleReceiverCheck()
1808 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_HandleFastApiCall()
1809 __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset)); in Generate_HandleFastApiCall()
1813 __ Lsa(t8, sp, a0, kPointerSizeLog2); in Generate_HandleFastApiCall()
1814 __ lw(t0, MemOperand(t8)); in Generate_HandleFastApiCall()
1819 __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset)); in Generate_HandleFastApiCall()
1820 __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset)); in Generate_HandleFastApiCall()
1821 __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_HandleFastApiCall()
1822 __ Jump(t2); in Generate_HandleFastApiCall()
1825 __ bind(&receiver_check_failed); in Generate_HandleFastApiCall()
1827 __ Addu(t8, t8, Operand(kPointerSize)); in Generate_HandleFastApiCall()
1828 __ addu(sp, t8, zero_reg); in Generate_HandleFastApiCall()
1829 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); in Generate_HandleFastApiCall()
1836 __ lw(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_OnStackReplacementHelper()
1837 __ lw(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1839 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1845 __ push(a0); in Generate_OnStackReplacementHelper()
1846 __ CallRuntime(Runtime::kCompileForOnStackReplacement); in Generate_OnStackReplacementHelper()
1850 __ Ret(eq, v0, Operand(Smi::kZero)); in Generate_OnStackReplacementHelper()
1855 __ LeaveFrame(StackFrame::STUB); in Generate_OnStackReplacementHelper()
1860 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); in Generate_OnStackReplacementHelper()
1864 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt( in Generate_OnStackReplacementHelper()
1867 __ SmiUntag(a1); in Generate_OnStackReplacementHelper()
1871 __ addu(v0, v0, a1); in Generate_OnStackReplacementHelper()
1872 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); in Generate_OnStackReplacementHelper()
1875 __ Ret(); in Generate_OnStackReplacementHelper()
1901 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1902 __ mov(a3, a2); in Generate_FunctionPrototypeApply()
1904 __ sll(scratch, a0, kPointerSizeLog2); in Generate_FunctionPrototypeApply()
1905 __ Addu(a0, sp, Operand(scratch)); in Generate_FunctionPrototypeApply()
1906 __ lw(a1, MemOperand(a0)); // receiver in Generate_FunctionPrototypeApply()
1907 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_FunctionPrototypeApply()
1908 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_FunctionPrototypeApply()
1909 __ lw(a2, MemOperand(a0)); // thisArg in Generate_FunctionPrototypeApply()
1910 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_FunctionPrototypeApply()
1911 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_FunctionPrototypeApply()
1912 __ lw(a3, MemOperand(a0)); // argArray in Generate_FunctionPrototypeApply()
1913 __ bind(&no_arg); in Generate_FunctionPrototypeApply()
1914 __ Addu(sp, sp, Operand(scratch)); in Generate_FunctionPrototypeApply()
1915 __ sw(a2, MemOperand(sp)); in Generate_FunctionPrototypeApply()
1916 __ mov(a0, a3); in Generate_FunctionPrototypeApply()
1927 __ JumpIfSmi(a1, &receiver_not_callable); in Generate_FunctionPrototypeApply()
1928 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_FunctionPrototypeApply()
1929 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_FunctionPrototypeApply()
1930 __ And(t0, t0, Operand(1 << Map::kIsCallable)); in Generate_FunctionPrototypeApply()
1931 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg)); in Generate_FunctionPrototypeApply()
1935 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1936 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1940 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1941 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1945 __ bind(&no_arguments); in Generate_FunctionPrototypeApply()
1947 __ mov(a0, zero_reg); in Generate_FunctionPrototypeApply()
1948 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1952 __ bind(&receiver_not_callable); in Generate_FunctionPrototypeApply()
1954 __ sw(a1, MemOperand(sp)); in Generate_FunctionPrototypeApply()
1955 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_FunctionPrototypeApply()
1965 __ Branch(&done, ne, a0, Operand(zero_reg)); in Generate_FunctionPrototypeCall()
1966 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeCall()
1967 __ Addu(a0, a0, Operand(1)); in Generate_FunctionPrototypeCall()
1968 __ bind(&done); in Generate_FunctionPrototypeCall()
1973 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_FunctionPrototypeCall()
1974 __ lw(a1, MemOperand(at)); in Generate_FunctionPrototypeCall()
1984 __ Lsa(a2, sp, a0, kPointerSizeLog2); in Generate_FunctionPrototypeCall()
1986 __ bind(&loop); in Generate_FunctionPrototypeCall()
1987 __ lw(at, MemOperand(a2, -kPointerSize)); in Generate_FunctionPrototypeCall()
1988 __ sw(at, MemOperand(a2)); in Generate_FunctionPrototypeCall()
1989 __ Subu(a2, a2, Operand(kPointerSize)); in Generate_FunctionPrototypeCall()
1990 __ Branch(&loop, ne, a2, Operand(sp)); in Generate_FunctionPrototypeCall()
1993 __ Subu(a0, a0, Operand(1)); in Generate_FunctionPrototypeCall()
1994 __ Pop(); in Generate_FunctionPrototypeCall()
1998 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeCall()
2016 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2017 __ mov(a2, a1); in Generate_ReflectApply()
2018 __ mov(a3, a1); in Generate_ReflectApply()
2019 __ sll(scratch, a0, kPointerSizeLog2); in Generate_ReflectApply()
2020 __ mov(a0, scratch); in Generate_ReflectApply()
2021 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectApply()
2022 __ Branch(&no_arg, lt, a0, Operand(zero_reg)); in Generate_ReflectApply()
2023 __ Addu(a0, sp, Operand(a0)); in Generate_ReflectApply()
2024 __ lw(a1, MemOperand(a0)); // target in Generate_ReflectApply()
2025 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectApply()
2026 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectApply()
2027 __ lw(a2, MemOperand(a0)); // thisArgument in Generate_ReflectApply()
2028 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectApply()
2029 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectApply()
2030 __ lw(a3, MemOperand(a0)); // argumentsList in Generate_ReflectApply()
2031 __ bind(&no_arg); in Generate_ReflectApply()
2032 __ Addu(sp, sp, Operand(scratch)); in Generate_ReflectApply()
2033 __ sw(a2, MemOperand(sp)); in Generate_ReflectApply()
2034 __ mov(a0, a3); in Generate_ReflectApply()
2045 __ JumpIfSmi(a1, &target_not_callable); in Generate_ReflectApply()
2046 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_ReflectApply()
2047 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_ReflectApply()
2048 __ And(t0, t0, Operand(1 << Map::kIsCallable)); in Generate_ReflectApply()
2049 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg)); in Generate_ReflectApply()
2053 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2054 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectApply()
2057 __ bind(&target_not_callable); in Generate_ReflectApply()
2059 __ sw(a1, MemOperand(sp)); in Generate_ReflectApply()
2060 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_ReflectApply()
2080 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); in Generate_ReflectConstruct()
2081 __ mov(a2, a1); in Generate_ReflectConstruct()
2083 __ sll(scratch, a0, kPointerSizeLog2); in Generate_ReflectConstruct()
2084 __ Addu(a0, sp, Operand(scratch)); in Generate_ReflectConstruct()
2085 __ sw(a2, MemOperand(a0)); // receiver in Generate_ReflectConstruct()
2086 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectConstruct()
2087 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectConstruct()
2088 __ lw(a1, MemOperand(a0)); // target in Generate_ReflectConstruct()
2089 __ mov(a3, a1); // new.target defaults to target in Generate_ReflectConstruct()
2090 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectConstruct()
2091 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectConstruct()
2092 __ lw(a2, MemOperand(a0)); // argumentsList in Generate_ReflectConstruct()
2093 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectConstruct()
2094 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectConstruct()
2095 __ lw(a3, MemOperand(a0)); // new.target in Generate_ReflectConstruct()
2096 __ bind(&no_arg); in Generate_ReflectConstruct()
2097 __ Addu(sp, sp, Operand(scratch)); in Generate_ReflectConstruct()
2098 __ mov(a0, a2); in Generate_ReflectConstruct()
2110 __ JumpIfSmi(a1, &target_not_constructor); in Generate_ReflectConstruct()
2111 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2112 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2113 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); in Generate_ReflectConstruct()
2114 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg)); in Generate_ReflectConstruct()
2118 __ JumpIfSmi(a3, &new_target_not_constructor); in Generate_ReflectConstruct()
2119 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2120 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2121 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); in Generate_ReflectConstruct()
2122 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg)); in Generate_ReflectConstruct()
2125 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectConstruct()
2128 __ bind(&target_not_constructor); in Generate_ReflectConstruct()
2130 __ sw(a1, MemOperand(sp)); in Generate_ReflectConstruct()
2131 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); in Generate_ReflectConstruct()
2135 __ bind(&new_target_not_constructor); in Generate_ReflectConstruct()
2137 __ sw(a3, MemOperand(sp)); in Generate_ReflectConstruct()
2138 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); in Generate_ReflectConstruct()
2143 __ sll(a0, a0, kSmiTagSize); in EnterArgumentsAdaptorFrame()
2144 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); in EnterArgumentsAdaptorFrame()
2145 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); in EnterArgumentsAdaptorFrame()
2146 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in EnterArgumentsAdaptorFrame()
2156 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + in LeaveArgumentsAdaptorFrame()
2158 __ mov(sp, fp); in LeaveArgumentsAdaptorFrame()
2159 __ MultiPop(fp.bit() | ra.bit()); in LeaveArgumentsAdaptorFrame()
2160 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize); in LeaveArgumentsAdaptorFrame()
2162 __ Addu(sp, sp, Operand(kPointerSize)); in LeaveArgumentsAdaptorFrame()
2177 __ JumpIfSmi(a0, &create_runtime); in Generate_Apply()
2180 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); in Generate_Apply()
2183 __ lw(t0, NativeContextMemOperand()); in Generate_Apply()
2186 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2187 __ Branch(&create_arguments, eq, a2, Operand(at)); in Generate_Apply()
2188 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2189 __ Branch(&create_arguments, eq, a2, Operand(at)); in Generate_Apply()
2192 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); in Generate_Apply()
2193 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); in Generate_Apply()
2194 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); in Generate_Apply()
2197 __ bind(&create_runtime); in Generate_Apply()
2200 __ Push(a1, a3, a0); in Generate_Apply()
2201 __ CallRuntime(Runtime::kCreateListFromArrayLike); in Generate_Apply()
2202 __ mov(a0, v0); in Generate_Apply()
2203 __ Pop(a1, a3); in Generate_Apply()
2204 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate_Apply()
2205 __ SmiUntag(a2); in Generate_Apply()
2207 __ Branch(&done_create); in Generate_Apply()
2210 __ bind(&create_arguments); in Generate_Apply()
2211 __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset)); in Generate_Apply()
2212 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset)); in Generate_Apply()
2213 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset)); in Generate_Apply()
2214 __ Branch(&create_runtime, ne, a2, Operand(at)); in Generate_Apply()
2215 __ SmiUntag(a2); in Generate_Apply()
2216 __ mov(a0, t0); in Generate_Apply()
2217 __ Branch(&done_create); in Generate_Apply()
2220 __ bind(&create_array); in Generate_Apply()
2221 __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset)); in Generate_Apply()
2222 __ DecodeField<Map::ElementsKindBits>(a2); in Generate_Apply()
2226 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS)); in Generate_Apply()
2227 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS)); in Generate_Apply()
2228 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset)); in Generate_Apply()
2229 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); in Generate_Apply()
2230 __ SmiUntag(a2); in Generate_Apply()
2232 __ bind(&done_create); in Generate_Apply()
2240 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex); in Generate_Apply()
2243 __ Subu(t0, sp, t0); in Generate_Apply()
2245 __ sll(at, a2, kPointerSizeLog2); in Generate_Apply()
2246 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison. in Generate_Apply()
2247 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_Apply()
2248 __ bind(&done); in Generate_Apply()
2261 __ mov(t0, zero_reg); in Generate_Apply()
2263 __ bind(&loop); in Generate_Apply()
2264 __ Branch(&done, eq, t0, Operand(a2)); in Generate_Apply()
2265 __ Lsa(at, a0, t0, kPointerSizeLog2); in Generate_Apply()
2266 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize)); in Generate_Apply()
2267 __ Push(at); in Generate_Apply()
2268 __ Addu(t0, t0, Operand(1)); in Generate_Apply()
2269 __ Branch(&loop); in Generate_Apply()
2270 __ bind(&done); in Generate_Apply()
2271 __ Move(a0, t0); in Generate_Apply()
2277 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2278 __ Branch(&construct, ne, a3, Operand(at)); in Generate_Apply()
2279 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_Apply()
2280 __ bind(&construct); in Generate_Apply()
2281 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_Apply()
2325 __ li(at, Operand(is_tail_call_elimination_enabled)); in PrepareForTailCall()
2326 __ lb(scratch1, MemOperand(at)); in PrepareForTailCall()
2327 __ Branch(&done, eq, scratch1, Operand(zero_reg)); in PrepareForTailCall()
2332 __ lw(scratch3, in PrepareForTailCall()
2334 __ Branch(&no_interpreter_frame, ne, scratch3, in PrepareForTailCall()
2336 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2337 __ bind(&no_interpreter_frame); in PrepareForTailCall()
2343 __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2344 __ lw(scratch3, in PrepareForTailCall()
2346 __ Branch(&no_arguments_adaptor, ne, scratch3, in PrepareForTailCall()
2350 __ mov(fp, scratch2); in PrepareForTailCall()
2351 __ lw(caller_args_count_reg, in PrepareForTailCall()
2353 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2354 __ Branch(&formal_parameter_count_loaded); in PrepareForTailCall()
2356 __ bind(&no_arguments_adaptor); in PrepareForTailCall()
2358 __ lw(scratch1, in PrepareForTailCall()
2360 __ lw(scratch1, in PrepareForTailCall()
2362 __ lw(caller_args_count_reg, in PrepareForTailCall()
2365 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2367 __ bind(&formal_parameter_count_loaded); in PrepareForTailCall()
2370 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, in PrepareForTailCall()
2372 __ bind(&done); in PrepareForTailCall()
2384 __ AssertFunction(a1); in Generate_CallFunction()
2389 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2390 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset)); in Generate_CallFunction()
2391 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); in Generate_CallFunction()
2392 __ Branch(&class_constructor, ne, at, Operand(zero_reg)); in Generate_CallFunction()
2399 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_CallFunction()
2402 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); in Generate_CallFunction()
2403 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | in Generate_CallFunction()
2405 __ Branch(&done_convert, ne, at, Operand(zero_reg)); in Generate_CallFunction()
2416 __ LoadGlobalProxy(a3); in Generate_CallFunction()
2419 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_CallFunction()
2420 __ lw(a3, MemOperand(at)); in Generate_CallFunction()
2421 __ JumpIfSmi(a3, &convert_to_object); in Generate_CallFunction()
2423 __ GetObjectType(a3, t0, t0); in Generate_CallFunction()
2424 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE)); in Generate_CallFunction()
2427 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, in Generate_CallFunction()
2429 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); in Generate_CallFunction()
2430 __ bind(&convert_global_proxy); in Generate_CallFunction()
2433 __ LoadGlobalProxy(a3); in Generate_CallFunction()
2435 __ Branch(&convert_receiver); in Generate_CallFunction()
2437 __ bind(&convert_to_object); in Generate_CallFunction()
2443 __ sll(a0, a0, kSmiTagSize); // Smi tagged. in Generate_CallFunction()
2444 __ Push(a0, a1); in Generate_CallFunction()
2445 __ mov(a0, a3); in Generate_CallFunction()
2446 __ Push(cp); in Generate_CallFunction()
2447 __ Call(masm->isolate()->builtins()->ToObject(), in Generate_CallFunction()
2449 __ Pop(cp); in Generate_CallFunction()
2450 __ mov(a3, v0); in Generate_CallFunction()
2451 __ Pop(a0, a1); in Generate_CallFunction()
2452 __ sra(a0, a0, kSmiTagSize); // Un-tag. in Generate_CallFunction()
2454 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2455 __ bind(&convert_receiver); in Generate_CallFunction()
2457 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_CallFunction()
2458 __ sw(a3, MemOperand(at)); in Generate_CallFunction()
2460 __ bind(&done_convert); in Generate_CallFunction()
2473 __ lw(a2, in Generate_CallFunction()
2475 __ sra(a2, a2, kSmiTagSize); // Un-tag. in Generate_CallFunction()
2478 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, in Generate_CallFunction()
2482 __ bind(&class_constructor); in Generate_CallFunction()
2485 __ Push(a1); in Generate_CallFunction()
2486 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); in Generate_CallFunction()
2497 __ AssertBoundFunction(a1); in Generate_CallBoundFunctionImpl()
2505 __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset)); in Generate_CallBoundFunctionImpl()
2506 __ Lsa(t0, sp, a0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2507 __ sw(at, MemOperand(t0)); in Generate_CallBoundFunctionImpl()
2511 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); in Generate_CallBoundFunctionImpl()
2512 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_CallBoundFunctionImpl()
2513 __ SmiUntag(t0); in Generate_CallBoundFunctionImpl()
2525 __ sll(t1, t0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2526 __ Subu(sp, sp, Operand(t1)); in Generate_CallBoundFunctionImpl()
2529 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); in Generate_CallBoundFunctionImpl()
2530 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. in Generate_CallBoundFunctionImpl()
2532 __ Addu(sp, sp, Operand(t1)); in Generate_CallBoundFunctionImpl()
2535 __ EnterFrame(StackFrame::INTERNAL); in Generate_CallBoundFunctionImpl()
2536 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CallBoundFunctionImpl()
2538 __ bind(&done); in Generate_CallBoundFunctionImpl()
2544 __ mov(t1, zero_reg); in Generate_CallBoundFunctionImpl()
2545 __ bind(&loop); in Generate_CallBoundFunctionImpl()
2546 __ Branch(&done_loop, gt, t1, Operand(a0)); in Generate_CallBoundFunctionImpl()
2547 __ Lsa(t2, sp, t0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2548 __ lw(at, MemOperand(t2)); in Generate_CallBoundFunctionImpl()
2549 __ Lsa(t2, sp, t1, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2550 __ sw(at, MemOperand(t2)); in Generate_CallBoundFunctionImpl()
2551 __ Addu(t0, t0, Operand(1)); in Generate_CallBoundFunctionImpl()
2552 __ Addu(t1, t1, Operand(1)); in Generate_CallBoundFunctionImpl()
2553 __ Branch(&loop); in Generate_CallBoundFunctionImpl()
2554 __ bind(&done_loop); in Generate_CallBoundFunctionImpl()
2560 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_CallBoundFunctionImpl()
2561 __ SmiUntag(t0); in Generate_CallBoundFunctionImpl()
2562 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2563 __ bind(&loop); in Generate_CallBoundFunctionImpl()
2564 __ Subu(t0, t0, Operand(1)); in Generate_CallBoundFunctionImpl()
2565 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); in Generate_CallBoundFunctionImpl()
2566 __ Lsa(t1, a2, t0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2567 __ lw(at, MemOperand(t1)); in Generate_CallBoundFunctionImpl()
2568 __ Lsa(t1, sp, a0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2569 __ sw(at, MemOperand(t1)); in Generate_CallBoundFunctionImpl()
2570 __ Addu(a0, a0, Operand(1)); in Generate_CallBoundFunctionImpl()
2571 __ Branch(&loop); in Generate_CallBoundFunctionImpl()
2572 __ bind(&done_loop); in Generate_CallBoundFunctionImpl()
2576 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_CallBoundFunctionImpl()
2577 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, in Generate_CallBoundFunctionImpl()
2579 __ lw(at, MemOperand(at)); in Generate_CallBoundFunctionImpl()
2580 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2581 __ Jump(at); in Generate_CallBoundFunctionImpl()
2593 __ JumpIfSmi(a1, &non_callable); in Generate_Call()
2594 __ bind(&non_smi); in Generate_Call()
2595 __ GetObjectType(a1, t1, t2); in Generate_Call()
2596 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), in Generate_Call()
2598 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), in Generate_Call()
2602 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset)); in Generate_Call()
2603 __ And(t1, t1, Operand(1 << Map::kIsCallable)); in Generate_Call()
2604 __ Branch(&non_callable, eq, t1, Operand(zero_reg)); in Generate_Call()
2606 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE)); in Generate_Call()
2614 __ Push(a1); in Generate_Call()
2617 __ Addu(a0, a0, 2); in Generate_Call()
2619 __ JumpToExternalReference( in Generate_Call()
2624 __ bind(&non_function); in Generate_Call()
2626 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_Call()
2627 __ sw(a1, MemOperand(at)); in Generate_Call()
2629 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); in Generate_Call()
2630 __ Jump(masm->isolate()->builtins()->CallFunction( in Generate_Call()
2635 __ bind(&non_callable); in Generate_Call()
2638 __ Push(a1); in Generate_Call()
2639 __ CallRuntime(Runtime::kThrowCalledNonCallable); in Generate_Call()
2650 __ AssertFunction(a1); in Generate_ConstructFunction()
2654 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_ConstructFunction()
2658 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_ConstructFunction()
2659 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); in Generate_ConstructFunction()
2660 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructFunction()
2661 __ Jump(at); in Generate_ConstructFunction()
2671 __ AssertBoundFunction(a1); in Generate_ConstructBoundFunction()
2674 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); in Generate_ConstructBoundFunction()
2675 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_ConstructBoundFunction()
2676 __ SmiUntag(t0); in Generate_ConstructBoundFunction()
2689 __ sll(t1, t0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2690 __ Subu(sp, sp, Operand(t1)); in Generate_ConstructBoundFunction()
2693 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); in Generate_ConstructBoundFunction()
2694 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. in Generate_ConstructBoundFunction()
2696 __ Addu(sp, sp, Operand(t1)); in Generate_ConstructBoundFunction()
2699 __ EnterFrame(StackFrame::INTERNAL); in Generate_ConstructBoundFunction()
2700 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ConstructBoundFunction()
2702 __ bind(&done); in Generate_ConstructBoundFunction()
2708 __ mov(t1, zero_reg); in Generate_ConstructBoundFunction()
2709 __ bind(&loop); in Generate_ConstructBoundFunction()
2710 __ Branch(&done_loop, ge, t1, Operand(a0)); in Generate_ConstructBoundFunction()
2711 __ Lsa(t2, sp, t0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2712 __ lw(at, MemOperand(t2)); in Generate_ConstructBoundFunction()
2713 __ Lsa(t2, sp, t1, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2714 __ sw(at, MemOperand(t2)); in Generate_ConstructBoundFunction()
2715 __ Addu(t0, t0, Operand(1)); in Generate_ConstructBoundFunction()
2716 __ Addu(t1, t1, Operand(1)); in Generate_ConstructBoundFunction()
2717 __ Branch(&loop); in Generate_ConstructBoundFunction()
2718 __ bind(&done_loop); in Generate_ConstructBoundFunction()
2724 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_ConstructBoundFunction()
2725 __ SmiUntag(t0); in Generate_ConstructBoundFunction()
2726 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
2727 __ bind(&loop); in Generate_ConstructBoundFunction()
2728 __ Subu(t0, t0, Operand(1)); in Generate_ConstructBoundFunction()
2729 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); in Generate_ConstructBoundFunction()
2730 __ Lsa(t1, a2, t0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2731 __ lw(at, MemOperand(t1)); in Generate_ConstructBoundFunction()
2732 __ Lsa(t1, sp, a0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2733 __ sw(at, MemOperand(t1)); in Generate_ConstructBoundFunction()
2734 __ Addu(a0, a0, Operand(1)); in Generate_ConstructBoundFunction()
2735 __ Branch(&loop); in Generate_ConstructBoundFunction()
2736 __ bind(&done_loop); in Generate_ConstructBoundFunction()
2742 __ Branch(&skip_load, ne, a1, Operand(a3)); in Generate_ConstructBoundFunction()
2743 __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_ConstructBoundFunction()
2744 __ bind(&skip_load); in Generate_ConstructBoundFunction()
2748 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_ConstructBoundFunction()
2749 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); in Generate_ConstructBoundFunction()
2750 __ lw(at, MemOperand(at)); in Generate_ConstructBoundFunction()
2751 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
2752 __ Jump(at); in Generate_ConstructBoundFunction()
2765 __ Push(a1, a3); in Generate_ConstructProxy()
2767 __ Addu(a0, a0, Operand(3)); in Generate_ConstructProxy()
2769 __ JumpToExternalReference( in Generate_ConstructProxy()
2784 __ JumpIfSmi(a1, &non_constructor); in Generate_Construct()
2787 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_Construct()
2788 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset)); in Generate_Construct()
2789 __ Jump(masm->isolate()->builtins()->ConstructFunction(), in Generate_Construct()
2793 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset)); in Generate_Construct()
2794 __ And(t3, t3, Operand(1 << Map::kIsConstructor)); in Generate_Construct()
2795 __ Branch(&non_constructor, eq, t3, Operand(zero_reg)); in Generate_Construct()
2799 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), in Generate_Construct()
2803 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, in Generate_Construct()
2809 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_Construct()
2810 __ sw(a1, MemOperand(at)); in Generate_Construct()
2812 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1); in Generate_Construct()
2813 __ Jump(masm->isolate()->builtins()->CallFunction(), in Generate_Construct()
2819 __ bind(&non_constructor); in Generate_Construct()
2820 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), in Generate_Construct()
2830 __ SmiTag(a0); in Generate_AllocateInNewSpace()
2831 __ Push(a0); in Generate_AllocateInNewSpace()
2832 __ Move(cp, Smi::kZero); in Generate_AllocateInNewSpace()
2833 __ TailCallRuntime(Runtime::kAllocateInNewSpace); in Generate_AllocateInNewSpace()
2842 __ SmiTag(a0); in Generate_AllocateInOldSpace()
2843 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); in Generate_AllocateInOldSpace()
2844 __ Push(a0, a1); in Generate_AllocateInOldSpace()
2845 __ Move(cp, Smi::kZero); in Generate_AllocateInOldSpace()
2846 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); in Generate_AllocateInOldSpace()
2855 __ Push(a0); in Generate_Abort()
2856 __ Move(cp, Smi::kZero); in Generate_Abort()
2857 __ TailCallRuntime(Runtime::kAbort); in Generate_Abort()
2872 __ Branch(&dont_adapt_arguments, eq, a2, in Generate_ArgumentsAdaptorTrampoline()
2875 __ Branch(&too_few, Uless, a0, Operand(a2)); in Generate_ArgumentsAdaptorTrampoline()
2882 __ bind(&enough); in Generate_ArgumentsAdaptorTrampoline()
2887 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize); in Generate_ArgumentsAdaptorTrampoline()
2889 __ Addu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
2891 __ sll(t1, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
2892 __ subu(t1, a0, t1); in Generate_ArgumentsAdaptorTrampoline()
2902 __ bind(©); in Generate_ArgumentsAdaptorTrampoline()
2903 __ lw(t0, MemOperand(a0)); in Generate_ArgumentsAdaptorTrampoline()
2904 __ push(t0); in Generate_ArgumentsAdaptorTrampoline()
2905 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t1)); in Generate_ArgumentsAdaptorTrampoline()
2906 __ addiu(a0, a0, -kPointerSize); // In delay slot. in Generate_ArgumentsAdaptorTrampoline()
2908 __ jmp(&invoke); in Generate_ArgumentsAdaptorTrampoline()
2912 __ bind(&too_few); in Generate_ArgumentsAdaptorTrampoline()
2921 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize); in Generate_ArgumentsAdaptorTrampoline()
2923 __ Addu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
2925 __ Addu(t3, fp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
2934 __ bind(©); in Generate_ArgumentsAdaptorTrampoline()
2935 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver. in Generate_ArgumentsAdaptorTrampoline()
2936 __ Subu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
2937 __ Subu(a0, a0, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
2938 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3)); in Generate_ArgumentsAdaptorTrampoline()
2939 __ sw(t0, MemOperand(sp)); // In the delay slot. in Generate_ArgumentsAdaptorTrampoline()
2945 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate_ArgumentsAdaptorTrampoline()
2946 __ sll(t2, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
2947 __ Subu(t1, fp, Operand(t2)); in Generate_ArgumentsAdaptorTrampoline()
2949 __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in Generate_ArgumentsAdaptorTrampoline()
2953 __ bind(&fill); in Generate_ArgumentsAdaptorTrampoline()
2954 __ Subu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
2955 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1)); in Generate_ArgumentsAdaptorTrampoline()
2956 __ sw(t0, MemOperand(sp)); in Generate_ArgumentsAdaptorTrampoline()
2960 __ bind(&invoke); in Generate_ArgumentsAdaptorTrampoline()
2961 __ mov(a0, a2); in Generate_ArgumentsAdaptorTrampoline()
2965 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
2966 __ Call(t0); in Generate_ArgumentsAdaptorTrampoline()
2973 __ Ret(); in Generate_ArgumentsAdaptorTrampoline()
2978 __ bind(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
2979 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
2980 __ Jump(t0); in Generate_ArgumentsAdaptorTrampoline()
2982 __ bind(&stack_overflow); in Generate_ArgumentsAdaptorTrampoline()
2985 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ArgumentsAdaptorTrampoline()
2986 __ break_(0xCC); in Generate_ArgumentsAdaptorTrampoline()
2990 #undef __