Lines Matching full:__

23 #define __ ACCESS_MASM(masm)  macro
26 __ dsll(t9, a0, kPointerSizeLog2); in Generate()
27 __ Daddu(t9, sp, t9); in Generate()
28 __ sd(a1, MemOperand(t9, 0)); in Generate()
29 __ Push(a1); in Generate()
30 __ Push(a2); in Generate()
31 __ Daddu(a0, a0, 3); in Generate()
32 __ TailCallRuntime(Runtime::kNewArray); in Generate()
72 __ Dsubu(sp, sp, Operand(param_count * kPointerSize)); in GenerateLightweightMiss()
75 __ sd(descriptor.GetRegisterParameter(i), in GenerateLightweightMiss()
78 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
81 __ Ret(); in GenerateLightweightMiss()
102 __ Push(scratch, scratch2, scratch3); in Generate()
105 __ ldc1(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
108 __ cfc1(scratch2, FCSR); in Generate()
109 __ ctc1(zero_reg, FCSR); in Generate()
112 __ Trunc_w_d(double_scratch, double_scratch); in Generate()
114 __ mfc1(scratch3, double_scratch); in Generate()
117 __ cfc1(scratch, FCSR); in Generate()
118 __ ctc1(scratch2, FCSR); in Generate()
121 __ And( in Generate()
127 __ Branch(&error, ne, scratch, Operand(zero_reg)); in Generate()
128 __ Move(result_reg, scratch3); in Generate()
129 __ Branch(&done); in Generate()
130 __ bind(&error); in Generate()
137 __ lw(input_low, in Generate()
139 __ lw(input_high, in Generate()
144 __ Ext(result_reg, in Generate()
150 __ Subu(scratch, result_reg, HeapNumber::kExponentMask); in Generate()
151 __ Movz(result_reg, zero_reg, scratch); in Generate()
152 __ Branch(&done, eq, scratch, Operand(zero_reg)); in Generate()
155 __ Subu(result_reg, in Generate()
161 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg)); in Generate()
162 __ mov(result_reg, zero_reg); in Generate()
163 __ Branch(&done); in Generate()
165 __ bind(&normal_exponent); in Generate()
168 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits)); in Generate()
173 __ And(sign, input_high, Operand(HeapNumber::kSignMask)); in Generate()
178 __ Branch(&high_shift_needed, lt, scratch, Operand(32)); in Generate()
179 __ mov(input_high, zero_reg); in Generate()
180 __ Branch(&high_shift_done); in Generate()
181 __ bind(&high_shift_needed); in Generate()
184 __ Or(input_high, in Generate()
190 __ sllv(input_high, input_high, scratch); in Generate()
192 __ bind(&high_shift_done); in Generate()
196 __ li(at, 32); in Generate()
197 __ subu(scratch, at, scratch); in Generate()
198 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg)); in Generate()
201 __ Subu(scratch, zero_reg, scratch); in Generate()
202 __ sllv(input_low, input_low, scratch); in Generate()
203 __ Branch(&shift_done); in Generate()
205 __ bind(&pos_shift); in Generate()
206 __ srlv(input_low, input_low, scratch); in Generate()
208 __ bind(&shift_done); in Generate()
209 __ Or(input_high, input_high, Operand(input_low)); in Generate()
211 __ mov(scratch, sign); in Generate()
214 __ Subu(result_reg, zero_reg, input_high); in Generate()
215 __ Movz(result_reg, input_high, scratch); in Generate()
217 __ bind(&done); in Generate()
219 __ Pop(scratch, scratch2, scratch3); in Generate()
220 __ Ret(); in Generate()
233 __ Branch(&not_identical, ne, a0, Operand(a1)); in EmitIdenticalObjectComparison()
235 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); in EmitIdenticalObjectComparison()
241 __ GetObjectType(a0, t0, t0); in EmitIdenticalObjectComparison()
244 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
246 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
248 __ Branch(slow, eq, t0, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
250 __ Branch(&heap_number, eq, t0, Operand(HEAP_NUMBER_TYPE)); in EmitIdenticalObjectComparison()
253 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
255 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
257 __ Branch(slow, eq, t0, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
262 __ Branch(&return_equal, ne, t0, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
263 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
264 __ Branch(&return_equal, ne, a0, Operand(a6)); in EmitIdenticalObjectComparison()
266 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
269 __ li(v0, Operand(GREATER)); in EmitIdenticalObjectComparison()
272 __ li(v0, Operand(LESS)); in EmitIdenticalObjectComparison()
278 __ bind(&return_equal); in EmitIdenticalObjectComparison()
280 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
282 __ li(v0, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
284 __ li(v0, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
286 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
292 __ bind(&heap_number); in EmitIdenticalObjectComparison()
299 __ lwu(a6, FieldMemOperand(a0, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
301 __ And(a7, a6, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
303 __ Branch(&return_equal, ne, a7, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
306 __ sll(a6, a6, HeapNumber::kNonMantissaBitsInTopWord); in EmitIdenticalObjectComparison()
308 __ lwu(a7, FieldMemOperand(a0, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
309 __ Or(v0, a7, Operand(a6)); in EmitIdenticalObjectComparison()
316 __ Ret(eq, v0, Operand(zero_reg)); in EmitIdenticalObjectComparison()
318 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
320 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
322 __ li(v0, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
328 __ bind(&not_identical); in EmitIdenticalObjectComparison()
342 __ JumpIfSmi(lhs, &lhs_is_smi); in EmitSmiNonsmiComparison()
345 __ GetObjectType(lhs, t0, t0); in EmitSmiNonsmiComparison()
349 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
350 __ mov(v0, lhs); in EmitSmiNonsmiComparison()
354 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
358 __ SmiUntag(at, rhs); in EmitSmiNonsmiComparison()
359 __ mtc1(at, f14); in EmitSmiNonsmiComparison()
360 __ cvt_d_w(f14, f14); in EmitSmiNonsmiComparison()
361 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
364 __ jmp(both_loaded_as_doubles); in EmitSmiNonsmiComparison()
366 __ bind(&lhs_is_smi); in EmitSmiNonsmiComparison()
368 __ GetObjectType(rhs, t0, t0); in EmitSmiNonsmiComparison()
372 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
373 __ li(v0, Operand(1)); in EmitSmiNonsmiComparison()
377 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
382 __ SmiUntag(at, lhs); in EmitSmiNonsmiComparison()
383 __ mtc1(at, f12); in EmitSmiNonsmiComparison()
384 __ cvt_d_w(f12, f12); in EmitSmiNonsmiComparison()
385 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
400 __ GetObjectType(lhs, a2, a2); in EmitStrictTwoHeapObjectCompare()
401 __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitStrictTwoHeapObjectCompare()
405 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
406 __ Ret(USE_DELAY_SLOT); in EmitStrictTwoHeapObjectCompare()
407 __ li(v0, Operand(1)); in EmitStrictTwoHeapObjectCompare()
409 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
411 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
413 __ GetObjectType(rhs, a3, a3); in EmitStrictTwoHeapObjectCompare()
414 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitStrictTwoHeapObjectCompare()
417 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
422 __ Or(a2, a2, Operand(a3)); in EmitStrictTwoHeapObjectCompare()
423 __ And(at, a2, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
424 __ Branch(&return_not_equal, eq, at, Operand(zero_reg)); in EmitStrictTwoHeapObjectCompare()
434 __ GetObjectType(lhs, a3, a2); in EmitCheckForTwoHeapNumbers()
435 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE)); in EmitCheckForTwoHeapNumbers()
436 __ ld(a2, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
438 __ Branch(slow, ne, a3, Operand(a2)); in EmitCheckForTwoHeapNumbers()
442 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
443 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
445 __ jmp(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
460 __ And(at, a2, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
461 __ Branch(&object_test, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
462 __ And(at, a2, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
463 __ Branch(possible_strings, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
464 __ GetObjectType(rhs, a3, a3); in EmitCheckForInternalizedStringsOrObjects()
465 __ Branch(runtime_call, ge, a3, Operand(FIRST_NONSTRING_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
466 __ And(at, a3, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
467 __ Branch(possible_strings, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
472 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
473 __ mov(v0, a0); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
475 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
476 __ ld(a2, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
477 __ ld(a3, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
478 __ lbu(t0, FieldMemOperand(a2, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
479 __ lbu(t1, FieldMemOperand(a3, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
480 __ And(at, t0, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
481 __ Branch(&undetectable, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
482 __ And(at, t1, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
483 __ Branch(&return_unequal, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
485 __ GetInstanceType(a2, a2); in EmitCheckForInternalizedStringsOrObjects()
486 __ Branch(runtime_call, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
487 __ GetInstanceType(a3, a3); in EmitCheckForInternalizedStringsOrObjects()
488 __ Branch(runtime_call, lt, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
490 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
492 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
493 __ mov(v0, a0); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
495 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
496 __ And(at, t1, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
497 __ Branch(&return_unequal, eq, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
502 __ GetInstanceType(a2, a2); in EmitCheckForInternalizedStringsOrObjects()
503 __ Branch(&return_equal, eq, a2, Operand(ODDBALL_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
504 __ GetInstanceType(a3, a3); in EmitCheckForInternalizedStringsOrObjects()
505 __ Branch(&return_unequal, ne, a3, Operand(ODDBALL_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
507 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
508 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
509 __ li(v0, Operand(EQUAL)); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
519 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
521 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
522 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
527 __ bind(&ok); in CompareICStub_CheckInputType()
547 __ Or(a2, a1, a0); in GenerateGeneric()
548 __ JumpIfNotSmi(a2, &not_two_smis); in GenerateGeneric()
549 __ SmiUntag(a1); in GenerateGeneric()
550 __ SmiUntag(a0); in GenerateGeneric()
552 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
553 __ dsubu(v0, a1, a0); in GenerateGeneric()
554 __ bind(&not_two_smis); in GenerateGeneric()
567 __ And(a6, lhs, Operand(rhs)); in GenerateGeneric()
568 __ JumpIfNotSmi(a6, &not_smis, a4); in GenerateGeneric()
580 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
586 __ li(a4, Operand(LESS)); in GenerateGeneric()
587 __ li(a5, Operand(GREATER)); in GenerateGeneric()
588 __ li(a6, Operand(EQUAL)); in GenerateGeneric()
591 __ BranchF(NULL, &nan, eq, f12, f14); in GenerateGeneric()
596 __ c(OLT, D, f12, f14); in GenerateGeneric()
597 __ Movt(v0, a4); in GenerateGeneric()
601 __ Movf(v0, a5); in GenerateGeneric()
604 __ c(EQ, D, f12, f14); in GenerateGeneric()
605 __ Movt(v0, a6); in GenerateGeneric()
608 __ BranchF(USE_DELAY_SLOT, &skip, NULL, lt, f12, f14); in GenerateGeneric()
609 __ mov(v0, a4); // Return LESS as result. in GenerateGeneric()
611 __ BranchF(USE_DELAY_SLOT, &skip, NULL, eq, f12, f14); in GenerateGeneric()
612 __ mov(v0, a6); // Return EQUAL as result. in GenerateGeneric()
614 __ mov(v0, a5); // Return GREATER as result. in GenerateGeneric()
615 __ bind(&skip); in GenerateGeneric()
617 __ Ret(); in GenerateGeneric()
619 __ bind(&nan); in GenerateGeneric()
623 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
625 __ li(v0, Operand(GREATER)); in GenerateGeneric()
627 __ li(v0, Operand(LESS)); in GenerateGeneric()
631 __ bind(&not_smis); in GenerateGeneric()
654 __ bind(&check_for_internalized_strings); in GenerateGeneric()
666 __ bind(&flat_string_check); in GenerateGeneric()
668 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow); in GenerateGeneric()
670 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, in GenerateGeneric()
680 __ bind(&slow); in GenerateGeneric()
684 __ Push(lhs, rhs); in GenerateGeneric()
685 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); in GenerateGeneric()
689 __ LoadRoot(a0, Heap::kTrueValueRootIndex); in GenerateGeneric()
690 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
691 __ subu(v0, v0, a0); // In delay slot. in GenerateGeneric()
695 __ Push(lhs, rhs); in GenerateGeneric()
703 __ li(a0, Operand(Smi::FromInt(ncr))); in GenerateGeneric()
704 __ push(a0); in GenerateGeneric()
708 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
711 __ bind(&miss); in GenerateGeneric()
717 __ mov(t9, ra); in Generate()
718 __ pop(ra); in Generate()
719 __ PushSafepointRegisters(); in Generate()
720 __ Jump(t9); in Generate()
725 __ mov(t9, ra); in Generate()
726 __ pop(ra); in Generate()
727 __ PopSafepointRegisters(); in Generate()
728 __ Jump(t9); in Generate()
736 __ MultiPush(kJSCallerSaved | ra.bit()); in Generate()
738 __ MultiPushFPU(kCallerSavedFPU); in Generate()
745 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
746 __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
747 __ CallCFunction( in Generate()
751 __ MultiPopFPU(kCallerSavedFPU); in Generate()
754 __ MultiPop(kJSCallerSaved | ra.bit()); in Generate()
755 __ Ret(); in Generate()
773 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
775 __ ldc1(double_exponent, in Generate()
782 __ EmitFPUTruncate(kRoundToMinusInf, in Generate()
790 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg)); in Generate()
792 __ push(ra); in Generate()
795 __ PrepareCallCFunction(0, 2, scratch2); in Generate()
796 __ MovToFloatParameters(double_base, double_exponent); in Generate()
797 __ CallCFunction( in Generate()
801 __ pop(ra); in Generate()
802 __ MovFromFloatResult(double_result); in Generate()
803 __ jmp(&done); in Generate()
805 __ bind(&int_exponent_convert); in Generate()
809 __ bind(&int_exponent); in Generate()
813 __ mov(scratch, exponent); in Generate()
816 __ mov(exponent, scratch); in Generate()
819 __ mov_d(double_scratch, double_base); // Back up base. in Generate()
820 __ Move(double_result, 1.0); in Generate()
824 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg)); in Generate()
825 __ Dsubu(scratch, zero_reg, scratch); in Generate()
828 __ Branch(&bail_out, gt, zero_reg, Operand(scratch)); in Generate()
829 __ bind(&positive_exponent); in Generate()
830 __ Assert(ge, kUnexpectedNegativeValue, scratch, Operand(zero_reg)); in Generate()
833 __ bind(&while_true); in Generate()
835 __ And(scratch2, scratch, 1); in Generate()
837 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg)); in Generate()
838 __ mul_d(double_result, double_result, double_scratch); in Generate()
839 __ bind(&no_carry); in Generate()
841 __ dsra(scratch, scratch, 1); in Generate()
843 __ Branch(&loop_end, eq, scratch, Operand(zero_reg)); in Generate()
844 __ mul_d(double_scratch, double_scratch, double_scratch); in Generate()
846 __ Branch(&while_true); in Generate()
848 __ bind(&loop_end); in Generate()
850 __ Branch(&done, ge, exponent, Operand(zero_reg)); in Generate()
851 __ Move(double_scratch, 1.0); in Generate()
852 __ div_d(double_result, double_scratch, double_result); in Generate()
855 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero); in Generate()
859 __ bind(&bail_out); in Generate()
860 __ mtc1(exponent, single_scratch); in Generate()
861 __ cvt_d_w(double_exponent, single_scratch); in Generate()
864 __ push(ra); in Generate()
867 __ PrepareCallCFunction(0, 2, scratch); in Generate()
868 __ MovToFloatParameters(double_base, double_exponent); in Generate()
869 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
872 __ pop(ra); in Generate()
873 __ MovFromFloatResult(double_result); in Generate()
875 __ bind(&done); in Generate()
876 __ Ret(); in Generate()
941 __ mov(s1, a2); in Generate()
944 __ Dlsa(s1, sp, a0, kPointerSizeLog2); in Generate()
945 __ Dsubu(s1, s1, kPointerSize); in Generate()
950 __ EnterExitFrame(save_doubles(), 0, is_builtin_exit() in Generate()
960 __ mov(s0, a0); in Generate()
961 __ mov(s2, a1); in Generate()
966 __ AssertStackIsAligned(); in Generate()
973 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
974 __ mov(a1, s1); in Generate()
982 __ Dsubu(sp, sp, Operand(result_stack_size)); in Generate()
985 __ li(a3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
986 __ mov(a2, s1); in Generate()
987 __ mov(a1, a0); in Generate()
988 __ mov(a0, sp); in Generate()
1002 __ addiupc(ra, kNumInstructionsToJump + 1); in Generate()
1006 __ bal(&find_ra); // bal exposes branch delay slot. in Generate()
1007 __ Daddu(ra, ra, kNumInstructionsToJump * Instruction::kInstrSize); in Generate()
1009 __ bind(&find_ra); in Generate()
1012 __ sd(ra, MemOperand(sp, result_stack_size)); in Generate()
1017 __ mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC. in Generate()
1018 __ jalr(t9); in Generate()
1020 __ daddiu(sp, sp, -kCArgsSlotsSize); in Generate()
1028 __ ld(a0, MemOperand(v0, 2 * kPointerSize)); in Generate()
1029 __ ld(v1, MemOperand(v0, 1 * kPointerSize)); in Generate()
1030 __ ld(v0, MemOperand(v0, 0 * kPointerSize)); in Generate()
1036 __ LoadRoot(a4, Heap::kExceptionRootIndex); in Generate()
1037 __ Branch(&exception_returned, eq, a4, Operand(v0)); in Generate()
1045 __ li(a2, Operand(pending_exception_address)); in Generate()
1046 __ ld(a2, MemOperand(a2)); in Generate()
1047 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); in Generate()
1049 __ Branch(&okay, eq, a4, Operand(a2)); in Generate()
1050 __ stop("Unexpected pending exception"); in Generate()
1051 __ bind(&okay); in Generate()
1066 __ LeaveExitFrame(save_doubles(), argc, true, EMIT_RETURN); in Generate()
1069 __ bind(&exception_returned); in Generate()
1088 __ PrepareCallCFunction(3, 0, a0); in Generate()
1089 __ mov(a0, zero_reg); in Generate()
1090 __ mov(a1, zero_reg); in Generate()
1091 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1092 __ CallCFunction(find_handler, 3); in Generate()
1096 __ li(cp, Operand(pending_handler_context_address)); in Generate()
1097 __ ld(cp, MemOperand(cp)); in Generate()
1098 __ li(sp, Operand(pending_handler_sp_address)); in Generate()
1099 __ ld(sp, MemOperand(sp)); in Generate()
1100 __ li(fp, Operand(pending_handler_fp_address)); in Generate()
1101 __ ld(fp, MemOperand(fp)); in Generate()
1106 __ Branch(&zero, eq, cp, Operand(zero_reg)); in Generate()
1107 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1108 __ bind(&zero); in Generate()
1111 __ li(a1, Operand(pending_handler_code_address)); in Generate()
1112 __ ld(a1, MemOperand(a1)); in Generate()
1113 __ li(a2, Operand(pending_handler_offset_address)); in Generate()
1114 __ ld(a2, MemOperand(a2)); in Generate()
1115 __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1116 __ Daddu(t9, a1, a2); in Generate()
1117 __ Jump(t9); in Generate()
1140 __ MultiPush(kCalleeSaved | ra.bit()); in Generate()
1143 __ MultiPushFPU(kCalleeSavedFPU); in Generate()
1145 __ Move(kDoubleRegZero, 0.0); in Generate()
1148 __ mov(s0, a4); // 5th parameter in mips64 a4 (a4) register. in Generate()
1150 __ InitializeRootRegister(); in Generate()
1153 __ li(a7, Operand(-1)); // Push a bad frame pointer to fail if it is used. in Generate()
1155 __ li(a6, Operand(Smi::FromInt(marker))); in Generate()
1156 __ li(a5, Operand(Smi::FromInt(marker))); in Generate()
1158 __ li(a4, Operand(c_entry_fp)); in Generate()
1159 __ ld(a4, MemOperand(a4)); in Generate()
1160 __ Push(a7, a6, a5, a4); in Generate()
1162 __ daddiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); in Generate()
1183 __ li(a5, Operand(ExternalReference(js_entry_sp))); in Generate()
1184 __ ld(a6, MemOperand(a5)); in Generate()
1185 __ Branch(&non_outermost_js, ne, a6, Operand(zero_reg)); in Generate()
1186 __ sd(fp, MemOperand(a5)); in Generate()
1187 __ li(a4, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); in Generate()
1189 __ b(&cont); in Generate()
1190 __ nop(); // Branch delay slot nop. in Generate()
1191 __ bind(&non_outermost_js); in Generate()
1192 __ li(a4, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); in Generate()
1193 __ bind(&cont); in Generate()
1194 __ push(a4); in Generate()
1198 __ jmp(&invoke); in Generate()
1199 __ bind(&handler_entry); in Generate()
1205 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1207 __ sd(v0, MemOperand(a4)); // We come back from 'invoke'. result is in v0. in Generate()
1208 __ LoadRoot(v0, Heap::kExceptionRootIndex); in Generate()
1209 __ b(&exit); // b exposes branch delay slot. in Generate()
1210 __ nop(); // Branch delay slot nop. in Generate()
1213 __ bind(&invoke); in Generate()
1214 __ PushStackHandler(); in Generate()
1241 __ li(a4, Operand(construct_entry)); in Generate()
1244 __ li(a4, Operand(entry)); in Generate()
1246 __ ld(t9, MemOperand(a4)); // Deref address. in Generate()
1248 __ daddiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); in Generate()
1249 __ Call(t9); in Generate()
1252 __ PopStackHandler(); in Generate()
1254 __ bind(&exit); // v0 holds result in Generate()
1257 __ pop(a5); in Generate()
1258 __ Branch(&non_outermost_js_2, in Generate()
1262 __ li(a5, Operand(ExternalReference(js_entry_sp))); in Generate()
1263 __ sd(zero_reg, MemOperand(a5)); in Generate()
1264 __ bind(&non_outermost_js_2); in Generate()
1267 __ pop(a5); in Generate()
1268 __ li(a4, Operand(ExternalReference(Isolate::kCEntryFPAddress, in Generate()
1270 __ sd(a5, MemOperand(a4)); in Generate()
1273 __ daddiu(sp, sp, -EntryFrameConstants::kCallerFPOffset); in Generate()
1276 __ MultiPopFPU(kCalleeSavedFPU); in Generate()
1279 __ MultiPop(kCalleeSaved | ra.bit()); in Generate()
1281 __ Jump(ra); in Generate()
1302 __ Ret(); in Generate()
1307 __ bind(&miss); in Generate()
1323 __ bind(&miss); in Generate()
1334 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1365 __ li(a0, Operand(address_of_regexp_stack_memory_size)); in Generate()
1366 __ ld(a0, MemOperand(a0, 0)); in Generate()
1367 __ Branch(&runtime, eq, a0, Operand(zero_reg)); in Generate()
1370 __ ld(a0, MemOperand(sp, kJSRegExpOffset)); in Generate()
1372 __ JumpIfSmi(a0, &runtime); in Generate()
1373 __ GetObjectType(a0, a1, a1); in Generate()
1374 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); in Generate()
1377 __ ld(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset)); in Generate()
1379 __ SmiTst(regexp_data, a4); in Generate()
1380 __ Check(nz, in Generate()
1384 __ GetObjectType(regexp_data, a0, a0); in Generate()
1385 __ Check(eq, in Generate()
1393 __ ld(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1394 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); in Generate()
1398 __ ld(a2, in Generate()
1406 __ Branch(&runtime, hi, a2, Operand(Smi::FromInt(temp))); in Generate()
1409 __ mov(t0, zero_reg); in Generate()
1410 __ ld(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1411 __ JumpIfSmi(subject, &runtime); in Generate()
1412 __ mov(a3, subject); // Make a copy of the original subject string. in Generate()
1439 __ bind(&check_underlying); in Generate()
1440 __ ld(a2, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1441 __ lbu(a0, FieldMemOperand(a2, Map::kInstanceTypeOffset)); in Generate()
1444 __ And(a1, in Generate()
1450 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); // Go to (4). in Generate()
1458 __ Branch(&not_seq_nor_cons, ge, a1, Operand(kExternalStringTag)); in Generate()
1462 __ ld(a0, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1463 __ LoadRoot(a1, Heap::kempty_stringRootIndex); in Generate()
1464 __ Branch(&runtime, ne, a0, Operand(a1)); in Generate()
1465 __ ld(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1466 __ jmp(&check_underlying); in Generate()
1469 __ bind(&seq_string); in Generate()
1475 __ ld(a1, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1476 __ JumpIfNotSmi(a1, &runtime); in Generate()
1477 __ ld(a3, FieldMemOperand(a3, String::kLengthOffset)); in Generate()
1478 __ Branch(&runtime, ls, a3, Operand(a1)); in Generate()
1479 __ SmiUntag(a1); in Generate()
1484 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for one_byte. in Generate()
1485 __ ld(t9, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset)); in Generate()
1486 __ dsra(a3, a0, 2); // a3 is 1 for one_byte, 0 for UC16 (used below). in Generate()
1487 __ ld(a5, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
1488 __ Movz(t9, a5, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset. in Generate()
1495 __ JumpIfSmi(t9, &runtime); in Generate()
1503 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), in Generate()
1509 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1530 __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1531 __ sd(a0, MemOperand(sp, 1 * kPointerSize)); in Generate()
1534 __ li(a7, Operand(1)); in Generate()
1537 __ li(a0, Operand(address_of_regexp_stack_memory_address)); in Generate()
1538 __ ld(a0, MemOperand(a0, 0)); in Generate()
1539 __ li(a2, Operand(address_of_regexp_stack_memory_size)); in Generate()
1540 __ ld(a2, MemOperand(a2, 0)); in Generate()
1541 __ daddu(a6, a0, a2); in Generate()
1545 __ mov(a5, zero_reg); in Generate()
1548 __ li( in Generate()
1554 __ Daddu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1555 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte. in Generate()
1560 __ ld(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1565 __ dsllv(t1, t0, a3); in Generate()
1566 __ daddu(t0, t2, t1); in Generate()
1567 __ dsllv(t1, a1, a3); in Generate()
1568 __ daddu(a2, t0, t1); in Generate()
1570 __ ld(t2, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
1572 __ SmiUntag(t2); in Generate()
1573 __ dsllv(t1, t2, a3); in Generate()
1574 __ daddu(a3, t0, t1); in Generate()
1579 __ mov(a0, subject); in Generate()
1582 __ Daddu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1586 __ LeaveExitFrame(false, no_reg, true); in Generate()
1594 __ Branch(&success, eq, v0, Operand(1)); in Generate()
1598 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1600 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1605 __ li(a1, Operand(isolate()->factory()->the_hole_value())); in Generate()
1606 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1608 __ ld(v0, MemOperand(a2, 0)); in Generate()
1609 __ Branch(&runtime, eq, v0, Operand(a1)); in Generate()
1612 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1614 __ bind(&failure); in Generate()
1616 __ li(v0, Operand(isolate()->factory()->null_value())); in Generate()
1617 __ DropAndRet(4); in Generate()
1620 __ bind(&success); in Generate()
1622 __ lw(a1, UntagSmiFieldMemOperand( in Generate()
1625 __ Daddu(a1, a1, Operand(1)); in Generate()
1626 __ dsll(a1, a1, 1); // Multiply by 2. in Generate()
1629 __ ld(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1630 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1632 __ ld(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); in Generate()
1633 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in Generate()
1634 __ Branch(&runtime, ne, a0, Operand(at)); in Generate()
1637 __ ld(a0, in Generate()
1639 __ Daddu(a2, a1, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1641 __ SmiUntag(at, a0); in Generate()
1642 __ Branch(&runtime, gt, a2, Operand(at)); in Generate()
1647 __ SmiTag(a2, a1); // To smi. in Generate()
1648 __ sd(a2, FieldMemOperand(last_match_info_elements, in Generate()
1651 __ sd(subject, FieldMemOperand(last_match_info_elements, in Generate()
1653 __ mov(a2, subject); in Generate()
1654 __ RecordWriteField(last_match_info_elements, in Generate()
1657 __ mov(subject, a2); in Generate()
1658 __ sd(subject, FieldMemOperand(last_match_info_elements, in Generate()
1660 __ RecordWriteField(last_match_info_elements, in Generate()
1667 __ li(a2, Operand(address_of_static_offsets_vector)); in Generate()
1674 __ Daddu(a0, last_match_info_elements, in Generate()
1676 __ bind(&next_capture); in Generate()
1677 __ Dsubu(a1, a1, Operand(1)); in Generate()
1678 __ Branch(&done, lt, a1, Operand(zero_reg)); in Generate()
1680 __ lw(a3, MemOperand(a2, 0)); in Generate()
1681 __ daddiu(a2, a2, kIntSize); in Generate()
1683 __ SmiTag(a3); in Generate()
1684 __ sd(a3, MemOperand(a0, 0)); in Generate()
1685 __ Branch(&next_capture, USE_DELAY_SLOT); in Generate()
1686 __ daddiu(a0, a0, kPointerSize); // In branch delay slot. in Generate()
1688 __ bind(&done); in Generate()
1691 __ mov(v0, last_match_info_elements); in Generate()
1692 __ DropAndRet(4); in Generate()
1695 __ bind(&runtime); in Generate()
1696 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1700 __ bind(&not_seq_nor_cons); in Generate()
1702 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag)); in Generate()
1705 __ bind(&external_string); in Generate()
1706 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1707 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); in Generate()
1711 __ And(at, a0, Operand(kIsIndirectStringMask)); in Generate()
1712 __ Assert(eq, in Generate()
1717 __ ld(subject, in Generate()
1721 __ Dsubu(subject, in Generate()
1724 __ jmp(&seq_string); // Go to (4). in Generate()
1727 __ bind(&not_long_external); in Generate()
1729 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1730 __ Branch(&runtime, ne, at, Operand(zero_reg)); in Generate()
1734 __ ld(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1735 __ SmiUntag(t0); in Generate()
1736 __ ld(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1737 __ jmp(&check_underlying); // Go to (1). in Generate()
1755 __ SmiTag(a0); in CallStubInRecordCallTarget()
1756 __ MultiPush(kSavedRegs); in CallStubInRecordCallTarget()
1758 __ CallStub(stub); in CallStubInRecordCallTarget()
1760 __ MultiPop(kSavedRegs); in CallStubInRecordCallTarget()
1761 __ SmiUntag(a0); in CallStubInRecordCallTarget()
1781 __ dsrl(a5, a3, 32 - kPointerSizeLog2); in GenerateRecordCallTarget()
1782 __ Daddu(a5, a2, Operand(a5)); in GenerateRecordCallTarget()
1783 __ ld(a5, FieldMemOperand(a5, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1792 __ ld(weak_value, FieldMemOperand(a5, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1793 __ Branch(&done, eq, a1, Operand(weak_value)); in GenerateRecordCallTarget()
1794 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1795 __ Branch(&done, eq, a5, Operand(at)); in GenerateRecordCallTarget()
1796 __ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1797 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1798 __ Branch(&check_allocation_site, ne, feedback_map, Operand(at)); in GenerateRecordCallTarget()
1801 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1802 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1804 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1809 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1810 __ Branch(&miss, ne, feedback_map, Operand(at)); in GenerateRecordCallTarget()
1813 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5); in GenerateRecordCallTarget()
1814 __ Branch(&megamorphic, ne, a1, Operand(a5)); in GenerateRecordCallTarget()
1815 __ jmp(&done); in GenerateRecordCallTarget()
1817 __ bind(&miss); in GenerateRecordCallTarget()
1821 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1822 __ Branch(&initialize, eq, a5, Operand(at)); in GenerateRecordCallTarget()
1825 __ bind(&megamorphic); in GenerateRecordCallTarget()
1826 __ dsrl(a5, a3, 32 - kPointerSizeLog2); in GenerateRecordCallTarget()
1827 __ Daddu(a5, a2, Operand(a5)); in GenerateRecordCallTarget()
1828 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1829 __ sd(at, FieldMemOperand(a5, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1830 __ jmp(&done); in GenerateRecordCallTarget()
1833 __ bind(&initialize); in GenerateRecordCallTarget()
1835 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5); in GenerateRecordCallTarget()
1836 __ Branch(&not_array_function, ne, a1, Operand(a5)); in GenerateRecordCallTarget()
1843 __ Branch(&done); in GenerateRecordCallTarget()
1845 __ bind(&not_array_function); in GenerateRecordCallTarget()
1850 __ bind(&done); in GenerateRecordCallTarget()
1853 __ SmiScale(a4, a3, kPointerSizeLog2); in GenerateRecordCallTarget()
1854 __ Daddu(a5, a2, Operand(a4)); in GenerateRecordCallTarget()
1855 __ ld(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1856 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); in GenerateRecordCallTarget()
1857 __ sd(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1869 __ JumpIfSmi(a1, &non_function); in Generate()
1871 __ GetObjectType(a1, a5, a5); in Generate()
1872 __ Branch(&non_function, ne, a5, Operand(JS_FUNCTION_TYPE)); in Generate()
1876 __ dsrl(at, a3, 32 - kPointerSizeLog2); in Generate()
1877 __ Daddu(a5, a2, at); in Generate()
1880 __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize)); in Generate()
1881 __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset)); in Generate()
1882 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
1883 __ Branch(&feedback_register_initialized, eq, a5, Operand(at)); in Generate()
1884 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate()
1885 __ bind(&feedback_register_initialized); in Generate()
1887 __ AssertUndefinedOrAllocationSite(a2, a5); in Generate()
1890 __ mov(a3, a1); in Generate()
1894 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1895 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1896 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1897 __ Jump(at); in Generate()
1899 __ bind(&non_function); in Generate()
1900 __ mov(a3, a1); in Generate()
1901 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1913 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1916 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1917 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1919 __ And(a4, result_, Operand(kIsNotStringMask)); in GenerateFast()
1920 __ Branch(receiver_not_string_, ne, a4, Operand(zero_reg)); in GenerateFast()
1924 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1926 __ bind(&got_smi_index_); in GenerateFast()
1929 __ ld(a4, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
1930 __ Branch(index_out_of_range_, ls, a4, Operand(index_)); in GenerateFast()
1932 __ SmiUntag(index_); in GenerateFast()
1940 __ SmiTag(result_); in GenerateFast()
1941 __ bind(&exit_); in GenerateFast()
1947 __ dsrl(t0, slot, 32 - kPointerSizeLog2); in IncrementCallCount()
1948 __ Daddu(slot, feedback_vector, Operand(t0)); in IncrementCallCount()
1949 __ ld(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize)); in IncrementCallCount()
1950 __ Daddu(t0, t0, Operand(Smi::FromInt(1))); in IncrementCallCount()
1951 __ sd(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize)); in IncrementCallCount()
1960 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); in HandleArrayCase()
1961 __ Branch(miss, ne, a1, Operand(at)); in HandleArrayCase()
1966 __ mov(a2, a4); in HandleArrayCase()
1967 __ mov(a3, a1); in HandleArrayCase()
1969 __ TailCallStub(&stub); in HandleArrayCase()
1981 __ dsrl(a4, a3, 32 - kPointerSizeLog2); in Generate()
1982 __ Daddu(a4, a2, Operand(a4)); in Generate()
1983 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); in Generate()
1999 __ ld(a5, FieldMemOperand(a4, WeakCell::kValueOffset)); in Generate()
2000 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a5)); in Generate()
2004 __ JumpIfSmi(a1, &extra_checks_or_miss); in Generate()
2006 __ bind(&call_function); in Generate()
2010 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), in Generate()
2014 __ bind(&extra_checks_or_miss); in Generate()
2017 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in Generate()
2018 __ Branch(&call, eq, a4, Operand(at)); in Generate()
2021 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset)); in Generate()
2022 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
2023 __ Branch(&not_allocation_site, ne, a5, Operand(at)); in Generate()
2027 __ bind(&not_allocation_site); in Generate()
2032 __ Branch(&miss); in Generate()
2035 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); in Generate()
2036 __ Branch(&uninitialized, eq, a4, Operand(at)); in Generate()
2040 __ AssertNotSmi(a4); in Generate()
2041 __ GetObjectType(a4, a5, a5); in Generate()
2042 __ Branch(&miss, ne, a5, Operand(JS_FUNCTION_TYPE)); in Generate()
2043 __ dsrl(a4, a3, 32 - kPointerSizeLog2); in Generate()
2044 __ Daddu(a4, a2, Operand(a4)); in Generate()
2045 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in Generate()
2046 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize)); in Generate()
2048 __ bind(&call); in Generate()
2051 __ bind(&call_count_incremented); in Generate()
2053 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), in Generate()
2056 __ bind(&uninitialized); in Generate()
2059 __ JumpIfSmi(a1, &miss); in Generate()
2062 __ GetObjectType(a1, a4, a4); in Generate()
2063 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE)); in Generate()
2067 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a4); in Generate()
2068 __ Branch(&miss, eq, a1, Operand(a4)); in Generate()
2071 __ ld(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate()
2072 __ ld(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); in Generate()
2073 __ ld(t1, NativeContextMemOperand()); in Generate()
2074 __ Branch(&miss, ne, t0, Operand(t1)); in Generate()
2083 __ SmiTag(a0); in Generate()
2084 __ Push(a0); in Generate()
2085 __ Push(a2, a3); in Generate()
2086 __ Push(cp, a1); in Generate()
2087 __ CallStub(&create_stub); in Generate()
2088 __ Pop(cp, a1); in Generate()
2089 __ Pop(a2, a3); in Generate()
2090 __ Pop(a0); in Generate()
2091 __ SmiUntag(a0); in Generate()
2094 __ Branch(&call_function); in Generate()
2098 __ bind(&miss); in Generate()
2101 __ Branch(&call_count_incremented); in Generate()
2109 __ SmiTag(a0); in GenerateMiss()
2110 __ Push(a0); in GenerateMiss()
2113 __ Push(a1, a2, a3); in GenerateMiss()
2116 __ CallRuntime(Runtime::kCallIC_Miss); in GenerateMiss()
2119 __ mov(a1, v0); in GenerateMiss()
2122 __ Pop(a0); in GenerateMiss()
2123 __ SmiUntag(a0); in GenerateMiss()
2130 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
2133 __ bind(&index_not_smi_); in GenerateSlow()
2135 __ CheckMap(index_, in GenerateSlow()
2143 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2146 __ Push(object_, index_); in GenerateSlow()
2148 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
2153 __ Move(index_, v0); in GenerateSlow()
2155 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2158 __ pop(object_); in GenerateSlow()
2161 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
2162 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
2165 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
2167 __ Branch(&got_smi_index_); in GenerateSlow()
2172 __ bind(&call_runtime_); in GenerateSlow()
2174 __ SmiTag(index_); in GenerateSlow()
2175 __ Push(object_, index_); in GenerateSlow()
2176 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
2178 __ Move(result_, v0); in GenerateSlow()
2181 __ jmp(&exit_); in GenerateSlow()
2183 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
2192 __ JumpIfNotSmi(code_, &slow_case_); in GenerateFast()
2193 __ Branch(&slow_case_, hi, code_, in GenerateFast()
2196 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
2198 __ SmiScale(at, code_, kPointerSizeLog2); in GenerateFast()
2199 __ Daddu(result_, result_, at); in GenerateFast()
2200 __ ld(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); in GenerateFast()
2201 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateFast()
2202 __ Branch(&slow_case_, eq, result_, Operand(at)); in GenerateFast()
2203 __ bind(&exit_); in GenerateFast()
2210 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); in GenerateSlow()
2212 __ bind(&slow_case_); in GenerateSlow()
2214 __ push(code_); in GenerateSlow()
2215 __ CallRuntime(Runtime::kStringCharFromCode); in GenerateSlow()
2216 __ Move(result_, v0); in GenerateSlow()
2219 __ Branch(&exit_); in GenerateSlow()
2221 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); in GenerateSlow()
2236 __ And(scratch, dest, Operand(kPointerAlignmentMask)); in GenerateCopyCharacters()
2237 __ Check(eq, in GenerateCopyCharacters()
2248 __ Daddu(count, count, count); in GenerateCopyCharacters()
2252 __ Daddu(limit, dest, Operand(count)); in GenerateCopyCharacters()
2256 __ Branch(&loop_entry); in GenerateCopyCharacters()
2257 __ bind(&loop); in GenerateCopyCharacters()
2258 __ lbu(scratch, MemOperand(src)); in GenerateCopyCharacters()
2259 __ daddiu(src, src, 1); in GenerateCopyCharacters()
2260 __ sb(scratch, MemOperand(dest)); in GenerateCopyCharacters()
2261 __ daddiu(dest, dest, 1); in GenerateCopyCharacters()
2262 __ bind(&loop_entry); in GenerateCopyCharacters()
2263 __ Branch(&loop, lt, dest, Operand(limit)); in GenerateCopyCharacters()
2265 __ bind(&done); in GenerateCopyCharacters()
2276 __ ld(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2277 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2278 __ Branch(&check_zero_length, eq, length, Operand(scratch2)); in GenerateFlatOneByteStringEquals()
2279 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
2281 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL))); in GenerateFlatOneByteStringEquals()
2282 __ Ret(); in GenerateFlatOneByteStringEquals()
2286 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
2288 __ Branch(&compare_chars, ne, length, Operand(zero_reg)); in GenerateFlatOneByteStringEquals()
2290 __ Ret(USE_DELAY_SLOT); in GenerateFlatOneByteStringEquals()
2291 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
2294 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
2300 __ Ret(USE_DELAY_SLOT); in GenerateFlatOneByteStringEquals()
2301 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
2310 __ ld(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2311 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2312 __ Dsubu(scratch3, scratch1, Operand(scratch2)); in GenerateCompareFlatOneByteStrings()
2314 __ slt(scratch4, scratch2, scratch1); in GenerateCompareFlatOneByteStrings()
2315 __ Movn(scratch1, scratch2, scratch4); in GenerateCompareFlatOneByteStrings()
2318 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg)); in GenerateCompareFlatOneByteStrings()
2325 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2328 __ mov(scratch2, length_delta); in GenerateCompareFlatOneByteStrings()
2329 __ mov(scratch4, zero_reg); in GenerateCompareFlatOneByteStrings()
2330 __ mov(v0, zero_reg); in GenerateCompareFlatOneByteStrings()
2332 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
2336 __ Branch(&ret, eq, scratch2, Operand(scratch4)); in GenerateCompareFlatOneByteStrings()
2337 __ li(v0, Operand(Smi::FromInt(GREATER))); in GenerateCompareFlatOneByteStrings()
2338 __ Branch(&ret, gt, scratch2, Operand(scratch4)); in GenerateCompareFlatOneByteStrings()
2339 __ li(v0, Operand(Smi::FromInt(LESS))); in GenerateCompareFlatOneByteStrings()
2340 __ bind(&ret); in GenerateCompareFlatOneByteStrings()
2341 __ Ret(); in GenerateCompareFlatOneByteStrings()
2352 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
2353 __ Daddu(scratch1, length, in GenerateOneByteCharsCompareLoop()
2355 __ Daddu(left, left, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
2356 __ Daddu(right, right, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
2357 __ Dsubu(length, zero_reg, length); in GenerateOneByteCharsCompareLoop()
2363 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
2364 __ Daddu(scratch3, left, index); in GenerateOneByteCharsCompareLoop()
2365 __ lbu(scratch1, MemOperand(scratch3)); in GenerateOneByteCharsCompareLoop()
2366 __ Daddu(scratch3, right, index); in GenerateOneByteCharsCompareLoop()
2367 __ lbu(scratch2, MemOperand(scratch3)); in GenerateOneByteCharsCompareLoop()
2368 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2)); in GenerateOneByteCharsCompareLoop()
2369 __ Daddu(index, index, 1); in GenerateOneByteCharsCompareLoop()
2370 __ Branch(&loop, ne, index, Operand(zero_reg)); in GenerateOneByteCharsCompareLoop()
2384 __ li(a2, isolate()->factory()->undefined_value()); in Generate()
2388 __ And(at, a2, Operand(kSmiTagMask)); in Generate()
2389 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg)); in Generate()
2390 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset)); in Generate()
2391 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
2392 __ Assert(eq, kExpectedAllocationSite, a4, Operand(at)); in Generate()
2398 __ TailCallStub(&stub); in Generate()
2406 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2407 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2409 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); in GenerateBooleans()
2410 __ AssertSmi(a1); in GenerateBooleans()
2411 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); in GenerateBooleans()
2412 __ AssertSmi(a0); in GenerateBooleans()
2414 __ Ret(USE_DELAY_SLOT); in GenerateBooleans()
2415 __ Dsubu(v0, a1, a0); in GenerateBooleans()
2417 __ bind(&miss); in GenerateBooleans()
2425 __ Or(a2, a1, a0); in GenerateSmis()
2426 __ JumpIfNotSmi(a2, &miss); in GenerateSmis()
2430 __ Ret(USE_DELAY_SLOT); in GenerateSmis()
2431 __ Dsubu(v0, a0, a1); in GenerateSmis()
2434 __ SmiUntag(a1); in GenerateSmis()
2435 __ SmiUntag(a0); in GenerateSmis()
2436 __ Ret(USE_DELAY_SLOT); in GenerateSmis()
2437 __ Dsubu(v0, a1, a0); in GenerateSmis()
2440 __ bind(&miss); in GenerateSmis()
2453 __ JumpIfNotSmi(a1, &miss); in GenerateNumbers()
2456 __ JumpIfNotSmi(a0, &miss); in GenerateNumbers()
2463 __ JumpIfSmi(a0, &right_smi); in GenerateNumbers()
2464 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
2466 __ Dsubu(a2, a0, Operand(kHeapObjectTag)); in GenerateNumbers()
2467 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateNumbers()
2468 __ Branch(&left); in GenerateNumbers()
2469 __ bind(&right_smi); in GenerateNumbers()
2470 __ SmiUntag(a2, a0); // Can't clobber a0 yet. in GenerateNumbers()
2472 __ mtc1(a2, single_scratch); in GenerateNumbers()
2473 __ cvt_d_w(f2, single_scratch); in GenerateNumbers()
2475 __ bind(&left); in GenerateNumbers()
2476 __ JumpIfSmi(a1, &left_smi); in GenerateNumbers()
2477 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
2479 __ Dsubu(a2, a1, Operand(kHeapObjectTag)); in GenerateNumbers()
2480 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateNumbers()
2481 __ Branch(&done); in GenerateNumbers()
2482 __ bind(&left_smi); in GenerateNumbers()
2483 __ SmiUntag(a2, a1); // Can't clobber a1 yet. in GenerateNumbers()
2485 __ mtc1(a2, single_scratch); in GenerateNumbers()
2486 __ cvt_d_w(f0, single_scratch); in GenerateNumbers()
2488 __ bind(&done); in GenerateNumbers()
2493 __ BranchF(&fpu_eq, &unordered, eq, f0, f2); in GenerateNumbers()
2496 __ BranchF(&fpu_lt, NULL, lt, f0, f2); in GenerateNumbers()
2500 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2501 __ li(v0, Operand(GREATER)); in GenerateNumbers()
2503 __ bind(&fpu_eq); in GenerateNumbers()
2504 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2505 __ li(v0, Operand(EQUAL)); in GenerateNumbers()
2507 __ bind(&fpu_lt); in GenerateNumbers()
2508 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2509 __ li(v0, Operand(LESS)); in GenerateNumbers()
2511 __ bind(&unordered); in GenerateNumbers()
2512 __ bind(&generic_stub); in GenerateNumbers()
2515 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2517 __ bind(&maybe_undefined1); in GenerateNumbers()
2519 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2520 __ Branch(&miss, ne, a0, Operand(at)); in GenerateNumbers()
2521 __ JumpIfSmi(a1, &unordered); in GenerateNumbers()
2522 __ GetObjectType(a1, a2, a2); in GenerateNumbers()
2523 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE)); in GenerateNumbers()
2524 __ jmp(&unordered); in GenerateNumbers()
2527 __ bind(&maybe_undefined2); in GenerateNumbers()
2529 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2530 __ Branch(&unordered, eq, a1, Operand(at)); in GenerateNumbers()
2533 __ bind(&miss); in GenerateNumbers()
2549 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2552 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2553 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2554 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2555 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2557 __ Or(tmp1, tmp1, Operand(tmp2)); in GenerateInternalizedStrings()
2558 __ And(at, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2559 __ Branch(&miss, ne, at, Operand(zero_reg)); in GenerateInternalizedStrings()
2566 __ mov(v0, right); in GenerateInternalizedStrings()
2568 __ Ret(ne, left, Operand(right)); in GenerateInternalizedStrings()
2570 __ Ret(USE_DELAY_SLOT); in GenerateInternalizedStrings()
2571 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateInternalizedStrings()
2573 __ bind(&miss); in GenerateInternalizedStrings()
2590 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2594 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2595 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2596 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2597 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2599 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2600 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2603 __ mov(v0, a0); in GenerateUniqueNames()
2607 __ Branch(&done, ne, left, Operand(right)); in GenerateUniqueNames()
2613 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateUniqueNames()
2614 __ bind(&done); in GenerateUniqueNames()
2615 __ Ret(); in GenerateUniqueNames()
2617 __ bind(&miss); in GenerateUniqueNames()
2638 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2642 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2643 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2644 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2645 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2647 __ Or(tmp3, tmp1, tmp2); in GenerateStrings()
2648 __ And(tmp5, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2649 __ Branch(&miss, ne, tmp5, Operand(zero_reg)); in GenerateStrings()
2655 __ Branch(&left_ne_right, ne, left, Operand(right)); in GenerateStrings()
2656 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2657 __ mov(v0, zero_reg); // In the delay slot. in GenerateStrings()
2658 __ bind(&left_ne_right); in GenerateStrings()
2668 __ Or(tmp3, tmp1, Operand(tmp2)); in GenerateStrings()
2669 __ And(tmp5, tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2671 __ Branch(&is_symbol, ne, tmp5, Operand(zero_reg)); in GenerateStrings()
2675 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2676 __ mov(v0, a0); // In the delay slot. in GenerateStrings()
2677 __ bind(&is_symbol); in GenerateStrings()
2682 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2695 __ bind(&runtime); in GenerateStrings()
2699 __ Push(left, right); in GenerateStrings()
2700 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2702 __ LoadRoot(a0, Heap::kTrueValueRootIndex); in GenerateStrings()
2703 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2704 __ Subu(v0, v0, a0); // In delay slot. in GenerateStrings()
2706 __ Push(left, right); in GenerateStrings()
2707 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2710 __ bind(&miss); in GenerateStrings()
2718 __ And(a2, a1, Operand(a0)); in GenerateReceivers()
2719 __ JumpIfSmi(a2, &miss); in GenerateReceivers()
2722 __ GetObjectType(a0, a2, a2); in GenerateReceivers()
2723 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in GenerateReceivers()
2724 __ GetObjectType(a1, a2, a2); in GenerateReceivers()
2725 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in GenerateReceivers()
2728 __ Ret(USE_DELAY_SLOT); in GenerateReceivers()
2729 __ dsubu(v0, a0, a1); in GenerateReceivers()
2731 __ bind(&miss); in GenerateReceivers()
2739 __ And(a2, a1, a0); in GenerateKnownReceivers()
2740 __ JumpIfSmi(a2, &miss); in GenerateKnownReceivers()
2741 __ GetWeakValue(a4, cell); in GenerateKnownReceivers()
2742 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2743 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2744 __ Branch(&miss, ne, a2, Operand(a4)); in GenerateKnownReceivers()
2745 __ Branch(&miss, ne, a3, Operand(a4)); in GenerateKnownReceivers()
2748 __ Ret(USE_DELAY_SLOT); in GenerateKnownReceivers()
2749 __ dsubu(v0, a0, a1); in GenerateKnownReceivers()
2752 __ li(a2, Operand(Smi::FromInt(GREATER))); in GenerateKnownReceivers()
2754 __ li(a2, Operand(Smi::FromInt(LESS))); in GenerateKnownReceivers()
2756 __ Push(a1, a0, a2); in GenerateKnownReceivers()
2757 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2760 __ bind(&miss); in GenerateKnownReceivers()
2769 __ Push(a1, a0); in GenerateMiss()
2770 __ Push(ra, a1, a0); in GenerateMiss()
2771 __ li(a4, Operand(Smi::FromInt(op()))); in GenerateMiss()
2772 __ daddiu(sp, sp, -kPointerSize); in GenerateMiss()
2773 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs, in GenerateMiss()
2775 __ sd(a4, MemOperand(sp)); // In the delay slot. in GenerateMiss()
2777 __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2779 __ Pop(a1, a0, ra); in GenerateMiss()
2781 __ Jump(a2); in GenerateMiss()
2791 __ daddiu(sp, sp, -kCArgsSlotsSize); in Generate()
2794 __ sd(ra, MemOperand(sp, kCArgsSlotsSize)); in Generate()
2795 __ Call(t9); // Call the C++ function. in Generate()
2796 __ ld(t9, MemOperand(sp, kCArgsSlotsSize)); in Generate()
2802 __ Uld(a4, MemOperand(t9)); in Generate()
2803 __ Assert(ne, kReceivedInvalidReturnAddress, a4, in Generate()
2806 __ Jump(t9); in Generate()
2814 __ Move(t9, target); in GenerateCall()
2815 __ li(at, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE); in GenerateCall()
2816 __ Call(at); in GenerateCall()
2838 __ SmiLoadUntag(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2839 __ Dsubu(index, index, Operand(1)); in GenerateNegativeLookup()
2840 __ And(index, index, in GenerateNegativeLookup()
2845 __ Dlsa(index, index, index, 1); // index *= 3. in GenerateNegativeLookup()
2852 __ Dlsa(tmp, properties, index, kPointerSizeLog2); in GenerateNegativeLookup()
2853 __ ld(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2856 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2857 __ Branch(done, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
2860 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2863 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2866 __ Branch(&good, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
2869 __ ld(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2870 __ lbu(entity_name, in GenerateNegativeLookup()
2872 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2873 __ bind(&good); in GenerateNegativeLookup()
2876 __ ld(properties, in GenerateNegativeLookup()
2884 __ MultiPush(spill_mask); in GenerateNegativeLookup()
2885 __ ld(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2886 __ li(a1, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2888 __ CallStub(&stub); in GenerateNegativeLookup()
2889 __ mov(at, v0); in GenerateNegativeLookup()
2890 __ MultiPop(spill_mask); in GenerateNegativeLookup()
2892 __ Branch(done, eq, at, Operand(zero_reg)); in GenerateNegativeLookup()
2893 __ Branch(miss, ne, at, Operand(zero_reg)); in GenerateNegativeLookup()
2913 __ AssertName(name); in GeneratePositiveLookup()
2916 __ ld(scratch1, FieldMemOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
2917 __ SmiUntag(scratch1); in GeneratePositiveLookup()
2918 __ Dsubu(scratch1, scratch1, Operand(1)); in GeneratePositiveLookup()
2925 __ lwu(scratch2, FieldMemOperand(name, Name::kHashFieldOffset)); in GeneratePositiveLookup()
2932 __ Daddu(scratch2, scratch2, Operand( in GeneratePositiveLookup()
2935 __ dsrl(scratch2, scratch2, Name::kHashShift); in GeneratePositiveLookup()
2936 __ And(scratch2, scratch1, scratch2); in GeneratePositiveLookup()
2941 __ Dlsa(scratch2, scratch2, scratch2, 1); in GeneratePositiveLookup()
2944 __ Dlsa(scratch2, elements, scratch2, kPointerSizeLog2); in GeneratePositiveLookup()
2945 __ ld(at, FieldMemOperand(scratch2, kElementsStartOffset)); in GeneratePositiveLookup()
2946 __ Branch(done, eq, name, Operand(at)); in GeneratePositiveLookup()
2954 __ MultiPush(spill_mask); in GeneratePositiveLookup()
2957 __ Move(a1, name); in GeneratePositiveLookup()
2958 __ Move(a0, elements); in GeneratePositiveLookup()
2960 __ Move(a0, elements); in GeneratePositiveLookup()
2961 __ Move(a1, name); in GeneratePositiveLookup()
2964 __ CallStub(&stub); in GeneratePositiveLookup()
2965 __ mov(scratch2, a2); in GeneratePositiveLookup()
2966 __ mov(at, v0); in GeneratePositiveLookup()
2967 __ MultiPop(spill_mask); in GeneratePositiveLookup()
2969 __ Branch(done, ne, at, Operand(zero_reg)); in GeneratePositiveLookup()
2970 __ Branch(miss, eq, at, Operand(zero_reg)); in GeneratePositiveLookup()
2997 __ ld(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2998 __ SmiUntag(mask); in Generate()
2999 __ Dsubu(mask, mask, Operand(1)); in Generate()
3001 __ lwu(hash, FieldMemOperand(key, Name::kHashFieldOffset)); in Generate()
3003 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
3014 __ Daddu(index, hash, Operand( in Generate()
3017 __ mov(index, hash); in Generate()
3019 __ dsrl(index, index, Name::kHashShift); in Generate()
3020 __ And(index, mask, index); in Generate()
3025 __ Dlsa(index, index, index, 1); in Generate()
3028 __ Dlsa(index, dictionary, index, kPointerSizeLog2); in Generate()
3029 __ ld(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
3032 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); in Generate()
3035 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); in Generate()
3039 __ ld(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
3040 __ lbu(entry_key, in Generate()
3042 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
3046 __ bind(&maybe_in_dictionary); in Generate()
3051 __ Ret(USE_DELAY_SLOT); in Generate()
3052 __ mov(result, zero_reg); in Generate()
3055 __ bind(&in_dictionary); in Generate()
3056 __ Ret(USE_DELAY_SLOT); in Generate()
3057 __ li(result, 1); in Generate()
3059 __ bind(&not_in_dictionary); in Generate()
3060 __ Ret(USE_DELAY_SLOT); in Generate()
3061 __ mov(result, zero_reg); in Generate()
3089 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting); in Generate()
3090 __ nop(); in Generate()
3091 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting); in Generate()
3092 __ nop(); in Generate()
3095 __ RememberedSetHelper(object(), in Generate()
3101 __ Ret(); in Generate()
3103 __ bind(&skip_to_incremental_noncompacting); in Generate()
3106 __ bind(&skip_to_incremental_compacting); in Generate()
3123 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
3124 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
3128 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
3137 __ RememberedSetHelper(object(), in GenerateIncremental()
3143 __ bind(&dont_need_remembered_set); in GenerateIncremental()
3150 __ Ret(); in GenerateIncremental()
3157 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
3162 __ Move(address, regs_.address()); in InformIncrementalMarker()
3163 __ Move(a0, regs_.object()); in InformIncrementalMarker()
3164 __ Move(a1, address); in InformIncrementalMarker()
3165 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
3168 __ CallCFunction( in InformIncrementalMarker()
3185 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
3189 __ RememberedSetHelper(object(), in CheckNeedsToInformIncrementalMarker()
3195 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3198 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
3201 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
3206 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
3212 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
3218 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
3223 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3224 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
3229 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3233 __ RememberedSetHelper(object(), in CheckNeedsToInformIncrementalMarker()
3239 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3242 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
3243 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3245 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
3253 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3256 __ ld(a1, MemOperand(fp, parameter_count_offset)); in Generate()
3258 __ Daddu(a1, a1, Operand(1)); in Generate()
3261 __ dsll(a1, a1, kPointerSizeLog2); in Generate()
3262 __ Ret(USE_DELAY_SLOT); in Generate()
3263 __ Daddu(sp, sp, a1); in Generate()
3267 __ EmitLoadTypeFeedbackVector(a2); in Generate()
3269 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3283 __ ld(cached_map, in HandleArrayCases()
3285 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3286 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); in HandleArrayCases()
3289 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); in HandleArrayCases()
3290 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3291 __ Jump(t9); in HandleArrayCases()
3294 __ bind(&start_polymorphic); in HandleArrayCases()
3295 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandleArrayCases()
3299 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); in HandleArrayCases()
3315 __ SmiScale(too_far, length, kPointerSizeLog2); in HandleArrayCases()
3316 __ Daddu(too_far, feedback, Operand(too_far)); in HandleArrayCases()
3317 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3318 __ Daddu(pointer_reg, feedback, in HandleArrayCases()
3321 __ bind(&next_loop); in HandleArrayCases()
3322 __ ld(cached_map, MemOperand(pointer_reg)); in HandleArrayCases()
3323 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3324 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); in HandleArrayCases()
3325 __ ld(handler, MemOperand(pointer_reg, kPointerSize)); in HandleArrayCases()
3326 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3327 __ Jump(t9); in HandleArrayCases()
3329 __ bind(&prepare_next); in HandleArrayCases()
3330 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); in HandleArrayCases()
3331 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); in HandleArrayCases()
3334 __ Branch(miss); in HandleArrayCases()
3343 __ JumpIfSmi(receiver, load_smi_map); in HandleMonomorphicCase()
3344 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in HandleMonomorphicCase()
3345 __ bind(compare_map); in HandleMonomorphicCase()
3348 __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); in HandleMonomorphicCase()
3349 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); in HandleMonomorphicCase()
3351 __ SmiScale(handler, slot, kPointerSizeLog2); in HandleMonomorphicCase()
3352 __ Daddu(handler, vector, Operand(handler)); in HandleMonomorphicCase()
3353 __ ld(handler, in HandleMonomorphicCase()
3355 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag); in HandleMonomorphicCase()
3356 __ Jump(t9); in HandleMonomorphicCase()
3360 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); in Generate()
3386 __ ld(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandlePolymorphicStoreCase()
3398 __ SmiScale(too_far, too_far, kPointerSizeLog2); in HandlePolymorphicStoreCase()
3399 __ Daddu(too_far, feedback, Operand(too_far)); in HandlePolymorphicStoreCase()
3400 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3401 __ Daddu(pointer_reg, feedback, in HandlePolymorphicStoreCase()
3404 __ bind(&next_loop); in HandlePolymorphicStoreCase()
3405 __ ld(cached_map, MemOperand(pointer_reg)); in HandlePolymorphicStoreCase()
3406 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3407 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); in HandlePolymorphicStoreCase()
3409 __ ld(too_far, MemOperand(pointer_reg, kPointerSize)); in HandlePolymorphicStoreCase()
3410 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in HandlePolymorphicStoreCase()
3411 __ Branch(&transition_call, ne, too_far, Operand(at)); in HandlePolymorphicStoreCase()
3413 __ ld(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3414 __ Daddu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3415 __ Jump(t9); in HandlePolymorphicStoreCase()
3417 __ bind(&transition_call); in HandlePolymorphicStoreCase()
3418 __ ld(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3419 __ JumpIfSmi(too_far, miss); in HandlePolymorphicStoreCase()
3421 __ ld(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3424 __ Move(feedback, too_far); in HandlePolymorphicStoreCase()
3425 __ Daddu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3426 __ Jump(t9); in HandlePolymorphicStoreCase()
3428 __ bind(&prepare_next); in HandlePolymorphicStoreCase()
3429 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); in HandlePolymorphicStoreCase()
3430 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); in HandlePolymorphicStoreCase()
3433 __ Branch(miss); in HandlePolymorphicStoreCase()
3446 __ SmiScale(scratch1, slot, kPointerSizeLog2); in GenerateImpl()
3447 __ Daddu(feedback, vector, Operand(scratch1)); in GenerateImpl()
3448 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); in GenerateImpl()
3458 __ bind(&try_array); in GenerateImpl()
3460 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); in GenerateImpl()
3461 __ Branch(&not_array, ne, scratch1, Heap::kFixedArrayMapRootIndex); in GenerateImpl()
3471 __ bind(&not_array); in GenerateImpl()
3473 __ Branch(&try_poly_name, ne, feedback, Heap::kmegamorphic_symbolRootIndex); in GenerateImpl()
3476 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); in GenerateImpl()
3478 __ bind(&try_poly_name); in GenerateImpl()
3480 __ Branch(&miss, ne, key, Operand(feedback)); in GenerateImpl()
3483 __ SmiScale(scratch1, slot, kPointerSizeLog2); in GenerateImpl()
3484 __ Daddu(feedback, vector, Operand(scratch1)); in GenerateImpl()
3485 __ ld(feedback, in GenerateImpl()
3490 __ bind(&miss); in GenerateImpl()
3493 __ bind(&load_smi_map); in GenerateImpl()
3494 __ Branch(USE_DELAY_SLOT, &compare_map); in GenerateImpl()
3495 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. in GenerateImpl()
3502 __ push(ra); in MaybeCallEntryHook()
3503 __ CallStub(&stub); in MaybeCallEntryHook()
3504 __ pop(ra); in MaybeCallEntryHook()
3524 __ MultiPush(kSavedRegs | ra.bit()); in Generate()
3527 __ Dsubu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
3531 __ Daddu(a1, sp, Operand(kNumSavedRegs * kPointerSize)); in Generate()
3536 __ mov(s5, sp); in Generate()
3538 __ And(sp, sp, Operand(-frame_alignment)); in Generate()
3541 __ Dsubu(sp, sp, kCArgsSlotsSize); in Generate()
3545 __ li(t9, Operand(entry_hook)); in Generate()
3550 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3553 __ li(t9, Operand(ExternalReference(&dispatcher, in Generate()
3558 __ Call(t9); in Generate()
3562 __ mov(sp, s5); in Generate()
3564 __ Daddu(sp, sp, kCArgsSlotsSize); in Generate()
3568 __ MultiPop(kSavedRegs | ra.bit()); in Generate()
3569 __ Ret(); in Generate()
3578 __ TailCallStub(&stub); in CreateArrayDispatch()
3585 __ TailCallStub(&stub, eq, a3, Operand(kind)); in CreateArrayDispatch()
3589 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
3613 __ And(at, a3, Operand(1)); in CreateArrayDispatchOneArgument()
3614 __ Branch(&normal_sequence, ne, at, Operand(zero_reg)); in CreateArrayDispatchOneArgument()
3617 __ ld(a5, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
3618 __ Branch(&normal_sequence, eq, a5, Operand(zero_reg)); in CreateArrayDispatchOneArgument()
3627 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
3629 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3633 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3637 __ Daddu(a3, a3, Operand(1)); in CreateArrayDispatchOneArgument()
3640 __ ld(a5, FieldMemOperand(a2, 0)); in CreateArrayDispatchOneArgument()
3641 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
3642 __ Assert(eq, kExpectedAllocationSite, a5, Operand(at)); in CreateArrayDispatchOneArgument()
3649 __ ld(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3650 __ Daddu(a4, a4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); in CreateArrayDispatchOneArgument()
3651 __ sd(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3654 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3660 __ TailCallStub(&stub, eq, a3, Operand(kind)); in CreateArrayDispatchOneArgument()
3664 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3708 __ And(at, a0, a0); in GenerateDispatchToArrayStub()
3709 __ Branch(&not_zero_case, ne, at, Operand(zero_reg)); in GenerateDispatchToArrayStub()
3712 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
3713 __ Branch(&not_one_case, gt, a0, Operand(1)); in GenerateDispatchToArrayStub()
3716 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
3718 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3736 __ ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3738 __ SmiTst(a4, at); in Generate()
3739 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, in Generate()
3741 __ GetObjectType(a4, a4, a5); in Generate()
3742 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, in Generate()
3746 __ AssertUndefinedOrAllocationSite(a2, a4); in Generate()
3750 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate()
3753 __ Branch(&subclassing, ne, a1, Operand(a3)); in Generate()
3757 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in Generate()
3758 __ Branch(&no_info, eq, a2, Operand(at)); in Generate()
3760 __ ld(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in Generate()
3761 __ SmiUntag(a3); in Generate()
3763 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
3766 __ bind(&no_info); in Generate()
3770 __ bind(&subclassing); in Generate()
3771 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate()
3772 __ sd(a1, MemOperand(at)); in Generate()
3773 __ li(at, Operand(3)); in Generate()
3774 __ Daddu(a0, a0, at); in Generate()
3775 __ Push(a3, a2); in Generate()
3776 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3784 __ TailCallStub(&stub0, lo, a0, Operand(1)); in GenerateCase()
3787 __ TailCallStub(&stubN, hi, a0, Operand(1)); in GenerateCase()
3792 __ ld(at, MemOperand(sp, 0)); in GenerateCase()
3796 __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg)); in GenerateCase()
3800 __ TailCallStub(&stub1); in GenerateCase()
3817 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3819 __ SmiTst(a3, at); in Generate()
3820 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, in Generate()
3822 __ GetObjectType(a3, a3, a4); in Generate()
3823 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, in Generate()
3828 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3832 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); in Generate()
3834 __ DecodeField<Map::ElementsKindBits>(a3); in Generate()
3838 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); in Generate()
3839 __ Assert( in Generate()
3842 __ bind(&done); in Generate()
3846 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); in Generate()
3849 __ bind(&fast_elements_case); in Generate()
3861 __ AssertFunction(a1); in Generate()
3862 __ AssertReceiver(a3); in Generate()
3866 __ GetObjectType(a3, a2, a2); in Generate()
3867 __ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE)); in Generate()
3870 __ ld(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3871 __ JumpIfSmi(a2, &new_object); in Generate()
3872 __ GetObjectType(a2, a0, a0); in Generate()
3873 __ Branch(&new_object, ne, a0, Operand(MAP_TYPE)); in Generate()
3877 __ ld(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset)); in Generate()
3878 __ Branch(&new_object, ne, a0, Operand(a1)); in Generate()
3882 __ lbu(a4, FieldMemOperand(a2, Map::kInstanceSizeOffset)); in Generate()
3883 __ Allocate(a4, v0, a5, a0, &allocate, SIZE_IN_WORDS); in Generate()
3884 __ bind(&done_allocate); in Generate()
3887 __ sd(a2, FieldMemOperand(v0, JSObject::kMapOffset)); in Generate()
3888 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex); in Generate()
3889 __ sd(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset)); in Generate()
3890 __ sd(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
3892 __ Daddu(a1, v0, Operand(JSObject::kHeaderSize - kHeapObjectTag)); in Generate()
3906 __ lwu(a3, FieldMemOperand(a2, Map::kBitField3Offset)); in Generate()
3907 __ And(at, a3, Operand(Map::ConstructionCounter::kMask)); in Generate()
3908 __ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(zero_reg)); in Generate()
3909 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot. in Generate()
3912 __ InitializeFieldsWithFiller(a1, a5, a0); in Generate()
3913 __ Ret(); in Generate()
3915 __ bind(&slack_tracking); in Generate()
3919 __ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift)); in Generate()
3920 __ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset)); in Generate()
3923 __ lbu(a4, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset)); in Generate()
3924 __ dsll(a4, a4, kPointerSizeLog2); in Generate()
3925 __ Dsubu(a4, a5, a4); in Generate()
3926 __ InitializeFieldsWithFiller(a1, a4, a0); in Generate()
3929 __ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex); in Generate()
3930 __ InitializeFieldsWithFiller(a1, a5, a0); in Generate()
3935 __ And(a3, a3, Operand(Map::ConstructionCounter::kMask)); in Generate()
3936 __ Branch(&finalize, eq, a3, Operand(zero_reg)); in Generate()
3937 __ Ret(); in Generate()
3940 __ bind(&finalize); in Generate()
3943 __ Push(v0, a2); in Generate()
3944 __ CallRuntime(Runtime::kFinalizeInstanceSize); in Generate()
3945 __ Pop(v0); in Generate()
3947 __ Ret(); in Generate()
3951 __ bind(&allocate); in Generate()
3956 __ dsll(a4, a4, kPointerSizeLog2 + kSmiShiftSize + kSmiTagSize); in Generate()
3957 __ SmiTag(a4); in Generate()
3958 __ Push(a2, a4); in Generate()
3959 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3960 __ Pop(a2); in Generate()
3962 __ lbu(a5, FieldMemOperand(a2, Map::kInstanceSizeOffset)); in Generate()
3963 __ Dlsa(a5, v0, a5, kPointerSizeLog2); in Generate()
3965 __ Dsubu(a5, a5, Operand(kHeapObjectTag)); in Generate()
3966 __ jmp(&done_allocate); in Generate()
3969 __ bind(&new_object); in Generate()
3970 __ Push(a1, a3); in Generate()
3971 __ TailCallRuntime(Runtime::kNewObject); in Generate()
3982 __ AssertFunction(a1); in Generate()
3985 __ mov(a2, fp); in Generate()
3989 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
3993 __ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset)); in Generate()
3994 __ Branch(&ok, eq, a1, Operand(a3)); in Generate()
3995 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
3996 __ bind(&ok); in Generate()
4002 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4003 __ ld(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4004 __ Branch(&no_rest_parameters, ne, a3, in Generate()
4010 __ SmiLoadUntag( in Generate()
4012 __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4013 __ lw(a3, in Generate()
4015 __ Dsubu(a0, a0, Operand(a3)); in Generate()
4016 __ Branch(&rest_parameters, gt, a0, Operand(zero_reg)); in Generate()
4019 __ bind(&no_rest_parameters); in Generate()
4028 __ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4029 __ bind(&done_allocate); in Generate()
4032 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1); in Generate()
4033 __ sd(a1, FieldMemOperand(v0, JSArray::kMapOffset)); in Generate()
4034 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex); in Generate()
4035 __ sd(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset)); in Generate()
4036 __ sd(a1, FieldMemOperand(v0, JSArray::kElementsOffset)); in Generate()
4037 __ Move(a1, Smi::kZero); in Generate()
4038 __ Ret(USE_DELAY_SLOT); in Generate()
4039 __ sd(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot in Generate()
4043 __ bind(&allocate); in Generate()
4046 __ Push(Smi::FromInt(JSArray::kSize)); in Generate()
4047 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4049 __ jmp(&done_allocate); in Generate()
4052 __ bind(&rest_parameters); in Generate()
4055 __ Dlsa(a2, a2, a0, kPointerSizeLog2); in Generate()
4056 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - in Generate()
4069 __ li(a5, Operand(JSArray::kSize + FixedArray::kHeaderSize)); in Generate()
4070 __ Dlsa(a5, a5, a0, kPointerSizeLog2); in Generate()
4071 __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4072 __ bind(&done_allocate); in Generate()
4075 __ SmiTag(a4, a0); in Generate()
4078 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in Generate()
4079 __ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset)); in Generate()
4080 __ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate()
4081 __ Daddu(a3, v0, Operand(FixedArray::kHeaderSize)); in Generate()
4084 __ Dlsa(a1, a3, a0, kPointerSizeLog2); in Generate()
4085 __ bind(&loop); in Generate()
4086 __ Branch(&done_loop, eq, a1, Operand(a3)); in Generate()
4087 __ ld(at, MemOperand(a2, 0 * kPointerSize)); in Generate()
4088 __ sd(at, FieldMemOperand(a3, 0 * kPointerSize)); in Generate()
4089 __ Dsubu(a2, a2, Operand(1 * kPointerSize)); in Generate()
4090 __ Daddu(a3, a3, Operand(1 * kPointerSize)); in Generate()
4091 __ Branch(&loop); in Generate()
4092 __ bind(&done_loop); in Generate()
4096 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at); in Generate()
4097 __ sd(at, FieldMemOperand(a3, JSArray::kMapOffset)); in Generate()
4098 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); in Generate()
4099 __ sd(at, FieldMemOperand(a3, JSArray::kPropertiesOffset)); in Generate()
4100 __ sd(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); in Generate()
4101 __ sd(a4, FieldMemOperand(a3, JSArray::kLengthOffset)); in Generate()
4103 __ Ret(USE_DELAY_SLOT); in Generate()
4104 __ mov(v0, a3); // In delay slot in Generate()
4108 __ bind(&allocate); in Generate()
4109 __ Branch(&too_big_for_new_space, gt, a5, in Generate()
4113 __ SmiTag(a0); in Generate()
4114 __ SmiTag(a5); in Generate()
4115 __ Push(a0, a2, a5); in Generate()
4116 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4117 __ Pop(a0, a2); in Generate()
4118 __ SmiUntag(a0); in Generate()
4120 __ jmp(&done_allocate); in Generate()
4123 __ bind(&too_big_for_new_space); in Generate()
4124 __ Push(a1); in Generate()
4125 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4137 __ AssertFunction(a1); in Generate()
4140 __ mov(t0, fp); in Generate()
4144 __ ld(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset)); in Generate()
4148 __ ld(a3, MemOperand(t0, StandardFrameConstants::kFunctionOffset)); in Generate()
4149 __ Branch(&ok, eq, a1, Operand(a3)); in Generate()
4150 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4151 __ bind(&ok); in Generate()
4155 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4156 __ lw(a2, in Generate()
4158 __ Lsa(a3, t0, a2, kPointerSizeLog2); in Generate()
4159 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4160 __ SmiTag(a2); in Generate()
4172 __ ld(a4, MemOperand(t0, StandardFrameConstants::kCallerFPOffset)); in Generate()
4173 __ ld(a0, MemOperand(a4, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4174 __ Branch(&adaptor_frame, eq, a0, in Generate()
4178 __ mov(a5, a2); in Generate()
4179 __ Branch(USE_DELAY_SLOT, &try_allocate); in Generate()
4180 __ mov(a6, a2); // In delay slot. in Generate()
4183 __ bind(&adaptor_frame); in Generate()
4184 __ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4185 __ SmiScale(t2, a5, kPointerSizeLog2); in Generate()
4186 __ Daddu(a4, a4, Operand(t2)); in Generate()
4187 __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4192 __ mov(a6, a2); in Generate()
4193 __ Branch(&try_allocate, le, a6, Operand(a5)); in Generate()
4194 __ mov(a6, a5); in Generate()
4196 __ bind(&try_allocate); in Generate()
4205 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg)); in Generate()
4206 __ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0. in Generate()
4207 __ SmiScale(t1, a6, kPointerSizeLog2); in Generate()
4208 __ daddiu(t1, t1, kParameterMapHeaderSize); in Generate()
4209 __ bind(&param_map_size); in Generate()
4212 __ SmiScale(t2, a5, kPointerSizeLog2); in Generate()
4213 __ Daddu(t1, t1, Operand(t2)); in Generate()
4214 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize)); in Generate()
4217 __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4220 __ Allocate(t1, v0, t1, a4, &runtime, NO_ALLOCATION_FLAGS); in Generate()
4230 __ ld(a4, NativeContextMemOperand()); in Generate()
4232 __ Branch(&skip2_ne, ne, a6, Operand(zero_reg)); in Generate()
4233 __ ld(a4, MemOperand(a4, kNormalOffset)); in Generate()
4234 __ bind(&skip2_ne); in Generate()
4236 __ Branch(&skip2_eq, eq, a6, Operand(zero_reg)); in Generate()
4237 __ ld(a4, MemOperand(a4, kAliasedOffset)); in Generate()
4238 __ bind(&skip2_eq); in Generate()
4244 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset)); in Generate()
4245 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex); in Generate()
4246 __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset)); in Generate()
4247 __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
4250 __ AssertNotSmi(a1); in Generate()
4251 __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); in Generate()
4254 __ AssertSmi(a5); in Generate()
4255 __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); in Generate()
4260 __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4261 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
4270 __ Branch(&skip3, ne, a6, Operand(Smi::kZero)); in Generate()
4273 __ mov(a1, a4); in Generate()
4274 __ bind(&skip3); in Generate()
4276 __ Branch(&skip_parameter_map, eq, a6, Operand(Smi::kZero)); in Generate()
4278 __ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex); in Generate()
4279 __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset)); in Generate()
4280 __ Daddu(a5, a6, Operand(Smi::FromInt(2))); in Generate()
4281 __ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset)); in Generate()
4282 __ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize)); in Generate()
4283 __ SmiScale(t2, a6, kPointerSizeLog2); in Generate()
4284 __ Daddu(a5, a4, Operand(t2)); in Generate()
4285 __ Daddu(a5, a5, Operand(kParameterMapHeaderSize)); in Generate()
4286 __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize)); in Generate()
4297 __ mov(a5, a6); in Generate()
4298 __ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); in Generate()
4299 __ Dsubu(t1, t1, Operand(a6)); in Generate()
4300 __ LoadRoot(a7, Heap::kTheHoleValueRootIndex); in Generate()
4301 __ SmiScale(t2, a5, kPointerSizeLog2); in Generate()
4302 __ Daddu(a1, a4, Operand(t2)); in Generate()
4303 __ Daddu(a1, a1, Operand(kParameterMapHeaderSize)); in Generate()
4310 __ jmp(&parameters_test); in Generate()
4312 __ bind(&parameters_loop); in Generate()
4313 __ Dsubu(a5, a5, Operand(Smi::FromInt(1))); in Generate()
4314 __ SmiScale(a0, a5, kPointerSizeLog2); in Generate()
4315 __ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); in Generate()
4316 __ Daddu(t2, a4, a0); in Generate()
4317 __ sd(t1, MemOperand(t2)); in Generate()
4318 __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); in Generate()
4319 __ Daddu(t2, a1, a0); in Generate()
4320 __ sd(a7, MemOperand(t2)); in Generate()
4321 __ Daddu(t1, t1, Operand(Smi::FromInt(1))); in Generate()
4322 __ bind(&parameters_test); in Generate()
4323 __ Branch(&parameters_loop, ne, a5, Operand(Smi::kZero)); in Generate()
4326 __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); in Generate()
4328 __ bind(&skip_parameter_map); in Generate()
4335 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); in Generate()
4336 __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset)); in Generate()
4337 __ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset)); in Generate()
4340 __ SmiScale(t2, a6, kPointerSizeLog2); in Generate()
4341 __ Dsubu(a3, a3, Operand(t2)); in Generate()
4342 __ jmp(&arguments_test); in Generate()
4344 __ bind(&arguments_loop); in Generate()
4345 __ Dsubu(a3, a3, Operand(kPointerSize)); in Generate()
4346 __ ld(a4, MemOperand(a3, 0)); in Generate()
4347 __ SmiScale(t2, a6, kPointerSizeLog2); in Generate()
4348 __ Daddu(t1, a1, Operand(t2)); in Generate()
4349 __ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize)); in Generate()
4350 __ Daddu(a6, a6, Operand(Smi::FromInt(1))); in Generate()
4352 __ bind(&arguments_test); in Generate()
4353 __ Branch(&arguments_loop, lt, a6, Operand(a5)); in Generate()
4356 __ Ret(); in Generate()
4360 __ bind(&runtime); in Generate()
4361 __ Push(a1, a3, a5); in Generate()
4362 __ TailCallRuntime(Runtime::kNewSloppyArguments); in Generate()
4373 __ AssertFunction(a1); in Generate()
4376 __ mov(a2, fp); in Generate()
4380 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4384 __ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset)); in Generate()
4385 __ Branch(&ok, eq, a1, Operand(a3)); in Generate()
4386 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4387 __ bind(&ok); in Generate()
4392 __ ld(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4393 __ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4394 __ Branch(&arguments_adaptor, eq, a0, in Generate()
4397 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4398 __ lw(a0, in Generate()
4400 __ Dlsa(a2, a2, a0, kPointerSizeLog2); in Generate()
4401 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - in Generate()
4404 __ Branch(&arguments_done); in Generate()
4405 __ bind(&arguments_adaptor); in Generate()
4407 __ SmiLoadUntag( in Generate()
4409 __ Dlsa(a2, a3, a0, kPointerSizeLog2); in Generate()
4410 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - in Generate()
4413 __ bind(&arguments_done); in Generate()
4425 __ li(a5, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); in Generate()
4426 __ Dlsa(a5, a5, a0, kPointerSizeLog2); in Generate()
4427 __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4428 __ bind(&done_allocate); in Generate()
4431 __ SmiTag(a4, a0); in Generate()
4434 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in Generate()
4435 __ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset)); in Generate()
4436 __ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate()
4437 __ Daddu(a3, v0, Operand(FixedArray::kHeaderSize)); in Generate()
4440 __ Dlsa(a1, a3, a0, kPointerSizeLog2); in Generate()
4441 __ bind(&loop); in Generate()
4442 __ Branch(&done_loop, eq, a1, Operand(a3)); in Generate()
4443 __ ld(at, MemOperand(a2, 0 * kPointerSize)); in Generate()
4444 __ sd(at, FieldMemOperand(a3, 0 * kPointerSize)); in Generate()
4445 __ Dsubu(a2, a2, Operand(1 * kPointerSize)); in Generate()
4446 __ Daddu(a3, a3, Operand(1 * kPointerSize)); in Generate()
4447 __ Branch(&loop); in Generate()
4448 __ bind(&done_loop); in Generate()
4452 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at); in Generate()
4453 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset)); in Generate()
4454 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); in Generate()
4455 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset)); in Generate()
4456 __ sd(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset)); in Generate()
4457 __ sd(a4, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset)); in Generate()
4459 __ Ret(USE_DELAY_SLOT); in Generate()
4460 __ mov(v0, a3); // In delay slot in Generate()
4464 __ bind(&allocate); in Generate()
4465 __ Branch(&too_big_for_new_space, gt, a5, Operand(kMaxRegularHeapObjectSize)); in Generate()
4468 __ SmiTag(a0); in Generate()
4469 __ SmiTag(a5); in Generate()
4470 __ Push(a0, a2, a5); in Generate()
4471 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4472 __ Pop(a0, a2); in Generate()
4473 __ SmiUntag(a0); in Generate()
4475 __ jmp(&done_allocate); in Generate()
4478 __ bind(&too_big_for_new_space); in Generate()
4479 __ Push(a1); in Generate()
4480 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4512 __ li(t9, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
4513 __ lb(t9, MemOperand(t9, 0)); in CallApiFunctionAndReturn()
4514 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); in CallApiFunctionAndReturn()
4517 __ li(t9, Operand(thunk_ref)); in CallApiFunctionAndReturn()
4518 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
4520 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
4521 __ mov(t9, function_address); in CallApiFunctionAndReturn()
4522 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
4525 __ li(s3, Operand(next_address)); in CallApiFunctionAndReturn()
4526 __ ld(s0, MemOperand(s3, kNextOffset)); in CallApiFunctionAndReturn()
4527 __ ld(s1, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
4528 __ lw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4529 __ Addu(s2, s2, Operand(1)); in CallApiFunctionAndReturn()
4530 __ sw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4534 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4535 __ PrepareCallCFunction(1, a0); in CallApiFunctionAndReturn()
4536 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4537 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
4539 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4550 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4551 __ PrepareCallCFunction(1, a0); in CallApiFunctionAndReturn()
4552 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4553 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
4555 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4564 __ ld(v0, return_value_operand); in CallApiFunctionAndReturn()
4565 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
4569 __ sd(s0, MemOperand(s3, kNextOffset)); in CallApiFunctionAndReturn()
4570 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
4571 __ lw(a1, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4572 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2)); in CallApiFunctionAndReturn()
4574 __ Subu(s2, s2, Operand(1)); in CallApiFunctionAndReturn()
4575 __ sw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4576 __ ld(at, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
4577 __ Branch(&delete_allocated_handles, ne, s1, Operand(at)); in CallApiFunctionAndReturn()
4580 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
4584 __ ld(cp, *context_restore_operand); in CallApiFunctionAndReturn()
4588 __ ld(s0, MemOperand(sp, stack_space_offset)); in CallApiFunctionAndReturn()
4590 __ li(s0, Operand(stack_space)); in CallApiFunctionAndReturn()
4592 __ LeaveExitFrame(false, s0, !restore_context, NO_EMIT_RETURN, in CallApiFunctionAndReturn()
4596 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
4597 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
4598 __ ld(a5, MemOperand(at)); in CallApiFunctionAndReturn()
4599 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5)); in CallApiFunctionAndReturn()
4601 __ Ret(); in CallApiFunctionAndReturn()
4604 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
4605 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
4608 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
4609 __ sd(s1, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
4610 __ mov(s0, v0); in CallApiFunctionAndReturn()
4611 __ mov(a0, v0); in CallApiFunctionAndReturn()
4612 __ PrepareCallCFunction(1, s1); in CallApiFunctionAndReturn()
4613 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4614 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
4616 __ mov(v0, s0); in CallApiFunctionAndReturn()
4617 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
4653 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
4656 __ Push(context, callee, call_data); in Generate()
4659 __ ld(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
4664 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4667 __ Push(scratch, scratch); in Generate()
4668 __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
4670 __ Push(scratch, holder); in Generate()
4673 __ mov(scratch, sp); in Generate()
4680 __ EnterExitFrame(false, kApiStackSpace); in Generate()
4685 __ Daddu(a0, sp, Operand(1 * kPointerSize)); in Generate()
4687 __ sd(scratch, MemOperand(a0, 0 * kPointerSize)); in Generate()
4689 __ Daddu(at, scratch, in Generate()
4691 __ sd(at, MemOperand(a0, 1 * kPointerSize)); in Generate()
4695 __ li(at, Operand(argc())); in Generate()
4696 __ sw(at, MemOperand(a0, 2 * kPointerSize)); in Generate()
4745 __ Dsubu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize); in Generate()
4746 __ sd(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize)); in Generate()
4747 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
4748 __ sd(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize)); in Generate()
4749 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4750 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize)); in Generate()
4751 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) * in Generate()
4753 __ li(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
4754 __ sd(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize)); in Generate()
4755 __ sd(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize)); in Generate()
4758 __ sd(zero_reg, in Generate()
4760 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
4761 __ sd(scratch, MemOperand(sp, 0 * kPointerSize)); in Generate()
4767 __ mov(a0, sp); // a0 = Handle<Name> in Generate()
4768 __ Daddu(a1, a0, Operand(1 * kPointerSize)); // a1 = v8::PCI::args_ in Generate()
4772 __ EnterExitFrame(false, kApiStackSpace); in Generate()
4776 __ sd(a1, MemOperand(sp, 1 * kPointerSize)); in Generate()
4777 __ Daddu(a1, sp, Operand(1 * kPointerSize)); in Generate()
4783 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
4784 __ ld(api_function_address, in Generate()
4795 #undef __