Lines Matching full:__

24 #define __ ACCESS_MASM(masm)  macro
27 __ sll(t9, a0, kPointerSizeLog2); in Generate()
28 __ Addu(t9, sp, t9); in Generate()
29 __ sw(a1, MemOperand(t9, 0)); in Generate()
30 __ Push(a1); in Generate()
31 __ Push(a2); in Generate()
32 __ Addu(a0, a0, Operand(3)); in Generate()
33 __ TailCallRuntime(Runtime::kNewArray); in Generate()
73 __ Subu(sp, sp, Operand(param_count * kPointerSize)); in GenerateLightweightMiss()
76 __ sw(descriptor.GetRegisterParameter(i), in GenerateLightweightMiss()
79 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
82 __ Ret(); in GenerateLightweightMiss()
103 __ Push(scratch, scratch2, scratch3); in Generate()
107 __ ldc1(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
110 __ cfc1(scratch2, FCSR); in Generate()
111 __ ctc1(zero_reg, FCSR); in Generate()
114 __ Trunc_w_d(double_scratch, double_scratch); in Generate()
116 __ mfc1(scratch3, double_scratch); in Generate()
119 __ cfc1(scratch, FCSR); in Generate()
120 __ ctc1(scratch2, FCSR); in Generate()
123 __ And( in Generate()
129 __ Branch(&error, ne, scratch, Operand(zero_reg)); in Generate()
130 __ Move(result_reg, scratch3); in Generate()
131 __ Branch(&done); in Generate()
132 __ bind(&error); in Generate()
139 __ lw(input_low, in Generate()
141 __ lw(input_high, in Generate()
146 __ Ext(result_reg, in Generate()
152 __ Subu(scratch, result_reg, HeapNumber::kExponentMask); in Generate()
153 __ Movz(result_reg, zero_reg, scratch); in Generate()
154 __ Branch(&done, eq, scratch, Operand(zero_reg)); in Generate()
157 __ Subu(result_reg, in Generate()
163 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg)); in Generate()
164 __ mov(result_reg, zero_reg); in Generate()
165 __ Branch(&done); in Generate()
167 __ bind(&normal_exponent); in Generate()
170 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits)); in Generate()
175 __ And(sign, input_high, Operand(HeapNumber::kSignMask)); in Generate()
180 __ Branch(&high_shift_needed, lt, scratch, Operand(32)); in Generate()
181 __ mov(input_high, zero_reg); in Generate()
182 __ Branch(&high_shift_done); in Generate()
183 __ bind(&high_shift_needed); in Generate()
186 __ Or(input_high, in Generate()
192 __ sllv(input_high, input_high, scratch); in Generate()
194 __ bind(&high_shift_done); in Generate()
198 __ li(at, 32); in Generate()
199 __ subu(scratch, at, scratch); in Generate()
200 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg)); in Generate()
203 __ Subu(scratch, zero_reg, scratch); in Generate()
204 __ sllv(input_low, input_low, scratch); in Generate()
205 __ Branch(&shift_done); in Generate()
207 __ bind(&pos_shift); in Generate()
208 __ srlv(input_low, input_low, scratch); in Generate()
210 __ bind(&shift_done); in Generate()
211 __ Or(input_high, input_high, Operand(input_low)); in Generate()
213 __ mov(scratch, sign); in Generate()
216 __ Subu(result_reg, zero_reg, input_high); in Generate()
217 __ Movz(result_reg, input_high, scratch); in Generate()
219 __ bind(&done); in Generate()
221 __ Pop(scratch, scratch2, scratch3); in Generate()
222 __ Ret(); in Generate()
235 __ Branch(&not_identical, ne, a0, Operand(a1)); in EmitIdenticalObjectComparison()
237 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); in EmitIdenticalObjectComparison()
243 __ GetObjectType(a0, t4, t4); in EmitIdenticalObjectComparison()
246 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
248 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
250 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
252 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE)); in EmitIdenticalObjectComparison()
255 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
257 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
259 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
264 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
265 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
266 __ Branch(&return_equal, ne, a0, Operand(t2)); in EmitIdenticalObjectComparison()
268 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
271 __ li(v0, Operand(GREATER)); in EmitIdenticalObjectComparison()
274 __ li(v0, Operand(LESS)); in EmitIdenticalObjectComparison()
280 __ bind(&return_equal); in EmitIdenticalObjectComparison()
282 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
284 __ li(v0, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
286 __ li(v0, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
288 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
295 __ bind(&heap_number); in EmitIdenticalObjectComparison()
302 __ lw(t2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
304 __ And(t3, t2, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
306 __ Branch(&return_equal, ne, t3, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
309 __ sll(t2, t2, HeapNumber::kNonMantissaBitsInTopWord); in EmitIdenticalObjectComparison()
311 __ lw(t3, FieldMemOperand(a0, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
312 __ Or(v0, t3, Operand(t2)); in EmitIdenticalObjectComparison()
319 __ Ret(eq, v0, Operand(zero_reg)); in EmitIdenticalObjectComparison()
321 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
323 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
325 __ li(v0, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
331 __ bind(&not_identical); in EmitIdenticalObjectComparison()
345 __ JumpIfSmi(lhs, &lhs_is_smi); in EmitSmiNonsmiComparison()
348 __ GetObjectType(lhs, t4, t4); in EmitSmiNonsmiComparison()
352 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
353 __ mov(v0, lhs); in EmitSmiNonsmiComparison()
357 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
362 __ sra(at, rhs, kSmiTagSize); in EmitSmiNonsmiComparison()
363 __ mtc1(at, f14); in EmitSmiNonsmiComparison()
364 __ cvt_d_w(f14, f14); in EmitSmiNonsmiComparison()
365 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
368 __ jmp(both_loaded_as_doubles); in EmitSmiNonsmiComparison()
370 __ bind(&lhs_is_smi); in EmitSmiNonsmiComparison()
372 __ GetObjectType(rhs, t4, t4); in EmitSmiNonsmiComparison()
376 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
377 __ li(v0, Operand(1)); in EmitSmiNonsmiComparison()
381 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
386 __ sra(at, lhs, kSmiTagSize); in EmitSmiNonsmiComparison()
387 __ mtc1(at, f12); in EmitSmiNonsmiComparison()
388 __ cvt_d_w(f12, f12); in EmitSmiNonsmiComparison()
389 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
404 __ GetObjectType(lhs, a2, a2); in EmitStrictTwoHeapObjectCompare()
405 __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitStrictTwoHeapObjectCompare()
409 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
410 __ Ret(USE_DELAY_SLOT); in EmitStrictTwoHeapObjectCompare()
411 __ li(v0, Operand(1)); in EmitStrictTwoHeapObjectCompare()
413 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
415 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
417 __ GetObjectType(rhs, a3, a3); in EmitStrictTwoHeapObjectCompare()
418 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitStrictTwoHeapObjectCompare()
421 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
426 __ Or(a2, a2, Operand(a3)); in EmitStrictTwoHeapObjectCompare()
427 __ And(at, a2, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
428 __ Branch(&return_not_equal, eq, at, Operand(zero_reg)); in EmitStrictTwoHeapObjectCompare()
438 __ GetObjectType(lhs, a3, a2); in EmitCheckForTwoHeapNumbers()
439 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE)); in EmitCheckForTwoHeapNumbers()
440 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
442 __ Branch(slow, ne, a3, Operand(a2)); in EmitCheckForTwoHeapNumbers()
446 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
447 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
449 __ jmp(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
464 __ And(at, a2, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
465 __ Branch(&object_test, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
466 __ And(at, a2, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
467 __ Branch(possible_strings, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
468 __ GetObjectType(rhs, a3, a3); in EmitCheckForInternalizedStringsOrObjects()
469 __ Branch(runtime_call, ge, a3, Operand(FIRST_NONSTRING_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
470 __ And(at, a3, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
471 __ Branch(possible_strings, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
476 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
477 __ mov(v0, a0); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
479 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
480 __ lw(a2, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
481 __ lw(a3, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
482 __ lbu(t0, FieldMemOperand(a2, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
483 __ lbu(t1, FieldMemOperand(a3, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
484 __ And(at, t0, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
485 __ Branch(&undetectable, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
486 __ And(at, t1, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
487 __ Branch(&return_unequal, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
489 __ GetInstanceType(a2, a2); in EmitCheckForInternalizedStringsOrObjects()
490 __ Branch(runtime_call, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
491 __ GetInstanceType(a3, a3); in EmitCheckForInternalizedStringsOrObjects()
492 __ Branch(runtime_call, lt, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
494 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
496 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
497 __ mov(v0, a0); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
499 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
500 __ And(at, t1, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
501 __ Branch(&return_unequal, eq, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
506 __ GetInstanceType(a2, a2); in EmitCheckForInternalizedStringsOrObjects()
507 __ Branch(&return_equal, eq, a2, Operand(ODDBALL_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
508 __ GetInstanceType(a3, a3); in EmitCheckForInternalizedStringsOrObjects()
509 __ Branch(&return_unequal, ne, a3, Operand(ODDBALL_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
511 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
512 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
513 __ li(v0, Operand(EQUAL)); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
523 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
525 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
526 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
531 __ bind(&ok); in CompareICStub_CheckInputType()
551 __ Or(a2, a1, a0); in GenerateGeneric()
552 __ JumpIfNotSmi(a2, &not_two_smis); in GenerateGeneric()
553 __ sra(a1, a1, 1); in GenerateGeneric()
554 __ sra(a0, a0, 1); in GenerateGeneric()
555 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
556 __ subu(v0, a1, a0); in GenerateGeneric()
557 __ bind(&not_two_smis); in GenerateGeneric()
570 __ And(t2, lhs, Operand(rhs)); in GenerateGeneric()
571 __ JumpIfNotSmi(t2, &not_smis, t0); in GenerateGeneric()
583 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
588 __ li(t0, Operand(LESS)); in GenerateGeneric()
589 __ li(t1, Operand(GREATER)); in GenerateGeneric()
590 __ li(t2, Operand(EQUAL)); in GenerateGeneric()
593 __ BranchF(NULL, &nan, eq, f12, f14); in GenerateGeneric()
598 __ c(OLT, D, f12, f14); in GenerateGeneric()
599 __ Movt(v0, t0); in GenerateGeneric()
603 __ Movf(v0, t1); in GenerateGeneric()
606 __ c(EQ, D, f12, f14); in GenerateGeneric()
607 __ Movt(v0, t2); in GenerateGeneric()
610 __ BranchF(USE_DELAY_SLOT, &skip, NULL, lt, f12, f14); in GenerateGeneric()
611 __ mov(v0, t0); // Return LESS as result. in GenerateGeneric()
613 __ BranchF(USE_DELAY_SLOT, &skip, NULL, eq, f12, f14); in GenerateGeneric()
614 __ mov(v0, t2); // Return EQUAL as result. in GenerateGeneric()
616 __ mov(v0, t1); // Return GREATER as result. in GenerateGeneric()
617 __ bind(&skip); in GenerateGeneric()
620 __ Ret(); in GenerateGeneric()
622 __ bind(&nan); in GenerateGeneric()
626 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
628 __ li(v0, Operand(GREATER)); in GenerateGeneric()
630 __ li(v0, Operand(LESS)); in GenerateGeneric()
634 __ bind(&not_smis); in GenerateGeneric()
657 __ bind(&check_for_internalized_strings); in GenerateGeneric()
669 __ bind(&flat_string_check); in GenerateGeneric()
671 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow); in GenerateGeneric()
673 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, in GenerateGeneric()
683 __ bind(&slow); in GenerateGeneric()
687 __ Push(lhs, rhs); in GenerateGeneric()
688 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); in GenerateGeneric()
692 __ LoadRoot(a0, Heap::kTrueValueRootIndex); in GenerateGeneric()
693 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
694 __ subu(v0, v0, a0); // In delay slot. in GenerateGeneric()
698 __ Push(lhs, rhs); in GenerateGeneric()
706 __ li(a0, Operand(Smi::FromInt(ncr))); in GenerateGeneric()
707 __ push(a0); in GenerateGeneric()
711 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
714 __ bind(&miss); in GenerateGeneric()
720 __ mov(t9, ra); in Generate()
721 __ pop(ra); in Generate()
722 __ PushSafepointRegisters(); in Generate()
723 __ Jump(t9); in Generate()
728 __ mov(t9, ra); in Generate()
729 __ pop(ra); in Generate()
730 __ PopSafepointRegisters(); in Generate()
731 __ Jump(t9); in Generate()
739 __ MultiPush(kJSCallerSaved | ra.bit()); in Generate()
741 __ MultiPushFPU(kCallerSavedFPU); in Generate()
748 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
749 __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
750 __ CallCFunction( in Generate()
754 __ MultiPopFPU(kCallerSavedFPU); in Generate()
757 __ MultiPop(kJSCallerSaved | ra.bit()); in Generate()
758 __ Ret(); in Generate()
776 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
778 __ ldc1(double_exponent, in Generate()
785 __ EmitFPUTruncate(kRoundToMinusInf, in Generate()
793 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg)); in Generate()
795 __ push(ra); in Generate()
798 __ PrepareCallCFunction(0, 2, scratch2); in Generate()
799 __ MovToFloatParameters(double_base, double_exponent); in Generate()
800 __ CallCFunction( in Generate()
804 __ pop(ra); in Generate()
805 __ MovFromFloatResult(double_result); in Generate()
806 __ jmp(&done); in Generate()
808 __ bind(&int_exponent_convert); in Generate()
812 __ bind(&int_exponent); in Generate()
816 __ mov(scratch, exponent); in Generate()
819 __ mov(exponent, scratch); in Generate()
822 __ mov_d(double_scratch, double_base); // Back up base. in Generate()
823 __ Move(double_result, 1.0); in Generate()
827 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg)); in Generate()
828 __ Subu(scratch, zero_reg, scratch); in Generate()
831 __ Branch(&bail_out, gt, zero_reg, Operand(scratch)); in Generate()
832 __ bind(&positive_exponent); in Generate()
833 __ Assert(ge, kUnexpectedNegativeValue, scratch, Operand(zero_reg)); in Generate()
836 __ bind(&while_true); in Generate()
838 __ And(scratch2, scratch, 1); in Generate()
840 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg)); in Generate()
841 __ mul_d(double_result, double_result, double_scratch); in Generate()
842 __ bind(&no_carry); in Generate()
844 __ sra(scratch, scratch, 1); in Generate()
846 __ Branch(&loop_end, eq, scratch, Operand(zero_reg)); in Generate()
847 __ mul_d(double_scratch, double_scratch, double_scratch); in Generate()
849 __ Branch(&while_true); in Generate()
851 __ bind(&loop_end); in Generate()
853 __ Branch(&done, ge, exponent, Operand(zero_reg)); in Generate()
854 __ Move(double_scratch, 1.0); in Generate()
855 __ div_d(double_result, double_scratch, double_result); in Generate()
858 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero); in Generate()
862 __ bind(&bail_out); in Generate()
863 __ mtc1(exponent, single_scratch); in Generate()
864 __ cvt_d_w(double_exponent, single_scratch); in Generate()
867 __ push(ra); in Generate()
870 __ PrepareCallCFunction(0, 2, scratch); in Generate()
871 __ MovToFloatParameters(double_base, double_exponent); in Generate()
872 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
875 __ pop(ra); in Generate()
876 __ MovFromFloatResult(double_result); in Generate()
878 __ bind(&done); in Generate()
879 __ Ret(); in Generate()
944 __ mov(s1, a2); in Generate()
947 __ Lsa(s1, sp, a0, kPointerSizeLog2); in Generate()
948 __ Subu(s1, s1, kPointerSize); in Generate()
953 __ EnterExitFrame(save_doubles(), 0, is_builtin_exit() in Generate()
963 __ mov(s0, a0); in Generate()
964 __ mov(s2, a1); in Generate()
969 __ AssertStackIsAligned(); in Generate()
976 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
977 __ mov(a1, s1); in Generate()
985 __ Subu(sp, sp, Operand(result_stack_size)); in Generate()
988 __ li(a3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
989 __ mov(a2, s1); in Generate()
990 __ mov(a1, a0); in Generate()
991 __ mov(a0, sp); in Generate()
1005 __ addiupc(ra, kNumInstructionsToJump + 1); in Generate()
1009 __ bal(&find_ra); // bal exposes branch delay slot. in Generate()
1010 __ Addu(ra, ra, kNumInstructionsToJump * Instruction::kInstrSize); in Generate()
1012 __ bind(&find_ra); in Generate()
1015 __ sw(ra, MemOperand(sp, result_stack_size)); in Generate()
1020 __ mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC. in Generate()
1021 __ jalr(t9); in Generate()
1023 __ addiu(sp, sp, -kCArgsSlotsSize); in Generate()
1031 __ lw(a0, MemOperand(v0, 2 * kPointerSize)); in Generate()
1032 __ lw(v1, MemOperand(v0, 1 * kPointerSize)); in Generate()
1033 __ lw(v0, MemOperand(v0, 0 * kPointerSize)); in Generate()
1039 __ LoadRoot(t0, Heap::kExceptionRootIndex); in Generate()
1040 __ Branch(&exception_returned, eq, t0, Operand(v0)); in Generate()
1048 __ li(a2, Operand(pending_exception_address)); in Generate()
1049 __ lw(a2, MemOperand(a2)); in Generate()
1050 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); in Generate()
1052 __ Branch(&okay, eq, t0, Operand(a2)); in Generate()
1053 __ stop("Unexpected pending exception"); in Generate()
1054 __ bind(&okay); in Generate()
1069 __ LeaveExitFrame(save_doubles(), argc, true, EMIT_RETURN); in Generate()
1072 __ bind(&exception_returned); in Generate()
1091 __ PrepareCallCFunction(3, 0, a0); in Generate()
1092 __ mov(a0, zero_reg); in Generate()
1093 __ mov(a1, zero_reg); in Generate()
1094 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1095 __ CallCFunction(find_handler, 3); in Generate()
1099 __ li(cp, Operand(pending_handler_context_address)); in Generate()
1100 __ lw(cp, MemOperand(cp)); in Generate()
1101 __ li(sp, Operand(pending_handler_sp_address)); in Generate()
1102 __ lw(sp, MemOperand(sp)); in Generate()
1103 __ li(fp, Operand(pending_handler_fp_address)); in Generate()
1104 __ lw(fp, MemOperand(fp)); in Generate()
1109 __ Branch(&zero, eq, cp, Operand(zero_reg)); in Generate()
1110 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1111 __ bind(&zero); in Generate()
1114 __ li(a1, Operand(pending_handler_code_address)); in Generate()
1115 __ lw(a1, MemOperand(a1)); in Generate()
1116 __ li(a2, Operand(pending_handler_offset_address)); in Generate()
1117 __ lw(a2, MemOperand(a2)); in Generate()
1118 __ Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1119 __ Addu(t9, a1, a2); in Generate()
1120 __ Jump(t9); in Generate()
1141 __ MultiPush(kCalleeSaved | ra.bit()); in Generate()
1144 __ MultiPushFPU(kCalleeSavedFPU); in Generate()
1146 __ Move(kDoubleRegZero, 0.0); in Generate()
1153 __ InitializeRootRegister(); in Generate()
1154 __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize)); in Generate()
1157 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used. in Generate()
1159 __ li(t2, Operand(Smi::FromInt(marker))); in Generate()
1160 __ li(t1, Operand(Smi::FromInt(marker))); in Generate()
1161 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, in Generate()
1163 __ lw(t0, MemOperand(t0)); in Generate()
1164 __ Push(t3, t2, t1, t0); in Generate()
1166 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); in Generate()
1187 __ li(t1, Operand(ExternalReference(js_entry_sp))); in Generate()
1188 __ lw(t2, MemOperand(t1)); in Generate()
1189 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg)); in Generate()
1190 __ sw(fp, MemOperand(t1)); in Generate()
1191 __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); in Generate()
1193 __ b(&cont); in Generate()
1194 __ nop(); // Branch delay slot nop. in Generate()
1195 __ bind(&non_outermost_js); in Generate()
1196 __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); in Generate()
1197 __ bind(&cont); in Generate()
1198 __ push(t0); in Generate()
1202 __ jmp(&invoke); in Generate()
1203 __ bind(&handler_entry); in Generate()
1209 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1211 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0. in Generate()
1212 __ LoadRoot(v0, Heap::kExceptionRootIndex); in Generate()
1213 __ b(&exit); // b exposes branch delay slot. in Generate()
1214 __ nop(); // Branch delay slot nop. in Generate()
1217 __ bind(&invoke); in Generate()
1218 __ PushStackHandler(); in Generate()
1245 __ li(t0, Operand(construct_entry)); in Generate()
1248 __ li(t0, Operand(entry)); in Generate()
1250 __ lw(t9, MemOperand(t0)); // Deref address. in Generate()
1253 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); in Generate()
1254 __ Call(t9); in Generate()
1257 __ PopStackHandler(); in Generate()
1259 __ bind(&exit); // v0 holds result in Generate()
1262 __ pop(t1); in Generate()
1263 __ Branch(&non_outermost_js_2, in Generate()
1267 __ li(t1, Operand(ExternalReference(js_entry_sp))); in Generate()
1268 __ sw(zero_reg, MemOperand(t1)); in Generate()
1269 __ bind(&non_outermost_js_2); in Generate()
1272 __ pop(t1); in Generate()
1273 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, in Generate()
1275 __ sw(t1, MemOperand(t0)); in Generate()
1278 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset); in Generate()
1281 __ MultiPopFPU(kCalleeSavedFPU); in Generate()
1284 __ MultiPop(kCalleeSaved | ra.bit()); in Generate()
1286 __ Jump(ra); in Generate()
1307 __ Ret(); in Generate()
1312 __ bind(&miss); in Generate()
1328 __ bind(&miss); in Generate()
1338 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1368 __ li(a0, Operand(address_of_regexp_stack_memory_size)); in Generate()
1369 __ lw(a0, MemOperand(a0, 0)); in Generate()
1370 __ Branch(&runtime, eq, a0, Operand(zero_reg)); in Generate()
1373 __ lw(a0, MemOperand(sp, kJSRegExpOffset)); in Generate()
1375 __ JumpIfSmi(a0, &runtime); in Generate()
1376 __ GetObjectType(a0, a1, a1); in Generate()
1377 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); in Generate()
1380 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset)); in Generate()
1382 __ SmiTst(regexp_data, t0); in Generate()
1383 __ Check(nz, in Generate()
1387 __ GetObjectType(regexp_data, a0, a0); in Generate()
1388 __ Check(eq, in Generate()
1396 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1397 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); in Generate()
1401 __ lw(a2, in Generate()
1409 __ Branch( in Generate()
1413 __ mov(t0, zero_reg); in Generate()
1414 __ lw(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1415 __ JumpIfSmi(subject, &runtime); in Generate()
1416 __ mov(a3, subject); // Make a copy of the original subject string. in Generate()
1439 __ bind(&check_underlying); in Generate()
1440 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1441 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); in Generate()
1444 __ And(a1, in Generate()
1450 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); // Go to (5). in Generate()
1458 __ Branch(&not_seq_nor_cons, ge, a1, Operand(kExternalStringTag)); in Generate()
1462 __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1463 __ LoadRoot(a1, Heap::kempty_stringRootIndex); in Generate()
1464 __ Branch(&runtime, ne, a0, Operand(a1)); in Generate()
1465 __ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1466 __ jmp(&check_underlying); in Generate()
1469 __ bind(&seq_string); in Generate()
1475 __ lw(a1, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1476 __ JumpIfNotSmi(a1, &runtime); in Generate()
1477 __ lw(a3, FieldMemOperand(a3, String::kLengthOffset)); in Generate()
1478 __ Branch(&runtime, ls, a3, Operand(a1)); in Generate()
1479 __ sra(a1, a1, kSmiTagSize); // Untag the Smi. in Generate()
1484 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for one-byte. in Generate()
1485 __ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset)); in Generate()
1486 __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below). in Generate()
1487 __ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
1488 __ Movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset. in Generate()
1495 __ JumpIfSmi(t9, &runtime); in Generate()
1503 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), in Generate()
1509 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1527 __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1528 __ sw(a0, MemOperand(sp, 5 * kPointerSize)); in Generate()
1531 __ li(a0, Operand(1)); in Generate()
1532 __ sw(a0, MemOperand(sp, 4 * kPointerSize)); in Generate()
1535 __ li(a0, Operand(address_of_regexp_stack_memory_address)); in Generate()
1536 __ lw(a0, MemOperand(a0, 0)); in Generate()
1537 __ li(a2, Operand(address_of_regexp_stack_memory_size)); in Generate()
1538 __ lw(a2, MemOperand(a2, 0)); in Generate()
1539 __ addu(a0, a0, a2); in Generate()
1540 __ sw(a0, MemOperand(sp, 3 * kPointerSize)); in Generate()
1544 __ mov(a0, zero_reg); in Generate()
1545 __ sw(a0, MemOperand(sp, 2 * kPointerSize)); in Generate()
1548 __ li(a0, Operand( in Generate()
1550 __ sw(a0, MemOperand(sp, 1 * kPointerSize)); in Generate()
1554 __ Addu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1555 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte. in Generate()
1560 __ lw(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1565 __ sllv(t1, t0, a3); in Generate()
1566 __ addu(t0, t2, t1); in Generate()
1567 __ sllv(t1, a1, a3); in Generate()
1568 __ addu(a2, t0, t1); in Generate()
1570 __ lw(t2, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
1571 __ sra(t2, t2, kSmiTagSize); in Generate()
1572 __ sllv(t1, t2, a3); in Generate()
1573 __ addu(a3, t0, t1); in Generate()
1578 __ mov(a0, subject); in Generate()
1581 __ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1585 __ LeaveExitFrame(false, no_reg, true); in Generate()
1593 __ Branch(&success, eq, v0, Operand(1)); in Generate()
1597 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1599 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1604 __ li(a1, Operand(isolate()->factory()->the_hole_value())); in Generate()
1605 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1607 __ lw(v0, MemOperand(a2, 0)); in Generate()
1608 __ Branch(&runtime, eq, v0, Operand(a1)); in Generate()
1611 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1613 __ bind(&failure); in Generate()
1615 __ li(v0, Operand(isolate()->factory()->null_value())); in Generate()
1616 __ DropAndRet(4); in Generate()
1619 __ bind(&success); in Generate()
1620 __ lw(a1, in Generate()
1626 __ Addu(a1, a1, Operand(2)); // a1 was a smi. in Generate()
1629 __ lw(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1630 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1632 __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); in Generate()
1633 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in Generate()
1634 __ Branch(&runtime, ne, a0, Operand(at)); in Generate()
1637 __ lw(a0, in Generate()
1639 __ Addu(a2, a1, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1640 __ sra(at, a0, kSmiTagSize); in Generate()
1641 __ Branch(&runtime, gt, a2, Operand(at)); in Generate()
1646 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi. in Generate()
1647 __ sw(a2, FieldMemOperand(last_match_info_elements, in Generate()
1650 __ sw(subject, FieldMemOperand(last_match_info_elements, in Generate()
1652 __ mov(a2, subject); in Generate()
1653 __ RecordWriteField(last_match_info_elements, in Generate()
1656 __ mov(subject, a2); in Generate()
1657 __ sw(subject, FieldMemOperand(last_match_info_elements, in Generate()
1659 __ RecordWriteField(last_match_info_elements, in Generate()
1666 __ li(a2, Operand(address_of_static_offsets_vector)); in Generate()
1673 __ Addu(a0, last_match_info_elements, in Generate()
1675 __ bind(&next_capture); in Generate()
1676 __ Subu(a1, a1, Operand(1)); in Generate()
1677 __ Branch(&done, lt, a1, Operand(zero_reg)); in Generate()
1679 __ lw(a3, MemOperand(a2, 0)); in Generate()
1680 __ addiu(a2, a2, kPointerSize); in Generate()
1682 __ sll(a3, a3, kSmiTagSize); // Convert to Smi. in Generate()
1683 __ sw(a3, MemOperand(a0, 0)); in Generate()
1684 __ Branch(&next_capture, USE_DELAY_SLOT); in Generate()
1685 __ addiu(a0, a0, kPointerSize); // In branch delay slot. in Generate()
1687 __ bind(&done); in Generate()
1690 __ mov(v0, last_match_info_elements); in Generate()
1691 __ DropAndRet(4); in Generate()
1694 __ bind(&runtime); in Generate()
1695 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1699 __ bind(&not_seq_nor_cons); in Generate()
1701 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag)); in Generate()
1704 __ bind(&external_string); in Generate()
1705 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1706 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); in Generate()
1710 __ And(at, a0, Operand(kIsIndirectStringMask)); in Generate()
1711 __ Assert(eq, in Generate()
1716 __ lw(subject, in Generate()
1720 __ Subu(subject, in Generate()
1723 __ jmp(&seq_string); // Go to (5). in Generate()
1726 __ bind(&not_long_external); in Generate()
1728 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1729 __ Branch(&runtime, ne, at, Operand(zero_reg)); in Generate()
1733 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1734 __ sra(t0, t0, kSmiTagSize); in Generate()
1735 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1736 __ jmp(&check_underlying); // Go to (4). in Generate()
1754 __ SmiTag(a0); in CallStubInRecordCallTarget()
1755 __ MultiPush(kSavedRegs); in CallStubInRecordCallTarget()
1757 __ CallStub(stub); in CallStubInRecordCallTarget()
1759 __ MultiPop(kSavedRegs); in CallStubInRecordCallTarget()
1760 __ SmiUntag(a0); in CallStubInRecordCallTarget()
1780 __ Lsa(t2, a2, a3, kPointerSizeLog2 - kSmiTagSize); in GenerateRecordCallTarget()
1781 __ lw(t2, FieldMemOperand(t2, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1790 __ lw(weak_value, FieldMemOperand(t2, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1791 __ Branch(&done, eq, a1, Operand(weak_value)); in GenerateRecordCallTarget()
1792 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1793 __ Branch(&done, eq, t2, Operand(at)); in GenerateRecordCallTarget()
1794 __ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1795 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1796 __ Branch(&check_allocation_site, ne, feedback_map, Operand(at)); in GenerateRecordCallTarget()
1799 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1800 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1802 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1807 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1808 __ Branch(&miss, ne, feedback_map, Operand(at)); in GenerateRecordCallTarget()
1811 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t2); in GenerateRecordCallTarget()
1812 __ Branch(&megamorphic, ne, a1, Operand(t2)); in GenerateRecordCallTarget()
1813 __ jmp(&done); in GenerateRecordCallTarget()
1815 __ bind(&miss); in GenerateRecordCallTarget()
1819 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1820 __ Branch(&initialize, eq, t2, Operand(at)); in GenerateRecordCallTarget()
1823 __ bind(&megamorphic); in GenerateRecordCallTarget()
1824 __ Lsa(t2, a2, a3, kPointerSizeLog2 - kSmiTagSize); in GenerateRecordCallTarget()
1825 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1826 __ sw(at, FieldMemOperand(t2, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1827 __ jmp(&done); in GenerateRecordCallTarget()
1830 __ bind(&initialize); in GenerateRecordCallTarget()
1832 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t2); in GenerateRecordCallTarget()
1833 __ Branch(&not_array_function, ne, a1, Operand(t2)); in GenerateRecordCallTarget()
1840 __ Branch(&done); in GenerateRecordCallTarget()
1842 __ bind(&not_array_function); in GenerateRecordCallTarget()
1846 __ bind(&done); in GenerateRecordCallTarget()
1849 __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize); in GenerateRecordCallTarget()
1850 __ lw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1851 __ Addu(t0, t0, Operand(Smi::FromInt(1))); in GenerateRecordCallTarget()
1852 __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1864 __ JumpIfSmi(a1, &non_function); in Generate()
1866 __ GetObjectType(a1, t1, t1); in Generate()
1867 __ Branch(&non_function, ne, t1, Operand(JS_FUNCTION_TYPE)); in Generate()
1871 __ Lsa(t1, a2, a3, kPointerSizeLog2 - kSmiTagSize); in Generate()
1874 __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize)); in Generate()
1875 __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset)); in Generate()
1876 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
1877 __ Branch(&feedback_register_initialized, eq, t1, Operand(at)); in Generate()
1878 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate()
1879 __ bind(&feedback_register_initialized); in Generate()
1881 __ AssertUndefinedOrAllocationSite(a2, t1); in Generate()
1884 __ mov(a3, a1); in Generate()
1888 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1889 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1890 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1891 __ Jump(at); in Generate()
1893 __ bind(&non_function); in Generate()
1894 __ mov(a3, a1); in Generate()
1895 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1901 __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize); in IncrementCallCount()
1902 __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); in IncrementCallCount()
1903 __ Addu(slot, slot, Operand(Smi::FromInt(1))); in IncrementCallCount()
1904 __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); in IncrementCallCount()
1913 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at); in HandleArrayCase()
1914 __ Branch(miss, ne, a1, Operand(at)); in HandleArrayCase()
1919 __ mov(a2, t0); in HandleArrayCase()
1920 __ mov(a3, a1); in HandleArrayCase()
1922 __ TailCallStub(&stub); in HandleArrayCase()
1934 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); in Generate()
1935 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); in Generate()
1951 __ lw(t1, FieldMemOperand(t0, WeakCell::kValueOffset)); in Generate()
1952 __ Branch(&extra_checks_or_miss, ne, a1, Operand(t1)); in Generate()
1956 __ JumpIfSmi(a1, &extra_checks_or_miss); in Generate()
1958 __ bind(&call_function); in Generate()
1963 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), in Generate()
1967 __ bind(&extra_checks_or_miss); in Generate()
1970 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in Generate()
1971 __ Branch(&call, eq, t0, Operand(at)); in Generate()
1974 __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset)); in Generate()
1975 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
1976 __ Branch(&not_allocation_site, ne, t1, Operand(at)); in Generate()
1980 __ bind(&not_allocation_site); in Generate()
1985 __ Branch(&miss); in Generate()
1988 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); in Generate()
1989 __ Branch(&uninitialized, eq, t0, Operand(at)); in Generate()
1993 __ AssertNotSmi(t0); in Generate()
1994 __ GetObjectType(t0, t1, t1); in Generate()
1995 __ Branch(&miss, ne, t1, Operand(JS_FUNCTION_TYPE)); in Generate()
1996 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize); in Generate()
1997 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in Generate()
1998 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); in Generate()
2000 __ bind(&call); in Generate()
2003 __ bind(&call_count_incremented); in Generate()
2005 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), in Generate()
2008 __ bind(&uninitialized); in Generate()
2011 __ JumpIfSmi(a1, &miss); in Generate()
2014 __ GetObjectType(a1, t0, t0); in Generate()
2015 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE)); in Generate()
2019 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0); in Generate()
2020 __ Branch(&miss, eq, a1, Operand(t0)); in Generate()
2023 __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate()
2024 __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX)); in Generate()
2025 __ lw(t1, NativeContextMemOperand()); in Generate()
2026 __ Branch(&miss, ne, t0, Operand(t1)); in Generate()
2035 __ SmiTag(a0); in Generate()
2036 __ Push(a0); in Generate()
2037 __ Push(a2, a3); in Generate()
2038 __ Push(cp, a1); in Generate()
2039 __ CallStub(&create_stub); in Generate()
2040 __ Pop(cp, a1); in Generate()
2041 __ Pop(a2, a3); in Generate()
2042 __ Pop(a0); in Generate()
2043 __ SmiUntag(a0); in Generate()
2046 __ Branch(&call_function); in Generate()
2050 __ bind(&miss); in Generate()
2053 __ Branch(&call_count_incremented); in Generate()
2061 __ SmiTag(a0); in GenerateMiss()
2062 __ Push(a0); in GenerateMiss()
2065 __ Push(a1, a2, a3); in GenerateMiss()
2068 __ CallRuntime(Runtime::kCallIC_Miss); in GenerateMiss()
2071 __ mov(a1, v0); in GenerateMiss()
2074 __ Pop(a0); in GenerateMiss()
2075 __ SmiUntag(a0); in GenerateMiss()
2086 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
2089 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
2090 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
2092 __ And(t0, result_, Operand(kIsNotStringMask)); in GenerateFast()
2093 __ Branch(receiver_not_string_, ne, t0, Operand(zero_reg)); in GenerateFast()
2097 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
2099 __ bind(&got_smi_index_); in GenerateFast()
2102 __ lw(t0, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
2103 __ Branch(index_out_of_range_, ls, t0, Operand(index_)); in GenerateFast()
2105 __ sra(index_, index_, kSmiTagSize); in GenerateFast()
2113 __ sll(result_, result_, kSmiTagSize); in GenerateFast()
2114 __ bind(&exit_); in GenerateFast()
2121 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
2124 __ bind(&index_not_smi_); in GenerateSlow()
2126 __ CheckMap(index_, in GenerateSlow()
2134 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2137 __ Push(object_, index_); in GenerateSlow()
2139 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
2143 __ Move(index_, v0); in GenerateSlow()
2145 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2148 __ pop(object_); in GenerateSlow()
2151 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
2152 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
2155 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
2157 __ Branch(&got_smi_index_); in GenerateSlow()
2162 __ bind(&call_runtime_); in GenerateSlow()
2164 __ sll(index_, index_, kSmiTagSize); in GenerateSlow()
2165 __ Push(object_, index_); in GenerateSlow()
2166 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
2168 __ Move(result_, v0); in GenerateSlow()
2171 __ jmp(&exit_); in GenerateSlow()
2173 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
2189 __ And(t0, code_, Operand(kSmiTagMask | in GenerateFast()
2191 __ Branch(&slow_case_, ne, t0, Operand(zero_reg)); in GenerateFast()
2193 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
2196 __ Lsa(result_, result_, code_, kPointerSizeLog2 - kSmiTagSize); in GenerateFast()
2197 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); in GenerateFast()
2198 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in GenerateFast()
2199 __ Branch(&slow_case_, eq, result_, Operand(t0)); in GenerateFast()
2200 __ bind(&exit_); in GenerateFast()
2207 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); in GenerateSlow()
2209 __ bind(&slow_case_); in GenerateSlow()
2211 __ push(code_); in GenerateSlow()
2212 __ CallRuntime(Runtime::kStringCharFromCode); in GenerateSlow()
2213 __ Move(result_, v0); in GenerateSlow()
2216 __ Branch(&exit_); in GenerateSlow()
2218 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); in GenerateSlow()
2233 __ And(scratch, dest, Operand(kPointerAlignmentMask)); in GenerateCopyCharacters()
2234 __ Check(eq, in GenerateCopyCharacters()
2245 __ Addu(count, count, count); in GenerateCopyCharacters()
2249 __ Addu(limit, dest, Operand(count)); in GenerateCopyCharacters()
2253 __ Branch(&loop_entry); in GenerateCopyCharacters()
2254 __ bind(&loop); in GenerateCopyCharacters()
2255 __ lbu(scratch, MemOperand(src)); in GenerateCopyCharacters()
2256 __ Addu(src, src, Operand(1)); in GenerateCopyCharacters()
2257 __ sb(scratch, MemOperand(dest)); in GenerateCopyCharacters()
2258 __ Addu(dest, dest, Operand(1)); in GenerateCopyCharacters()
2259 __ bind(&loop_entry); in GenerateCopyCharacters()
2260 __ Branch(&loop, lt, dest, Operand(limit)); in GenerateCopyCharacters()
2262 __ bind(&done); in GenerateCopyCharacters()
2273 __ lw(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2274 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2275 __ Branch(&check_zero_length, eq, length, Operand(scratch2)); in GenerateFlatOneByteStringEquals()
2276 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
2278 __ Ret(USE_DELAY_SLOT); in GenerateFlatOneByteStringEquals()
2279 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL))); in GenerateFlatOneByteStringEquals()
2283 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
2285 __ Branch(&compare_chars, ne, length, Operand(zero_reg)); in GenerateFlatOneByteStringEquals()
2287 __ Ret(USE_DELAY_SLOT); in GenerateFlatOneByteStringEquals()
2288 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
2291 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
2297 __ Ret(USE_DELAY_SLOT); in GenerateFlatOneByteStringEquals()
2298 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
2307 __ lw(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2308 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2309 __ Subu(scratch3, scratch1, Operand(scratch2)); in GenerateCompareFlatOneByteStrings()
2311 __ slt(scratch4, scratch2, scratch1); in GenerateCompareFlatOneByteStrings()
2312 __ Movn(scratch1, scratch2, scratch4); in GenerateCompareFlatOneByteStrings()
2315 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg)); in GenerateCompareFlatOneByteStrings()
2322 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2325 __ mov(scratch2, length_delta); in GenerateCompareFlatOneByteStrings()
2326 __ mov(scratch4, zero_reg); in GenerateCompareFlatOneByteStrings()
2327 __ mov(v0, zero_reg); in GenerateCompareFlatOneByteStrings()
2329 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
2333 __ Branch(&ret, eq, scratch2, Operand(scratch4)); in GenerateCompareFlatOneByteStrings()
2334 __ li(v0, Operand(Smi::FromInt(GREATER))); in GenerateCompareFlatOneByteStrings()
2335 __ Branch(&ret, gt, scratch2, Operand(scratch4)); in GenerateCompareFlatOneByteStrings()
2336 __ li(v0, Operand(Smi::FromInt(LESS))); in GenerateCompareFlatOneByteStrings()
2337 __ bind(&ret); in GenerateCompareFlatOneByteStrings()
2338 __ Ret(); in GenerateCompareFlatOneByteStrings()
2349 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
2350 __ Addu(scratch1, length, in GenerateOneByteCharsCompareLoop()
2352 __ Addu(left, left, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
2353 __ Addu(right, right, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
2354 __ Subu(length, zero_reg, length); in GenerateOneByteCharsCompareLoop()
2360 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
2361 __ Addu(scratch3, left, index); in GenerateOneByteCharsCompareLoop()
2362 __ lbu(scratch1, MemOperand(scratch3)); in GenerateOneByteCharsCompareLoop()
2363 __ Addu(scratch3, right, index); in GenerateOneByteCharsCompareLoop()
2364 __ lbu(scratch2, MemOperand(scratch3)); in GenerateOneByteCharsCompareLoop()
2365 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2)); in GenerateOneByteCharsCompareLoop()
2366 __ Addu(index, index, 1); in GenerateOneByteCharsCompareLoop()
2367 __ Branch(&loop, ne, index, Operand(zero_reg)); in GenerateOneByteCharsCompareLoop()
2381 __ li(a2, isolate()->factory()->undefined_value()); in Generate()
2385 __ And(at, a2, Operand(kSmiTagMask)); in Generate()
2386 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg)); in Generate()
2387 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset)); in Generate()
2388 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
2389 __ Assert(eq, kExpectedAllocationSite, t0, Operand(at)); in Generate()
2395 __ TailCallStub(&stub); in Generate()
2403 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2404 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2406 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); in GenerateBooleans()
2407 __ AssertSmi(a1); in GenerateBooleans()
2408 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); in GenerateBooleans()
2409 __ AssertSmi(a0); in GenerateBooleans()
2411 __ Ret(USE_DELAY_SLOT); in GenerateBooleans()
2412 __ Subu(v0, a1, a0); in GenerateBooleans()
2414 __ bind(&miss); in GenerateBooleans()
2422 __ Or(a2, a1, a0); in GenerateSmis()
2423 __ JumpIfNotSmi(a2, &miss); in GenerateSmis()
2427 __ Ret(USE_DELAY_SLOT); in GenerateSmis()
2428 __ Subu(v0, a0, a1); in GenerateSmis()
2431 __ SmiUntag(a1); in GenerateSmis()
2432 __ SmiUntag(a0); in GenerateSmis()
2433 __ Ret(USE_DELAY_SLOT); in GenerateSmis()
2434 __ Subu(v0, a1, a0); in GenerateSmis()
2437 __ bind(&miss); in GenerateSmis()
2450 __ JumpIfNotSmi(a1, &miss); in GenerateNumbers()
2453 __ JumpIfNotSmi(a0, &miss); in GenerateNumbers()
2460 __ JumpIfSmi(a0, &right_smi); in GenerateNumbers()
2461 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
2463 __ Subu(a2, a0, Operand(kHeapObjectTag)); in GenerateNumbers()
2464 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateNumbers()
2465 __ Branch(&left); in GenerateNumbers()
2466 __ bind(&right_smi); in GenerateNumbers()
2467 __ SmiUntag(a2, a0); // Can't clobber a0 yet. in GenerateNumbers()
2469 __ mtc1(a2, single_scratch); in GenerateNumbers()
2470 __ cvt_d_w(f2, single_scratch); in GenerateNumbers()
2472 __ bind(&left); in GenerateNumbers()
2473 __ JumpIfSmi(a1, &left_smi); in GenerateNumbers()
2474 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
2476 __ Subu(a2, a1, Operand(kHeapObjectTag)); in GenerateNumbers()
2477 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateNumbers()
2478 __ Branch(&done); in GenerateNumbers()
2479 __ bind(&left_smi); in GenerateNumbers()
2480 __ SmiUntag(a2, a1); // Can't clobber a1 yet. in GenerateNumbers()
2482 __ mtc1(a2, single_scratch); in GenerateNumbers()
2483 __ cvt_d_w(f0, single_scratch); in GenerateNumbers()
2485 __ bind(&done); in GenerateNumbers()
2490 __ BranchF(&fpu_eq, &unordered, eq, f0, f2); in GenerateNumbers()
2493 __ BranchF(&fpu_lt, NULL, lt, f0, f2); in GenerateNumbers()
2497 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2498 __ li(v0, Operand(GREATER)); in GenerateNumbers()
2500 __ bind(&fpu_eq); in GenerateNumbers()
2501 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2502 __ li(v0, Operand(EQUAL)); in GenerateNumbers()
2504 __ bind(&fpu_lt); in GenerateNumbers()
2505 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2506 __ li(v0, Operand(LESS)); in GenerateNumbers()
2508 __ bind(&unordered); in GenerateNumbers()
2509 __ bind(&generic_stub); in GenerateNumbers()
2512 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2514 __ bind(&maybe_undefined1); in GenerateNumbers()
2516 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2517 __ Branch(&miss, ne, a0, Operand(at)); in GenerateNumbers()
2518 __ JumpIfSmi(a1, &unordered); in GenerateNumbers()
2519 __ GetObjectType(a1, a2, a2); in GenerateNumbers()
2520 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE)); in GenerateNumbers()
2521 __ jmp(&unordered); in GenerateNumbers()
2524 __ bind(&maybe_undefined2); in GenerateNumbers()
2526 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2527 __ Branch(&unordered, eq, a1, Operand(at)); in GenerateNumbers()
2530 __ bind(&miss); in GenerateNumbers()
2546 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2549 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2550 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2551 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2552 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2554 __ Or(tmp1, tmp1, Operand(tmp2)); in GenerateInternalizedStrings()
2555 __ And(at, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2556 __ Branch(&miss, ne, at, Operand(zero_reg)); in GenerateInternalizedStrings()
2563 __ mov(v0, right); in GenerateInternalizedStrings()
2565 __ Ret(ne, left, Operand(right)); in GenerateInternalizedStrings()
2567 __ Ret(USE_DELAY_SLOT); in GenerateInternalizedStrings()
2568 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateInternalizedStrings()
2570 __ bind(&miss); in GenerateInternalizedStrings()
2587 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2591 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2592 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2593 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2594 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2596 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2597 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2600 __ mov(v0, a0); in GenerateUniqueNames()
2604 __ Branch(&done, ne, left, Operand(right)); in GenerateUniqueNames()
2610 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateUniqueNames()
2611 __ bind(&done); in GenerateUniqueNames()
2612 __ Ret(); in GenerateUniqueNames()
2614 __ bind(&miss); in GenerateUniqueNames()
2635 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2639 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2640 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2641 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2642 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2644 __ Or(tmp3, tmp1, tmp2); in GenerateStrings()
2645 __ And(tmp5, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2646 __ Branch(&miss, ne, tmp5, Operand(zero_reg)); in GenerateStrings()
2652 __ Branch(&left_ne_right, ne, left, Operand(right)); in GenerateStrings()
2653 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2654 __ mov(v0, zero_reg); // In the delay slot. in GenerateStrings()
2655 __ bind(&left_ne_right); in GenerateStrings()
2665 __ Or(tmp3, tmp1, Operand(tmp2)); in GenerateStrings()
2666 __ And(tmp5, tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2668 __ Branch(&is_symbol, ne, tmp5, Operand(zero_reg)); in GenerateStrings()
2672 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2673 __ mov(v0, a0); // In the delay slot. in GenerateStrings()
2674 __ bind(&is_symbol); in GenerateStrings()
2679 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2692 __ bind(&runtime); in GenerateStrings()
2696 __ Push(left, right); in GenerateStrings()
2697 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2699 __ LoadRoot(a0, Heap::kTrueValueRootIndex); in GenerateStrings()
2700 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2701 __ Subu(v0, v0, a0); // In delay slot. in GenerateStrings()
2703 __ Push(left, right); in GenerateStrings()
2704 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2707 __ bind(&miss); in GenerateStrings()
2715 __ And(a2, a1, Operand(a0)); in GenerateReceivers()
2716 __ JumpIfSmi(a2, &miss); in GenerateReceivers()
2719 __ GetObjectType(a0, a2, a2); in GenerateReceivers()
2720 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in GenerateReceivers()
2721 __ GetObjectType(a1, a2, a2); in GenerateReceivers()
2722 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in GenerateReceivers()
2725 __ Ret(USE_DELAY_SLOT); in GenerateReceivers()
2726 __ subu(v0, a0, a1); in GenerateReceivers()
2728 __ bind(&miss); in GenerateReceivers()
2736 __ And(a2, a1, a0); in GenerateKnownReceivers()
2737 __ JumpIfSmi(a2, &miss); in GenerateKnownReceivers()
2738 __ GetWeakValue(t0, cell); in GenerateKnownReceivers()
2739 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2740 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2741 __ Branch(&miss, ne, a2, Operand(t0)); in GenerateKnownReceivers()
2742 __ Branch(&miss, ne, a3, Operand(t0)); in GenerateKnownReceivers()
2745 __ Ret(USE_DELAY_SLOT); in GenerateKnownReceivers()
2746 __ subu(v0, a0, a1); in GenerateKnownReceivers()
2749 __ li(a2, Operand(Smi::FromInt(GREATER))); in GenerateKnownReceivers()
2751 __ li(a2, Operand(Smi::FromInt(LESS))); in GenerateKnownReceivers()
2753 __ Push(a1, a0, a2); in GenerateKnownReceivers()
2754 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2757 __ bind(&miss); in GenerateKnownReceivers()
2766 __ Push(a1, a0); in GenerateMiss()
2767 __ Push(ra, a1, a0); in GenerateMiss()
2768 __ li(t0, Operand(Smi::FromInt(op()))); in GenerateMiss()
2769 __ addiu(sp, sp, -kPointerSize); in GenerateMiss()
2770 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs, in GenerateMiss()
2772 __ sw(t0, MemOperand(sp)); // In the delay slot. in GenerateMiss()
2774 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2776 __ Pop(a1, a0, ra); in GenerateMiss()
2778 __ Jump(a2); in GenerateMiss()
2788 __ Subu(sp, sp, Operand(kCArgsSlotsSize)); in Generate()
2791 __ sw(ra, MemOperand(sp, kCArgsSlotsSize)); in Generate()
2792 __ Call(t9); // Call the C++ function. in Generate()
2793 __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); in Generate()
2799 __ lw(t0, MemOperand(t9)); in Generate()
2800 __ Assert(ne, kReceivedInvalidReturnAddress, t0, in Generate()
2803 __ Jump(t9); in Generate()
2811 __ Move(t9, target); in GenerateCall()
2812 __ li(at, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE); in GenerateCall()
2813 __ Call(at); in GenerateCall()
2835 __ lw(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2836 __ Subu(index, index, Operand(1)); in GenerateNegativeLookup()
2837 __ And(index, index, Operand( in GenerateNegativeLookup()
2842 __ Lsa(index, index, index, 1); in GenerateNegativeLookup()
2848 __ Lsa(tmp, properties, index, 1); in GenerateNegativeLookup()
2849 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2852 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2853 __ Branch(done, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
2856 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2859 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2862 __ Branch(&good, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
2865 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2866 __ lbu(entity_name, in GenerateNegativeLookup()
2868 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2869 __ bind(&good); in GenerateNegativeLookup()
2872 __ lw(properties, in GenerateNegativeLookup()
2880 __ MultiPush(spill_mask); in GenerateNegativeLookup()
2881 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2882 __ li(a1, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2884 __ CallStub(&stub); in GenerateNegativeLookup()
2885 __ mov(at, v0); in GenerateNegativeLookup()
2886 __ MultiPop(spill_mask); in GenerateNegativeLookup()
2888 __ Branch(done, eq, at, Operand(zero_reg)); in GenerateNegativeLookup()
2889 __ Branch(miss, ne, at, Operand(zero_reg)); in GenerateNegativeLookup()
2909 __ AssertName(name); in GeneratePositiveLookup()
2912 __ lw(scratch1, FieldMemOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
2913 __ sra(scratch1, scratch1, kSmiTagSize); // convert smi to int in GeneratePositiveLookup()
2914 __ Subu(scratch1, scratch1, Operand(1)); in GeneratePositiveLookup()
2921 __ lw(scratch2, FieldMemOperand(name, Name::kHashFieldOffset)); in GeneratePositiveLookup()
2928 __ Addu(scratch2, scratch2, Operand( in GeneratePositiveLookup()
2931 __ srl(scratch2, scratch2, Name::kHashShift); in GeneratePositiveLookup()
2932 __ And(scratch2, scratch1, scratch2); in GeneratePositiveLookup()
2938 __ Lsa(scratch2, scratch2, scratch2, 1); in GeneratePositiveLookup()
2941 __ Lsa(scratch2, elements, scratch2, 2); in GeneratePositiveLookup()
2942 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset)); in GeneratePositiveLookup()
2943 __ Branch(done, eq, name, Operand(at)); in GeneratePositiveLookup()
2951 __ MultiPush(spill_mask); in GeneratePositiveLookup()
2954 __ Move(a1, name); in GeneratePositiveLookup()
2955 __ Move(a0, elements); in GeneratePositiveLookup()
2957 __ Move(a0, elements); in GeneratePositiveLookup()
2958 __ Move(a1, name); in GeneratePositiveLookup()
2961 __ CallStub(&stub); in GeneratePositiveLookup()
2962 __ mov(scratch2, a2); in GeneratePositiveLookup()
2963 __ mov(at, v0); in GeneratePositiveLookup()
2964 __ MultiPop(spill_mask); in GeneratePositiveLookup()
2966 __ Branch(done, ne, at, Operand(zero_reg)); in GeneratePositiveLookup()
2967 __ Branch(miss, eq, at, Operand(zero_reg)); in GeneratePositiveLookup()
2994 __ lw(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2995 __ sra(mask, mask, kSmiTagSize); in Generate()
2996 __ Subu(mask, mask, Operand(1)); in Generate()
2998 __ lw(hash, FieldMemOperand(key, Name::kHashFieldOffset)); in Generate()
3000 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
3011 __ Addu(index, hash, Operand( in Generate()
3014 __ mov(index, hash); in Generate()
3016 __ srl(index, index, Name::kHashShift); in Generate()
3017 __ And(index, mask, index); in Generate()
3022 __ Lsa(index, index, index, 1); in Generate()
3025 __ Lsa(index, dictionary, index, 2); in Generate()
3026 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
3029 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); in Generate()
3032 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); in Generate()
3036 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
3037 __ lbu(entry_key, in Generate()
3039 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
3043 __ bind(&maybe_in_dictionary); in Generate()
3048 __ Ret(USE_DELAY_SLOT); in Generate()
3049 __ mov(result, zero_reg); in Generate()
3052 __ bind(&in_dictionary); in Generate()
3053 __ Ret(USE_DELAY_SLOT); in Generate()
3054 __ li(result, 1); in Generate()
3056 __ bind(&not_in_dictionary); in Generate()
3057 __ Ret(USE_DELAY_SLOT); in Generate()
3058 __ mov(result, zero_reg); in Generate()
3086 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting); in Generate()
3087 __ nop(); in Generate()
3088 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting); in Generate()
3089 __ nop(); in Generate()
3092 __ RememberedSetHelper(object(), in Generate()
3098 __ Ret(); in Generate()
3100 __ bind(&skip_to_incremental_noncompacting); in Generate()
3103 __ bind(&skip_to_incremental_compacting); in Generate()
3120 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
3121 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
3125 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
3134 __ RememberedSetHelper(object(), in GenerateIncremental()
3140 __ bind(&dont_need_remembered_set); in GenerateIncremental()
3147 __ Ret(); in GenerateIncremental()
3154 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
3159 __ Move(address, regs_.address()); in InformIncrementalMarker()
3160 __ Move(a0, regs_.object()); in InformIncrementalMarker()
3161 __ Move(a1, address); in InformIncrementalMarker()
3162 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
3165 __ CallCFunction( in InformIncrementalMarker()
3182 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
3186 __ RememberedSetHelper(object(), in CheckNeedsToInformIncrementalMarker()
3192 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3195 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
3198 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
3203 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
3209 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
3215 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
3220 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3221 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
3226 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3230 __ RememberedSetHelper(object(), in CheckNeedsToInformIncrementalMarker()
3236 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3239 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
3240 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3242 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
3250 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3253 __ lw(a1, MemOperand(fp, parameter_count_offset)); in Generate()
3255 __ Addu(a1, a1, Operand(1)); in Generate()
3258 __ sll(a1, a1, kPointerSizeLog2); in Generate()
3259 __ Ret(USE_DELAY_SLOT); in Generate()
3260 __ Addu(sp, sp, a1); in Generate()
3264 __ EmitLoadTypeFeedbackVector(a2); in Generate()
3266 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3280 __ lw(cached_map, in HandleArrayCases()
3282 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3283 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map)); in HandleArrayCases()
3286 __ lw(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); in HandleArrayCases()
3287 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3288 __ Jump(t9); in HandleArrayCases()
3292 __ bind(&start_polymorphic); in HandleArrayCases()
3293 __ lw(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandleArrayCases()
3297 __ Branch(miss, eq, length, Operand(Smi::FromInt(2))); in HandleArrayCases()
3313 __ Lsa(too_far, feedback, length, kPointerSizeLog2 - kSmiTagSize); in HandleArrayCases()
3314 __ Addu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3315 __ Addu(pointer_reg, feedback, in HandleArrayCases()
3318 __ bind(&next_loop); in HandleArrayCases()
3319 __ lw(cached_map, MemOperand(pointer_reg)); in HandleArrayCases()
3320 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3321 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); in HandleArrayCases()
3322 __ lw(handler, MemOperand(pointer_reg, kPointerSize)); in HandleArrayCases()
3323 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3324 __ Jump(t9); in HandleArrayCases()
3326 __ bind(&prepare_next); in HandleArrayCases()
3327 __ Addu(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); in HandleArrayCases()
3328 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); in HandleArrayCases()
3331 __ jmp(miss); in HandleArrayCases()
3340 __ JumpIfSmi(receiver, load_smi_map); in HandleMonomorphicCase()
3341 __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in HandleMonomorphicCase()
3342 __ bind(compare_map); in HandleMonomorphicCase()
3345 __ lw(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); in HandleMonomorphicCase()
3346 __ Branch(try_array, ne, cached_map, Operand(receiver_map)); in HandleMonomorphicCase()
3349 __ Lsa(handler, vector, slot, kPointerSizeLog2 - kSmiTagSize); in HandleMonomorphicCase()
3350 __ lw(handler, in HandleMonomorphicCase()
3352 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleMonomorphicCase()
3353 __ Jump(t9); in HandleMonomorphicCase()
3357 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); in Generate()
3382 __ lw(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandlePolymorphicStoreCase()
3394 __ Lsa(too_far, feedback, too_far, kPointerSizeLog2 - kSmiTagSize); in HandlePolymorphicStoreCase()
3395 __ Addu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3396 __ Addu(pointer_reg, feedback, in HandlePolymorphicStoreCase()
3399 __ bind(&next_loop); in HandlePolymorphicStoreCase()
3400 __ lw(cached_map, MemOperand(pointer_reg)); in HandlePolymorphicStoreCase()
3401 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3402 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map)); in HandlePolymorphicStoreCase()
3404 __ lw(too_far, MemOperand(pointer_reg, kPointerSize)); in HandlePolymorphicStoreCase()
3405 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in HandlePolymorphicStoreCase()
3406 __ Branch(&transition_call, ne, too_far, Operand(at)); in HandlePolymorphicStoreCase()
3407 __ lw(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3408 __ Addu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3409 __ Jump(t9); in HandlePolymorphicStoreCase()
3411 __ bind(&transition_call); in HandlePolymorphicStoreCase()
3412 __ lw(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3413 __ JumpIfSmi(too_far, miss); in HandlePolymorphicStoreCase()
3415 __ lw(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3419 __ mov(feedback, too_far); in HandlePolymorphicStoreCase()
3421 __ Addu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3422 __ Jump(t9); in HandlePolymorphicStoreCase()
3424 __ bind(&prepare_next); in HandlePolymorphicStoreCase()
3425 __ Addu(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); in HandlePolymorphicStoreCase()
3426 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far)); in HandlePolymorphicStoreCase()
3429 __ jmp(miss); in HandlePolymorphicStoreCase()
3442 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize); in GenerateImpl()
3443 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); in GenerateImpl()
3453 __ bind(&try_array); in GenerateImpl()
3455 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); in GenerateImpl()
3456 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in GenerateImpl()
3457 __ Branch(&not_array, ne, scratch1, Operand(at)); in GenerateImpl()
3461 __ bind(&polymorphic); in GenerateImpl()
3468 __ bind(&not_array); in GenerateImpl()
3470 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in GenerateImpl()
3471 __ Branch(&try_poly_name, ne, feedback, Operand(at)); in GenerateImpl()
3474 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); in GenerateImpl()
3476 __ bind(&try_poly_name); in GenerateImpl()
3478 __ Branch(&miss, ne, key, Operand(feedback)); in GenerateImpl()
3481 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize); in GenerateImpl()
3482 __ lw(feedback, in GenerateImpl()
3487 __ bind(&miss); in GenerateImpl()
3490 __ bind(&load_smi_map); in GenerateImpl()
3491 __ Branch(USE_DELAY_SLOT, &compare_map); in GenerateImpl()
3492 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot. in GenerateImpl()
3499 __ push(ra); in MaybeCallEntryHook()
3500 __ CallStub(&stub); in MaybeCallEntryHook()
3501 __ pop(ra); in MaybeCallEntryHook()
3521 __ MultiPush(kSavedRegs | ra.bit()); in Generate()
3524 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
3528 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize)); in Generate()
3533 __ mov(s5, sp); in Generate()
3535 __ And(sp, sp, Operand(-frame_alignment)); in Generate()
3537 __ Subu(sp, sp, kCArgsSlotsSize); in Generate()
3541 __ li(t9, Operand(entry_hook)); in Generate()
3546 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3549 __ li(t9, Operand(ExternalReference(&dispatcher, in Generate()
3554 __ Call(t9); in Generate()
3558 __ mov(sp, s5); in Generate()
3560 __ Addu(sp, sp, kCArgsSlotsSize); in Generate()
3564 __ MultiPop(kSavedRegs | ra.bit()); in Generate()
3565 __ Ret(); in Generate()
3574 __ TailCallStub(&stub); in CreateArrayDispatch()
3581 __ TailCallStub(&stub, eq, a3, Operand(kind)); in CreateArrayDispatch()
3585 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
3609 __ And(at, a3, Operand(1)); in CreateArrayDispatchOneArgument()
3610 __ Branch(&normal_sequence, ne, at, Operand(zero_reg)); in CreateArrayDispatchOneArgument()
3614 __ lw(t1, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
3615 __ Branch(&normal_sequence, eq, t1, Operand(zero_reg)); in CreateArrayDispatchOneArgument()
3624 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
3626 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3630 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3634 __ Addu(a3, a3, Operand(1)); in CreateArrayDispatchOneArgument()
3637 __ lw(t1, FieldMemOperand(a2, 0)); in CreateArrayDispatchOneArgument()
3638 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
3639 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at)); in CreateArrayDispatchOneArgument()
3646 __ lw(t0, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3647 __ Addu(t0, t0, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); in CreateArrayDispatchOneArgument()
3648 __ sw(t0, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3651 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3657 __ TailCallStub(&stub, eq, a3, Operand(kind)); in CreateArrayDispatchOneArgument()
3661 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3705 __ And(at, a0, a0); in GenerateDispatchToArrayStub()
3706 __ Branch(&not_zero_case, ne, at, Operand(zero_reg)); in GenerateDispatchToArrayStub()
3709 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
3710 __ Branch(&not_one_case, gt, a0, Operand(1)); in GenerateDispatchToArrayStub()
3713 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
3715 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3733 __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3735 __ SmiTst(t0, at); in Generate()
3736 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, in Generate()
3738 __ GetObjectType(t0, t0, t1); in Generate()
3739 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, in Generate()
3743 __ AssertUndefinedOrAllocationSite(a2, t0); in Generate()
3747 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate()
3750 __ Branch(&subclassing, ne, a1, Operand(a3)); in Generate()
3754 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in Generate()
3755 __ Branch(&no_info, eq, a2, Operand(at)); in Generate()
3757 __ lw(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in Generate()
3758 __ SmiUntag(a3); in Generate()
3760 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
3763 __ bind(&no_info); in Generate()
3767 __ bind(&subclassing); in Generate()
3768 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate()
3769 __ sw(a1, MemOperand(at)); in Generate()
3770 __ li(at, Operand(3)); in Generate()
3771 __ addu(a0, a0, at); in Generate()
3772 __ Push(a3, a2); in Generate()
3773 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3781 __ TailCallStub(&stub0, lo, a0, Operand(1)); in GenerateCase()
3784 __ TailCallStub(&stubN, hi, a0, Operand(1)); in GenerateCase()
3789 __ lw(at, MemOperand(sp, 0)); in GenerateCase()
3793 __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg)); in GenerateCase()
3797 __ TailCallStub(&stub1); in GenerateCase()
3814 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3816 __ SmiTst(a3, at); in Generate()
3817 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, in Generate()
3819 __ GetObjectType(a3, a3, t0); in Generate()
3820 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, in Generate()
3825 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3829 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); in Generate()
3831 __ DecodeField<Map::ElementsKindBits>(a3); in Generate()
3835 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); in Generate()
3836 __ Assert( in Generate()
3839 __ bind(&done); in Generate()
3843 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); in Generate()
3846 __ bind(&fast_elements_case); in Generate()
3858 __ AssertFunction(a1); in Generate()
3859 __ AssertReceiver(a3); in Generate()
3863 __ GetObjectType(a3, a2, a2); in Generate()
3864 __ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE)); in Generate()
3867 __ lw(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3868 __ JumpIfSmi(a2, &new_object); in Generate()
3869 __ GetObjectType(a2, a0, a0); in Generate()
3870 __ Branch(&new_object, ne, a0, Operand(MAP_TYPE)); in Generate()
3874 __ lw(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset)); in Generate()
3875 __ Branch(&new_object, ne, a0, Operand(a1)); in Generate()
3879 __ lbu(t0, FieldMemOperand(a2, Map::kInstanceSizeOffset)); in Generate()
3880 __ Allocate(t0, v0, t1, a0, &allocate, SIZE_IN_WORDS); in Generate()
3881 __ bind(&done_allocate); in Generate()
3884 __ sw(a2, FieldMemOperand(v0, JSObject::kMapOffset)); in Generate()
3885 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex); in Generate()
3886 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset)); in Generate()
3887 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
3889 __ Addu(a1, v0, Operand(JSObject::kHeaderSize - kHeapObjectTag)); in Generate()
3903 __ lw(a3, FieldMemOperand(a2, Map::kBitField3Offset)); in Generate()
3904 __ And(at, a3, Operand(Map::ConstructionCounter::kMask)); in Generate()
3905 __ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(0)); in Generate()
3906 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot. in Generate()
3909 __ InitializeFieldsWithFiller(a1, t1, a0); in Generate()
3910 __ Ret(); in Generate()
3912 __ bind(&slack_tracking); in Generate()
3916 __ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift)); in Generate()
3917 __ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset)); in Generate()
3920 __ lbu(t0, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset)); in Generate()
3921 __ sll(t0, t0, kPointerSizeLog2); in Generate()
3922 __ subu(t0, t1, t0); in Generate()
3923 __ InitializeFieldsWithFiller(a1, t0, a0); in Generate()
3926 __ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex); in Generate()
3927 __ InitializeFieldsWithFiller(a1, t1, a0); in Generate()
3932 __ And(a3, a3, Operand(Map::ConstructionCounter::kMask)); in Generate()
3933 __ Branch(&finalize, eq, a3, Operand(zero_reg)); in Generate()
3934 __ Ret(); in Generate()
3937 __ bind(&finalize); in Generate()
3940 __ Push(v0, a2); in Generate()
3941 __ CallRuntime(Runtime::kFinalizeInstanceSize); in Generate()
3942 __ Pop(v0); in Generate()
3944 __ Ret(); in Generate()
3948 __ bind(&allocate); in Generate()
3953 __ sll(t0, t0, kPointerSizeLog2 + kSmiTagSize); in Generate()
3954 __ Push(a2, t0); in Generate()
3955 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3956 __ Pop(a2); in Generate()
3958 __ lbu(t1, FieldMemOperand(a2, Map::kInstanceSizeOffset)); in Generate()
3959 __ Lsa(t1, v0, t1, kPointerSizeLog2); in Generate()
3961 __ Subu(t1, t1, Operand(kHeapObjectTag)); in Generate()
3962 __ jmp(&done_allocate); in Generate()
3965 __ bind(&new_object); in Generate()
3966 __ Push(a1, a3); in Generate()
3967 __ TailCallRuntime(Runtime::kNewObject); in Generate()
3978 __ AssertFunction(a1); in Generate()
3981 __ mov(a2, fp); in Generate()
3985 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
3989 __ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset)); in Generate()
3990 __ Branch(&ok, eq, a1, Operand(a3)); in Generate()
3991 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
3992 __ bind(&ok); in Generate()
3998 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
3999 __ lw(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4000 __ Branch(&no_rest_parameters, ne, a3, in Generate()
4006 __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4007 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4008 __ lw(a3, in Generate()
4010 __ Subu(a0, a0, Operand(a3)); in Generate()
4011 __ Branch(&rest_parameters, gt, a0, Operand(zero_reg)); in Generate()
4014 __ bind(&no_rest_parameters); in Generate()
4023 __ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4024 __ bind(&done_allocate); in Generate()
4027 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1); in Generate()
4028 __ sw(a1, FieldMemOperand(v0, JSArray::kMapOffset)); in Generate()
4029 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex); in Generate()
4030 __ sw(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset)); in Generate()
4031 __ sw(a1, FieldMemOperand(v0, JSArray::kElementsOffset)); in Generate()
4032 __ Move(a1, Smi::kZero); in Generate()
4033 __ Ret(USE_DELAY_SLOT); in Generate()
4034 __ sw(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot in Generate()
4038 __ bind(&allocate); in Generate()
4041 __ Push(Smi::FromInt(JSArray::kSize)); in Generate()
4042 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4044 __ jmp(&done_allocate); in Generate()
4047 __ bind(&rest_parameters); in Generate()
4050 __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1); in Generate()
4051 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - in Generate()
4064 __ li(t0, Operand(JSArray::kSize + FixedArray::kHeaderSize)); in Generate()
4065 __ Lsa(t0, t0, a0, kPointerSizeLog2 - 1); in Generate()
4066 __ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4067 __ bind(&done_allocate); in Generate()
4070 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in Generate()
4071 __ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset)); in Generate()
4072 __ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate()
4073 __ Addu(a3, v0, Operand(FixedArray::kHeaderSize)); in Generate()
4076 __ sll(at, a0, kPointerSizeLog2 - 1); in Generate()
4077 __ Addu(a1, a3, at); in Generate()
4078 __ bind(&loop); in Generate()
4079 __ Branch(&done_loop, eq, a1, Operand(a3)); in Generate()
4080 __ lw(at, MemOperand(a2, 0 * kPointerSize)); in Generate()
4081 __ sw(at, FieldMemOperand(a3, 0 * kPointerSize)); in Generate()
4082 __ Subu(a2, a2, Operand(1 * kPointerSize)); in Generate()
4083 __ Addu(a3, a3, Operand(1 * kPointerSize)); in Generate()
4084 __ jmp(&loop); in Generate()
4085 __ bind(&done_loop); in Generate()
4089 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at); in Generate()
4090 __ sw(at, FieldMemOperand(a3, JSArray::kMapOffset)); in Generate()
4091 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); in Generate()
4092 __ sw(at, FieldMemOperand(a3, JSArray::kPropertiesOffset)); in Generate()
4093 __ sw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); in Generate()
4094 __ sw(a0, FieldMemOperand(a3, JSArray::kLengthOffset)); in Generate()
4096 __ Ret(USE_DELAY_SLOT); in Generate()
4097 __ mov(v0, a3); // In delay slot in Generate()
4101 __ bind(&allocate); in Generate()
4102 __ Branch(&too_big_for_new_space, gt, t0, in Generate()
4106 __ SmiTag(t0); in Generate()
4107 __ Push(a0, a2, t0); in Generate()
4108 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4109 __ Pop(a0, a2); in Generate()
4111 __ jmp(&done_allocate); in Generate()
4114 __ bind(&too_big_for_new_space); in Generate()
4115 __ Push(a1); in Generate()
4116 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4128 __ AssertFunction(a1); in Generate()
4131 __ mov(t0, fp); in Generate()
4135 __ lw(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset)); in Generate()
4139 __ lw(a3, MemOperand(t0, StandardFrameConstants::kFunctionOffset)); in Generate()
4140 __ Branch(&ok, eq, a1, Operand(a3)); in Generate()
4141 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4142 __ bind(&ok); in Generate()
4146 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4147 __ lw(a2, in Generate()
4149 __ Lsa(a3, t0, a2, kPointerSizeLog2 - 1); in Generate()
4150 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4162 __ lw(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset)); in Generate()
4163 __ lw(a0, MemOperand(t0, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4164 __ Branch(&adaptor_frame, eq, a0, in Generate()
4168 __ mov(t1, a2); in Generate()
4169 __ Branch(USE_DELAY_SLOT, &try_allocate); in Generate()
4170 __ mov(t2, a2); // In delay slot. in Generate()
4173 __ bind(&adaptor_frame); in Generate()
4174 __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4175 __ Lsa(t0, t0, t1, 1); in Generate()
4176 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4181 __ mov(t2, a2); in Generate()
4182 __ Branch(&try_allocate, le, t2, Operand(t1)); in Generate()
4183 __ mov(t2, t1); in Generate()
4185 __ bind(&try_allocate); in Generate()
4194 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, t2, Operand(zero_reg)); in Generate()
4195 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. in Generate()
4196 __ sll(t5, t2, 1); in Generate()
4197 __ addiu(t5, t5, kParameterMapHeaderSize); in Generate()
4198 __ bind(&param_map_size); in Generate()
4201 __ Lsa(t5, t5, t1, 1); in Generate()
4202 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); in Generate()
4205 __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4208 __ Allocate(t5, v0, t5, t0, &runtime, NO_ALLOCATION_FLAGS); in Generate()
4218 __ lw(t0, NativeContextMemOperand()); in Generate()
4220 __ Branch(&skip2_ne, ne, t2, Operand(zero_reg)); in Generate()
4221 __ lw(t0, MemOperand(t0, kNormalOffset)); in Generate()
4222 __ bind(&skip2_ne); in Generate()
4224 __ Branch(&skip2_eq, eq, t2, Operand(zero_reg)); in Generate()
4225 __ lw(t0, MemOperand(t0, kAliasedOffset)); in Generate()
4226 __ bind(&skip2_eq); in Generate()
4232 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); in Generate()
4233 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); in Generate()
4234 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); in Generate()
4235 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
4238 __ AssertNotSmi(a1); in Generate()
4239 __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); in Generate()
4242 __ AssertSmi(t1); in Generate()
4243 __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); in Generate()
4248 __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4249 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
4258 __ Branch(&skip3, ne, t2, Operand(Smi::kZero)); in Generate()
4261 __ mov(a1, t0); in Generate()
4262 __ bind(&skip3); in Generate()
4264 __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::kZero)); in Generate()
4266 __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex); in Generate()
4267 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); in Generate()
4268 __ Addu(t1, t2, Operand(Smi::FromInt(2))); in Generate()
4269 __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); in Generate()
4270 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); in Generate()
4271 __ Lsa(t1, t0, t2, 1); in Generate()
4272 __ Addu(t1, t1, Operand(kParameterMapHeaderSize)); in Generate()
4273 __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); in Generate()
4284 __ mov(t1, t2); in Generate()
4285 __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); in Generate()
4286 __ Subu(t5, t5, Operand(t2)); in Generate()
4287 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); in Generate()
4288 __ Lsa(a1, t0, t1, 1); in Generate()
4289 __ Addu(a1, a1, Operand(kParameterMapHeaderSize)); in Generate()
4296 __ jmp(&parameters_test); in Generate()
4298 __ bind(&parameters_loop); in Generate()
4299 __ Subu(t1, t1, Operand(Smi::FromInt(1))); in Generate()
4300 __ sll(a0, t1, 1); in Generate()
4301 __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); in Generate()
4302 __ Addu(t6, t0, a0); in Generate()
4303 __ sw(t5, MemOperand(t6)); in Generate()
4304 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); in Generate()
4305 __ Addu(t6, a1, a0); in Generate()
4306 __ sw(t3, MemOperand(t6)); in Generate()
4307 __ Addu(t5, t5, Operand(Smi::FromInt(1))); in Generate()
4308 __ bind(&parameters_test); in Generate()
4309 __ Branch(&parameters_loop, ne, t1, Operand(Smi::kZero)); in Generate()
4312 __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); in Generate()
4314 __ bind(&skip_parameter_map); in Generate()
4321 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); in Generate()
4322 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); in Generate()
4323 __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset)); in Generate()
4326 __ sll(t6, t2, 1); in Generate()
4327 __ Subu(a3, a3, Operand(t6)); in Generate()
4328 __ jmp(&arguments_test); in Generate()
4330 __ bind(&arguments_loop); in Generate()
4331 __ Subu(a3, a3, Operand(kPointerSize)); in Generate()
4332 __ lw(t0, MemOperand(a3, 0)); in Generate()
4333 __ Lsa(t5, a1, t2, 1); in Generate()
4334 __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize)); in Generate()
4335 __ Addu(t2, t2, Operand(Smi::FromInt(1))); in Generate()
4337 __ bind(&arguments_test); in Generate()
4338 __ Branch(&arguments_loop, lt, t2, Operand(t1)); in Generate()
4341 __ Ret(); in Generate()
4345 __ bind(&runtime); in Generate()
4346 __ Push(a1, a3, t1); in Generate()
4347 __ TailCallRuntime(Runtime::kNewSloppyArguments); in Generate()
4358 __ AssertFunction(a1); in Generate()
4361 __ mov(a2, fp); in Generate()
4365 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4369 __ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset)); in Generate()
4370 __ Branch(&ok, eq, a1, Operand(a3)); in Generate()
4371 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4372 __ bind(&ok); in Generate()
4377 __ lw(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4378 __ lw(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4379 __ Branch(&arguments_adaptor, eq, a0, in Generate()
4382 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4383 __ lw(a0, in Generate()
4385 __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1); in Generate()
4386 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - in Generate()
4389 __ Branch(&arguments_done); in Generate()
4390 __ bind(&arguments_adaptor); in Generate()
4392 __ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4393 __ Lsa(a2, a3, a0, kPointerSizeLog2 - 1); in Generate()
4394 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - in Generate()
4397 __ bind(&arguments_done); in Generate()
4409 __ li(t0, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); in Generate()
4410 __ Lsa(t0, t0, a0, kPointerSizeLog2 - 1); in Generate()
4411 __ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4412 __ bind(&done_allocate); in Generate()
4415 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in Generate()
4416 __ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset)); in Generate()
4417 __ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate()
4418 __ Addu(a3, v0, Operand(FixedArray::kHeaderSize)); in Generate()
4421 __ sll(at, a0, kPointerSizeLog2 - 1); in Generate()
4422 __ Addu(a1, a3, at); in Generate()
4423 __ bind(&loop); in Generate()
4424 __ Branch(&done_loop, eq, a1, Operand(a3)); in Generate()
4425 __ lw(at, MemOperand(a2, 0 * kPointerSize)); in Generate()
4426 __ sw(at, FieldMemOperand(a3, 0 * kPointerSize)); in Generate()
4427 __ Subu(a2, a2, Operand(1 * kPointerSize)); in Generate()
4428 __ Addu(a3, a3, Operand(1 * kPointerSize)); in Generate()
4429 __ Branch(&loop); in Generate()
4430 __ bind(&done_loop); in Generate()
4434 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at); in Generate()
4435 __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset)); in Generate()
4436 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); in Generate()
4437 __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset)); in Generate()
4438 __ sw(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset)); in Generate()
4439 __ sw(a0, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset)); in Generate()
4441 __ Ret(USE_DELAY_SLOT); in Generate()
4442 __ mov(v0, a3); // In delay slot in Generate()
4446 __ bind(&allocate); in Generate()
4447 __ Branch(&too_big_for_new_space, gt, t0, Operand(kMaxRegularHeapObjectSize)); in Generate()
4450 __ SmiTag(t0); in Generate()
4451 __ Push(a0, a2, t0); in Generate()
4452 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4453 __ Pop(a0, a2); in Generate()
4455 __ jmp(&done_allocate); in Generate()
4458 __ bind(&too_big_for_new_space); in Generate()
4459 __ Push(a1); in Generate()
4460 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4490 __ li(t9, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
4491 __ lb(t9, MemOperand(t9, 0)); in CallApiFunctionAndReturn()
4492 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); in CallApiFunctionAndReturn()
4495 __ li(t9, Operand(thunk_ref)); in CallApiFunctionAndReturn()
4496 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
4498 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
4499 __ mov(t9, function_address); in CallApiFunctionAndReturn()
4500 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
4503 __ li(s3, Operand(next_address)); in CallApiFunctionAndReturn()
4504 __ lw(s0, MemOperand(s3, kNextOffset)); in CallApiFunctionAndReturn()
4505 __ lw(s1, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
4506 __ lw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4507 __ Addu(s2, s2, Operand(1)); in CallApiFunctionAndReturn()
4508 __ sw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4512 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4513 __ PrepareCallCFunction(1, a0); in CallApiFunctionAndReturn()
4514 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4515 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
4517 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4528 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4529 __ PrepareCallCFunction(1, a0); in CallApiFunctionAndReturn()
4530 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4531 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
4533 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4542 __ lw(v0, return_value_operand); in CallApiFunctionAndReturn()
4543 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
4547 __ sw(s0, MemOperand(s3, kNextOffset)); in CallApiFunctionAndReturn()
4548 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
4549 __ lw(a1, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4550 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2)); in CallApiFunctionAndReturn()
4552 __ Subu(s2, s2, Operand(1)); in CallApiFunctionAndReturn()
4553 __ sw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
4554 __ lw(at, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
4555 __ Branch(&delete_allocated_handles, ne, s1, Operand(at)); in CallApiFunctionAndReturn()
4558 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
4562 __ lw(cp, *context_restore_operand); in CallApiFunctionAndReturn()
4567 __ lw(s0, MemOperand(sp, stack_space_offset + kCArgsSlotsSize)); in CallApiFunctionAndReturn()
4569 __ li(s0, Operand(stack_space)); in CallApiFunctionAndReturn()
4571 __ LeaveExitFrame(false, s0, !restore_context, NO_EMIT_RETURN, in CallApiFunctionAndReturn()
4575 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
4576 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
4577 __ lw(t1, MemOperand(at)); in CallApiFunctionAndReturn()
4578 __ Branch(&promote_scheduled_exception, ne, t0, Operand(t1)); in CallApiFunctionAndReturn()
4580 __ Ret(); in CallApiFunctionAndReturn()
4583 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
4584 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
4587 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
4588 __ sw(s1, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
4589 __ mov(s0, v0); in CallApiFunctionAndReturn()
4590 __ mov(a0, v0); in CallApiFunctionAndReturn()
4591 __ PrepareCallCFunction(1, s1); in CallApiFunctionAndReturn()
4592 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4593 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
4595 __ mov(v0, s0); in CallApiFunctionAndReturn()
4596 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
4632 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
4635 __ Push(context, callee, call_data); in Generate()
4638 __ lw(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
4643 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4646 __ Push(scratch, scratch); in Generate()
4647 __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
4649 __ Push(scratch, holder); in Generate()
4652 __ mov(scratch, sp); in Generate()
4659 __ EnterExitFrame(false, kApiStackSpace); in Generate()
4664 __ Addu(a0, sp, Operand(1 * kPointerSize)); in Generate()
4666 __ sw(scratch, MemOperand(a0, 0 * kPointerSize)); in Generate()
4668 __ Addu(at, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize)); in Generate()
4669 __ sw(at, MemOperand(a0, 1 * kPointerSize)); in Generate()
4671 __ li(at, Operand(argc())); in Generate()
4672 __ sw(at, MemOperand(a0, 2 * kPointerSize)); in Generate()
4721 __ Subu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize); in Generate()
4722 __ sw(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize)); in Generate()
4723 __ lw(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
4724 __ sw(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize)); in Generate()
4725 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4726 __ sw(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize)); in Generate()
4727 __ sw(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) * in Generate()
4729 __ li(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
4730 __ sw(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize)); in Generate()
4731 __ sw(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize)); in Generate()
4734 __ sw(zero_reg, in Generate()
4736 __ lw(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
4737 __ sw(scratch, MemOperand(sp, 0 * kPointerSize)); in Generate()
4743 __ mov(a0, sp); // a0 = Handle<Name> in Generate()
4744 __ Addu(a1, a0, Operand(1 * kPointerSize)); // a1 = v8::PCI::args_ in Generate()
4748 __ EnterExitFrame(false, kApiStackSpace); in Generate()
4752 __ sw(a1, MemOperand(sp, 1 * kPointerSize)); in Generate()
4753 __ Addu(a1, sp, Operand(1 * kPointerSize)); // a1 = v8::PropertyCallbackInfo& in Generate()
4758 __ lw(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
4759 __ lw(api_function_address, in Generate()
4770 #undef __