Lines Matching full:__

24 #define __ ACCESS_MASM(masm)  macro
27 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); in Generate()
28 __ StoreP(r3, MemOperand(sp, r1)); in Generate()
29 __ push(r3); in Generate()
30 __ push(r4); in Generate()
31 __ AddP(r2, r2, Operand(3)); in Generate()
32 __ TailCallRuntime(Runtime::kNewArray); in Generate()
68 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
70 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
73 __ Ret(); in GenerateLightweightMiss()
92 __ push(scratch); in Generate()
98 __ LoadDouble(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
101 __ ConvertDoubleToInt64(double_scratch, in Generate()
109 __ TestIfInt32(result_reg, r0); in Generate()
111 __ TestIfInt32(scratch, result_reg, r0); in Generate()
113 __ beq(&fastpath_done, Label::kNear); in Generate()
116 __ Push(scratch_high, scratch_low); in Generate()
120 __ LoadlW(scratch_high, in Generate()
122 __ LoadlW(scratch_low, in Generate()
125 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask); in Generate()
129 __ SubP(scratch, Operand(HeapNumber::kExponentBias + 1)); in Generate()
134 __ CmpP(scratch, Operand(83)); in Generate()
135 __ bge(&out_of_range, Label::kNear); in Generate()
142 __ Load(r0, Operand(51)); in Generate()
143 __ SubP(scratch, r0, scratch); in Generate()
144 __ CmpP(scratch, Operand::Zero()); in Generate()
145 __ ble(&only_low, Label::kNear); in Generate()
148 __ ShiftRight(scratch_low, scratch_low, scratch); in Generate()
152 __ Load(r0, Operand(32)); in Generate()
153 __ SubP(scratch, r0, scratch); in Generate()
154 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask); in Generate()
157 __ Load(r0, Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16))); in Generate()
158 __ ShiftLeftP(r0, r0, Operand(16)); in Generate()
159 __ OrP(result_reg, result_reg, r0); in Generate()
160 __ ShiftLeft(r0, result_reg, scratch); in Generate()
161 __ OrP(result_reg, scratch_low, r0); in Generate()
162 __ b(&negate, Label::kNear); in Generate()
164 __ bind(&out_of_range); in Generate()
165 __ mov(result_reg, Operand::Zero()); in Generate()
166 __ b(&done, Label::kNear); in Generate()
168 __ bind(&only_low); in Generate()
171 __ LoadComplementRR(scratch, scratch); in Generate()
172 __ ShiftLeft(result_reg, scratch_low, scratch); in Generate()
174 __ bind(&negate); in Generate()
181 __ ShiftRightArith(r0, scratch_high, Operand(31)); in Generate()
183 __ lgfr(r0, r0); in Generate()
184 __ ShiftRightP(r0, r0, Operand(32)); in Generate()
186 __ XorP(result_reg, r0); in Generate()
187 __ ShiftRight(r0, scratch_high, Operand(31)); in Generate()
188 __ AddP(result_reg, r0); in Generate()
190 __ bind(&done); in Generate()
191 __ Pop(scratch_high, scratch_low); in Generate()
193 __ bind(&fastpath_done); in Generate()
194 __ pop(scratch); in Generate()
196 __ Ret(); in Generate()
206 __ CmpP(r2, r3); in EmitIdenticalObjectComparison()
207 __ bne(&not_identical); in EmitIdenticalObjectComparison()
215 __ CompareObjectType(r2, r6, r6, FIRST_JS_RECEIVER_TYPE); in EmitIdenticalObjectComparison()
216 __ bge(slow); in EmitIdenticalObjectComparison()
218 __ CmpP(r6, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
219 __ beq(slow); in EmitIdenticalObjectComparison()
221 __ CmpP(r6, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
222 __ beq(slow); in EmitIdenticalObjectComparison()
224 __ CompareObjectType(r2, r6, r6, HEAP_NUMBER_TYPE); in EmitIdenticalObjectComparison()
225 __ beq(&heap_number); in EmitIdenticalObjectComparison()
228 __ CmpP(r6, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
229 __ bge(slow); in EmitIdenticalObjectComparison()
231 __ CmpP(r6, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
232 __ beq(slow); in EmitIdenticalObjectComparison()
234 __ CmpP(r6, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
235 __ beq(slow); in EmitIdenticalObjectComparison()
240 __ CmpP(r6, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
241 __ bne(&return_equal); in EmitIdenticalObjectComparison()
242 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
243 __ bne(&return_equal); in EmitIdenticalObjectComparison()
246 __ LoadImmP(r2, Operand(GREATER)); in EmitIdenticalObjectComparison()
249 __ LoadImmP(r2, Operand(LESS)); in EmitIdenticalObjectComparison()
251 __ Ret(); in EmitIdenticalObjectComparison()
256 __ bind(&return_equal); in EmitIdenticalObjectComparison()
258 __ LoadImmP(r2, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
260 __ LoadImmP(r2, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
262 __ LoadImmP(r2, Operand(EQUAL)); // Things are <=, >=, ==, === themselves in EmitIdenticalObjectComparison()
264 __ Ret(); in EmitIdenticalObjectComparison()
270 __ bind(&heap_number); in EmitIdenticalObjectComparison()
277 __ LoadlW(r4, FieldMemOperand(r2, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
280 __ ExtractBitMask(r5, r4, HeapNumber::kExponentMask); in EmitIdenticalObjectComparison()
281 __ CmpLogicalP(r5, Operand(0x7ff)); in EmitIdenticalObjectComparison()
282 __ bne(&return_equal); in EmitIdenticalObjectComparison()
285 __ sll(r4, Operand(HeapNumber::kNonMantissaBitsInTopWord)); in EmitIdenticalObjectComparison()
287 __ LoadlW(r5, FieldMemOperand(r2, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
288 __ OrP(r2, r5, r4); in EmitIdenticalObjectComparison()
289 __ CmpP(r2, Operand::Zero()); in EmitIdenticalObjectComparison()
296 __ bne(&not_equal, Label::kNear); in EmitIdenticalObjectComparison()
298 __ Ret(); in EmitIdenticalObjectComparison()
299 __ bind(&not_equal); in EmitIdenticalObjectComparison()
301 __ LoadImmP(r2, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
303 __ LoadImmP(r2, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
306 __ Ret(); in EmitIdenticalObjectComparison()
310 __ bind(&not_identical); in EmitIdenticalObjectComparison()
320 __ JumpIfSmi(rhs, &rhs_is_smi); in EmitSmiNonsmiComparison()
323 __ CompareObjectType(rhs, r5, r6, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
329 __ beq(&skip, Label::kNear); in EmitSmiNonsmiComparison()
331 __ mov(r2, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
333 __ Ret(); in EmitSmiNonsmiComparison()
334 __ bind(&skip); in EmitSmiNonsmiComparison()
338 __ bne(slow); in EmitSmiNonsmiComparison()
343 __ SmiToDouble(d7, lhs); in EmitSmiNonsmiComparison()
345 __ LoadDouble(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
349 __ b(lhs_not_nan); in EmitSmiNonsmiComparison()
351 __ bind(&rhs_is_smi); in EmitSmiNonsmiComparison()
353 __ CompareObjectType(lhs, r6, r6, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
359 __ beq(&skip, Label::kNear); in EmitSmiNonsmiComparison()
361 __ mov(r2, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
363 __ Ret(); in EmitSmiNonsmiComparison()
364 __ bind(&skip); in EmitSmiNonsmiComparison()
368 __ bne(slow); in EmitSmiNonsmiComparison()
373 __ LoadDouble(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
375 __ SmiToDouble(d6, rhs); in EmitSmiNonsmiComparison()
391 __ CompareObjectType(rhs, r4, r4, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
392 __ blt(&first_non_object, Label::kNear); in EmitStrictTwoHeapObjectCompare()
396 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
397 __ Ret(); in EmitStrictTwoHeapObjectCompare()
399 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
401 __ CmpP(r4, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
402 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
404 __ CompareObjectType(lhs, r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
405 __ bge(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
408 __ CmpP(r5, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
409 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
414 __ OrP(r4, r4, r5); in EmitStrictTwoHeapObjectCompare()
415 __ AndP(r0, r4, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
416 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
426 __ CompareObjectType(rhs, r5, r4, HEAP_NUMBER_TYPE); in EmitCheckForTwoHeapNumbers()
427 __ bne(not_heap_numbers); in EmitCheckForTwoHeapNumbers()
428 __ LoadP(r4, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
429 __ CmpP(r4, r5); in EmitCheckForTwoHeapNumbers()
430 __ bne(slow); // First was a heap number, second wasn't. Go slow case. in EmitCheckForTwoHeapNumbers()
434 __ LoadDouble(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
435 __ LoadDouble(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
437 __ b(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
452 __ mov(r0, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
453 __ AndP(r0, r4); in EmitCheckForInternalizedStringsOrObjects()
454 __ bne(&object_test, Label::kNear); in EmitCheckForInternalizedStringsOrObjects()
455 __ mov(r0, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
456 __ AndP(r0, r4); in EmitCheckForInternalizedStringsOrObjects()
457 __ bne(possible_strings); in EmitCheckForInternalizedStringsOrObjects()
458 __ CompareObjectType(lhs, r5, r5, FIRST_NONSTRING_TYPE); in EmitCheckForInternalizedStringsOrObjects()
459 __ bge(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
460 __ mov(r0, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
461 __ AndP(r0, r5); in EmitCheckForInternalizedStringsOrObjects()
462 __ bne(possible_strings); in EmitCheckForInternalizedStringsOrObjects()
467 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
469 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
470 __ LoadP(r4, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
471 __ LoadP(r5, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
472 __ LoadlB(r6, FieldMemOperand(r4, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
473 __ LoadlB(r7, FieldMemOperand(r5, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
474 __ AndP(r0, r6, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
475 __ bne(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
476 __ AndP(r0, r7, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
477 __ bne(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
479 __ CompareInstanceType(r4, r4, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
480 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
481 __ CompareInstanceType(r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
482 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
484 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
486 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
488 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
489 __ AndP(r0, r7, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
490 __ beq(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
495 __ CompareInstanceType(r4, r4, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
496 __ beq(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
497 __ CompareInstanceType(r5, r5, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
498 __ bne(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
500 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
501 __ LoadImmP(r2, Operand(EQUAL)); in EmitCheckForInternalizedStringsOrObjects()
502 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
511 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
513 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
514 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
519 __ bind(&ok); in CompareICStub_CheckInputType()
538 __ OrP(r4, r3, r2); in GenerateGeneric()
539 __ JumpIfNotSmi(r4, &not_two_smis); in GenerateGeneric()
540 __ SmiUntag(r3); in GenerateGeneric()
541 __ SmiUntag(r2); in GenerateGeneric()
542 __ SubP(r2, r3, r2); in GenerateGeneric()
543 __ Ret(); in GenerateGeneric()
544 __ bind(&not_two_smis); in GenerateGeneric()
557 __ AndP(r4, lhs, rhs); in GenerateGeneric()
558 __ JumpIfNotSmi(r4, &not_smis); in GenerateGeneric()
569 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
571 __ bind(&lhs_not_nan); in GenerateGeneric()
573 __ cdbr(d7, d6); in GenerateGeneric()
576 __ bunordered(&nan); in GenerateGeneric()
577 __ beq(&equal, Label::kNear); in GenerateGeneric()
578 __ blt(&less_than, Label::kNear); in GenerateGeneric()
579 __ LoadImmP(r2, Operand(GREATER)); in GenerateGeneric()
580 __ Ret(); in GenerateGeneric()
581 __ bind(&equal); in GenerateGeneric()
582 __ LoadImmP(r2, Operand(EQUAL)); in GenerateGeneric()
583 __ Ret(); in GenerateGeneric()
584 __ bind(&less_than); in GenerateGeneric()
585 __ LoadImmP(r2, Operand(LESS)); in GenerateGeneric()
586 __ Ret(); in GenerateGeneric()
588 __ bind(&nan); in GenerateGeneric()
593 __ LoadImmP(r2, Operand(GREATER)); in GenerateGeneric()
595 __ LoadImmP(r2, Operand(LESS)); in GenerateGeneric()
597 __ Ret(); in GenerateGeneric()
599 __ bind(&not_smis); in GenerateGeneric()
619 __ bind(&check_for_internalized_strings); in GenerateGeneric()
632 __ bind(&flat_string_check); in GenerateGeneric()
634 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r4, r5, &slow); in GenerateGeneric()
636 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r4, in GenerateGeneric()
645 __ bind(&slow); in GenerateGeneric()
650 __ Push(lhs, rhs); in GenerateGeneric()
651 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); in GenerateGeneric()
655 __ LoadRoot(r3, Heap::kTrueValueRootIndex); in GenerateGeneric()
656 __ SubP(r2, r2, r3); in GenerateGeneric()
657 __ Ret(); in GenerateGeneric()
659 __ Push(lhs, rhs); in GenerateGeneric()
667 __ LoadSmiLiteral(r2, Smi::FromInt(ncr)); in GenerateGeneric()
668 __ push(r2); in GenerateGeneric()
672 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
675 __ bind(&miss); in GenerateGeneric()
683 __ MultiPush(kJSCallerSaved | r14.bit()); in Generate()
685 __ MultiPushDoubles(kCallerSavedDoubles); in Generate()
692 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
693 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
694 __ CallCFunction(ExternalReference::store_buffer_overflow_function(isolate()), in Generate()
697 __ MultiPopDoubles(kCallerSavedDoubles); in Generate()
699 __ MultiPop(kJSCallerSaved | r14.bit()); in Generate()
700 __ Ret(); in Generate()
704 __ PushSafepointRegisters(); in Generate()
705 __ b(r14); in Generate()
709 __ PopSafepointRegisters(); in Generate()
710 __ b(r14); in Generate()
726 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
728 __ LoadDouble(double_exponent, in Generate()
734 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, in Generate()
736 __ beq(&int_exponent, Label::kNear); in Generate()
738 __ push(r14); in Generate()
741 __ PrepareCallCFunction(0, 2, scratch); in Generate()
742 __ MovToFloatParameters(double_base, double_exponent); in Generate()
743 __ CallCFunction( in Generate()
746 __ pop(r14); in Generate()
747 __ MovFromFloatResult(double_result); in Generate()
748 __ b(&done); in Generate()
752 __ bind(&int_exponent); in Generate()
756 __ LoadRR(scratch, exponent); in Generate()
759 __ LoadRR(exponent, scratch); in Generate()
761 __ ldr(double_scratch, double_base); // Back up base. in Generate()
762 __ LoadImmP(scratch2, Operand(1)); in Generate()
763 __ ConvertIntToDouble(scratch2, double_result); in Generate()
767 __ CmpP(scratch, Operand::Zero()); in Generate()
768 __ bge(&positive_exponent, Label::kNear); in Generate()
769 __ LoadComplementRR(scratch, scratch); in Generate()
770 __ bind(&positive_exponent); in Generate()
773 __ bind(&while_true); in Generate()
774 __ mov(scratch2, Operand(1)); in Generate()
775 __ AndP(scratch2, scratch); in Generate()
776 __ beq(&no_carry, Label::kNear); in Generate()
777 __ mdbr(double_result, double_scratch); in Generate()
778 __ bind(&no_carry); in Generate()
779 __ ShiftRightP(scratch, scratch, Operand(1)); in Generate()
780 __ LoadAndTestP(scratch, scratch); in Generate()
781 __ beq(&loop_end, Label::kNear); in Generate()
782 __ mdbr(double_scratch, double_scratch); in Generate()
783 __ b(&while_true); in Generate()
784 __ bind(&loop_end); in Generate()
786 __ CmpP(exponent, Operand::Zero()); in Generate()
787 __ bge(&done); in Generate()
790 __ ldr(double_scratch, double_result); in Generate()
791 __ LoadImmP(scratch2, Operand(1)); in Generate()
792 __ ConvertIntToDouble(scratch2, double_result); in Generate()
793 __ ddbr(double_result, double_scratch); in Generate()
797 __ lzdr(kDoubleRegZero); in Generate()
798 __ cdbr(double_result, kDoubleRegZero); in Generate()
799 __ bne(&done, Label::kNear); in Generate()
802 __ ConvertIntToDouble(exponent, double_exponent); in Generate()
805 __ push(r14); in Generate()
808 __ PrepareCallCFunction(0, 2, scratch); in Generate()
809 __ MovToFloatParameters(double_base, double_exponent); in Generate()
810 __ CallCFunction( in Generate()
813 __ pop(r14); in Generate()
814 __ MovFromFloatResult(double_result); in Generate()
816 __ bind(&done); in Generate()
817 __ Ret(); in Generate()
870 __ LoadRR(r7, r3); in Generate()
874 __ LoadRR(r3, r4); in Generate()
877 __ ShiftLeftP(r3, r2, Operand(kPointerSizeLog2)); in Generate()
878 __ lay(r3, MemOperand(r3, sp, -kPointerSize)); in Generate()
900 __ EnterExitFrame(save_doubles(), arg_stack_space, is_builtin_exit() in Generate()
905 __ LoadRR(r6, r2); in Generate()
906 __ LoadRR(r8, r3); in Generate()
919 __ LoadRR(r4, r3); in Generate()
920 __ LoadRR(r3, r2); in Generate()
921 __ la(r2, MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kPointerSize)); in Generate()
925 __ mov(isolate_reg, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
935 __ larl(r14, &return_label); // Generate the return addr of call later. in Generate()
936 __ StoreP(r14, MemOperand(sp, kStackFrameRASlot * kPointerSize)); in Generate()
940 // __ lay(sp, MemOperand(sp, -kCalleeRegisterSaveAreaSize)); in Generate()
941 __ b(target); in Generate()
942 __ bind(&return_label); in Generate()
943 // __ la(sp, MemOperand(sp, +kCalleeRegisterSaveAreaSize)); in Generate()
948 if (result_size() > 2) __ LoadP(r4, MemOperand(r2, 2 * kPointerSize)); in Generate()
949 __ LoadP(r3, MemOperand(r2, kPointerSize)); in Generate()
950 __ LoadP(r2, MemOperand(r2)); in Generate()
955 __ CompareRoot(r2, Heap::kExceptionRootIndex); in Generate()
956 __ beq(&exception_returned, Label::kNear); in Generate()
964 __ mov(r1, Operand(pending_exception_address)); in Generate()
965 __ LoadP(r1, MemOperand(r1)); in Generate()
966 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); in Generate()
968 __ beq(&okay, Label::kNear); in Generate()
969 __ stop("Unexpected pending exception"); in Generate()
970 __ bind(&okay); in Generate()
985 __ LeaveExitFrame(save_doubles(), argc, true); in Generate()
986 __ b(r14); in Generate()
989 __ bind(&exception_returned); in Generate()
1008 __ PrepareCallCFunction(3, 0, r2); in Generate()
1009 __ LoadImmP(r2, Operand::Zero()); in Generate()
1010 __ LoadImmP(r3, Operand::Zero()); in Generate()
1011 __ mov(r4, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1012 __ CallCFunction(find_handler, 3); in Generate()
1016 __ mov(cp, Operand(pending_handler_context_address)); in Generate()
1017 __ LoadP(cp, MemOperand(cp)); in Generate()
1018 __ mov(sp, Operand(pending_handler_sp_address)); in Generate()
1019 __ LoadP(sp, MemOperand(sp)); in Generate()
1020 __ mov(fp, Operand(pending_handler_fp_address)); in Generate()
1021 __ LoadP(fp, MemOperand(fp)); in Generate()
1026 __ CmpP(cp, Operand::Zero()); in Generate()
1027 __ beq(&skip, Label::kNear); in Generate()
1028 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1029 __ bind(&skip); in Generate()
1032 __ mov(r3, Operand(pending_handler_code_address)); in Generate()
1033 __ LoadP(r3, MemOperand(r3)); in Generate()
1034 __ mov(r4, Operand(pending_handler_offset_address)); in Generate()
1035 __ LoadP(r4, MemOperand(r4)); in Generate()
1036 __ AddP(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start in Generate()
1037 __ AddP(ip, r3, r4); in Generate()
1038 __ Jump(ip); in Generate()
1055 __ lay(sp, MemOperand(sp, -8 * kDoubleSize)); in Generate()
1056 __ std(d8, MemOperand(sp)); in Generate()
1057 __ std(d9, MemOperand(sp, 1 * kDoubleSize)); in Generate()
1058 __ std(d10, MemOperand(sp, 2 * kDoubleSize)); in Generate()
1059 __ std(d11, MemOperand(sp, 3 * kDoubleSize)); in Generate()
1060 __ std(d12, MemOperand(sp, 4 * kDoubleSize)); in Generate()
1061 __ std(d13, MemOperand(sp, 5 * kDoubleSize)); in Generate()
1062 __ std(d14, MemOperand(sp, 6 * kDoubleSize)); in Generate()
1063 __ std(d15, MemOperand(sp, 7 * kDoubleSize)); in Generate()
1067 __ lay(sp, MemOperand(sp, -2 * kDoubleSize)); in Generate()
1068 __ std(d4, MemOperand(sp)); in Generate()
1069 __ std(d6, MemOperand(sp, kDoubleSize)); in Generate()
1082 __ lay(sp, MemOperand(sp, -10 * kPointerSize)); in Generate()
1083 __ StoreMultipleP(r6, sp, MemOperand(sp, 0)); in Generate()
1086 // __ LoadDoubleLiteral(kDoubleRegZero, 0.0, r0); in Generate()
1094 __ lay(sp, MemOperand(sp, -5 * kPointerSize)); in Generate()
1096 __ LoadImmP(r10, Operand(-1)); in Generate()
1099 __ LoadSmiLiteral(r9, Smi::FromInt(marker)); in Generate()
1100 __ LoadSmiLiteral(r8, Smi::FromInt(marker)); in Generate()
1102 __ mov(r7, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1103 __ LoadP(r7, MemOperand(r7)); in Generate()
1104 __ StoreMultipleP(r7, r10, MemOperand(sp, kPointerSize)); in Generate()
1108 __ lay(fp, in Generate()
1114 __ mov(r7, Operand(ExternalReference(js_entry_sp))); in Generate()
1115 __ LoadAndTestP(r8, MemOperand(r7)); in Generate()
1116 __ bne(&non_outermost_js, Label::kNear); in Generate()
1117 __ StoreP(fp, MemOperand(r7)); in Generate()
1118 __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1120 __ b(&cont, Label::kNear); in Generate()
1121 __ bind(&non_outermost_js); in Generate()
1122 __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1124 __ bind(&cont); in Generate()
1125 __ StoreP(ip, MemOperand(sp)); // frame-type in Generate()
1129 __ b(&invoke, Label::kNear); in Generate()
1131 __ bind(&handler_entry); in Generate()
1137 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1140 __ StoreP(r2, MemOperand(ip)); in Generate()
1141 __ LoadRoot(r2, Heap::kExceptionRootIndex); in Generate()
1142 __ b(&exit, Label::kNear); in Generate()
1145 __ bind(&invoke); in Generate()
1147 __ PushStackHandler(); in Generate()
1166 __ mov(ip, Operand(construct_entry)); in Generate()
1169 __ mov(ip, Operand(entry)); in Generate()
1171 __ LoadP(ip, MemOperand(ip)); // deref address in Generate()
1175 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1177 // __ basr(r14, ip); in Generate()
1178 __ larl(r14, &return_addr); in Generate()
1179 __ b(ip); in Generate()
1180 __ bind(&return_addr); in Generate()
1183 __ PopStackHandler(); in Generate()
1185 __ bind(&exit); // r2 holds result in Generate()
1188 __ pop(r7); in Generate()
1189 __ CmpSmiLiteral(r7, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME), r0); in Generate()
1190 __ bne(&non_outermost_js_2, Label::kNear); in Generate()
1191 __ mov(r8, Operand::Zero()); in Generate()
1192 __ mov(r7, Operand(ExternalReference(js_entry_sp))); in Generate()
1193 __ StoreP(r8, MemOperand(r7)); in Generate()
1194 __ bind(&non_outermost_js_2); in Generate()
1197 __ pop(r5); in Generate()
1198 __ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1199 __ StoreP(r5, MemOperand(ip)); in Generate()
1202 __ lay(sp, MemOperand(sp, -EntryFrameConstants::kCallerFPOffset)); in Generate()
1205 __ LoadMultipleP(r6, sp, MemOperand(sp, 0)); in Generate()
1206 __ la(sp, MemOperand(sp, 10 * kPointerSize)); in Generate()
1211 __ ld(d8, MemOperand(sp)); in Generate()
1212 __ ld(d9, MemOperand(sp, 1 * kDoubleSize)); in Generate()
1213 __ ld(d10, MemOperand(sp, 2 * kDoubleSize)); in Generate()
1214 __ ld(d11, MemOperand(sp, 3 * kDoubleSize)); in Generate()
1215 __ ld(d12, MemOperand(sp, 4 * kDoubleSize)); in Generate()
1216 __ ld(d13, MemOperand(sp, 5 * kDoubleSize)); in Generate()
1217 __ ld(d14, MemOperand(sp, 6 * kDoubleSize)); in Generate()
1218 __ ld(d15, MemOperand(sp, 7 * kDoubleSize)); in Generate()
1219 __ la(sp, MemOperand(sp, 8 * kDoubleSize)); in Generate()
1223 __ ld(d4, MemOperand(sp)); in Generate()
1224 __ ld(d6, MemOperand(sp, kDoubleSize)); in Generate()
1225 __ la(sp, MemOperand(sp, 2 * kDoubleSize)); in Generate()
1228 __ b(r14); in Generate()
1241 __ bind(&miss); in Generate()
1267 __ Ret(); in Generate()
1272 __ bind(&miss); in Generate()
1282 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1308 __ CleanseP(r14); in Generate()
1321 __ mov(r2, Operand(address_of_regexp_stack_memory_size)); in Generate()
1322 __ LoadAndTestP(r2, MemOperand(r2)); in Generate()
1323 __ beq(&runtime); in Generate()
1326 __ LoadP(r2, MemOperand(sp, kJSRegExpOffset)); in Generate()
1327 __ JumpIfSmi(r2, &runtime); in Generate()
1328 __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE); in Generate()
1329 __ bne(&runtime); in Generate()
1332 __ LoadP(regexp_data, FieldMemOperand(r2, JSRegExp::kDataOffset)); in Generate()
1334 __ TestIfSmi(regexp_data); in Generate()
1335 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected, cr0); in Generate()
1336 __ CompareObjectType(regexp_data, r2, r2, FIXED_ARRAY_TYPE); in Generate()
1337 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1342 __ LoadP(r2, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1344 __ CmpSmiLiteral(r2, Smi::FromInt(JSRegExp::IRREGEXP), r0); in Generate()
1345 __ bne(&runtime); in Generate()
1349 __ LoadP(r4, in Generate()
1355 __ SmiToShortArrayOffset(r4, r4); in Generate()
1357 __ CmpLogicalP(r4, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2)); in Generate()
1358 __ bgt(&runtime); in Generate()
1361 __ LoadImmP(ip, Operand::Zero()); in Generate()
1362 __ LoadP(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1363 __ JumpIfSmi(subject, &runtime); in Generate()
1364 __ LoadRR(r5, subject); // Make a copy of the original subject string. in Generate()
1387 __ bind(&check_underlying); in Generate()
1388 __ LoadP(r2, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1389 __ LoadlB(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); in Generate()
1395 __ mov(r3, Operand(kIsNotStringMask | kStringRepresentationMask | in Generate()
1397 __ AndP(r3, r2); in Generate()
1399 __ beq(&seq_string, Label::kNear); // Go to (4). in Generate()
1407 __ CmpP(r3, Operand(kExternalStringTag)); in Generate()
1408 __ bge(&not_seq_nor_cons); // Go to (5). in Generate()
1412 __ LoadP(r2, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1413 __ CompareRoot(r2, Heap::kempty_stringRootIndex); in Generate()
1414 __ bne(&runtime); in Generate()
1415 __ LoadP(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1416 __ b(&check_underlying); in Generate()
1419 __ bind(&seq_string); in Generate()
1425 __ LoadP(r3, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1426 __ JumpIfNotSmi(r3, &runtime); in Generate()
1427 __ LoadP(r5, FieldMemOperand(r5, String::kLengthOffset)); in Generate()
1428 __ CmpLogicalP(r5, r3); in Generate()
1429 __ ble(&runtime); in Generate()
1430 __ SmiUntag(r3); in Generate()
1435 __ ExtractBitMask(r5, r2, kStringEncodingMask, SetRC); in Generate()
1436 __ beq(&encoding_type_UC16, Label::kNear); in Generate()
1437 __ LoadP(code, in Generate()
1439 __ b(&br_over, Label::kNear); in Generate()
1440 __ bind(&encoding_type_UC16); in Generate()
1441 __ LoadP(code, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
1442 __ bind(&br_over); in Generate()
1449 __ JumpIfSmi(code, &runtime); in Generate()
1457 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r2, r4); in Generate()
1462 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1468 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1469 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1474 __ mov(r2, Operand::Zero()); in Generate()
1475 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1479 __ mov(r2, Operand(1)); in Generate()
1480 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1484 __ mov(r2, Operand(address_of_regexp_stack_memory_address)); in Generate()
1485 __ LoadP(r2, MemOperand(r2, 0)); in Generate()
1486 __ mov(r1, Operand(address_of_regexp_stack_memory_size)); in Generate()
1487 __ LoadP(r1, MemOperand(r1, 0)); in Generate()
1488 __ AddP(r2, r1); in Generate()
1489 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1495 __ mov(r2, Operand::Zero()); in Generate()
1496 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1505 __ LoadP(r2, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1509 __ AddP(r1, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1512 __ mov( in Generate()
1518 __ XorP(r5, Operand(1)); in Generate()
1522 __ ShiftLeftP(ip, ip, r5); in Generate()
1523 __ AddP(ip, r1, ip); in Generate()
1524 __ ShiftLeftP(r4, r3, r5); in Generate()
1525 __ AddP(r4, ip, r4); in Generate()
1528 __ LoadP(r1, FieldMemOperand(r2, String::kLengthOffset)); in Generate()
1529 __ SmiUntag(r1); in Generate()
1530 __ ShiftLeftP(r0, r1, r5); in Generate()
1531 __ AddP(r5, ip, r0); in Generate()
1534 __ AddP(code, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1539 __ LeaveExitFrame(false, no_reg, true); in Generate()
1543 __ LoadP(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1549 __ Cmp32(r2, Operand(1)); in Generate()
1552 __ beq(&success); in Generate()
1554 __ Cmp32(r2, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1555 __ beq(&failure); in Generate()
1556 __ Cmp32(r2, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1558 __ bne(&runtime); in Generate()
1563 __ mov(r3, Operand(isolate()->factory()->the_hole_value())); in Generate()
1564 __ mov(r4, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1566 __ LoadP(r2, MemOperand(r4, 0)); in Generate()
1567 __ CmpP(r2, r3); in Generate()
1568 __ beq(&runtime); in Generate()
1571 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1573 __ bind(&failure); in Generate()
1575 __ mov(r2, Operand(isolate()->factory()->null_value())); in Generate()
1576 __ la(sp, MemOperand(sp, (4 * kPointerSize))); in Generate()
1577 __ Ret(); in Generate()
1580 __ bind(&success); in Generate()
1581 __ LoadP(r3, in Generate()
1586 __ SmiToShortArrayOffset(r3, r3); in Generate()
1587 __ AddP(r3, Operand(2)); in Generate()
1590 __ LoadP(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1591 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1593 __ LoadP(r2, in Generate()
1595 __ CompareRoot(r2, Heap::kFixedArrayMapRootIndex); in Generate()
1596 __ bne(&runtime); in Generate()
1599 __ LoadP( in Generate()
1601 __ AddP(r4, r3, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1602 __ SmiUntag(r0, r2); in Generate()
1603 __ CmpP(r4, r0); in Generate()
1604 __ bgt(&runtime); in Generate()
1609 __ SmiTag(r4, r3); in Generate()
1610 __ StoreP(r4, FieldMemOperand(last_match_info_elements, in Generate()
1613 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1615 __ LoadRR(r4, subject); in Generate()
1616 __ RecordWriteField(last_match_info_elements, in Generate()
1619 __ LoadRR(subject, r4); in Generate()
1620 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1622 __ RecordWriteField(last_match_info_elements, in Generate()
1629 __ mov(r4, Operand(address_of_static_offsets_vector)); in Generate()
1636 __ AddP(r2, last_match_info_elements, in Generate()
1639 __ AddP(r4, Operand(-kIntSize)); // bias down for lwzu in Generate()
1640 __ bind(&next_capture); in Generate()
1642 __ ly(r5, MemOperand(r4, kIntSize)); in Generate()
1643 __ lay(r4, MemOperand(r4, kIntSize)); in Generate()
1645 __ SmiTag(r5); in Generate()
1646 __ StoreP(r5, MemOperand(r2, kPointerSize)); in Generate()
1647 __ lay(r2, MemOperand(r2, kPointerSize)); in Generate()
1648 __ BranchOnCount(r3, &next_capture); in Generate()
1651 __ LoadRR(r2, last_match_info_elements); in Generate()
1652 __ la(sp, MemOperand(sp, (4 * kPointerSize))); in Generate()
1653 __ Ret(); in Generate()
1656 __ bind(&runtime); in Generate()
1657 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1661 __ bind(&not_seq_nor_cons); in Generate()
1663 __ bgt(&not_long_external, Label::kNear); // Go to (7). in Generate()
1666 __ bind(&external_string); in Generate()
1667 __ LoadP(r2, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1668 __ LoadlB(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); in Generate()
1673 __ tmll(r2, Operand(kIsIndirectStringMask)); in Generate()
1674 __ Assert(eq, kExternalStringExpectedButNotFound, cr0); in Generate()
1676 __ LoadP(subject, in Generate()
1680 __ SubP(subject, subject, in Generate()
1682 __ b(&seq_string); // Go to (4). in Generate()
1685 __ bind(&not_long_external); in Generate()
1687 __ mov(r0, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1688 __ AndP(r0, r3); in Generate()
1689 __ bne(&runtime); in Generate()
1693 __ LoadP(ip, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1694 __ SmiUntag(ip); in Generate()
1695 __ LoadP(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1696 __ b(&check_underlying); // Go to (4). in Generate()
1708 __ SmiTag(r2); in CallStubInRecordCallTarget()
1709 __ Push(r5, r4, r3, r2); in CallStubInRecordCallTarget()
1710 __ Push(cp); in CallStubInRecordCallTarget()
1712 __ CallStub(stub); in CallStubInRecordCallTarget()
1714 __ Pop(cp); in CallStubInRecordCallTarget()
1715 __ Pop(r5, r4, r3, r2); in CallStubInRecordCallTarget()
1716 __ SmiUntag(r2); in CallStubInRecordCallTarget()
1737 __ SmiToPtrArrayOffset(r7, r5); in GenerateRecordCallTarget()
1738 __ AddP(r7, r4, r7); in GenerateRecordCallTarget()
1739 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1748 __ LoadP(weak_value, FieldMemOperand(r7, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1749 __ CmpP(r3, weak_value); in GenerateRecordCallTarget()
1750 __ beq(&done, Label::kNear); in GenerateRecordCallTarget()
1751 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1752 __ beq(&done, Label::kNear); in GenerateRecordCallTarget()
1753 __ LoadP(feedback_map, FieldMemOperand(r7, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1754 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1755 __ bne(&check_allocation_site); in GenerateRecordCallTarget()
1758 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1759 __ b(&megamorphic); in GenerateRecordCallTarget()
1761 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1766 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1767 __ bne(&miss); in GenerateRecordCallTarget()
1770 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); in GenerateRecordCallTarget()
1771 __ CmpP(r3, r7); in GenerateRecordCallTarget()
1772 __ bne(&megamorphic); in GenerateRecordCallTarget()
1773 __ b(&done, Label::kNear); in GenerateRecordCallTarget()
1775 __ bind(&miss); in GenerateRecordCallTarget()
1779 __ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1780 __ beq(&initialize); in GenerateRecordCallTarget()
1783 __ bind(&megamorphic); in GenerateRecordCallTarget()
1784 __ SmiToPtrArrayOffset(r7, r5); in GenerateRecordCallTarget()
1785 __ AddP(r7, r4, r7); in GenerateRecordCallTarget()
1786 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1787 __ StoreP(ip, FieldMemOperand(r7, FixedArray::kHeaderSize), r0); in GenerateRecordCallTarget()
1788 __ jmp(&done); in GenerateRecordCallTarget()
1791 __ bind(&initialize); in GenerateRecordCallTarget()
1794 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); in GenerateRecordCallTarget()
1795 __ CmpP(r3, r7); in GenerateRecordCallTarget()
1796 __ bne(&not_array_function); in GenerateRecordCallTarget()
1803 __ b(&done, Label::kNear); in GenerateRecordCallTarget()
1805 __ bind(&not_array_function); in GenerateRecordCallTarget()
1810 __ bind(&done); in GenerateRecordCallTarget()
1813 __ SmiToPtrArrayOffset(r7, r5); in GenerateRecordCallTarget()
1814 __ AddP(r7, r4, r7); in GenerateRecordCallTarget()
1816 __ LoadP(r6, FieldMemOperand(r7, count_offset)); in GenerateRecordCallTarget()
1817 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0); in GenerateRecordCallTarget()
1818 __ StoreP(r6, FieldMemOperand(r7, count_offset), r0); in GenerateRecordCallTarget()
1829 __ JumpIfSmi(r3, &non_function); in Generate()
1831 __ CompareObjectType(r3, r7, r7, JS_FUNCTION_TYPE); in Generate()
1832 __ bne(&non_function); in Generate()
1836 __ SmiToPtrArrayOffset(r7, r5); in Generate()
1837 __ AddP(r7, r4, r7); in Generate()
1839 __ LoadP(r4, FieldMemOperand(r7, FixedArray::kHeaderSize)); in Generate()
1840 __ LoadP(r7, FieldMemOperand(r4, AllocationSite::kMapOffset)); in Generate()
1841 __ CompareRoot(r7, Heap::kAllocationSiteMapRootIndex); in Generate()
1843 __ beq(&feedback_register_initialized); in Generate()
1844 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); in Generate()
1845 __ bind(&feedback_register_initialized); in Generate()
1847 __ AssertUndefinedOrAllocationSite(r4, r7); in Generate()
1850 __ LoadRR(r5, r3); in Generate()
1854 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1855 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1856 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1857 __ JumpToJSEntry(ip); in Generate()
1859 __ bind(&non_function); in Generate()
1860 __ LoadRR(r5, r3); in Generate()
1861 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1868 __ SmiToPtrArrayOffset(temp, slot); in IncrementCallCount()
1869 __ AddP(feedback_vector, feedback_vector, temp); in IncrementCallCount()
1870 __ LoadP(slot, FieldMemOperand(feedback_vector, count_offset)); in IncrementCallCount()
1871 __ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp); in IncrementCallCount()
1872 __ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp); in IncrementCallCount()
1881 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); in HandleArrayCase()
1882 __ CmpP(r3, r7); in HandleArrayCase()
1883 __ bne(miss); in HandleArrayCase()
1888 __ LoadRR(r4, r6); in HandleArrayCase()
1889 __ LoadRR(r5, r3); in HandleArrayCase()
1891 __ TailCallStub(&stub); in HandleArrayCase()
1902 __ SmiToPtrArrayOffset(r8, r5); in Generate()
1903 __ AddP(r8, r4, r8); in Generate()
1904 __ LoadP(r6, FieldMemOperand(r8, FixedArray::kHeaderSize)); in Generate()
1920 __ LoadP(r7, FieldMemOperand(r6, WeakCell::kValueOffset)); in Generate()
1921 __ CmpP(r3, r7); in Generate()
1922 __ bne(&extra_checks_or_miss, Label::kNear); in Generate()
1926 __ JumpIfSmi(r3, &extra_checks_or_miss); in Generate()
1928 __ bind(&call_function); in Generate()
1933 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), in Generate()
1937 __ bind(&extra_checks_or_miss); in Generate()
1940 __ CompareRoot(r6, Heap::kmegamorphic_symbolRootIndex); in Generate()
1941 __ beq(&call); in Generate()
1944 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset)); in Generate()
1945 __ CompareRoot(r7, Heap::kAllocationSiteMapRootIndex); in Generate()
1946 __ bne(&not_allocation_site); in Generate()
1951 __ bind(&not_allocation_site); in Generate()
1956 __ b(&miss); in Generate()
1959 __ CompareRoot(r6, Heap::kuninitialized_symbolRootIndex); in Generate()
1960 __ beq(&uninitialized); in Generate()
1964 __ AssertNotSmi(r6); in Generate()
1965 __ CompareObjectType(r6, r7, r7, JS_FUNCTION_TYPE); in Generate()
1966 __ bne(&miss); in Generate()
1967 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); in Generate()
1968 __ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0); in Generate()
1970 __ bind(&call); in Generate()
1975 __ bind(&call_count_incremented); in Generate()
1976 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), in Generate()
1979 __ bind(&uninitialized); in Generate()
1982 __ JumpIfSmi(r3, &miss); in Generate()
1985 __ CompareObjectType(r3, r6, r6, JS_FUNCTION_TYPE); in Generate()
1986 __ bne(&miss); in Generate()
1990 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r6); in Generate()
1991 __ CmpP(r3, r6); in Generate()
1992 __ beq(&miss); in Generate()
1995 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate()
1996 __ LoadP(r6, ContextMemOperand(r6, Context::NATIVE_CONTEXT_INDEX)); in Generate()
1997 __ LoadP(ip, NativeContextMemOperand()); in Generate()
1998 __ CmpP(r6, ip); in Generate()
1999 __ bne(&miss); in Generate()
2008 __ SmiTag(r2); in Generate()
2009 __ Push(r2, r4, r5, cp, r3); in Generate()
2010 __ CallStub(&create_stub); in Generate()
2011 __ Pop(r4, r5, cp, r3); in Generate()
2012 __ Pop(r2); in Generate()
2013 __ SmiUntag(r2); in Generate()
2016 __ b(&call_function); in Generate()
2020 __ bind(&miss); in Generate()
2023 __ b(&call_count_incremented); in Generate()
2030 __ SmiTag(r2); in GenerateMiss()
2033 __ Push(r2, r3, r4, r5); in GenerateMiss()
2036 __ CallRuntime(Runtime::kCallIC_Miss); in GenerateMiss()
2039 __ LoadRR(r3, r2); in GenerateMiss()
2042 __ Pop(r2); in GenerateMiss()
2043 __ SmiUntag(r2); in GenerateMiss()
2050 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
2053 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
2054 __ LoadlB(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
2056 __ mov(r0, Operand(kIsNotStringMask)); in GenerateFast()
2057 __ AndP(r0, result_); in GenerateFast()
2058 __ bne(receiver_not_string_); in GenerateFast()
2062 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
2063 __ bind(&got_smi_index_); in GenerateFast()
2066 __ LoadP(ip, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
2067 __ CmpLogicalP(ip, index_); in GenerateFast()
2068 __ ble(index_out_of_range_); in GenerateFast()
2070 __ SmiUntag(index_); in GenerateFast()
2075 __ SmiTag(result_); in GenerateFast()
2076 __ bind(&exit_); in GenerateFast()
2082 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
2085 __ bind(&index_not_smi_); in GenerateSlow()
2087 __ CheckMap(index_, result_, Heap::kHeapNumberMapRootIndex, index_not_number_, in GenerateSlow()
2091 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2095 __ Push(object_, index_); in GenerateSlow()
2097 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
2100 __ Move(index_, r2); in GenerateSlow()
2102 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2105 __ pop(object_); in GenerateSlow()
2108 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
2109 __ LoadlB(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
2112 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
2114 __ b(&got_smi_index_); in GenerateSlow()
2119 __ bind(&call_runtime_); in GenerateSlow()
2121 __ SmiTag(index_); in GenerateSlow()
2122 __ Push(object_, index_); in GenerateSlow()
2123 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
2124 __ Move(result_, r2); in GenerateSlow()
2126 __ b(&exit_); in GenerateSlow()
2128 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
2137 __ LoadSmiLiteral(r0, Smi::FromInt(~String::kMaxOneByteCharCodeU)); in GenerateFast()
2138 __ OrP(r0, r0, Operand(kSmiTagMask)); in GenerateFast()
2139 __ AndP(r0, code_, r0); in GenerateFast()
2140 __ bne(&slow_case_); in GenerateFast()
2142 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
2144 __ LoadRR(r0, code_); in GenerateFast()
2145 __ SmiToPtrArrayOffset(code_, code_); in GenerateFast()
2146 __ AddP(result_, code_); in GenerateFast()
2147 __ LoadRR(code_, r0); in GenerateFast()
2148 __ LoadP(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); in GenerateFast()
2149 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); in GenerateFast()
2150 __ beq(&slow_case_); in GenerateFast()
2151 __ bind(&exit_); in GenerateFast()
2156 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); in GenerateSlow()
2158 __ bind(&slow_case_); in GenerateSlow()
2160 __ push(code_); in GenerateSlow()
2161 __ CallRuntime(Runtime::kStringCharFromCode); in GenerateSlow()
2162 __ Move(result_, r2); in GenerateSlow()
2164 __ b(&exit_); in GenerateSlow()
2166 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); in GenerateSlow()
2177 __ mov(r0, Operand(kPointerAlignmentMask)); in GenerateCopyCharacters()
2178 __ AndP(r0, dest); in GenerateCopyCharacters()
2179 __ Check(eq, kDestinationOfCopyNotAligned, cr0); in GenerateCopyCharacters()
2186 __ AddP(count, count, count); in GenerateCopyCharacters()
2187 __ beq(&done, Label::kNear); in GenerateCopyCharacters()
2189 __ CmpP(count, Operand::Zero()); in GenerateCopyCharacters()
2190 __ beq(&done, Label::kNear); in GenerateCopyCharacters()
2196 __ bind(&byte_loop); in GenerateCopyCharacters()
2197 __ LoadlB(scratch, MemOperand(src)); in GenerateCopyCharacters()
2198 __ la(src, MemOperand(src, 1)); in GenerateCopyCharacters()
2199 __ stc(scratch, MemOperand(dest)); in GenerateCopyCharacters()
2200 __ la(dest, MemOperand(dest, 1)); in GenerateCopyCharacters()
2201 __ BranchOnCount(count, &byte_loop); in GenerateCopyCharacters()
2203 __ bind(&done); in GenerateCopyCharacters()
2216 __ LoadP(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2217 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2218 __ CmpP(length, scratch2); in GenerateFlatOneByteStringEquals()
2219 __ beq(&check_zero_length); in GenerateFlatOneByteStringEquals()
2220 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
2221 __ LoadSmiLiteral(r2, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
2222 __ Ret(); in GenerateFlatOneByteStringEquals()
2226 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
2228 __ CmpP(length, Operand::Zero()); in GenerateFlatOneByteStringEquals()
2229 __ bne(&compare_chars); in GenerateFlatOneByteStringEquals()
2230 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2231 __ Ret(); in GenerateFlatOneByteStringEquals()
2234 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
2239 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2240 __ Ret(); in GenerateFlatOneByteStringEquals()
2248 __ LoadP(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2249 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2250 __ SubP(scratch3, scratch1, scratch2 /*, LeaveOE, SetRC*/); in GenerateCompareFlatOneByteStrings()
2253 __ ble(&skip, Label::kNear); in GenerateCompareFlatOneByteStrings()
2254 __ LoadRR(scratch1, scratch2); in GenerateCompareFlatOneByteStrings()
2255 __ bind(&skip); in GenerateCompareFlatOneByteStrings()
2258 __ CmpP(min_length, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
2259 __ beq(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2266 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2269 __ LoadRR(r2, length_delta); in GenerateCompareFlatOneByteStrings()
2270 __ CmpP(length_delta, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
2271 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
2275 __ ble(&less_equal); in GenerateCompareFlatOneByteStrings()
2276 __ LoadSmiLiteral(r2, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
2277 __ Ret(); in GenerateCompareFlatOneByteStrings()
2278 __ bind(&less_equal); in GenerateCompareFlatOneByteStrings()
2279 __ beq(&equal); in GenerateCompareFlatOneByteStrings()
2280 __ LoadSmiLiteral(r2, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
2281 __ bind(&equal); in GenerateCompareFlatOneByteStrings()
2282 __ Ret(); in GenerateCompareFlatOneByteStrings()
2291 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
2292 __ AddP(scratch1, length, in GenerateOneByteCharsCompareLoop()
2294 __ AddP(left, scratch1); in GenerateOneByteCharsCompareLoop()
2295 __ AddP(right, scratch1); in GenerateOneByteCharsCompareLoop()
2296 __ LoadComplementRR(length, length); in GenerateOneByteCharsCompareLoop()
2301 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
2302 __ LoadlB(scratch1, MemOperand(left, index)); in GenerateOneByteCharsCompareLoop()
2303 __ LoadlB(r0, MemOperand(right, index)); in GenerateOneByteCharsCompareLoop()
2304 __ CmpP(scratch1, r0); in GenerateOneByteCharsCompareLoop()
2305 __ bne(chars_not_equal); in GenerateOneByteCharsCompareLoop()
2306 __ AddP(index, Operand(1)); in GenerateOneByteCharsCompareLoop()
2307 __ CmpP(index, Operand::Zero()); in GenerateOneByteCharsCompareLoop()
2308 __ bne(&loop); in GenerateOneByteCharsCompareLoop()
2321 __ Move(r4, isolate()->factory()->undefined_value()); in Generate()
2325 __ TestIfSmi(r4); in Generate()
2326 __ Assert(ne, kExpectedAllocationSite, cr0); in Generate()
2327 __ push(r4); in Generate()
2328 __ LoadP(r4, FieldMemOperand(r4, HeapObject::kMapOffset)); in Generate()
2329 __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex); in Generate()
2330 __ pop(r4); in Generate()
2331 __ Assert(eq, kExpectedAllocationSite); in Generate()
2337 __ TailCallStub(&stub); in Generate()
2344 __ CheckMap(r3, r4, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2345 __ CheckMap(r2, r5, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2347 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset)); in GenerateBooleans()
2348 __ AssertSmi(r3); in GenerateBooleans()
2349 __ LoadP(r2, FieldMemOperand(r2, Oddball::kToNumberOffset)); in GenerateBooleans()
2350 __ AssertSmi(r2); in GenerateBooleans()
2352 __ SubP(r2, r3, r2); in GenerateBooleans()
2353 __ Ret(); in GenerateBooleans()
2355 __ bind(&miss); in GenerateBooleans()
2362 __ OrP(r4, r3, r2); in GenerateSmis()
2363 __ JumpIfNotSmi(r4, &miss); in GenerateSmis()
2367 // __ sub(r2, r2, r3, SetCC); in GenerateSmis()
2368 __ SubP(r2, r2, r3); in GenerateSmis()
2371 __ SmiUntag(r3); in GenerateSmis()
2372 __ SmiUntag(r2); in GenerateSmis()
2373 __ SubP(r2, r3, r2); in GenerateSmis()
2375 __ Ret(); in GenerateSmis()
2377 __ bind(&miss); in GenerateSmis()
2390 __ JumpIfNotSmi(r3, &miss); in GenerateNumbers()
2393 __ JumpIfNotSmi(r2, &miss); in GenerateNumbers()
2400 __ JumpIfSmi(r2, &right_smi); in GenerateNumbers()
2401 __ CheckMap(r2, r4, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
2403 __ LoadDouble(d1, FieldMemOperand(r2, HeapNumber::kValueOffset)); in GenerateNumbers()
2404 __ b(&left); in GenerateNumbers()
2405 __ bind(&right_smi); in GenerateNumbers()
2406 __ SmiToDouble(d1, r2); in GenerateNumbers()
2408 __ bind(&left); in GenerateNumbers()
2409 __ JumpIfSmi(r3, &left_smi); in GenerateNumbers()
2410 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
2412 __ LoadDouble(d0, FieldMemOperand(r3, HeapNumber::kValueOffset)); in GenerateNumbers()
2413 __ b(&done); in GenerateNumbers()
2414 __ bind(&left_smi); in GenerateNumbers()
2415 __ SmiToDouble(d0, r3); in GenerateNumbers()
2417 __ bind(&done); in GenerateNumbers()
2420 __ cdbr(d0, d1); in GenerateNumbers()
2423 __ bunordered(&unordered); in GenerateNumbers()
2426 __ beq(&equal); in GenerateNumbers()
2427 __ blt(&less_than); in GenerateNumbers()
2429 __ LoadImmP(r2, Operand(GREATER)); in GenerateNumbers()
2430 __ Ret(); in GenerateNumbers()
2431 __ bind(&equal); in GenerateNumbers()
2432 __ LoadImmP(r2, Operand(EQUAL)); in GenerateNumbers()
2433 __ Ret(); in GenerateNumbers()
2434 __ bind(&less_than); in GenerateNumbers()
2435 __ LoadImmP(r2, Operand(LESS)); in GenerateNumbers()
2436 __ Ret(); in GenerateNumbers()
2438 __ bind(&unordered); in GenerateNumbers()
2439 __ bind(&generic_stub); in GenerateNumbers()
2442 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2444 __ bind(&maybe_undefined1); in GenerateNumbers()
2446 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2447 __ bne(&miss); in GenerateNumbers()
2448 __ JumpIfSmi(r3, &unordered); in GenerateNumbers()
2449 __ CompareObjectType(r3, r4, r4, HEAP_NUMBER_TYPE); in GenerateNumbers()
2450 __ bne(&maybe_undefined2); in GenerateNumbers()
2451 __ b(&unordered); in GenerateNumbers()
2454 __ bind(&maybe_undefined2); in GenerateNumbers()
2456 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2457 __ beq(&unordered); in GenerateNumbers()
2460 __ bind(&miss); in GenerateNumbers()
2475 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2478 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2479 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2480 __ LoadlB(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2481 __ LoadlB(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2483 __ OrP(tmp1, tmp1, tmp2); in GenerateInternalizedStrings()
2484 __ AndP(r0, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2485 __ bne(&miss); in GenerateInternalizedStrings()
2488 __ CmpP(left, right); in GenerateInternalizedStrings()
2489 __ bne(&not_equal); in GenerateInternalizedStrings()
2495 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateInternalizedStrings()
2496 __ bind(&not_equal); in GenerateInternalizedStrings()
2497 __ Ret(); in GenerateInternalizedStrings()
2499 __ bind(&miss); in GenerateInternalizedStrings()
2515 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2519 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2520 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2521 __ LoadlB(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2522 __ LoadlB(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2524 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2525 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2528 __ CmpP(left, right); in GenerateUniqueNames()
2529 __ bne(&miss); in GenerateUniqueNames()
2535 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateUniqueNames()
2536 __ Ret(); in GenerateUniqueNames()
2538 __ bind(&miss); in GenerateUniqueNames()
2557 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2561 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2562 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2563 __ LoadlB(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2564 __ LoadlB(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2566 __ OrP(tmp3, tmp1, tmp2); in GenerateStrings()
2567 __ AndP(r0, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2568 __ bne(&miss); in GenerateStrings()
2571 __ CmpP(left, right); in GenerateStrings()
2574 __ bne(&not_identical); in GenerateStrings()
2575 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateStrings()
2576 __ Ret(); in GenerateStrings()
2577 __ bind(&not_identical); in GenerateStrings()
2587 __ OrP(tmp3, tmp1, tmp2); in GenerateStrings()
2588 __ AndP(r0, tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2589 __ bne(&is_symbol); in GenerateStrings()
2593 __ Ret(); in GenerateStrings()
2594 __ bind(&is_symbol); in GenerateStrings()
2599 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2612 __ bind(&runtime); in GenerateStrings()
2616 __ Push(left, right); in GenerateStrings()
2617 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2619 __ LoadRoot(r3, Heap::kTrueValueRootIndex); in GenerateStrings()
2620 __ SubP(r2, r2, r3); in GenerateStrings()
2621 __ Ret(); in GenerateStrings()
2623 __ Push(left, right); in GenerateStrings()
2624 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2627 __ bind(&miss); in GenerateStrings()
2634 __ AndP(r4, r3, r2); in GenerateReceivers()
2635 __ JumpIfSmi(r4, &miss); in GenerateReceivers()
2638 __ CompareObjectType(r2, r4, r4, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2639 __ blt(&miss); in GenerateReceivers()
2640 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2641 __ blt(&miss); in GenerateReceivers()
2644 __ SubP(r2, r2, r3); in GenerateReceivers()
2645 __ Ret(); in GenerateReceivers()
2647 __ bind(&miss); in GenerateReceivers()
2654 __ AndP(r4, r3, r2); in GenerateKnownReceivers()
2655 __ JumpIfSmi(r4, &miss); in GenerateKnownReceivers()
2656 __ GetWeakValue(r6, cell); in GenerateKnownReceivers()
2657 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2658 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2659 __ CmpP(r4, r6); in GenerateKnownReceivers()
2660 __ bne(&miss); in GenerateKnownReceivers()
2661 __ CmpP(r5, r6); in GenerateKnownReceivers()
2662 __ bne(&miss); in GenerateKnownReceivers()
2665 __ SubP(r2, r2, r3); in GenerateKnownReceivers()
2666 __ Ret(); in GenerateKnownReceivers()
2669 __ LoadSmiLiteral(r4, Smi::FromInt(GREATER)); in GenerateKnownReceivers()
2671 __ LoadSmiLiteral(r4, Smi::FromInt(LESS)); in GenerateKnownReceivers()
2673 __ Push(r3, r2, r4); in GenerateKnownReceivers()
2674 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2677 __ bind(&miss); in GenerateKnownReceivers()
2685 __ Push(r3, r2); in GenerateMiss()
2686 __ Push(r3, r2); in GenerateMiss()
2687 __ LoadSmiLiteral(r0, Smi::FromInt(op())); in GenerateMiss()
2688 __ push(r0); in GenerateMiss()
2689 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2691 __ AddP(r4, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2693 __ Pop(r3, r2); in GenerateMiss()
2696 __ JumpToJSEntry(r4); in GenerateMiss()
2701 __ CleanseP(r14); in Generate()
2703 __ b(ip); // Callee will return to R14 directly in Generate()
2709 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize)); in GenerateCall()
2710 __ LoadP(target, MemOperand(target, 0)); // Instruction address in GenerateCall()
2714 __ Move(ip, target); in GenerateCall()
2717 __ call(GetCode(), RelocInfo::CODE_TARGET); // Call the stub. in GenerateCall()
2734 __ LoadP(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2735 __ SubP(index, Operand(1)); in GenerateNegativeLookup()
2736 __ LoadSmiLiteral( in GenerateNegativeLookup()
2738 __ AndP(index, ip); in GenerateNegativeLookup()
2742 __ ShiftLeftP(ip, index, Operand(1)); in GenerateNegativeLookup()
2743 __ AddP(index, ip); // index *= 3. in GenerateNegativeLookup()
2748 __ SmiToPtrArrayOffset(ip, index); in GenerateNegativeLookup()
2749 __ AddP(tmp, properties, ip); in GenerateNegativeLookup()
2750 __ LoadP(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2753 __ CompareRoot(entity_name, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2754 __ beq(done); in GenerateNegativeLookup()
2757 __ CmpP(entity_name, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2758 __ beq(miss); in GenerateNegativeLookup()
2761 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2762 __ beq(&good); in GenerateNegativeLookup()
2765 __ LoadP(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2766 __ LoadlB(entity_name, in GenerateNegativeLookup()
2768 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2769 __ bind(&good); in GenerateNegativeLookup()
2772 __ LoadP(properties, in GenerateNegativeLookup()
2779 __ LoadRR(r0, r14); in GenerateNegativeLookup()
2780 __ MultiPush(spill_mask); in GenerateNegativeLookup()
2782 __ LoadP(r2, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2783 __ mov(r3, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2785 __ CallStub(&stub); in GenerateNegativeLookup()
2786 __ CmpP(r2, Operand::Zero()); in GenerateNegativeLookup()
2788 __ MultiPop(spill_mask); // MultiPop does not touch condition flags in GenerateNegativeLookup()
2789 __ LoadRR(r14, r0); in GenerateNegativeLookup()
2791 __ beq(done); in GenerateNegativeLookup()
2792 __ bne(miss); in GenerateNegativeLookup()
2807 __ AssertName(name); in GeneratePositiveLookup()
2810 __ LoadP(scratch1, FieldMemOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
2811 __ SmiUntag(scratch1); // convert smi to int in GeneratePositiveLookup()
2812 __ SubP(scratch1, Operand(1)); in GeneratePositiveLookup()
2819 __ LoadlW(scratch2, FieldMemOperand(name, String::kHashFieldOffset)); in GeneratePositiveLookup()
2826 __ AddP(scratch2, in GeneratePositiveLookup()
2829 __ srl(scratch2, Operand(String::kHashShift)); in GeneratePositiveLookup()
2830 __ AndP(scratch2, scratch1); in GeneratePositiveLookup()
2835 __ ShiftLeftP(ip, scratch2, Operand(1)); in GeneratePositiveLookup()
2836 __ AddP(scratch2, ip); in GeneratePositiveLookup()
2839 __ ShiftLeftP(ip, scratch2, Operand(kPointerSizeLog2)); in GeneratePositiveLookup()
2840 __ AddP(scratch2, elements, ip); in GeneratePositiveLookup()
2841 __ LoadP(ip, FieldMemOperand(scratch2, kElementsStartOffset)); in GeneratePositiveLookup()
2842 __ CmpP(name, ip); in GeneratePositiveLookup()
2843 __ beq(done); in GeneratePositiveLookup()
2850 __ LoadRR(r0, r14); in GeneratePositiveLookup()
2851 __ MultiPush(spill_mask); in GeneratePositiveLookup()
2854 __ LoadRR(r3, name); in GeneratePositiveLookup()
2855 __ LoadRR(r2, elements); in GeneratePositiveLookup()
2857 __ LoadRR(r2, elements); in GeneratePositiveLookup()
2858 __ LoadRR(r3, name); in GeneratePositiveLookup()
2861 __ CallStub(&stub); in GeneratePositiveLookup()
2862 __ LoadRR(r1, r2); in GeneratePositiveLookup()
2863 __ LoadRR(scratch2, r4); in GeneratePositiveLookup()
2864 __ MultiPop(spill_mask); in GeneratePositiveLookup()
2865 __ LoadRR(r14, r0); in GeneratePositiveLookup()
2867 __ CmpP(r1, Operand::Zero()); in GeneratePositiveLookup()
2868 __ bne(done); in GeneratePositiveLookup()
2869 __ beq(miss); in GeneratePositiveLookup()
2896 __ LoadP(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2897 __ SmiUntag(mask); in Generate()
2898 __ SubP(mask, Operand(1)); in Generate()
2900 __ LoadlW(hash, FieldMemOperand(key, String::kHashFieldOffset)); in Generate()
2902 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
2913 __ AddP(index, hash, in Generate()
2916 __ LoadRR(index, hash); in Generate()
2918 __ ShiftRight(r0, index, Operand(String::kHashShift)); in Generate()
2919 __ AndP(index, r0, mask); in Generate()
2923 __ ShiftLeftP(scratch, index, Operand(1)); in Generate()
2924 __ AddP(index, scratch); // index *= 3. in Generate()
2926 __ ShiftLeftP(scratch, index, Operand(kPointerSizeLog2)); in Generate()
2927 __ AddP(index, dictionary, scratch); in Generate()
2928 __ LoadP(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
2931 __ CmpP(entry_key, undefined); in Generate()
2932 __ beq(&not_in_dictionary); in Generate()
2935 __ CmpP(entry_key, key); in Generate()
2936 __ beq(&in_dictionary); in Generate()
2940 __ LoadP(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
2941 __ LoadlB(entry_key, in Generate()
2943 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
2947 __ bind(&maybe_in_dictionary); in Generate()
2952 __ LoadImmP(result, Operand::Zero()); in Generate()
2953 __ Ret(); in Generate()
2956 __ bind(&in_dictionary); in Generate()
2957 __ LoadImmP(result, Operand(1)); in Generate()
2958 __ Ret(); in Generate()
2960 __ bind(&not_in_dictionary); in Generate()
2961 __ LoadImmP(result, Operand::Zero()); in Generate()
2962 __ Ret(); in Generate()
2989 __ b(CC_NOP, &skip_to_incremental_noncompacting); in Generate()
2990 __ b(CC_NOP, &skip_to_incremental_compacting); in Generate()
2993 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2996 __ Ret(); in Generate()
2998 __ bind(&skip_to_incremental_noncompacting); in Generate()
3001 __ bind(&skip_to_incremental_compacting); in Generate()
3015 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
3016 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
3019 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
3028 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
3031 __ bind(&dont_need_remembered_set); in GenerateIncremental()
3038 __ Ret(); in GenerateIncremental()
3044 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
3049 __ LoadRR(address, regs_.address()); in InformIncrementalMarker()
3050 __ LoadRR(r2, regs_.object()); in InformIncrementalMarker()
3051 __ LoadRR(r3, address); in InformIncrementalMarker()
3052 __ mov(r4, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
3055 __ CallCFunction( in InformIncrementalMarker()
3070 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
3074 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
3077 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3080 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
3083 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
3088 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
3093 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
3098 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
3103 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3104 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
3109 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3113 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
3116 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3119 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
3120 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3122 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
3129 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3132 __ LoadP(r3, MemOperand(fp, parameter_count_offset)); in Generate()
3134 __ AddP(r3, Operand(1)); in Generate()
3137 __ ShiftLeftP(r3, r3, Operand(kPointerSizeLog2)); in Generate()
3138 __ la(sp, MemOperand(r3, sp)); in Generate()
3139 __ Ret(); in Generate()
3143 __ EmitLoadTypeFeedbackVector(r4); in Generate()
3145 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3158 __ LoadP(cached_map, in HandleArrayCases()
3160 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3161 __ CmpP(receiver_map, cached_map); in HandleArrayCases()
3162 __ bne(&start_polymorphic, Label::kNear); in HandleArrayCases()
3165 __ LoadP(handler, in HandleArrayCases()
3167 __ AddP(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3168 __ Jump(ip); in HandleArrayCases()
3171 __ bind(&start_polymorphic); in HandleArrayCases()
3172 __ LoadP(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandleArrayCases()
3176 __ CmpSmiLiteral(length, Smi::FromInt(2), r0); in HandleArrayCases()
3177 __ beq(miss); in HandleArrayCases()
3193 __ SmiToPtrArrayOffset(r0, length); in HandleArrayCases()
3194 __ AddP(too_far, feedback, r0); in HandleArrayCases()
3195 __ AddP(too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3196 __ AddP(pointer_reg, feedback, in HandleArrayCases()
3199 __ bind(&next_loop); in HandleArrayCases()
3200 __ LoadP(cached_map, MemOperand(pointer_reg)); in HandleArrayCases()
3201 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3202 __ CmpP(receiver_map, cached_map); in HandleArrayCases()
3203 __ bne(&prepare_next, Label::kNear); in HandleArrayCases()
3204 __ LoadP(handler, MemOperand(pointer_reg, kPointerSize)); in HandleArrayCases()
3205 __ AddP(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3206 __ Jump(ip); in HandleArrayCases()
3208 __ bind(&prepare_next); in HandleArrayCases()
3209 __ AddP(pointer_reg, Operand(kPointerSize * 2)); in HandleArrayCases()
3210 __ CmpP(pointer_reg, too_far); in HandleArrayCases()
3211 __ blt(&next_loop, Label::kNear); in HandleArrayCases()
3214 __ b(miss); in HandleArrayCases()
3222 __ JumpIfSmi(receiver, load_smi_map); in HandleMonomorphicCase()
3223 __ LoadP(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in HandleMonomorphicCase()
3224 __ bind(compare_map); in HandleMonomorphicCase()
3227 __ LoadP(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); in HandleMonomorphicCase()
3228 __ CmpP(cached_map, receiver_map); in HandleMonomorphicCase()
3229 __ bne(try_array); in HandleMonomorphicCase()
3231 __ SmiToPtrArrayOffset(r1, slot); in HandleMonomorphicCase()
3232 __ LoadP(handler, in HandleMonomorphicCase()
3234 __ AddP(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleMonomorphicCase()
3235 __ Jump(ip); in HandleMonomorphicCase()
3239 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); in Generate()
3263 __ LoadP(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandlePolymorphicStoreCase()
3275 __ SmiToPtrArrayOffset(r0, too_far); in HandlePolymorphicStoreCase()
3276 __ AddP(too_far, feedback, r0); in HandlePolymorphicStoreCase()
3277 __ AddP(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3278 __ AddP(pointer_reg, feedback, in HandlePolymorphicStoreCase()
3281 __ bind(&next_loop); in HandlePolymorphicStoreCase()
3282 __ LoadP(cached_map, MemOperand(pointer_reg)); in HandlePolymorphicStoreCase()
3283 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3284 __ CmpP(receiver_map, cached_map); in HandlePolymorphicStoreCase()
3285 __ bne(&prepare_next); in HandlePolymorphicStoreCase()
3287 __ LoadP(too_far, MemOperand(pointer_reg, kPointerSize)); in HandlePolymorphicStoreCase()
3288 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex); in HandlePolymorphicStoreCase()
3289 __ bne(&transition_call); in HandlePolymorphicStoreCase()
3290 __ LoadP(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3291 __ AddP(ip, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3292 __ Jump(ip); in HandlePolymorphicStoreCase()
3294 __ bind(&transition_call); in HandlePolymorphicStoreCase()
3295 __ LoadP(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3296 __ JumpIfSmi(too_far, miss); in HandlePolymorphicStoreCase()
3298 __ LoadP(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3302 __ LoadRR(feedback, too_far); in HandlePolymorphicStoreCase()
3304 __ AddP(ip, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3305 __ Jump(ip); in HandlePolymorphicStoreCase()
3307 __ bind(&prepare_next); in HandlePolymorphicStoreCase()
3308 __ AddP(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); in HandlePolymorphicStoreCase()
3309 __ CmpLogicalP(pointer_reg, too_far); in HandlePolymorphicStoreCase()
3310 __ blt(&next_loop); in HandlePolymorphicStoreCase()
3313 __ b(miss); in HandlePolymorphicStoreCase()
3326 __ SmiToPtrArrayOffset(r0, slot); in GenerateImpl()
3327 __ AddP(feedback, vector, r0); in GenerateImpl()
3328 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); in GenerateImpl()
3338 __ bind(&try_array); in GenerateImpl()
3340 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); in GenerateImpl()
3341 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); in GenerateImpl()
3342 __ bne(&not_array); in GenerateImpl()
3346 __ bind(&polymorphic); in GenerateImpl()
3353 __ bind(&not_array); in GenerateImpl()
3355 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); in GenerateImpl()
3356 __ bne(&try_poly_name); in GenerateImpl()
3359 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); in GenerateImpl()
3361 __ bind(&try_poly_name); in GenerateImpl()
3363 __ CmpP(key, feedback); in GenerateImpl()
3364 __ bne(&miss); in GenerateImpl()
3367 __ SmiToPtrArrayOffset(r0, slot); in GenerateImpl()
3368 __ AddP(feedback, vector, r0); in GenerateImpl()
3369 __ LoadP(feedback, in GenerateImpl()
3374 __ bind(&miss); in GenerateImpl()
3377 __ bind(&load_smi_map); in GenerateImpl()
3378 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); in GenerateImpl()
3379 __ b(&compare_map); in GenerateImpl()
3393 __ CleanseP(r14); in MaybeCallEntryHook()
3394 __ Push(r14, ip); in MaybeCallEntryHook()
3395 __ CallStub(&stub); // BRASL in MaybeCallEntryHook()
3396 __ Pop(r14, ip); in MaybeCallEntryHook()
3421 __ CleanseP(r14); in Generate()
3422 __ LoadRR(ip, r14); in Generate()
3423 __ MultiPush(kSavedRegs | ip.bit()); in Generate()
3427 __ SubP(r2, ip, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
3431 __ lay(r3, MemOperand(sp, kNumSavedRegs * kPointerSize)); in Generate()
3436 __ LoadRR(r7, sp); in Generate()
3438 __ ClearRightImm(sp, sp, Operand(WhichPowerOf2(frame_alignment))); in Generate()
3444 __ mov(ip, Operand(entry_hook)); in Generate()
3448 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(ip, kPointerSize)); in Generate()
3449 __ LoadP(ip, MemOperand(ip, 0)); in Generate()
3456 __ LoadImmP(r0, Operand::Zero()); in Generate()
3457 __ lay(sp, MemOperand(sp, -kCalleeRegisterSaveAreaSize - in Generate()
3459 __ StoreP(r0, MemOperand(sp)); in Generate()
3464 __ mov(r4, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3467 __ mov(ip, Operand(ExternalReference( in Generate()
3470 __ Call(ip); in Generate()
3474 __ la(sp, MemOperand(sp, kCalleeRegisterSaveAreaSize + in Generate()
3479 __ LoadRR(sp, r7); in Generate()
3483 __ MultiPop(kSavedRegs | ip.bit()); in Generate()
3484 __ LoadRR(r14, ip); in Generate()
3485 __ Ret(); in Generate()
3493 __ TailCallStub(&stub); in CreateArrayDispatch()
3499 __ CmpP(r5, Operand(kind)); in CreateArrayDispatch()
3501 __ TailCallStub(&stub, eq); in CreateArrayDispatch()
3505 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
3528 __ AndP(r0, r5, Operand(1)); in CreateArrayDispatchOneArgument()
3529 __ bne(&normal_sequence); in CreateArrayDispatchOneArgument()
3533 __ LoadP(r7, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
3534 __ CmpP(r7, Operand::Zero()); in CreateArrayDispatchOneArgument()
3535 __ beq(&normal_sequence); in CreateArrayDispatchOneArgument()
3543 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
3545 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3548 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3552 __ AddP(r5, r5, Operand(1)); in CreateArrayDispatchOneArgument()
3554 __ LoadP(r7, FieldMemOperand(r4, 0)); in CreateArrayDispatchOneArgument()
3555 __ CompareRoot(r7, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
3556 __ Assert(eq, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
3563 __ LoadP(r6, FieldMemOperand(r4, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3564 __ AddSmiLiteral(r6, r6, Smi::FromInt(kFastElementsKindPackedToHoley), r0); in CreateArrayDispatchOneArgument()
3565 __ StoreP(r6, FieldMemOperand(r4, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3567 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3572 __ CmpP(r5, Operand(kind)); in CreateArrayDispatchOneArgument()
3574 __ TailCallStub(&stub, eq); in CreateArrayDispatchOneArgument()
3578 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3617 __ CmpP(r2, Operand::Zero()); in GenerateDispatchToArrayStub()
3618 __ bne(&not_zero_case); in GenerateDispatchToArrayStub()
3621 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
3622 __ CmpP(r2, Operand(1)); in GenerateDispatchToArrayStub()
3623 __ bgt(&not_one_case); in GenerateDispatchToArrayStub()
3626 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
3628 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3646 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3648 __ TestIfSmi(r6); in Generate()
3649 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3650 __ CompareObjectType(r6, r6, r7, MAP_TYPE); in Generate()
3651 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3654 __ AssertUndefinedOrAllocationSite(r4, r6); in Generate()
3658 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate()
3661 __ CmpP(r5, r3); in Generate()
3662 __ bne(&subclassing, Label::kNear); in Generate()
3666 __ CompareRoot(r4, Heap::kUndefinedValueRootIndex); in Generate()
3667 __ beq(&no_info); in Generate()
3669 __ LoadP(r5, FieldMemOperand(r4, AllocationSite::kTransitionInfoOffset)); in Generate()
3670 __ SmiUntag(r5); in Generate()
3672 __ AndP(r5, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
3675 __ bind(&no_info); in Generate()
3678 __ bind(&subclassing); in Generate()
3679 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); in Generate()
3680 __ StoreP(r3, MemOperand(sp, r1)); in Generate()
3681 __ AddP(r2, r2, Operand(3)); in Generate()
3682 __ Push(r5, r4); in Generate()
3683 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3688 __ CmpLogicalP(r2, Operand(1)); in GenerateCase()
3691 __ TailCallStub(&stub0, lt); in GenerateCase()
3694 __ TailCallStub(&stubN, gt); in GenerateCase()
3699 __ LoadP(r5, MemOperand(sp, 0)); in GenerateCase()
3700 __ CmpP(r5, Operand::Zero()); in GenerateCase()
3704 __ TailCallStub(&stub1_holey, ne); in GenerateCase()
3708 __ TailCallStub(&stub1); in GenerateCase()
3724 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3726 __ TestIfSmi(r5); in Generate()
3727 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3728 __ CompareObjectType(r5, r5, r6, MAP_TYPE); in Generate()
3729 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3733 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3735 __ LoadlB(r5, FieldMemOperand(r5, Map::kBitField2Offset)); in Generate()
3737 __ DecodeField<Map::ElementsKindBits>(r5); in Generate()
3741 __ CmpP(r5, Operand(FAST_ELEMENTS)); in Generate()
3742 __ beq(&done); in Generate()
3743 __ CmpP(r5, Operand(FAST_HOLEY_ELEMENTS)); in Generate()
3744 __ Assert(eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray); in Generate()
3745 __ bind(&done); in Generate()
3749 __ CmpP(r5, Operand(FAST_ELEMENTS)); in Generate()
3750 __ beq(&fast_elements_case); in Generate()
3753 __ bind(&fast_elements_case); in Generate()
3764 __ AssertFunction(r3); in Generate()
3765 __ AssertReceiver(r5); in Generate()
3769 __ CompareObjectType(r5, r4, r4, JS_FUNCTION_TYPE); in Generate()
3770 __ bne(&new_object); in Generate()
3773 __ LoadP(r4, FieldMemOperand(r5, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3774 __ JumpIfSmi(r4, &new_object); in Generate()
3775 __ CompareObjectType(r4, r2, r2, MAP_TYPE); in Generate()
3776 __ bne(&new_object); in Generate()
3780 __ LoadP(r2, FieldMemOperand(r4, Map::kConstructorOrBackPointerOffset)); in Generate()
3781 __ CmpP(r2, r3); in Generate()
3782 __ bne(&new_object); in Generate()
3786 __ LoadlB(r6, FieldMemOperand(r4, Map::kInstanceSizeOffset)); in Generate()
3787 __ Allocate(r6, r2, r7, r8, &allocate, SIZE_IN_WORDS); in Generate()
3788 __ bind(&done_allocate); in Generate()
3791 __ StoreP(r4, FieldMemOperand(r2, JSObject::kMapOffset)); in Generate()
3792 __ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex); in Generate()
3793 __ StoreP(r5, FieldMemOperand(r2, JSObject::kPropertiesOffset)); in Generate()
3794 __ StoreP(r5, FieldMemOperand(r2, JSObject::kElementsOffset)); in Generate()
3796 __ AddP(r3, r2, Operand(JSObject::kHeaderSize - kHeapObjectTag)); in Generate()
3810 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); in Generate()
3811 __ LoadlW(r5, FieldMemOperand(r4, Map::kBitField3Offset)); in Generate()
3812 __ DecodeField<Map::ConstructionCounter>(r9, r5); in Generate()
3813 __ LoadAndTestP(r9, r9); in Generate()
3814 __ bne(&slack_tracking); in Generate()
3817 __ InitializeFieldsWithFiller(r3, r7, r8); in Generate()
3819 __ Ret(); in Generate()
3821 __ bind(&slack_tracking); in Generate()
3825 __ Add32(r5, r5, Operand(-(1 << Map::ConstructionCounter::kShift))); in Generate()
3826 __ StoreW(r5, FieldMemOperand(r4, Map::kBitField3Offset)); in Generate()
3829 __ LoadlB(r6, FieldMemOperand(r4, Map::kUnusedPropertyFieldsOffset)); in Generate()
3830 __ ShiftLeftP(r6, r6, Operand(kPointerSizeLog2)); in Generate()
3831 __ SubP(r6, r7, r6); in Generate()
3832 __ InitializeFieldsWithFiller(r3, r6, r8); in Generate()
3835 __ LoadRoot(r8, Heap::kOnePointerFillerMapRootIndex); in Generate()
3836 __ InitializeFieldsWithFiller(r3, r7, r8); in Generate()
3839 __ CmpP(r9, Operand(Map::kSlackTrackingCounterEnd)); in Generate()
3840 __ Ret(ne); in Generate()
3845 __ Push(r2, r4); in Generate()
3846 __ CallRuntime(Runtime::kFinalizeInstanceSize); in Generate()
3847 __ Pop(r2); in Generate()
3849 __ Ret(); in Generate()
3853 __ bind(&allocate); in Generate()
3857 __ ShiftLeftP(r6, r6, in Generate()
3859 __ Push(r4, r6); in Generate()
3860 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3861 __ Pop(r4); in Generate()
3863 __ LoadlB(r7, FieldMemOperand(r4, Map::kInstanceSizeOffset)); in Generate()
3864 __ ShiftLeftP(r7, r7, Operand(kPointerSizeLog2)); in Generate()
3865 __ AddP(r7, r2, r7); in Generate()
3866 __ SubP(r7, r7, Operand(kHeapObjectTag)); in Generate()
3867 __ b(&done_allocate); in Generate()
3870 __ bind(&new_object); in Generate()
3871 __ Push(r3, r5); in Generate()
3872 __ TailCallRuntime(Runtime::kNewObject); in Generate()
3882 __ AssertFunction(r3); in Generate()
3885 __ LoadRR(r4, fp); in Generate()
3889 __ LoadP(r4, MemOperand(r4, StandardFrameConstants::kCallerFPOffset)); in Generate()
3893 __ LoadP(ip, MemOperand(r4, StandardFrameConstants::kFunctionOffset)); in Generate()
3894 __ CmpP(ip, r3); in Generate()
3895 __ b(&ok, Label::kNear); in Generate()
3896 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
3897 __ bind(&ok); in Generate()
3903 __ LoadP(r4, MemOperand(r4, StandardFrameConstants::kCallerFPOffset)); in Generate()
3904 __ LoadP(ip, MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
3905 __ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); in Generate()
3906 __ bne(&no_rest_parameters); in Generate()
3911 __ LoadP(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
3912 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3913 __ LoadW( in Generate()
3916 __ SmiTag(r5); in Generate()
3918 __ SubP(r2, r2, r5); in Generate()
3919 __ bgt(&rest_parameters); in Generate()
3922 __ bind(&no_rest_parameters); in Generate()
3931 __ Allocate(JSArray::kSize, r2, r3, r4, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3932 __ bind(&done_allocate); in Generate()
3935 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r3); in Generate()
3936 __ StoreP(r3, FieldMemOperand(r2, JSArray::kMapOffset), r0); in Generate()
3937 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex); in Generate()
3938 __ StoreP(r3, FieldMemOperand(r2, JSArray::kPropertiesOffset), r0); in Generate()
3939 __ StoreP(r3, FieldMemOperand(r2, JSArray::kElementsOffset), r0); in Generate()
3940 __ LoadImmP(r3, Operand::Zero()); in Generate()
3941 __ StoreP(r3, FieldMemOperand(r2, JSArray::kLengthOffset), r0); in Generate()
3943 __ Ret(); in Generate()
3946 __ bind(&allocate); in Generate()
3949 __ Push(Smi::FromInt(JSArray::kSize)); in Generate()
3950 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3952 __ b(&done_allocate); in Generate()
3955 __ bind(&rest_parameters); in Generate()
3958 __ SmiToPtrArrayOffset(r8, r2); in Generate()
3959 __ AddP(r4, r4, r8); in Generate()
3960 __ AddP(r4, r4, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
3973 __ mov(r9, Operand(JSArray::kSize + FixedArray::kHeaderSize)); in Generate()
3974 __ AddP(r9, r9, r8); in Generate()
3975 __ Allocate(r9, r5, r6, r7, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3976 __ bind(&done_allocate); in Generate()
3979 __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex); in Generate()
3980 __ StoreP(r3, FieldMemOperand(r5, FixedArray::kMapOffset), r0); in Generate()
3981 __ StoreP(r2, FieldMemOperand(r5, FixedArray::kLengthOffset), r0); in Generate()
3982 __ AddP(r6, r5, in Generate()
3986 __ SmiUntag(r1, r2); in Generate()
3987 // __ mtctr(r0); in Generate()
3988 __ bind(&loop); in Generate()
3989 __ lay(r4, MemOperand(r4, -kPointerSize)); in Generate()
3990 __ LoadP(ip, MemOperand(r4)); in Generate()
3991 __ la(r6, MemOperand(r6, kPointerSize)); in Generate()
3992 __ StoreP(ip, MemOperand(r6)); in Generate()
3993 // __ bdnz(&loop); in Generate()
3994 __ BranchOnCount(r1, &loop); in Generate()
3995 __ AddP(r6, r6, Operand(kPointerSize)); in Generate()
3999 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r3); in Generate()
4000 __ StoreP(r3, MemOperand(r6, JSArray::kMapOffset)); in Generate()
4001 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex); in Generate()
4002 __ StoreP(r3, MemOperand(r6, JSArray::kPropertiesOffset)); in Generate()
4003 __ StoreP(r5, MemOperand(r6, JSArray::kElementsOffset)); in Generate()
4004 __ StoreP(r2, MemOperand(r6, JSArray::kLengthOffset)); in Generate()
4006 __ AddP(r2, r6, Operand(kHeapObjectTag)); in Generate()
4007 __ Ret(); in Generate()
4011 __ bind(&allocate); in Generate()
4012 __ CmpP(r9, Operand(kMaxRegularHeapObjectSize)); in Generate()
4013 __ bgt(&too_big_for_new_space); in Generate()
4016 __ SmiTag(r9); in Generate()
4017 __ Push(r2, r4, r9); in Generate()
4018 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4019 __ LoadRR(r5, r2); in Generate()
4020 __ Pop(r2, r4); in Generate()
4022 __ b(&done_allocate); in Generate()
4025 __ bind(&too_big_for_new_space); in Generate()
4026 __ push(r3); in Generate()
4027 __ TailCallRuntime(Runtime::kNewRestParameter); in Generate()
4038 __ AssertFunction(r3); in Generate()
4041 __ LoadRR(r9, fp); in Generate()
4045 __ LoadP(r9, MemOperand(r9, StandardFrameConstants::kCallerFPOffset)); in Generate()
4049 __ LoadP(ip, MemOperand(r9, StandardFrameConstants::kFunctionOffset)); in Generate()
4050 __ CmpP(ip, r3); in Generate()
4051 __ beq(&ok, Label::kNear); in Generate()
4052 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4053 __ bind(&ok); in Generate()
4057 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4058 __ LoadW( in Generate()
4061 __ SmiTag(r4); in Generate()
4063 __ SmiToPtrArrayOffset(r5, r4); in Generate()
4064 __ AddP(r5, r9, r5); in Generate()
4065 __ AddP(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4077 __ LoadP(r6, MemOperand(r9, StandardFrameConstants::kCallerFPOffset)); in Generate()
4078 __ LoadP(r2, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4079 __ CmpSmiLiteral(r2, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); in Generate()
4080 __ beq(&adaptor_frame); in Generate()
4083 __ LoadRR(r7, r4); in Generate()
4084 __ LoadRR(r8, r4); in Generate()
4085 __ b(&try_allocate); in Generate()
4088 __ bind(&adaptor_frame); in Generate()
4089 __ LoadP(r7, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4090 __ SmiToPtrArrayOffset(r5, r7); in Generate()
4091 __ AddP(r5, r5, r6); in Generate()
4092 __ AddP(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4097 __ CmpP(r4, r7); in Generate()
4099 __ LoadRR(r8, r4); in Generate()
4100 __ blt(&skip); in Generate()
4101 __ LoadRR(r8, r7); in Generate()
4102 __ bind(&skip); in Generate()
4104 __ bind(&try_allocate); in Generate()
4111 __ CmpSmiLiteral(r8, Smi::kZero, r0); in Generate()
4113 __ bne(&skip2); in Generate()
4114 __ LoadImmP(r1, Operand::Zero()); in Generate()
4115 __ b(&skip3); in Generate()
4116 __ bind(&skip2); in Generate()
4117 __ SmiToPtrArrayOffset(r1, r8); in Generate()
4118 __ AddP(r1, r1, Operand(kParameterMapHeaderSize)); in Generate()
4119 __ bind(&skip3); in Generate()
4122 __ SmiToPtrArrayOffset(r6, r7); in Generate()
4123 __ AddP(r1, r1, r6); in Generate()
4124 __ AddP(r1, r1, Operand(FixedArray::kHeaderSize)); in Generate()
4127 __ AddP(r1, r1, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4130 __ Allocate(r1, r2, r1, r6, &runtime, NO_ALLOCATION_FLAGS); in Generate()
4140 __ LoadP(r6, NativeContextMemOperand()); in Generate()
4141 __ CmpP(r8, Operand::Zero()); in Generate()
4143 __ bne(&skip4); in Generate()
4144 __ LoadP(r6, MemOperand(r6, kNormalOffset)); in Generate()
4145 __ b(&skip5); in Generate()
4146 __ bind(&skip4); in Generate()
4147 __ LoadP(r6, MemOperand(r6, kAliasedOffset)); in Generate()
4148 __ bind(&skip5); in Generate()
4154 __ StoreP(r6, FieldMemOperand(r2, JSObject::kMapOffset), r0); in Generate()
4155 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex); in Generate()
4156 __ StoreP(r1, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0); in Generate()
4157 __ StoreP(r1, FieldMemOperand(r2, JSObject::kElementsOffset), r0); in Generate()
4160 __ AssertNotSmi(r3); in Generate()
4161 __ StoreP(r3, FieldMemOperand(r2, JSSloppyArgumentsObject::kCalleeOffset), in Generate()
4165 __ AssertSmi(r7); in Generate()
4166 __ StoreP(r7, FieldMemOperand(r2, JSSloppyArgumentsObject::kLengthOffset), in Generate()
4172 __ AddP(r6, r2, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4173 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0); in Generate()
4181 __ CmpSmiLiteral(r8, Smi::kZero, r0); in Generate()
4183 __ bne(&skip6); in Generate()
4186 __ LoadRR(r3, r6); in Generate()
4187 __ b(&skip_parameter_map); in Generate()
4188 __ bind(&skip6); in Generate()
4190 __ LoadRoot(r7, Heap::kSloppyArgumentsElementsMapRootIndex); in Generate()
4191 __ StoreP(r7, FieldMemOperand(r6, FixedArray::kMapOffset), r0); in Generate()
4192 __ AddSmiLiteral(r7, r8, Smi::FromInt(2), r0); in Generate()
4193 __ StoreP(r7, FieldMemOperand(r6, FixedArray::kLengthOffset), r0); in Generate()
4194 __ StoreP(cp, FieldMemOperand(r6, FixedArray::kHeaderSize + 0 * kPointerSize), in Generate()
4196 __ SmiToPtrArrayOffset(r7, r8); in Generate()
4197 __ AddP(r7, r7, r6); in Generate()
4198 __ AddP(r7, r7, Operand(kParameterMapHeaderSize)); in Generate()
4199 __ StoreP(r7, FieldMemOperand(r6, FixedArray::kHeaderSize + 1 * kPointerSize), in Generate()
4211 __ LoadRR(r7, r8); in Generate()
4212 __ AddSmiLiteral(r1, r4, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0); in Generate()
4213 __ SubP(r1, r1, r8); in Generate()
4214 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); in Generate()
4215 __ SmiToPtrArrayOffset(r3, r7); in Generate()
4216 __ AddP(r3, r3, r6); in Generate()
4217 __ AddP(r3, r3, Operand(kParameterMapHeaderSize)); in Generate()
4224 __ SmiUntag(r7); in Generate()
4225 __ push(r4); in Generate()
4226 __ LoadRR(r4, r7); in Generate()
4227 __ ShiftLeftP(r7, r7, Operand(kPointerSizeLog2)); in Generate()
4228 __ AddP(r9, r3, r7); in Generate()
4229 __ AddP(r7, r6, r7); in Generate()
4230 __ AddP(r9, r9, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
4231 __ AddP(r7, r7, Operand(kParameterMapHeaderSize - kHeapObjectTag)); in Generate()
4233 __ bind(&parameters_loop); in Generate()
4234 __ StoreP(r1, MemOperand(r7, -kPointerSize)); in Generate()
4235 __ lay(r7, MemOperand(r7, -kPointerSize)); in Generate()
4236 __ StoreP(ip, MemOperand(r9, -kPointerSize)); in Generate()
4237 __ lay(r9, MemOperand(r9, -kPointerSize)); in Generate()
4238 __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0); in Generate()
4239 __ BranchOnCount(r4, &parameters_loop); in Generate()
4240 __ pop(r4); in Generate()
4243 __ LoadP(r7, FieldMemOperand(r2, JSSloppyArgumentsObject::kLengthOffset)); in Generate()
4245 __ bind(&skip_parameter_map); in Generate()
4252 __ LoadRoot(r1, Heap::kFixedArrayMapRootIndex); in Generate()
4253 __ StoreP(r1, FieldMemOperand(r3, FixedArray::kMapOffset), r0); in Generate()
4254 __ StoreP(r7, FieldMemOperand(r3, FixedArray::kLengthOffset), r0); in Generate()
4255 __ SubP(r1, r7, r8); in Generate()
4256 __ Ret(eq); in Generate()
4259 __ SmiUntag(r1); in Generate()
4260 __ LoadRR(r4, r1); in Generate()
4262 __ SmiToPtrArrayOffset(r0, r8); in Generate()
4263 __ SubP(r5, r5, r0); in Generate()
4264 __ AddP(r1, r3, r0); in Generate()
4265 __ AddP(r1, r1, in Generate()
4268 __ bind(&arguments_loop); in Generate()
4269 __ LoadP(r6, MemOperand(r5, -kPointerSize)); in Generate()
4270 __ lay(r5, MemOperand(r5, -kPointerSize)); in Generate()
4271 __ StoreP(r6, MemOperand(r1, kPointerSize)); in Generate()
4272 __ la(r1, MemOperand(r1, kPointerSize)); in Generate()
4273 __ BranchOnCount(r4, &arguments_loop); in Generate()
4276 __ Ret(); in Generate()
4280 __ bind(&runtime); in Generate()
4281 __ Push(r3, r5, r7); in Generate()
4282 __ TailCallRuntime(Runtime::kNewSloppyArguments); in Generate()
4292 __ AssertFunction(r3); in Generate()
4295 __ LoadRR(r4, fp); in Generate()
4299 __ LoadP(r4, MemOperand(r4, StandardFrameConstants::kCallerFPOffset)); in Generate()
4303 __ LoadP(ip, MemOperand(r4, StandardFrameConstants::kFunctionOffset)); in Generate()
4304 __ CmpP(ip, r3); in Generate()
4305 __ beq(&ok, Label::kNear); in Generate()
4306 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4307 __ bind(&ok); in Generate()
4312 __ LoadP(r5, MemOperand(r4, StandardFrameConstants::kCallerFPOffset)); in Generate()
4313 __ LoadP(ip, MemOperand(r5, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4314 __ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); in Generate()
4315 __ beq(&arguments_adaptor); in Generate()
4317 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4318 __ LoadW(r2, FieldMemOperand( in Generate()
4321 __ SmiTag(r2); in Generate()
4323 __ SmiToPtrArrayOffset(r8, r2); in Generate()
4324 __ AddP(r4, r4, r8); in Generate()
4326 __ b(&arguments_done); in Generate()
4327 __ bind(&arguments_adaptor); in Generate()
4329 __ LoadP(r2, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4330 __ SmiToPtrArrayOffset(r8, r2); in Generate()
4331 __ AddP(r4, r5, r8); in Generate()
4333 __ bind(&arguments_done); in Generate()
4334 __ AddP(r4, r4, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4347 __ mov(r9, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); in Generate()
4348 __ AddP(r9, r9, r8); in Generate()
4349 __ Allocate(r9, r5, r6, r7, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4350 __ bind(&done_allocate); in Generate()
4353 __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex); in Generate()
4354 __ StoreP(r3, FieldMemOperand(r5, FixedArray::kMapOffset), r0); in Generate()
4355 __ StoreP(r2, FieldMemOperand(r5, FixedArray::kLengthOffset), r0); in Generate()
4356 __ AddP(r6, r5, in Generate()
4360 __ SmiUntag(r1, r2); in Generate()
4361 __ LoadAndTestP(r1, r1); in Generate()
4362 __ beq(&done_loop); in Generate()
4363 __ bind(&loop); in Generate()
4364 __ lay(r4, MemOperand(r4, -kPointerSize)); in Generate()
4365 __ LoadP(ip, MemOperand(r4)); in Generate()
4366 __ la(r6, MemOperand(r6, kPointerSize)); in Generate()
4367 __ StoreP(ip, MemOperand(r6)); in Generate()
4368 __ BranchOnCount(r1, &loop); in Generate()
4369 __ bind(&done_loop); in Generate()
4370 __ AddP(r6, r6, Operand(kPointerSize)); in Generate()
4374 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r3); in Generate()
4375 __ StoreP(r3, MemOperand(r6, JSStrictArgumentsObject::kMapOffset)); in Generate()
4376 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex); in Generate()
4377 __ StoreP(r3, MemOperand(r6, JSStrictArgumentsObject::kPropertiesOffset)); in Generate()
4378 __ StoreP(r5, MemOperand(r6, JSStrictArgumentsObject::kElementsOffset)); in Generate()
4379 __ StoreP(r2, MemOperand(r6, JSStrictArgumentsObject::kLengthOffset)); in Generate()
4381 __ AddP(r2, r6, Operand(kHeapObjectTag)); in Generate()
4382 __ Ret(); in Generate()
4386 __ bind(&allocate); in Generate()
4387 __ CmpP(r9, Operand(kMaxRegularHeapObjectSize)); in Generate()
4388 __ bgt(&too_big_for_new_space); in Generate()
4391 __ SmiTag(r9); in Generate()
4392 __ Push(r2, r4, r9); in Generate()
4393 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4394 __ LoadRR(r5, r2); in Generate()
4395 __ Pop(r2, r4); in Generate()
4397 __ b(&done_allocate); in Generate()
4400 __ bind(&too_big_for_new_space); in Generate()
4401 __ push(r3); in Generate()
4402 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4433 __ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
4434 __ LoadlB(scratch, MemOperand(scratch, 0)); in CallApiFunctionAndReturn()
4435 __ CmpP(scratch, Operand::Zero()); in CallApiFunctionAndReturn()
4439 __ beq(&profiler_disabled, Label::kNear); in CallApiFunctionAndReturn()
4440 __ mov(scratch, Operand(thunk_ref)); in CallApiFunctionAndReturn()
4441 __ b(&end_profiler_check, Label::kNear); in CallApiFunctionAndReturn()
4442 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
4443 __ LoadRR(scratch, function_address); in CallApiFunctionAndReturn()
4444 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
4451 __ mov(r9, Operand(next_address)); in CallApiFunctionAndReturn()
4452 __ LoadP(r6, MemOperand(r9, kNextOffset)); in CallApiFunctionAndReturn()
4453 __ LoadP(r7, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
4454 __ LoadlW(r8, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
4455 __ AddP(r8, Operand(1)); in CallApiFunctionAndReturn()
4456 __ StoreW(r8, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
4460 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4461 __ PrepareCallCFunction(1, r2); in CallApiFunctionAndReturn()
4462 __ mov(r2, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4463 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
4465 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4476 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4477 __ PrepareCallCFunction(1, r2); in CallApiFunctionAndReturn()
4478 __ mov(r2, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4479 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
4481 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4490 __ LoadP(r2, return_value_operand); in CallApiFunctionAndReturn()
4491 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
4494 __ StoreP(r6, MemOperand(r9, kNextOffset)); in CallApiFunctionAndReturn()
4495 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
4496 __ LoadlW(r3, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
4497 __ CmpP(r3, r8); in CallApiFunctionAndReturn()
4498 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall); in CallApiFunctionAndReturn()
4500 __ SubP(r8, Operand(1)); in CallApiFunctionAndReturn()
4501 __ StoreW(r8, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
4502 __ CmpP(r7, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
4503 __ bne(&delete_allocated_handles, Label::kNear); in CallApiFunctionAndReturn()
4506 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
4509 __ LoadP(cp, *context_restore_operand); in CallApiFunctionAndReturn()
4513 __ l(r6, *stack_space_operand); in CallApiFunctionAndReturn()
4515 __ mov(r6, Operand(stack_space)); in CallApiFunctionAndReturn()
4517 __ LeaveExitFrame(false, r6, !restore_context, stack_space_operand != NULL); in CallApiFunctionAndReturn()
4520 __ mov(r7, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
4521 __ LoadP(r7, MemOperand(r7)); in CallApiFunctionAndReturn()
4522 __ CompareRoot(r7, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
4523 __ bne(&promote_scheduled_exception, Label::kNear); in CallApiFunctionAndReturn()
4525 __ b(r14); in CallApiFunctionAndReturn()
4528 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
4529 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
4532 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
4533 __ StoreP(r7, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
4534 __ LoadRR(r6, r2); in CallApiFunctionAndReturn()
4535 __ PrepareCallCFunction(1, r7); in CallApiFunctionAndReturn()
4536 __ mov(r2, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4537 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
4539 __ LoadRR(r2, r6); in CallApiFunctionAndReturn()
4540 __ b(&leave_exit_frame, Label::kNear); in CallApiFunctionAndReturn()
4576 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
4579 __ push(context); in Generate()
4582 __ LoadP(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
4586 __ push(callee); in Generate()
4589 __ push(call_data); in Generate()
4593 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4596 __ push(scratch); in Generate()
4598 __ push(scratch); in Generate()
4600 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
4601 __ push(scratch); in Generate()
4603 __ push(holder); in Generate()
4606 __ LoadRR(scratch, sp); in Generate()
4620 __ EnterExitFrame(false, kApiStackSpace); in Generate()
4625 __ AddP(r2, sp, Operand(kFunctionCallbackInfoOffset)); in Generate()
4627 __ StoreP(scratch, MemOperand(r2, 0 * kPointerSize)); in Generate()
4629 __ AddP(ip, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize)); in Generate()
4630 __ StoreP(ip, MemOperand(r2, 1 * kPointerSize)); in Generate()
4632 __ LoadImmP(ip, Operand(argc())); in Generate()
4633 __ StoreW(ip, MemOperand(r2, 2 * kPointerSize)); in Generate()
4683 __ push(receiver); in Generate()
4685 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
4686 __ push(scratch); in Generate()
4687 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4688 __ Push(scratch, scratch); in Generate()
4689 __ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
4690 __ Push(scratch, holder); in Generate()
4691 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
4692 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
4693 __ push(scratch); in Generate()
4699 __ LoadRR(r2, sp); // r2 = Handle<Name> in Generate()
4700 __ AddP(r3, r2, Operand(1 * kPointerSize)); // r3 = v8::PCI::args_ in Generate()
4724 __ EnterExitFrame(false, apiStackSpace); in Generate()
4728 __ StoreP(r2, MemOperand(sp, arg0Slot * kPointerSize)); in Generate()
4729 __ AddP(r2, sp, Operand(arg0Slot * kPointerSize)); in Generate()
4734 __ StoreP(r3, MemOperand(sp, accessorInfoSlot * kPointerSize)); in Generate()
4735 __ AddP(r3, sp, Operand(accessorInfoSlot * kPointerSize)); in Generate()
4741 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
4742 __ LoadP(api_function_address, in Generate()
4752 #undef __