Lines Matching full:__
24 #define __ ACCESS_MASM(masm) macro
27 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); in Generate()
28 __ StorePX(r4, MemOperand(sp, r0)); in Generate()
29 __ push(r4); in Generate()
30 __ push(r5); in Generate()
31 __ addi(r3, r3, Operand(3)); in Generate()
32 __ TailCallRuntime(Runtime::kNewArray); in Generate()
69 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
71 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
74 __ Ret(); in GenerateLightweightMiss()
94 __ push(scratch); in Generate()
100 __ lfd(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
103 __ ConvertDoubleToInt64(double_scratch, in Generate()
111 __ TestIfInt32(result_reg, r0); in Generate()
113 __ TestIfInt32(scratch, result_reg, r0); in Generate()
115 __ beq(&fastpath_done); in Generate()
118 __ Push(scratch_high, scratch_low); in Generate()
122 __ lwz(scratch_high, in Generate()
124 __ lwz(scratch_low, in Generate()
127 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask); in Generate()
131 __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1)); in Generate()
136 __ cmpi(scratch, Operand(83)); in Generate()
137 __ bge(&out_of_range); in Generate()
144 __ subfic(scratch, scratch, Operand(51)); in Generate()
145 __ cmpi(scratch, Operand::Zero()); in Generate()
146 __ ble(&only_low); in Generate()
149 __ srw(scratch_low, scratch_low, scratch); in Generate()
153 __ subfic(scratch, scratch, Operand(32)); in Generate()
154 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask); in Generate()
157 __ oris(result_reg, result_reg, in Generate()
159 __ slw(r0, result_reg, scratch); in Generate()
160 __ orx(result_reg, scratch_low, r0); in Generate()
161 __ b(&negate); in Generate()
163 __ bind(&out_of_range); in Generate()
164 __ mov(result_reg, Operand::Zero()); in Generate()
165 __ b(&done); in Generate()
167 __ bind(&only_low); in Generate()
170 __ neg(scratch, scratch); in Generate()
171 __ slw(result_reg, scratch_low, scratch); in Generate()
173 __ bind(&negate); in Generate()
180 __ srawi(r0, scratch_high, 31); in Generate()
182 __ srdi(r0, r0, Operand(32)); in Generate()
184 __ xor_(result_reg, result_reg, r0); in Generate()
185 __ srwi(r0, scratch_high, Operand(31)); in Generate()
186 __ add(result_reg, result_reg, r0); in Generate()
188 __ bind(&done); in Generate()
189 __ Pop(scratch_high, scratch_low); in Generate()
191 __ bind(&fastpath_done); in Generate()
192 __ pop(scratch); in Generate()
194 __ Ret(); in Generate()
205 __ cmp(r3, r4); in EmitIdenticalObjectComparison()
206 __ bne(¬_identical); in EmitIdenticalObjectComparison()
214 __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE); in EmitIdenticalObjectComparison()
215 __ bge(slow); in EmitIdenticalObjectComparison()
217 __ cmpi(r7, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
218 __ beq(slow); in EmitIdenticalObjectComparison()
220 __ cmpi(r7, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
221 __ beq(slow); in EmitIdenticalObjectComparison()
223 __ CompareObjectType(r3, r7, r7, HEAP_NUMBER_TYPE); in EmitIdenticalObjectComparison()
224 __ beq(&heap_number); in EmitIdenticalObjectComparison()
227 __ cmpi(r7, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
228 __ bge(slow); in EmitIdenticalObjectComparison()
230 __ cmpi(r7, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
231 __ beq(slow); in EmitIdenticalObjectComparison()
233 __ cmpi(r7, Operand(SIMD128_VALUE_TYPE)); in EmitIdenticalObjectComparison()
234 __ beq(slow); in EmitIdenticalObjectComparison()
239 __ cmpi(r7, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
240 __ bne(&return_equal); in EmitIdenticalObjectComparison()
241 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
242 __ cmp(r3, r5); in EmitIdenticalObjectComparison()
243 __ bne(&return_equal); in EmitIdenticalObjectComparison()
246 __ li(r3, Operand(GREATER)); in EmitIdenticalObjectComparison()
249 __ li(r3, Operand(LESS)); in EmitIdenticalObjectComparison()
251 __ Ret(); in EmitIdenticalObjectComparison()
256 __ bind(&return_equal); in EmitIdenticalObjectComparison()
258 __ li(r3, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
260 __ li(r3, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
262 __ li(r3, Operand(EQUAL)); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
264 __ Ret(); in EmitIdenticalObjectComparison()
270 __ bind(&heap_number); in EmitIdenticalObjectComparison()
277 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
280 __ ExtractBitMask(r6, r5, HeapNumber::kExponentMask); in EmitIdenticalObjectComparison()
281 __ cmpli(r6, Operand(0x7ff)); in EmitIdenticalObjectComparison()
282 __ bne(&return_equal); in EmitIdenticalObjectComparison()
285 __ slwi(r5, r5, Operand(HeapNumber::kNonMantissaBitsInTopWord)); in EmitIdenticalObjectComparison()
287 __ lwz(r6, FieldMemOperand(r3, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
288 __ orx(r3, r6, r5); in EmitIdenticalObjectComparison()
289 __ cmpi(r3, Operand::Zero()); in EmitIdenticalObjectComparison()
296 __ li(r4, Operand((cond == le) ? GREATER : LESS)); in EmitIdenticalObjectComparison()
297 __ isel(eq, r3, r3, r4); in EmitIdenticalObjectComparison()
300 __ Ret(eq); in EmitIdenticalObjectComparison()
302 __ li(r3, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
304 __ li(r3, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
308 __ Ret(); in EmitIdenticalObjectComparison()
312 __ bind(¬_identical); in EmitIdenticalObjectComparison()
323 __ JumpIfSmi(rhs, &rhs_is_smi); in EmitSmiNonsmiComparison()
326 __ CompareObjectType(rhs, r6, r7, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
333 __ beq(&skip); in EmitSmiNonsmiComparison()
334 __ mov(r3, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
335 __ Ret(); in EmitSmiNonsmiComparison()
336 __ bind(&skip); in EmitSmiNonsmiComparison()
338 __ Ret(ne); in EmitSmiNonsmiComparison()
343 __ bne(slow); in EmitSmiNonsmiComparison()
348 __ SmiToDouble(d7, lhs); in EmitSmiNonsmiComparison()
350 __ lfd(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
354 __ b(lhs_not_nan); in EmitSmiNonsmiComparison()
356 __ bind(&rhs_is_smi); in EmitSmiNonsmiComparison()
358 __ CompareObjectType(lhs, r7, r7, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
365 __ beq(&skip); in EmitSmiNonsmiComparison()
366 __ mov(r3, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
367 __ Ret(); in EmitSmiNonsmiComparison()
368 __ bind(&skip); in EmitSmiNonsmiComparison()
370 __ Ret(ne); in EmitSmiNonsmiComparison()
375 __ bne(slow); in EmitSmiNonsmiComparison()
380 __ lfd(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
382 __ SmiToDouble(d6, rhs); in EmitSmiNonsmiComparison()
399 __ CompareObjectType(rhs, r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
400 __ blt(&first_non_object); in EmitStrictTwoHeapObjectCompare()
404 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
405 __ Ret(); in EmitStrictTwoHeapObjectCompare()
407 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
409 __ cmpi(r5, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
410 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
412 __ CompareObjectType(lhs, r6, r6, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
413 __ bge(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
416 __ cmpi(r6, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
417 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
422 __ orx(r5, r5, r6); in EmitStrictTwoHeapObjectCompare()
423 __ andi(r0, r5, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
424 __ beq(&return_not_equal, cr0); in EmitStrictTwoHeapObjectCompare()
435 __ CompareObjectType(rhs, r6, r5, HEAP_NUMBER_TYPE); in EmitCheckForTwoHeapNumbers()
436 __ bne(not_heap_numbers); in EmitCheckForTwoHeapNumbers()
437 __ LoadP(r5, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
438 __ cmp(r5, r6); in EmitCheckForTwoHeapNumbers()
439 __ bne(slow); // First was a heap number, second wasn't. Go slow case. in EmitCheckForTwoHeapNumbers()
443 __ lfd(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
444 __ lfd(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
446 __ b(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
461 __ andi(r0, r5, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
462 __ bne(&object_test, cr0); in EmitCheckForInternalizedStringsOrObjects()
463 __ andi(r0, r5, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
464 __ bne(possible_strings, cr0); in EmitCheckForInternalizedStringsOrObjects()
465 __ CompareObjectType(lhs, r6, r6, FIRST_NONSTRING_TYPE); in EmitCheckForInternalizedStringsOrObjects()
466 __ bge(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
467 __ andi(r0, r6, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
468 __ bne(possible_strings, cr0); in EmitCheckForInternalizedStringsOrObjects()
473 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
475 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
476 __ LoadP(r5, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
477 __ LoadP(r6, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
478 __ lbz(r7, FieldMemOperand(r5, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
479 __ lbz(r8, FieldMemOperand(r6, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
480 __ andi(r0, r7, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
481 __ bne(&undetectable, cr0); in EmitCheckForInternalizedStringsOrObjects()
482 __ andi(r0, r8, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
483 __ bne(&return_unequal, cr0); in EmitCheckForInternalizedStringsOrObjects()
485 __ CompareInstanceType(r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
486 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
487 __ CompareInstanceType(r6, r6, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
488 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
490 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
492 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
494 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
495 __ andi(r0, r8, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
496 __ beq(&return_unequal, cr0); in EmitCheckForInternalizedStringsOrObjects()
501 __ CompareInstanceType(r5, r5, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
502 __ beq(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
503 __ CompareInstanceType(r6, r6, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
504 __ bne(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
506 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
507 __ li(r3, Operand(EQUAL)); in EmitCheckForInternalizedStringsOrObjects()
508 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
518 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
520 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
521 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
526 __ bind(&ok); in CompareICStub_CheckInputType()
546 __ orx(r5, r4, r3); in GenerateGeneric()
547 __ JumpIfNotSmi(r5, ¬_two_smis); in GenerateGeneric()
548 __ SmiUntag(r4); in GenerateGeneric()
549 __ SmiUntag(r3); in GenerateGeneric()
550 __ sub(r3, r4, r3); in GenerateGeneric()
551 __ Ret(); in GenerateGeneric()
552 __ bind(¬_two_smis); in GenerateGeneric()
565 __ and_(r5, lhs, rhs); in GenerateGeneric()
566 __ JumpIfNotSmi(r5, ¬_smis); in GenerateGeneric()
577 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
579 __ bind(&lhs_not_nan); in GenerateGeneric()
581 __ fcmpu(d7, d6); in GenerateGeneric()
584 __ bunordered(&nan); in GenerateGeneric()
587 __ li(r4, Operand(GREATER)); in GenerateGeneric()
588 __ li(r5, Operand(LESS)); in GenerateGeneric()
589 __ isel(eq, r3, r0, r4); in GenerateGeneric()
590 __ isel(lt, r3, r5, r3); in GenerateGeneric()
591 __ Ret(); in GenerateGeneric()
593 __ beq(&equal); in GenerateGeneric()
594 __ blt(&less_than); in GenerateGeneric()
595 __ li(r3, Operand(GREATER)); in GenerateGeneric()
596 __ Ret(); in GenerateGeneric()
597 __ bind(&equal); in GenerateGeneric()
598 __ li(r3, Operand(EQUAL)); in GenerateGeneric()
599 __ Ret(); in GenerateGeneric()
600 __ bind(&less_than); in GenerateGeneric()
601 __ li(r3, Operand(LESS)); in GenerateGeneric()
602 __ Ret(); in GenerateGeneric()
605 __ bind(&nan); in GenerateGeneric()
610 __ li(r3, Operand(GREATER)); in GenerateGeneric()
612 __ li(r3, Operand(LESS)); in GenerateGeneric()
614 __ Ret(); in GenerateGeneric()
616 __ bind(¬_smis); in GenerateGeneric()
636 __ bind(&check_for_internalized_strings); in GenerateGeneric()
649 __ bind(&flat_string_check); in GenerateGeneric()
651 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r5, r6, &slow); in GenerateGeneric()
653 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r5, in GenerateGeneric()
662 __ bind(&slow); in GenerateGeneric()
667 __ Push(lhs, rhs); in GenerateGeneric()
668 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); in GenerateGeneric()
672 __ LoadRoot(r4, Heap::kTrueValueRootIndex); in GenerateGeneric()
673 __ sub(r3, r3, r4); in GenerateGeneric()
674 __ Ret(); in GenerateGeneric()
676 __ Push(lhs, rhs); in GenerateGeneric()
684 __ LoadSmiLiteral(r3, Smi::FromInt(ncr)); in GenerateGeneric()
685 __ push(r3); in GenerateGeneric()
689 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
692 __ bind(&miss); in GenerateGeneric()
701 __ mflr(r0); in Generate()
702 __ MultiPush(kJSCallerSaved | r0.bit()); in Generate()
704 __ MultiPushDoubles(kCallerSavedDoubles); in Generate()
711 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
712 __ mov(r3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
713 __ CallCFunction(ExternalReference::store_buffer_overflow_function(isolate()), in Generate()
716 __ MultiPopDoubles(kCallerSavedDoubles); in Generate()
718 __ MultiPop(kJSCallerSaved | r0.bit()); in Generate()
719 __ mtlr(r0); in Generate()
720 __ Ret(); in Generate()
725 __ PushSafepointRegisters(); in Generate()
726 __ blr(); in Generate()
731 __ PopSafepointRegisters(); in Generate()
732 __ blr(); in Generate()
749 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
751 __ lfd(double_exponent, in Generate()
757 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, in Generate()
759 __ beq(&int_exponent); in Generate()
761 __ mflr(r0); in Generate()
762 __ push(r0); in Generate()
765 __ PrepareCallCFunction(0, 2, scratch); in Generate()
766 __ MovToFloatParameters(double_base, double_exponent); in Generate()
767 __ CallCFunction( in Generate()
770 __ pop(r0); in Generate()
771 __ mtlr(r0); in Generate()
772 __ MovFromFloatResult(double_result); in Generate()
773 __ b(&done); in Generate()
777 __ bind(&int_exponent); in Generate()
781 __ mr(scratch, exponent); in Generate()
784 __ mr(exponent, scratch); in Generate()
786 __ fmr(double_scratch, double_base); // Back up base. in Generate()
787 __ li(scratch2, Operand(1)); in Generate()
788 __ ConvertIntToDouble(scratch2, double_result); in Generate()
791 __ cmpi(scratch, Operand::Zero()); in Generate()
793 __ neg(scratch2, scratch); in Generate()
794 __ isel(lt, scratch, scratch2, scratch); in Generate()
797 __ bge(&positive_exponent); in Generate()
798 __ neg(scratch, scratch); in Generate()
799 __ bind(&positive_exponent); in Generate()
803 __ bind(&while_true); in Generate()
804 __ andi(scratch2, scratch, Operand(1)); in Generate()
805 __ beq(&no_carry, cr0); in Generate()
806 __ fmul(double_result, double_result, double_scratch); in Generate()
807 __ bind(&no_carry); in Generate()
808 __ ShiftRightImm(scratch, scratch, Operand(1), SetRC); in Generate()
809 __ beq(&loop_end, cr0); in Generate()
810 __ fmul(double_scratch, double_scratch, double_scratch); in Generate()
811 __ b(&while_true); in Generate()
812 __ bind(&loop_end); in Generate()
814 __ cmpi(exponent, Operand::Zero()); in Generate()
815 __ bge(&done); in Generate()
817 __ li(scratch2, Operand(1)); in Generate()
818 __ ConvertIntToDouble(scratch2, double_scratch); in Generate()
819 __ fdiv(double_result, double_scratch, double_result); in Generate()
822 __ fcmpu(double_result, kDoubleRegZero); in Generate()
823 __ bne(&done); in Generate()
826 __ ConvertIntToDouble(exponent, double_exponent); in Generate()
829 __ mflr(r0); in Generate()
830 __ push(r0); in Generate()
833 __ PrepareCallCFunction(0, 2, scratch); in Generate()
834 __ MovToFloatParameters(double_base, double_exponent); in Generate()
835 __ CallCFunction( in Generate()
838 __ pop(r0); in Generate()
839 __ mtlr(r0); in Generate()
840 __ MovFromFloatResult(double_result); in Generate()
842 __ bind(&done); in Generate()
843 __ Ret(); in Generate()
904 __ mr(r15, r4); in Generate()
908 __ mr(r4, r5); in Generate()
911 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2)); in Generate()
912 __ add(r4, r4, sp); in Generate()
913 __ subi(r4, r4, Operand(kPointerSize)); in Generate()
930 __ EnterExitFrame(save_doubles(), arg_stack_space, is_builtin_exit() in Generate()
935 __ mr(r14, r3); in Generate()
948 __ mr(r5, r4); in Generate()
949 __ mr(r4, r3); in Generate()
950 __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize)); in Generate()
955 __ mov(isolate_reg, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
960 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(r15, kPointerSize)); in Generate()
961 __ LoadP(ip, MemOperand(r15, 0)); // Instruction address in Generate()
964 __ Move(ip, r15); in Generate()
973 __ mov_label_addr(r0, &after_call); in Generate()
974 __ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize)); in Generate()
975 __ Call(target); in Generate()
976 __ bind(&after_call); in Generate()
980 if (result_size() > 2) __ LoadP(r5, MemOperand(r3, 2 * kPointerSize)); in Generate()
981 __ LoadP(r4, MemOperand(r3, kPointerSize)); in Generate()
982 __ LoadP(r3, MemOperand(r3)); in Generate()
987 __ CompareRoot(r3, Heap::kExceptionRootIndex); in Generate()
988 __ beq(&exception_returned); in Generate()
997 __ mov(r6, Operand(pending_exception_address)); in Generate()
998 __ LoadP(r6, MemOperand(r6)); in Generate()
999 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); in Generate()
1001 __ beq(&okay); in Generate()
1002 __ stop("Unexpected pending exception"); in Generate()
1003 __ bind(&okay); in Generate()
1018 __ LeaveExitFrame(save_doubles(), argc, true); in Generate()
1019 __ blr(); in Generate()
1022 __ bind(&exception_returned); in Generate()
1041 __ PrepareCallCFunction(3, 0, r3); in Generate()
1042 __ li(r3, Operand::Zero()); in Generate()
1043 __ li(r4, Operand::Zero()); in Generate()
1044 __ mov(r5, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1045 __ CallCFunction(find_handler, 3); in Generate()
1049 __ mov(cp, Operand(pending_handler_context_address)); in Generate()
1050 __ LoadP(cp, MemOperand(cp)); in Generate()
1051 __ mov(sp, Operand(pending_handler_sp_address)); in Generate()
1052 __ LoadP(sp, MemOperand(sp)); in Generate()
1053 __ mov(fp, Operand(pending_handler_fp_address)); in Generate()
1054 __ LoadP(fp, MemOperand(fp)); in Generate()
1059 __ cmpi(cp, Operand::Zero()); in Generate()
1060 __ beq(&skip); in Generate()
1061 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1062 __ bind(&skip); in Generate()
1066 __ mov(r4, Operand(pending_handler_code_address)); in Generate()
1067 __ LoadP(r4, MemOperand(r4)); in Generate()
1068 __ mov(r5, Operand(pending_handler_offset_address)); in Generate()
1069 __ LoadP(r5, MemOperand(r5)); in Generate()
1070 __ addi(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start in Generate()
1072 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r4); in Generate()
1074 __ add(ip, r4, r5); in Generate()
1075 __ Jump(ip); in Generate()
1089 __ function_descriptor(); in Generate()
1095 __ mflr(r0); in Generate()
1096 __ StoreP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); in Generate()
1099 __ MultiPush(kCalleeSaved); in Generate()
1102 __ MultiPushDoubles(kCalleeSavedDoubles); in Generate()
1104 __ LoadDoubleLiteral(kDoubleRegZero, 0.0, r0); in Generate()
1112 __ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used. in Generate()
1113 __ push(r0); in Generate()
1115 __ li(kConstantPoolRegister, Operand::Zero()); in Generate()
1116 __ push(kConstantPoolRegister); in Generate()
1119 __ LoadSmiLiteral(r0, Smi::FromInt(marker)); in Generate()
1120 __ push(r0); in Generate()
1121 __ push(r0); in Generate()
1123 __ mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1124 __ LoadP(r0, MemOperand(r8)); in Generate()
1125 __ push(r0); in Generate()
1128 __ addi(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); in Generate()
1133 __ mov(r8, Operand(ExternalReference(js_entry_sp))); in Generate()
1134 __ LoadP(r9, MemOperand(r8)); in Generate()
1135 __ cmpi(r9, Operand::Zero()); in Generate()
1136 __ bne(&non_outermost_js); in Generate()
1137 __ StoreP(fp, MemOperand(r8)); in Generate()
1138 __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1140 __ b(&cont); in Generate()
1141 __ bind(&non_outermost_js); in Generate()
1142 __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1143 __ bind(&cont); in Generate()
1144 __ push(ip); // frame-type in Generate()
1148 __ b(&invoke); in Generate()
1150 __ bind(&handler_entry); in Generate()
1156 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1159 __ StoreP(r3, MemOperand(ip)); in Generate()
1160 __ LoadRoot(r3, Heap::kExceptionRootIndex); in Generate()
1161 __ b(&exit); in Generate()
1164 __ bind(&invoke); in Generate()
1166 __ PushStackHandler(); in Generate()
1185 __ mov(ip, Operand(construct_entry)); in Generate()
1188 __ mov(ip, Operand(entry)); in Generate()
1190 __ LoadP(ip, MemOperand(ip)); // deref address in Generate()
1194 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1195 __ mtctr(ip); in Generate()
1196 __ bctrl(); // make the call in Generate()
1199 __ PopStackHandler(); in Generate()
1201 __ bind(&exit); // r3 holds result in Generate()
1204 __ pop(r8); in Generate()
1205 __ CmpSmiLiteral(r8, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME), r0); in Generate()
1206 __ bne(&non_outermost_js_2); in Generate()
1207 __ mov(r9, Operand::Zero()); in Generate()
1208 __ mov(r8, Operand(ExternalReference(js_entry_sp))); in Generate()
1209 __ StoreP(r9, MemOperand(r8)); in Generate()
1210 __ bind(&non_outermost_js_2); in Generate()
1213 __ pop(r6); in Generate()
1214 __ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1215 __ StoreP(r6, MemOperand(ip)); in Generate()
1218 __ addi(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); in Generate()
1221 __ MultiPopDoubles(kCalleeSavedDoubles); in Generate()
1224 __ MultiPop(kCalleeSaved); in Generate()
1227 __ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); in Generate()
1228 __ mtlr(r0); in Generate()
1229 __ blr(); in Generate()
1243 __ bind(&miss); in Generate()
1270 __ Ret(); in Generate()
1275 __ bind(&miss); in Generate()
1286 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1323 __ mov(r3, Operand(address_of_regexp_stack_memory_size)); in Generate()
1324 __ LoadP(r3, MemOperand(r3, 0)); in Generate()
1325 __ cmpi(r3, Operand::Zero()); in Generate()
1326 __ beq(&runtime); in Generate()
1329 __ LoadP(r3, MemOperand(sp, kJSRegExpOffset)); in Generate()
1330 __ JumpIfSmi(r3, &runtime); in Generate()
1331 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE); in Generate()
1332 __ bne(&runtime); in Generate()
1335 __ LoadP(regexp_data, FieldMemOperand(r3, JSRegExp::kDataOffset)); in Generate()
1337 __ TestIfSmi(regexp_data, r0); in Generate()
1338 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected, cr0); in Generate()
1339 __ CompareObjectType(regexp_data, r3, r3, FIXED_ARRAY_TYPE); in Generate()
1340 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1345 __ LoadP(r3, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1347 __ CmpSmiLiteral(r3, Smi::FromInt(JSRegExp::IRREGEXP), r0); in Generate()
1348 __ bne(&runtime); in Generate()
1352 __ LoadP(r5, in Generate()
1358 __ SmiToShortArrayOffset(r5, r5); in Generate()
1360 __ cmpli(r5, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2)); in Generate()
1361 __ bgt(&runtime); in Generate()
1364 __ li(r11, Operand::Zero()); in Generate()
1365 __ LoadP(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1366 __ JumpIfSmi(subject, &runtime); in Generate()
1367 __ mr(r6, subject); // Make a copy of the original subject string. in Generate()
1390 __ bind(&check_underlying); in Generate()
1391 __ LoadP(r3, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1392 __ lbz(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); in Generate()
1398 __ andi(r4, r3, Operand(kIsNotStringMask | kStringRepresentationMask | in Generate()
1401 __ beq(&seq_string, cr0); // Go to (4). in Generate()
1409 __ cmpi(r4, Operand(kExternalStringTag)); in Generate()
1410 __ bge(¬_seq_nor_cons); // Go to (5). in Generate()
1414 __ LoadP(r3, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1415 __ CompareRoot(r3, Heap::kempty_stringRootIndex); in Generate()
1416 __ bne(&runtime); in Generate()
1417 __ LoadP(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1418 __ b(&check_underlying); in Generate()
1421 __ bind(&seq_string); in Generate()
1427 __ LoadP(r4, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1428 __ JumpIfNotSmi(r4, &runtime); in Generate()
1429 __ LoadP(r6, FieldMemOperand(r6, String::kLengthOffset)); in Generate()
1430 __ cmpl(r6, r4); in Generate()
1431 __ ble(&runtime); in Generate()
1432 __ SmiUntag(r4); in Generate()
1437 __ ExtractBitMask(r6, r3, kStringEncodingMask, SetRC); in Generate()
1438 __ beq(&encoding_type_UC16, cr0); in Generate()
1439 __ LoadP(code, in Generate()
1441 __ b(&br_over); in Generate()
1442 __ bind(&encoding_type_UC16); in Generate()
1443 __ LoadP(code, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
1444 __ bind(&br_over); in Generate()
1451 __ JumpIfSmi(code, &runtime); in Generate()
1459 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r3, r5); in Generate()
1464 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1470 __ mov(r3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1471 __ StoreP(r3, MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kPointerSize)); in Generate()
1477 __ li(r10, Operand(1)); in Generate()
1480 __ mov(r3, Operand(address_of_regexp_stack_memory_address)); in Generate()
1481 __ LoadP(r3, MemOperand(r3, 0)); in Generate()
1482 __ mov(r5, Operand(address_of_regexp_stack_memory_size)); in Generate()
1483 __ LoadP(r5, MemOperand(r5, 0)); in Generate()
1484 __ add(r9, r3, r5); in Generate()
1489 __ li(r8, Operand::Zero()); in Generate()
1492 __ mov( in Generate()
1498 __ addi(r18, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1499 __ xori(r6, r6, Operand(1)); in Generate()
1504 __ LoadP(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1509 __ ShiftLeft_(r11, r11, r6); in Generate()
1510 __ add(r11, r18, r11); in Generate()
1511 __ ShiftLeft_(r5, r4, r6); in Generate()
1512 __ add(r5, r11, r5); in Generate()
1514 __ LoadP(r18, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
1515 __ SmiUntag(r18); in Generate()
1516 __ ShiftLeft_(r6, r18, r6); in Generate()
1517 __ add(r6, r11, r6); in Generate()
1523 __ mr(r3, subject); in Generate()
1526 __ addi(code, code, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1531 __ LeaveExitFrame(false, no_reg, true); in Generate()
1539 __ cmpwi(r3, Operand(1)); in Generate()
1542 __ beq(&success); in Generate()
1544 __ cmpwi(r3, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1545 __ beq(&failure); in Generate()
1546 __ cmpwi(r3, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1548 __ bne(&runtime); in Generate()
1553 __ mov(r4, Operand(isolate()->factory()->the_hole_value())); in Generate()
1554 __ mov(r5, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1556 __ LoadP(r3, MemOperand(r5, 0)); in Generate()
1557 __ cmp(r3, r4); in Generate()
1558 __ beq(&runtime); in Generate()
1561 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1563 __ bind(&failure); in Generate()
1565 __ mov(r3, Operand(isolate()->factory()->null_value())); in Generate()
1566 __ addi(sp, sp, Operand(4 * kPointerSize)); in Generate()
1567 __ Ret(); in Generate()
1570 __ bind(&success); in Generate()
1571 __ LoadP(r4, in Generate()
1576 __ SmiToShortArrayOffset(r4, r4); in Generate()
1577 __ addi(r4, r4, Operand(2)); in Generate()
1580 __ LoadP(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1581 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1583 __ LoadP(r3, in Generate()
1585 __ CompareRoot(r3, Heap::kFixedArrayMapRootIndex); in Generate()
1586 __ bne(&runtime); in Generate()
1589 __ LoadP( in Generate()
1591 __ addi(r5, r4, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1592 __ SmiUntag(r0, r3); in Generate()
1593 __ cmp(r5, r0); in Generate()
1594 __ bgt(&runtime); in Generate()
1599 __ SmiTag(r5, r4); in Generate()
1600 __ StoreP(r5, FieldMemOperand(last_match_info_elements, in Generate()
1604 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1607 __ mr(r5, subject); in Generate()
1608 __ RecordWriteField(last_match_info_elements, in Generate()
1611 __ mr(subject, r5); in Generate()
1612 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1615 __ RecordWriteField(last_match_info_elements, in Generate()
1622 __ mov(r5, Operand(address_of_static_offsets_vector)); in Generate()
1629 __ addi(r3, last_match_info_elements, in Generate()
1632 __ addi(r5, r5, Operand(-kIntSize)); // bias down for lwzu in Generate()
1633 __ mtctr(r4); in Generate()
1634 __ bind(&next_capture); in Generate()
1636 __ lwzu(r6, MemOperand(r5, kIntSize)); in Generate()
1638 __ SmiTag(r6); in Generate()
1639 __ StorePU(r6, MemOperand(r3, kPointerSize)); in Generate()
1640 __ bdnz(&next_capture); in Generate()
1643 __ mr(r3, last_match_info_elements); in Generate()
1644 __ addi(sp, sp, Operand(4 * kPointerSize)); in Generate()
1645 __ Ret(); in Generate()
1648 __ bind(&runtime); in Generate()
1649 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1653 __ bind(¬_seq_nor_cons); in Generate()
1655 __ bgt(¬_long_external); // Go to (7). in Generate()
1658 __ bind(&external_string); in Generate()
1659 __ LoadP(r3, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1660 __ lbz(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); in Generate()
1665 __ andi(r0, r3, Operand(kIsIndirectStringMask)); in Generate()
1666 __ Assert(eq, kExternalStringExpectedButNotFound, cr0); in Generate()
1668 __ LoadP(subject, in Generate()
1672 __ subi(subject, subject, in Generate()
1674 __ b(&seq_string); // Go to (4). in Generate()
1677 __ bind(¬_long_external); in Generate()
1679 __ andi(r0, r4, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1680 __ bne(&runtime, cr0); in Generate()
1684 __ LoadP(r11, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1685 __ SmiUntag(r11); in Generate()
1686 __ LoadP(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1687 __ b(&check_underlying); // Go to (4). in Generate()
1700 __ SmiTag(r3); in CallStubInRecordCallTarget()
1701 __ Push(r6, r5, r4, r3); in CallStubInRecordCallTarget()
1702 __ Push(cp); in CallStubInRecordCallTarget()
1704 __ CallStub(stub); in CallStubInRecordCallTarget()
1706 __ Pop(cp); in CallStubInRecordCallTarget()
1707 __ Pop(r6, r5, r4, r3); in CallStubInRecordCallTarget()
1708 __ SmiUntag(r3); in CallStubInRecordCallTarget()
1730 __ SmiToPtrArrayOffset(r8, r6); in GenerateRecordCallTarget()
1731 __ add(r8, r5, r8); in GenerateRecordCallTarget()
1732 __ LoadP(r8, FieldMemOperand(r8, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1741 __ LoadP(weak_value, FieldMemOperand(r8, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1742 __ cmp(r4, weak_value); in GenerateRecordCallTarget()
1743 __ beq(&done); in GenerateRecordCallTarget()
1744 __ CompareRoot(r8, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1745 __ beq(&done); in GenerateRecordCallTarget()
1746 __ LoadP(feedback_map, FieldMemOperand(r8, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1747 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1748 __ bne(&check_allocation_site); in GenerateRecordCallTarget()
1751 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1752 __ b(&megamorphic); in GenerateRecordCallTarget()
1754 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1759 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1760 __ bne(&miss); in GenerateRecordCallTarget()
1763 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); in GenerateRecordCallTarget()
1764 __ cmp(r4, r8); in GenerateRecordCallTarget()
1765 __ bne(&megamorphic); in GenerateRecordCallTarget()
1766 __ b(&done); in GenerateRecordCallTarget()
1768 __ bind(&miss); in GenerateRecordCallTarget()
1772 __ CompareRoot(r8, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1773 __ beq(&initialize); in GenerateRecordCallTarget()
1776 __ bind(&megamorphic); in GenerateRecordCallTarget()
1777 __ SmiToPtrArrayOffset(r8, r6); in GenerateRecordCallTarget()
1778 __ add(r8, r5, r8); in GenerateRecordCallTarget()
1779 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1780 __ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0); in GenerateRecordCallTarget()
1781 __ jmp(&done); in GenerateRecordCallTarget()
1784 __ bind(&initialize); in GenerateRecordCallTarget()
1787 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); in GenerateRecordCallTarget()
1788 __ cmp(r4, r8); in GenerateRecordCallTarget()
1789 __ bne(¬_array_function); in GenerateRecordCallTarget()
1796 __ b(&done); in GenerateRecordCallTarget()
1798 __ bind(¬_array_function); in GenerateRecordCallTarget()
1803 __ bind(&done); in GenerateRecordCallTarget()
1806 __ SmiToPtrArrayOffset(r8, r6); in GenerateRecordCallTarget()
1807 __ add(r8, r5, r8); in GenerateRecordCallTarget()
1809 __ LoadP(r7, FieldMemOperand(r8, count_offset)); in GenerateRecordCallTarget()
1810 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); in GenerateRecordCallTarget()
1811 __ StoreP(r7, FieldMemOperand(r8, count_offset), r0); in GenerateRecordCallTarget()
1823 __ JumpIfSmi(r4, &non_function); in Generate()
1825 __ CompareObjectType(r4, r8, r8, JS_FUNCTION_TYPE); in Generate()
1826 __ bne(&non_function); in Generate()
1830 __ SmiToPtrArrayOffset(r8, r6); in Generate()
1831 __ add(r8, r5, r8); in Generate()
1833 __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize)); in Generate()
1834 __ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset)); in Generate()
1835 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); in Generate()
1837 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); in Generate()
1838 __ isel(eq, r5, r5, r8); in Generate()
1841 __ beq(&feedback_register_initialized); in Generate()
1842 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate()
1843 __ bind(&feedback_register_initialized); in Generate()
1846 __ AssertUndefinedOrAllocationSite(r5, r8); in Generate()
1849 __ mr(r6, r4); in Generate()
1853 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1854 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1855 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1856 __ JumpToJSEntry(ip); in Generate()
1858 __ bind(&non_function); in Generate()
1859 __ mr(r6, r4); in Generate()
1860 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1867 __ SmiToPtrArrayOffset(temp, slot); in IncrementCallCount()
1868 __ add(feedback_vector, feedback_vector, temp); in IncrementCallCount()
1869 __ LoadP(slot, FieldMemOperand(feedback_vector, count_offset)); in IncrementCallCount()
1870 __ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp); in IncrementCallCount()
1871 __ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp); in IncrementCallCount()
1880 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); in HandleArrayCase()
1881 __ cmp(r4, r8); in HandleArrayCase()
1882 __ bne(miss); in HandleArrayCase()
1887 __ mr(r5, r7); in HandleArrayCase()
1888 __ mr(r6, r4); in HandleArrayCase()
1890 __ TailCallStub(&stub); in HandleArrayCase()
1902 __ SmiToPtrArrayOffset(r9, r6); in Generate()
1903 __ add(r9, r5, r9); in Generate()
1904 __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize)); in Generate()
1920 __ LoadP(r8, FieldMemOperand(r7, WeakCell::kValueOffset)); in Generate()
1921 __ cmp(r4, r8); in Generate()
1922 __ bne(&extra_checks_or_miss); in Generate()
1926 __ JumpIfSmi(r4, &extra_checks_or_miss); in Generate()
1928 __ bind(&call_function); in Generate()
1933 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), in Generate()
1937 __ bind(&extra_checks_or_miss); in Generate()
1940 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex); in Generate()
1941 __ beq(&call); in Generate()
1944 __ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset)); in Generate()
1945 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); in Generate()
1946 __ bne(¬_allocation_site); in Generate()
1951 __ bind(¬_allocation_site); in Generate()
1956 __ b(&miss); in Generate()
1959 __ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex); in Generate()
1960 __ beq(&uninitialized); in Generate()
1964 __ AssertNotSmi(r7); in Generate()
1965 __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE); in Generate()
1966 __ bne(&miss); in Generate()
1967 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); in Generate()
1968 __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0); in Generate()
1970 __ bind(&call); in Generate()
1975 __ bind(&call_count_incremented); in Generate()
1976 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), in Generate()
1979 __ bind(&uninitialized); in Generate()
1982 __ JumpIfSmi(r4, &miss); in Generate()
1985 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); in Generate()
1986 __ bne(&miss); in Generate()
1990 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); in Generate()
1991 __ cmp(r4, r7); in Generate()
1992 __ beq(&miss); in Generate()
1995 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate()
1996 __ LoadP(r7, ContextMemOperand(r7, Context::NATIVE_CONTEXT_INDEX)); in Generate()
1997 __ LoadP(ip, NativeContextMemOperand()); in Generate()
1998 __ cmp(r7, ip); in Generate()
1999 __ bne(&miss); in Generate()
2008 __ SmiTag(r3); in Generate()
2009 __ Push(r3, r5, r6, cp, r4); in Generate()
2010 __ CallStub(&create_stub); in Generate()
2011 __ Pop(r5, r6, cp, r4); in Generate()
2012 __ Pop(r3); in Generate()
2013 __ SmiUntag(r3); in Generate()
2016 __ b(&call_function); in Generate()
2020 __ bind(&miss); in Generate()
2023 __ b(&call_count_incremented); in Generate()
2031 __ SmiTag(r3); in GenerateMiss()
2034 __ Push(r3, r4, r5, r6); in GenerateMiss()
2037 __ CallRuntime(Runtime::kCallIC_Miss); in GenerateMiss()
2040 __ mr(r4, r3); in GenerateMiss()
2043 __ Pop(r3); in GenerateMiss()
2044 __ SmiUntag(r3); in GenerateMiss()
2052 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
2055 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
2056 __ lbz(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
2058 __ andi(r0, result_, Operand(kIsNotStringMask)); in GenerateFast()
2059 __ bne(receiver_not_string_, cr0); in GenerateFast()
2063 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
2064 __ bind(&got_smi_index_); in GenerateFast()
2067 __ LoadP(ip, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
2068 __ cmpl(ip, index_); in GenerateFast()
2069 __ ble(index_out_of_range_); in GenerateFast()
2071 __ SmiUntag(index_); in GenerateFast()
2076 __ SmiTag(result_); in GenerateFast()
2077 __ bind(&exit_); in GenerateFast()
2084 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
2087 __ bind(&index_not_smi_); in GenerateSlow()
2089 __ CheckMap(index_, result_, Heap::kHeapNumberMapRootIndex, index_not_number_, in GenerateSlow()
2093 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2097 __ Push(object_, index_); in GenerateSlow()
2099 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
2102 __ Move(index_, r3); in GenerateSlow()
2104 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2107 __ pop(object_); in GenerateSlow()
2110 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
2111 __ lbz(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
2114 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
2116 __ b(&got_smi_index_); in GenerateSlow()
2121 __ bind(&call_runtime_); in GenerateSlow()
2123 __ SmiTag(index_); in GenerateSlow()
2124 __ Push(object_, index_); in GenerateSlow()
2125 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
2126 __ Move(result_, r3); in GenerateSlow()
2128 __ b(&exit_); in GenerateSlow()
2130 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
2140 __ LoadSmiLiteral(r0, Smi::FromInt(~String::kMaxOneByteCharCodeU)); in GenerateFast()
2141 __ ori(r0, r0, Operand(kSmiTagMask)); in GenerateFast()
2142 __ and_(r0, code_, r0, SetRC); in GenerateFast()
2143 __ bne(&slow_case_, cr0); in GenerateFast()
2145 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
2147 __ mr(r0, code_); in GenerateFast()
2148 __ SmiToPtrArrayOffset(code_, code_); in GenerateFast()
2149 __ add(result_, result_, code_); in GenerateFast()
2150 __ mr(code_, r0); in GenerateFast()
2151 __ LoadP(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); in GenerateFast()
2152 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); in GenerateFast()
2153 __ beq(&slow_case_); in GenerateFast()
2154 __ bind(&exit_); in GenerateFast()
2160 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); in GenerateSlow()
2162 __ bind(&slow_case_); in GenerateSlow()
2164 __ push(code_); in GenerateSlow()
2165 __ CallRuntime(Runtime::kStringCharFromCode); in GenerateSlow()
2166 __ Move(result_, r3); in GenerateSlow()
2168 __ b(&exit_); in GenerateSlow()
2170 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); in GenerateSlow()
2183 __ andi(r0, dest, Operand(kPointerAlignmentMask)); in GenerateCopyCharacters()
2184 __ Check(eq, kDestinationOfCopyNotAligned, cr0); in GenerateCopyCharacters()
2191 __ add(count, count, count, LeaveOE, SetRC); in GenerateCopyCharacters()
2192 __ beq(&done, cr0); in GenerateCopyCharacters()
2194 __ cmpi(count, Operand::Zero()); in GenerateCopyCharacters()
2195 __ beq(&done); in GenerateCopyCharacters()
2200 __ mtctr(count); in GenerateCopyCharacters()
2201 __ bind(&byte_loop); in GenerateCopyCharacters()
2202 __ lbz(scratch, MemOperand(src)); in GenerateCopyCharacters()
2203 __ addi(src, src, Operand(1)); in GenerateCopyCharacters()
2204 __ stb(scratch, MemOperand(dest)); in GenerateCopyCharacters()
2205 __ addi(dest, dest, Operand(1)); in GenerateCopyCharacters()
2206 __ bdnz(&byte_loop); in GenerateCopyCharacters()
2208 __ bind(&done); in GenerateCopyCharacters()
2221 __ LoadP(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2222 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2223 __ cmp(length, scratch2); in GenerateFlatOneByteStringEquals()
2224 __ beq(&check_zero_length); in GenerateFlatOneByteStringEquals()
2225 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
2226 __ LoadSmiLiteral(r3, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
2227 __ Ret(); in GenerateFlatOneByteStringEquals()
2231 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
2233 __ cmpi(length, Operand::Zero()); in GenerateFlatOneByteStringEquals()
2234 __ bne(&compare_chars); in GenerateFlatOneByteStringEquals()
2235 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2236 __ Ret(); in GenerateFlatOneByteStringEquals()
2239 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
2244 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2245 __ Ret(); in GenerateFlatOneByteStringEquals()
2254 __ LoadP(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2255 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2256 __ sub(scratch3, scratch1, scratch2, LeaveOE, SetRC); in GenerateCompareFlatOneByteStrings()
2259 __ isel(gt, scratch1, scratch2, scratch1, cr0); in GenerateCompareFlatOneByteStrings()
2262 __ ble(&skip, cr0); in GenerateCompareFlatOneByteStrings()
2263 __ mr(scratch1, scratch2); in GenerateCompareFlatOneByteStrings()
2264 __ bind(&skip); in GenerateCompareFlatOneByteStrings()
2268 __ cmpi(min_length, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
2269 __ beq(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2276 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2279 __ mr(r3, length_delta); in GenerateCompareFlatOneByteStrings()
2280 __ cmpi(r3, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
2281 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
2285 __ LoadSmiLiteral(r4, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
2286 __ LoadSmiLiteral(r5, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
2287 __ isel(eq, r3, r0, r4); in GenerateCompareFlatOneByteStrings()
2288 __ isel(lt, r3, r5, r3); in GenerateCompareFlatOneByteStrings()
2289 __ Ret(); in GenerateCompareFlatOneByteStrings()
2292 __ ble(&less_equal); in GenerateCompareFlatOneByteStrings()
2293 __ LoadSmiLiteral(r3, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
2294 __ Ret(); in GenerateCompareFlatOneByteStrings()
2295 __ bind(&less_equal); in GenerateCompareFlatOneByteStrings()
2296 __ beq(&equal); in GenerateCompareFlatOneByteStrings()
2297 __ LoadSmiLiteral(r3, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
2298 __ bind(&equal); in GenerateCompareFlatOneByteStrings()
2299 __ Ret(); in GenerateCompareFlatOneByteStrings()
2310 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
2311 __ addi(scratch1, length, in GenerateOneByteCharsCompareLoop()
2313 __ add(left, left, scratch1); in GenerateOneByteCharsCompareLoop()
2314 __ add(right, right, scratch1); in GenerateOneByteCharsCompareLoop()
2315 __ subfic(length, length, Operand::Zero()); in GenerateOneByteCharsCompareLoop()
2320 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
2321 __ lbzx(scratch1, MemOperand(left, index)); in GenerateOneByteCharsCompareLoop()
2322 __ lbzx(r0, MemOperand(right, index)); in GenerateOneByteCharsCompareLoop()
2323 __ cmp(scratch1, r0); in GenerateOneByteCharsCompareLoop()
2324 __ bne(chars_not_equal); in GenerateOneByteCharsCompareLoop()
2325 __ addi(index, index, Operand(1)); in GenerateOneByteCharsCompareLoop()
2326 __ cmpi(index, Operand::Zero()); in GenerateOneByteCharsCompareLoop()
2327 __ bne(&loop); in GenerateOneByteCharsCompareLoop()
2341 __ Move(r5, isolate()->factory()->undefined_value()); in Generate()
2345 __ TestIfSmi(r5, r0); in Generate()
2346 __ Assert(ne, kExpectedAllocationSite, cr0); in Generate()
2347 __ push(r5); in Generate()
2348 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset)); in Generate()
2349 __ LoadRoot(ip, Heap::kAllocationSiteMapRootIndex); in Generate()
2350 __ cmp(r5, ip); in Generate()
2351 __ pop(r5); in Generate()
2352 __ Assert(eq, kExpectedAllocationSite); in Generate()
2358 __ TailCallStub(&stub); in Generate()
2366 __ CheckMap(r4, r5, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2367 __ CheckMap(r3, r6, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2369 __ LoadP(r4, FieldMemOperand(r4, Oddball::kToNumberOffset)); in GenerateBooleans()
2370 __ AssertSmi(r4); in GenerateBooleans()
2371 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset)); in GenerateBooleans()
2372 __ AssertSmi(r3); in GenerateBooleans()
2374 __ sub(r3, r4, r3); in GenerateBooleans()
2375 __ Ret(); in GenerateBooleans()
2377 __ bind(&miss); in GenerateBooleans()
2385 __ orx(r5, r4, r3); in GenerateSmis()
2386 __ JumpIfNotSmi(r5, &miss); in GenerateSmis()
2390 // __ sub(r3, r3, r4, SetCC); in GenerateSmis()
2391 __ sub(r3, r3, r4); in GenerateSmis()
2394 __ SmiUntag(r4); in GenerateSmis()
2395 __ SmiUntag(r3); in GenerateSmis()
2396 __ sub(r3, r4, r3); in GenerateSmis()
2398 __ Ret(); in GenerateSmis()
2400 __ bind(&miss); in GenerateSmis()
2414 __ JumpIfNotSmi(r4, &miss); in GenerateNumbers()
2417 __ JumpIfNotSmi(r3, &miss); in GenerateNumbers()
2424 __ JumpIfSmi(r3, &right_smi); in GenerateNumbers()
2425 __ CheckMap(r3, r5, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
2427 __ lfd(d1, FieldMemOperand(r3, HeapNumber::kValueOffset)); in GenerateNumbers()
2428 __ b(&left); in GenerateNumbers()
2429 __ bind(&right_smi); in GenerateNumbers()
2430 __ SmiToDouble(d1, r3); in GenerateNumbers()
2432 __ bind(&left); in GenerateNumbers()
2433 __ JumpIfSmi(r4, &left_smi); in GenerateNumbers()
2434 __ CheckMap(r4, r5, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
2436 __ lfd(d0, FieldMemOperand(r4, HeapNumber::kValueOffset)); in GenerateNumbers()
2437 __ b(&done); in GenerateNumbers()
2438 __ bind(&left_smi); in GenerateNumbers()
2439 __ SmiToDouble(d0, r4); in GenerateNumbers()
2441 __ bind(&done); in GenerateNumbers()
2444 __ fcmpu(d0, d1); in GenerateNumbers()
2447 __ bunordered(&unordered); in GenerateNumbers()
2452 __ li(r4, Operand(GREATER)); in GenerateNumbers()
2453 __ li(r5, Operand(LESS)); in GenerateNumbers()
2454 __ isel(eq, r3, r0, r4); in GenerateNumbers()
2455 __ isel(lt, r3, r5, r3); in GenerateNumbers()
2456 __ Ret(); in GenerateNumbers()
2458 __ beq(&equal); in GenerateNumbers()
2459 __ blt(&less_than); in GenerateNumbers()
2461 __ li(r3, Operand(GREATER)); in GenerateNumbers()
2462 __ Ret(); in GenerateNumbers()
2463 __ bind(&equal); in GenerateNumbers()
2464 __ li(r3, Operand(EQUAL)); in GenerateNumbers()
2465 __ Ret(); in GenerateNumbers()
2466 __ bind(&less_than); in GenerateNumbers()
2467 __ li(r3, Operand(LESS)); in GenerateNumbers()
2468 __ Ret(); in GenerateNumbers()
2471 __ bind(&unordered); in GenerateNumbers()
2472 __ bind(&generic_stub); in GenerateNumbers()
2475 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2477 __ bind(&maybe_undefined1); in GenerateNumbers()
2479 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2480 __ bne(&miss); in GenerateNumbers()
2481 __ JumpIfSmi(r4, &unordered); in GenerateNumbers()
2482 __ CompareObjectType(r4, r5, r5, HEAP_NUMBER_TYPE); in GenerateNumbers()
2483 __ bne(&maybe_undefined2); in GenerateNumbers()
2484 __ b(&unordered); in GenerateNumbers()
2487 __ bind(&maybe_undefined2); in GenerateNumbers()
2489 __ CompareRoot(r4, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2490 __ beq(&unordered); in GenerateNumbers()
2493 __ bind(&miss); in GenerateNumbers()
2509 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2512 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2513 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2514 __ lbz(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2515 __ lbz(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2517 __ orx(tmp1, tmp1, tmp2); in GenerateInternalizedStrings()
2518 __ andi(r0, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2519 __ bne(&miss, cr0); in GenerateInternalizedStrings()
2522 __ cmp(left, right); in GenerateInternalizedStrings()
2523 __ bne(¬_equal); in GenerateInternalizedStrings()
2529 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateInternalizedStrings()
2530 __ bind(¬_equal); in GenerateInternalizedStrings()
2531 __ Ret(); in GenerateInternalizedStrings()
2533 __ bind(&miss); in GenerateInternalizedStrings()
2550 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2554 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2555 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2556 __ lbz(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2557 __ lbz(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2559 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2560 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2563 __ cmp(left, right); in GenerateUniqueNames()
2564 __ bne(&miss); in GenerateUniqueNames()
2570 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateUniqueNames()
2571 __ Ret(); in GenerateUniqueNames()
2573 __ bind(&miss); in GenerateUniqueNames()
2593 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2597 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2598 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2599 __ lbz(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2600 __ lbz(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2602 __ orx(tmp3, tmp1, tmp2); in GenerateStrings()
2603 __ andi(r0, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2604 __ bne(&miss, cr0); in GenerateStrings()
2607 __ cmp(left, right); in GenerateStrings()
2610 __ bne(¬_identical); in GenerateStrings()
2611 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateStrings()
2612 __ Ret(); in GenerateStrings()
2613 __ bind(¬_identical); in GenerateStrings()
2623 __ orx(tmp3, tmp1, tmp2); in GenerateStrings()
2624 __ andi(r0, tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2628 __ Ret(eq, cr0); in GenerateStrings()
2633 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2646 __ bind(&runtime); in GenerateStrings()
2650 __ Push(left, right); in GenerateStrings()
2651 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2653 __ LoadRoot(r4, Heap::kTrueValueRootIndex); in GenerateStrings()
2654 __ sub(r3, r3, r4); in GenerateStrings()
2655 __ Ret(); in GenerateStrings()
2657 __ Push(left, right); in GenerateStrings()
2658 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2661 __ bind(&miss); in GenerateStrings()
2669 __ and_(r5, r4, r3); in GenerateReceivers()
2670 __ JumpIfSmi(r5, &miss); in GenerateReceivers()
2673 __ CompareObjectType(r3, r5, r5, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2674 __ blt(&miss); in GenerateReceivers()
2675 __ CompareObjectType(r4, r5, r5, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2676 __ blt(&miss); in GenerateReceivers()
2679 __ sub(r3, r3, r4); in GenerateReceivers()
2680 __ Ret(); in GenerateReceivers()
2682 __ bind(&miss); in GenerateReceivers()
2690 __ and_(r5, r4, r3); in GenerateKnownReceivers()
2691 __ JumpIfSmi(r5, &miss); in GenerateKnownReceivers()
2692 __ GetWeakValue(r7, cell); in GenerateKnownReceivers()
2693 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2694 __ LoadP(r6, FieldMemOperand(r4, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2695 __ cmp(r5, r7); in GenerateKnownReceivers()
2696 __ bne(&miss); in GenerateKnownReceivers()
2697 __ cmp(r6, r7); in GenerateKnownReceivers()
2698 __ bne(&miss); in GenerateKnownReceivers()
2701 __ sub(r3, r3, r4); in GenerateKnownReceivers()
2702 __ Ret(); in GenerateKnownReceivers()
2705 __ LoadSmiLiteral(r5, Smi::FromInt(GREATER)); in GenerateKnownReceivers()
2707 __ LoadSmiLiteral(r5, Smi::FromInt(LESS)); in GenerateKnownReceivers()
2709 __ Push(r4, r3, r5); in GenerateKnownReceivers()
2710 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2713 __ bind(&miss); in GenerateKnownReceivers()
2722 __ Push(r4, r3); in GenerateMiss()
2723 __ Push(r4, r3); in GenerateMiss()
2724 __ LoadSmiLiteral(r0, Smi::FromInt(op())); in GenerateMiss()
2725 __ push(r0); in GenerateMiss()
2726 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2728 __ addi(r5, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2730 __ Pop(r4, r3); in GenerateMiss()
2733 __ JumpToJSEntry(r5); in GenerateMiss()
2741 __ mflr(r0); in Generate()
2742 __ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize)); in Generate()
2743 __ Call(ip); // Call the C++ function. in Generate()
2744 __ LoadP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize)); in Generate()
2745 __ mtlr(r0); in Generate()
2746 __ blr(); in Generate()
2753 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize)); in GenerateCall()
2754 __ LoadP(ip, MemOperand(target, 0)); // Instruction address in GenerateCall()
2758 __ Move(ip, target); in GenerateCall()
2762 __ mov(r0, Operand(code, RelocInfo::CODE_TARGET)); in GenerateCall()
2763 __ Call(r0); // Call the stub. in GenerateCall()
2781 __ LoadP(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2782 __ subi(index, index, Operand(1)); in GenerateNegativeLookup()
2783 __ LoadSmiLiteral( in GenerateNegativeLookup()
2785 __ and_(index, index, ip); in GenerateNegativeLookup()
2789 __ ShiftLeftImm(ip, index, Operand(1)); in GenerateNegativeLookup()
2790 __ add(index, index, ip); // index *= 3. in GenerateNegativeLookup()
2795 __ SmiToPtrArrayOffset(ip, index); in GenerateNegativeLookup()
2796 __ add(tmp, properties, ip); in GenerateNegativeLookup()
2797 __ LoadP(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2800 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2801 __ cmp(entity_name, tmp); in GenerateNegativeLookup()
2802 __ beq(done); in GenerateNegativeLookup()
2805 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2808 __ Cmpi(entity_name, Operand(Handle<Name>(name)), r0); in GenerateNegativeLookup()
2809 __ beq(miss); in GenerateNegativeLookup()
2812 __ cmp(entity_name, tmp); in GenerateNegativeLookup()
2813 __ beq(&good); in GenerateNegativeLookup()
2816 __ LoadP(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2817 __ lbz(entity_name, FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); in GenerateNegativeLookup()
2818 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2819 __ bind(&good); in GenerateNegativeLookup()
2822 __ LoadP(properties, in GenerateNegativeLookup()
2829 __ mflr(r0); in GenerateNegativeLookup()
2830 __ MultiPush(spill_mask); in GenerateNegativeLookup()
2832 __ LoadP(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2833 __ mov(r4, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2835 __ CallStub(&stub); in GenerateNegativeLookup()
2836 __ cmpi(r3, Operand::Zero()); in GenerateNegativeLookup()
2838 __ MultiPop(spill_mask); // MultiPop does not touch condition flags in GenerateNegativeLookup()
2839 __ mtlr(r0); in GenerateNegativeLookup()
2841 __ beq(done); in GenerateNegativeLookup()
2842 __ bne(miss); in GenerateNegativeLookup()
2858 __ AssertName(name); in GeneratePositiveLookup()
2861 __ LoadP(scratch1, FieldMemOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
2862 __ SmiUntag(scratch1); // convert smi to int in GeneratePositiveLookup()
2863 __ subi(scratch1, scratch1, Operand(1)); in GeneratePositiveLookup()
2870 __ lwz(scratch2, FieldMemOperand(name, Name::kHashFieldOffset)); in GeneratePositiveLookup()
2877 __ addi(scratch2, scratch2, in GeneratePositiveLookup()
2880 __ srwi(scratch2, scratch2, Operand(Name::kHashShift)); in GeneratePositiveLookup()
2881 __ and_(scratch2, scratch1, scratch2); in GeneratePositiveLookup()
2886 __ ShiftLeftImm(ip, scratch2, Operand(1)); in GeneratePositiveLookup()
2887 __ add(scratch2, scratch2, ip); in GeneratePositiveLookup()
2890 __ ShiftLeftImm(ip, scratch2, Operand(kPointerSizeLog2)); in GeneratePositiveLookup()
2891 __ add(scratch2, elements, ip); in GeneratePositiveLookup()
2892 __ LoadP(ip, FieldMemOperand(scratch2, kElementsStartOffset)); in GeneratePositiveLookup()
2893 __ cmp(name, ip); in GeneratePositiveLookup()
2894 __ beq(done); in GeneratePositiveLookup()
2901 __ mflr(r0); in GeneratePositiveLookup()
2902 __ MultiPush(spill_mask); in GeneratePositiveLookup()
2905 __ mr(r4, name); in GeneratePositiveLookup()
2906 __ mr(r3, elements); in GeneratePositiveLookup()
2908 __ mr(r3, elements); in GeneratePositiveLookup()
2909 __ mr(r4, name); in GeneratePositiveLookup()
2912 __ CallStub(&stub); in GeneratePositiveLookup()
2913 __ cmpi(r3, Operand::Zero()); in GeneratePositiveLookup()
2914 __ mr(scratch2, r5); in GeneratePositiveLookup()
2915 __ MultiPop(spill_mask); in GeneratePositiveLookup()
2916 __ mtlr(r0); in GeneratePositiveLookup()
2918 __ bne(done); in GeneratePositiveLookup()
2919 __ beq(miss); in GeneratePositiveLookup()
2947 __ LoadP(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2948 __ SmiUntag(mask); in Generate()
2949 __ subi(mask, mask, Operand(1)); in Generate()
2951 __ lwz(hash, FieldMemOperand(key, Name::kHashFieldOffset)); in Generate()
2953 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
2964 __ addi(index, hash, in Generate()
2967 __ mr(index, hash); in Generate()
2969 __ srwi(r0, index, Operand(Name::kHashShift)); in Generate()
2970 __ and_(index, mask, r0); in Generate()
2974 __ ShiftLeftImm(scratch, index, Operand(1)); in Generate()
2975 __ add(index, index, scratch); // index *= 3. in Generate()
2977 __ ShiftLeftImm(scratch, index, Operand(kPointerSizeLog2)); in Generate()
2978 __ add(index, dictionary, scratch); in Generate()
2979 __ LoadP(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
2982 __ cmp(entry_key, undefined); in Generate()
2983 __ beq(¬_in_dictionary); in Generate()
2986 __ cmp(entry_key, key); in Generate()
2987 __ beq(&in_dictionary); in Generate()
2991 __ LoadP(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
2992 __ lbz(entry_key, FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); in Generate()
2993 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
2997 __ bind(&maybe_in_dictionary); in Generate()
3002 __ li(result, Operand::Zero()); in Generate()
3003 __ Ret(); in Generate()
3006 __ bind(&in_dictionary); in Generate()
3007 __ li(result, Operand(1)); in Generate()
3008 __ Ret(); in Generate()
3010 __ bind(¬_in_dictionary); in Generate()
3011 __ li(result, Operand::Zero()); in Generate()
3012 __ Ret(); in Generate()
3041 __ crclr(Assembler::encode_crbit(cr2, CR_LT)); in Generate()
3042 __ blt(&skip_to_incremental_noncompacting, cr2); in Generate()
3043 __ blt(&skip_to_incremental_compacting, cr2); in Generate()
3046 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
3049 __ Ret(); in Generate()
3051 __ bind(&skip_to_incremental_noncompacting); in Generate()
3054 __ bind(&skip_to_incremental_compacting); in Generate()
3069 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
3070 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
3073 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
3082 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
3085 __ bind(&dont_need_remembered_set); in GenerateIncremental()
3092 __ Ret(); in GenerateIncremental()
3099 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
3104 __ mr(address, regs_.address()); in InformIncrementalMarker()
3105 __ mr(r3, regs_.object()); in InformIncrementalMarker()
3106 __ mr(r4, address); in InformIncrementalMarker()
3107 __ mov(r5, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
3110 __ CallCFunction( in InformIncrementalMarker()
3126 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
3130 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
3133 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3136 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
3139 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
3144 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
3149 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
3154 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
3159 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3160 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
3165 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3169 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
3172 __ Ret(); in CheckNeedsToInformIncrementalMarker()
3175 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
3176 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
3178 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
3186 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3189 __ LoadP(r4, MemOperand(fp, parameter_count_offset)); in Generate()
3191 __ addi(r4, r4, Operand(1)); in Generate()
3194 __ slwi(r4, r4, Operand(kPointerSizeLog2)); in Generate()
3195 __ add(sp, sp, r4); in Generate()
3196 __ Ret(); in Generate()
3200 __ EmitLoadTypeFeedbackVector(r5); in Generate()
3202 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3216 __ LoadP(cached_map, in HandleArrayCases()
3218 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3219 __ cmp(receiver_map, cached_map); in HandleArrayCases()
3220 __ bne(&start_polymorphic); in HandleArrayCases()
3223 __ LoadP(handler, in HandleArrayCases()
3225 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3226 __ Jump(ip); in HandleArrayCases()
3230 __ bind(&start_polymorphic); in HandleArrayCases()
3231 __ LoadP(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandleArrayCases()
3235 __ CmpSmiLiteral(length, Smi::FromInt(2), r0); in HandleArrayCases()
3236 __ beq(miss); in HandleArrayCases()
3252 __ SmiToPtrArrayOffset(r0, length); in HandleArrayCases()
3253 __ add(too_far, feedback, r0); in HandleArrayCases()
3254 __ addi(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3255 __ addi(pointer_reg, feedback, in HandleArrayCases()
3258 __ bind(&next_loop); in HandleArrayCases()
3259 __ LoadP(cached_map, MemOperand(pointer_reg)); in HandleArrayCases()
3260 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3261 __ cmp(receiver_map, cached_map); in HandleArrayCases()
3262 __ bne(&prepare_next); in HandleArrayCases()
3263 __ LoadP(handler, MemOperand(pointer_reg, kPointerSize)); in HandleArrayCases()
3264 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleArrayCases()
3265 __ Jump(ip); in HandleArrayCases()
3267 __ bind(&prepare_next); in HandleArrayCases()
3268 __ addi(pointer_reg, pointer_reg, Operand(kPointerSize * 2)); in HandleArrayCases()
3269 __ cmp(pointer_reg, too_far); in HandleArrayCases()
3270 __ blt(&next_loop); in HandleArrayCases()
3273 __ b(miss); in HandleArrayCases()
3282 __ JumpIfSmi(receiver, load_smi_map); in HandleMonomorphicCase()
3283 __ LoadP(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in HandleMonomorphicCase()
3284 __ bind(compare_map); in HandleMonomorphicCase()
3287 __ LoadP(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); in HandleMonomorphicCase()
3288 __ cmp(cached_map, receiver_map); in HandleMonomorphicCase()
3289 __ bne(try_array); in HandleMonomorphicCase()
3291 __ SmiToPtrArrayOffset(r0, slot); in HandleMonomorphicCase()
3292 __ add(handler, vector, r0); in HandleMonomorphicCase()
3293 __ LoadP(handler, in HandleMonomorphicCase()
3295 __ addi(ip, handler, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandleMonomorphicCase()
3296 __ Jump(ip); in HandleMonomorphicCase()
3300 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); in Generate()
3325 __ LoadP(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandlePolymorphicStoreCase()
3337 __ SmiToPtrArrayOffset(r0, too_far); in HandlePolymorphicStoreCase()
3338 __ add(too_far, feedback, r0); in HandlePolymorphicStoreCase()
3339 __ addi(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3340 __ addi(pointer_reg, feedback, in HandlePolymorphicStoreCase()
3343 __ bind(&next_loop); in HandlePolymorphicStoreCase()
3344 __ LoadP(cached_map, MemOperand(pointer_reg)); in HandlePolymorphicStoreCase()
3345 __ LoadP(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3346 __ cmp(receiver_map, cached_map); in HandlePolymorphicStoreCase()
3347 __ bne(&prepare_next); in HandlePolymorphicStoreCase()
3349 __ LoadP(too_far, MemOperand(pointer_reg, kPointerSize)); in HandlePolymorphicStoreCase()
3350 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex); in HandlePolymorphicStoreCase()
3351 __ bne(&transition_call); in HandlePolymorphicStoreCase()
3352 __ LoadP(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3353 __ addi(ip, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3354 __ Jump(ip); in HandlePolymorphicStoreCase()
3356 __ bind(&transition_call); in HandlePolymorphicStoreCase()
3357 __ LoadP(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3358 __ JumpIfSmi(too_far, miss); in HandlePolymorphicStoreCase()
3360 __ LoadP(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3364 __ mr(feedback, too_far); in HandlePolymorphicStoreCase()
3366 __ addi(ip, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag)); in HandlePolymorphicStoreCase()
3367 __ Jump(ip); in HandlePolymorphicStoreCase()
3369 __ bind(&prepare_next); in HandlePolymorphicStoreCase()
3370 __ addi(pointer_reg, pointer_reg, Operand(kPointerSize * 3)); in HandlePolymorphicStoreCase()
3371 __ cmpl(pointer_reg, too_far); in HandlePolymorphicStoreCase()
3372 __ blt(&next_loop); in HandlePolymorphicStoreCase()
3375 __ b(miss); in HandlePolymorphicStoreCase()
3388 __ SmiToPtrArrayOffset(r0, slot); in GenerateImpl()
3389 __ add(feedback, vector, r0); in GenerateImpl()
3390 __ LoadP(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); in GenerateImpl()
3400 __ bind(&try_array); in GenerateImpl()
3402 __ LoadP(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); in GenerateImpl()
3403 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex); in GenerateImpl()
3404 __ bne(¬_array); in GenerateImpl()
3408 __ bind(&polymorphic); in GenerateImpl()
3415 __ bind(¬_array); in GenerateImpl()
3417 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); in GenerateImpl()
3418 __ bne(&try_poly_name); in GenerateImpl()
3421 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); in GenerateImpl()
3423 __ bind(&try_poly_name); in GenerateImpl()
3425 __ cmp(key, feedback); in GenerateImpl()
3426 __ bne(&miss); in GenerateImpl()
3429 __ SmiToPtrArrayOffset(r0, slot); in GenerateImpl()
3430 __ add(feedback, vector, r0); in GenerateImpl()
3431 __ LoadP(feedback, in GenerateImpl()
3436 __ bind(&miss); in GenerateImpl()
3439 __ bind(&load_smi_map); in GenerateImpl()
3440 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); in GenerateImpl()
3441 __ b(&compare_map); in GenerateImpl()
3454 __ mflr(r0); in MaybeCallEntryHook()
3455 __ Push(r0, ip); in MaybeCallEntryHook()
3456 __ CallStub(&stub); in MaybeCallEntryHook()
3457 __ Pop(r0, ip); in MaybeCallEntryHook()
3458 __ mtlr(r0); in MaybeCallEntryHook()
3476 __ mflr(ip); in Generate()
3477 __ MultiPush(kSavedRegs | ip.bit()); in Generate()
3480 __ subi(r3, ip, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
3484 __ addi(r4, sp, Operand((kNumSavedRegs + 1) * kPointerSize)); in Generate()
3489 __ mr(r15, sp); in Generate()
3491 __ ClearRightImm(sp, sp, Operand(WhichPowerOf2(frame_alignment))); in Generate()
3505 __ mov(r5, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3508 __ mov(ip, Operand(entry_hook)); in Generate()
3511 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(ip, kPointerSize)); in Generate()
3512 __ LoadP(ip, MemOperand(ip, 0)); in Generate()
3517 __ li(r0, Operand::Zero()); in Generate()
3518 __ StorePU(r0, MemOperand(sp, -kNumRequiredStackFrameSlots * kPointerSize)); in Generate()
3520 __ Call(ip); in Generate()
3522 __ addi(sp, sp, Operand(kNumRequiredStackFrameSlots * kPointerSize)); in Generate()
3526 __ mr(sp, r15); in Generate()
3530 __ MultiPop(kSavedRegs | ip.bit()); in Generate()
3531 __ mtlr(ip); in Generate()
3532 __ Ret(); in Generate()
3541 __ TailCallStub(&stub); in CreateArrayDispatch()
3547 __ Cmpi(r6, Operand(kind), r0); in CreateArrayDispatch()
3549 __ TailCallStub(&stub, eq); in CreateArrayDispatch()
3553 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
3577 __ andi(r0, r6, Operand(1)); in CreateArrayDispatchOneArgument()
3578 __ bne(&normal_sequence, cr0); in CreateArrayDispatchOneArgument()
3582 __ LoadP(r8, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
3583 __ cmpi(r8, Operand::Zero()); in CreateArrayDispatchOneArgument()
3584 __ beq(&normal_sequence); in CreateArrayDispatchOneArgument()
3592 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
3594 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3597 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3601 __ addi(r6, r6, Operand(1)); in CreateArrayDispatchOneArgument()
3604 __ LoadP(r8, FieldMemOperand(r5, 0)); in CreateArrayDispatchOneArgument()
3605 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
3606 __ Assert(eq, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
3613 __ LoadP(r7, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3614 __ AddSmiLiteral(r7, r7, Smi::FromInt(kFastElementsKindPackedToHoley), r0); in CreateArrayDispatchOneArgument()
3615 __ StoreP(r7, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset), in CreateArrayDispatchOneArgument()
3618 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3623 __ mov(r0, Operand(kind)); in CreateArrayDispatchOneArgument()
3624 __ cmp(r6, r0); in CreateArrayDispatchOneArgument()
3626 __ TailCallStub(&stub, eq); in CreateArrayDispatchOneArgument()
3630 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3672 __ cmpi(r3, Operand::Zero()); in GenerateDispatchToArrayStub()
3673 __ bne(¬_zero_case); in GenerateDispatchToArrayStub()
3676 __ bind(¬_zero_case); in GenerateDispatchToArrayStub()
3677 __ cmpi(r3, Operand(1)); in GenerateDispatchToArrayStub()
3678 __ bgt(¬_one_case); in GenerateDispatchToArrayStub()
3681 __ bind(¬_one_case); in GenerateDispatchToArrayStub()
3683 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3702 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3704 __ TestIfSmi(r7, r0); in Generate()
3705 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3706 __ CompareObjectType(r7, r7, r8, MAP_TYPE); in Generate()
3707 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3710 __ AssertUndefinedOrAllocationSite(r5, r7); in Generate()
3714 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate()
3717 __ cmp(r6, r4); in Generate()
3718 __ bne(&subclassing); in Generate()
3722 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex); in Generate()
3723 __ beq(&no_info); in Generate()
3725 __ LoadP(r6, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); in Generate()
3726 __ SmiUntag(r6); in Generate()
3728 __ And(r6, r6, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
3731 __ bind(&no_info); in Generate()
3734 __ bind(&subclassing); in Generate()
3735 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); in Generate()
3736 __ StorePX(r4, MemOperand(sp, r0)); in Generate()
3737 __ addi(r3, r3, Operand(3)); in Generate()
3738 __ Push(r6, r5); in Generate()
3739 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3745 __ cmpli(r3, Operand(1)); in GenerateCase()
3748 __ TailCallStub(&stub0, lt); in GenerateCase()
3751 __ TailCallStub(&stubN, gt); in GenerateCase()
3756 __ LoadP(r6, MemOperand(sp, 0)); in GenerateCase()
3757 __ cmpi(r6, Operand::Zero()); in GenerateCase()
3761 __ TailCallStub(&stub1_holey, ne); in GenerateCase()
3765 __ TailCallStub(&stub1); in GenerateCase()
3782 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3784 __ TestIfSmi(r6, r0); in Generate()
3785 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3786 __ CompareObjectType(r6, r6, r7, MAP_TYPE); in Generate()
3787 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3791 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3793 __ lbz(r6, FieldMemOperand(r6, Map::kBitField2Offset)); in Generate()
3795 __ DecodeField<Map::ElementsKindBits>(r6); in Generate()
3799 __ cmpi(r6, Operand(FAST_ELEMENTS)); in Generate()
3800 __ beq(&done); in Generate()
3801 __ cmpi(r6, Operand(FAST_HOLEY_ELEMENTS)); in Generate()
3802 __ Assert(eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray); in Generate()
3803 __ bind(&done); in Generate()
3807 __ cmpi(r6, Operand(FAST_ELEMENTS)); in Generate()
3808 __ beq(&fast_elements_case); in Generate()
3811 __ bind(&fast_elements_case); in Generate()
3822 __ AssertFunction(r4); in Generate()
3823 __ AssertReceiver(r6); in Generate()
3827 __ CompareObjectType(r6, r5, r5, JS_FUNCTION_TYPE); in Generate()
3828 __ bne(&new_object); in Generate()
3831 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3832 __ JumpIfSmi(r5, &new_object); in Generate()
3833 __ CompareObjectType(r5, r3, r3, MAP_TYPE); in Generate()
3834 __ bne(&new_object); in Generate()
3838 __ LoadP(r3, FieldMemOperand(r5, Map::kConstructorOrBackPointerOffset)); in Generate()
3839 __ cmp(r3, r4); in Generate()
3840 __ bne(&new_object); in Generate()
3844 __ lbz(r7, FieldMemOperand(r5, Map::kInstanceSizeOffset)); in Generate()
3845 __ Allocate(r7, r3, r8, r9, &allocate, SIZE_IN_WORDS); in Generate()
3846 __ bind(&done_allocate); in Generate()
3849 __ StoreP(r5, FieldMemOperand(r3, JSObject::kMapOffset), r0); in Generate()
3850 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); in Generate()
3851 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); in Generate()
3852 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0); in Generate()
3854 __ addi(r4, r3, Operand(JSObject::kHeaderSize - kHeapObjectTag)); in Generate()
3868 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); in Generate()
3869 __ lwz(r6, FieldMemOperand(r5, Map::kBitField3Offset)); in Generate()
3870 __ DecodeField<Map::ConstructionCounter>(r10, r6, SetRC); in Generate()
3871 __ bne(&slack_tracking, cr0); in Generate()
3874 __ InitializeFieldsWithFiller(r4, r8, r9); in Generate()
3875 __ Ret(); in Generate()
3877 __ bind(&slack_tracking); in Generate()
3881 __ Add(r6, r6, -(1 << Map::ConstructionCounter::kShift), r0); in Generate()
3882 __ stw(r6, FieldMemOperand(r5, Map::kBitField3Offset)); in Generate()
3885 __ lbz(r7, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset)); in Generate()
3886 __ ShiftLeftImm(r7, r7, Operand(kPointerSizeLog2)); in Generate()
3887 __ sub(r7, r8, r7); in Generate()
3888 __ InitializeFieldsWithFiller(r4, r7, r9); in Generate()
3891 __ LoadRoot(r9, Heap::kOnePointerFillerMapRootIndex); in Generate()
3892 __ InitializeFieldsWithFiller(r4, r8, r9); in Generate()
3895 __ cmpi(r10, Operand(Map::kSlackTrackingCounterEnd)); in Generate()
3896 __ Ret(ne); in Generate()
3901 __ Push(r3, r5); in Generate()
3902 __ CallRuntime(Runtime::kFinalizeInstanceSize); in Generate()
3903 __ Pop(r3); in Generate()
3905 __ Ret(); in Generate()
3909 __ bind(&allocate); in Generate()
3913 __ ShiftLeftImm(r7, r7, in Generate()
3915 __ Push(r5, r7); in Generate()
3916 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3917 __ Pop(r5); in Generate()
3919 __ lbz(r8, FieldMemOperand(r5, Map::kInstanceSizeOffset)); in Generate()
3920 __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2)); in Generate()
3921 __ add(r8, r3, r8); in Generate()
3922 __ subi(r8, r8, Operand(kHeapObjectTag)); in Generate()
3923 __ b(&done_allocate); in Generate()
3926 __ bind(&new_object); in Generate()
3927 __ Push(r4, r6); in Generate()
3928 __ TailCallRuntime(Runtime::kNewObject); in Generate()
3938 __ AssertFunction(r4); in Generate()
3941 __ mr(r5, fp); in Generate()
3945 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset)); in Generate()
3949 __ LoadP(ip, MemOperand(r5, StandardFrameConstants::kFunctionOffset)); in Generate()
3950 __ cmp(ip, r4); in Generate()
3951 __ beq(&ok); in Generate()
3952 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
3953 __ bind(&ok); in Generate()
3959 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset)); in Generate()
3960 __ LoadP(ip, MemOperand(r5, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
3961 __ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); in Generate()
3962 __ bne(&no_rest_parameters); in Generate()
3967 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
3968 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3969 __ LoadWordArith( in Generate()
3972 __ SmiTag(r6); in Generate()
3974 __ sub(r3, r3, r6, LeaveOE, SetRC); in Generate()
3975 __ bgt(&rest_parameters, cr0); in Generate()
3978 __ bind(&no_rest_parameters); in Generate()
3987 __ Allocate(JSArray::kSize, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3988 __ bind(&done_allocate); in Generate()
3991 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r4); in Generate()
3992 __ StoreP(r4, FieldMemOperand(r3, JSArray::kMapOffset), r0); in Generate()
3993 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex); in Generate()
3994 __ StoreP(r4, FieldMemOperand(r3, JSArray::kPropertiesOffset), r0); in Generate()
3995 __ StoreP(r4, FieldMemOperand(r3, JSArray::kElementsOffset), r0); in Generate()
3996 __ li(r4, Operand::Zero()); in Generate()
3997 __ StoreP(r4, FieldMemOperand(r3, JSArray::kLengthOffset), r0); in Generate()
3999 __ Ret(); in Generate()
4002 __ bind(&allocate); in Generate()
4005 __ Push(Smi::FromInt(JSArray::kSize)); in Generate()
4006 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4008 __ b(&done_allocate); in Generate()
4011 __ bind(&rest_parameters); in Generate()
4014 __ SmiToPtrArrayOffset(r9, r3); in Generate()
4015 __ add(r5, r5, r9); in Generate()
4016 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4029 __ mov(r10, Operand(JSArray::kSize + FixedArray::kHeaderSize)); in Generate()
4030 __ add(r10, r10, r9); in Generate()
4031 __ Allocate(r10, r6, r7, r8, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4032 __ bind(&done_allocate); in Generate()
4035 __ LoadRoot(r4, Heap::kFixedArrayMapRootIndex); in Generate()
4036 __ StoreP(r4, FieldMemOperand(r6, FixedArray::kMapOffset), r0); in Generate()
4037 __ StoreP(r3, FieldMemOperand(r6, FixedArray::kLengthOffset), r0); in Generate()
4038 __ addi(r7, r6, in Generate()
4042 __ SmiUntag(r0, r3); in Generate()
4043 __ mtctr(r0); in Generate()
4044 __ bind(&loop); in Generate()
4045 __ LoadPU(ip, MemOperand(r5, -kPointerSize)); in Generate()
4046 __ StorePU(ip, MemOperand(r7, kPointerSize)); in Generate()
4047 __ bdnz(&loop); in Generate()
4048 __ addi(r7, r7, Operand(kPointerSize)); in Generate()
4052 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r4); in Generate()
4053 __ StoreP(r4, MemOperand(r7, JSArray::kMapOffset)); in Generate()
4054 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex); in Generate()
4055 __ StoreP(r4, MemOperand(r7, JSArray::kPropertiesOffset)); in Generate()
4056 __ StoreP(r6, MemOperand(r7, JSArray::kElementsOffset)); in Generate()
4057 __ StoreP(r3, MemOperand(r7, JSArray::kLengthOffset)); in Generate()
4059 __ addi(r3, r7, Operand(kHeapObjectTag)); in Generate()
4060 __ Ret(); in Generate()
4064 __ bind(&allocate); in Generate()
4065 __ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0); in Generate()
4066 __ bgt(&too_big_for_new_space); in Generate()
4069 __ SmiTag(r10); in Generate()
4070 __ Push(r3, r5, r10); in Generate()
4071 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4072 __ mr(r6, r3); in Generate()
4073 __ Pop(r3, r5); in Generate()
4075 __ b(&done_allocate); in Generate()
4078 __ bind(&too_big_for_new_space); in Generate()
4079 __ push(r4); in Generate()
4080 __ TailCallRuntime(Runtime::kNewRestParameter); in Generate()
4091 __ AssertFunction(r4); in Generate()
4094 __ mr(r10, fp); in Generate()
4098 __ LoadP(r10, MemOperand(r10, StandardFrameConstants::kCallerFPOffset)); in Generate()
4102 __ LoadP(ip, MemOperand(r10, StandardFrameConstants::kFunctionOffset)); in Generate()
4103 __ cmp(ip, r4); in Generate()
4104 __ beq(&ok); in Generate()
4105 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4106 __ bind(&ok); in Generate()
4111 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4112 __ LoadWordArith( in Generate()
4115 __ SmiTag(r5); in Generate()
4117 __ SmiToPtrArrayOffset(r6, r5); in Generate()
4118 __ add(r6, r10, r6); in Generate()
4119 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4131 __ LoadP(r7, MemOperand(r10, StandardFrameConstants::kCallerFPOffset)); in Generate()
4132 __ LoadP(r3, MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4133 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); in Generate()
4134 __ beq(&adaptor_frame); in Generate()
4137 __ mr(r8, r5); in Generate()
4138 __ mr(r9, r5); in Generate()
4139 __ b(&try_allocate); in Generate()
4142 __ bind(&adaptor_frame); in Generate()
4143 __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4144 __ SmiToPtrArrayOffset(r6, r8); in Generate()
4145 __ add(r6, r6, r7); in Generate()
4146 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4151 __ cmp(r5, r8); in Generate()
4153 __ isel(lt, r9, r5, r8); in Generate()
4156 __ mr(r9, r5); in Generate()
4157 __ blt(&skip); in Generate()
4158 __ mr(r9, r8); in Generate()
4159 __ bind(&skip); in Generate()
4162 __ bind(&try_allocate); in Generate()
4169 __ CmpSmiLiteral(r9, Smi::kZero, r0); in Generate()
4171 __ SmiToPtrArrayOffset(r11, r9); in Generate()
4172 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); in Generate()
4173 __ isel(eq, r11, r0, r11); in Generate()
4176 __ bne(&skip2); in Generate()
4177 __ li(r11, Operand::Zero()); in Generate()
4178 __ b(&skip3); in Generate()
4179 __ bind(&skip2); in Generate()
4180 __ SmiToPtrArrayOffset(r11, r9); in Generate()
4181 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); in Generate()
4182 __ bind(&skip3); in Generate()
4186 __ SmiToPtrArrayOffset(r7, r8); in Generate()
4187 __ add(r11, r11, r7); in Generate()
4188 __ addi(r11, r11, Operand(FixedArray::kHeaderSize)); in Generate()
4191 __ addi(r11, r11, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4194 __ Allocate(r11, r3, r11, r7, &runtime, NO_ALLOCATION_FLAGS); in Generate()
4204 __ LoadP(r7, NativeContextMemOperand()); in Generate()
4205 __ cmpi(r9, Operand::Zero()); in Generate()
4207 __ LoadP(r11, MemOperand(r7, kNormalOffset)); in Generate()
4208 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); in Generate()
4209 __ isel(eq, r7, r11, r7); in Generate()
4212 __ bne(&skip4); in Generate()
4213 __ LoadP(r7, MemOperand(r7, kNormalOffset)); in Generate()
4214 __ b(&skip5); in Generate()
4215 __ bind(&skip4); in Generate()
4216 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); in Generate()
4217 __ bind(&skip5); in Generate()
4224 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); in Generate()
4225 __ LoadRoot(r11, Heap::kEmptyFixedArrayRootIndex); in Generate()
4226 __ StoreP(r11, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); in Generate()
4227 __ StoreP(r11, FieldMemOperand(r3, JSObject::kElementsOffset), r0); in Generate()
4230 __ AssertNotSmi(r4); in Generate()
4231 __ StoreP(r4, FieldMemOperand(r3, JSSloppyArgumentsObject::kCalleeOffset), in Generate()
4235 __ AssertSmi(r8); in Generate()
4236 __ StoreP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset), in Generate()
4242 __ addi(r7, r3, Operand(JSSloppyArgumentsObject::kSize)); in Generate()
4243 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); in Generate()
4251 __ CmpSmiLiteral(r9, Smi::kZero, r0); in Generate()
4253 __ isel(eq, r4, r7, r4); in Generate()
4254 __ beq(&skip_parameter_map); in Generate()
4257 __ bne(&skip6); in Generate()
4260 __ mr(r4, r7); in Generate()
4261 __ b(&skip_parameter_map); in Generate()
4262 __ bind(&skip6); in Generate()
4265 __ LoadRoot(r8, Heap::kSloppyArgumentsElementsMapRootIndex); in Generate()
4266 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kMapOffset), r0); in Generate()
4267 __ AddSmiLiteral(r8, r9, Smi::FromInt(2), r0); in Generate()
4268 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kLengthOffset), r0); in Generate()
4269 __ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize), in Generate()
4271 __ SmiToPtrArrayOffset(r8, r9); in Generate()
4272 __ add(r8, r8, r7); in Generate()
4273 __ addi(r8, r8, Operand(kParameterMapHeaderSize)); in Generate()
4274 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kHeaderSize + 1 * kPointerSize), in Generate()
4286 __ mr(r8, r9); in Generate()
4287 __ AddSmiLiteral(r11, r5, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0); in Generate()
4288 __ sub(r11, r11, r9); in Generate()
4289 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); in Generate()
4290 __ SmiToPtrArrayOffset(r4, r8); in Generate()
4291 __ add(r4, r4, r7); in Generate()
4292 __ addi(r4, r4, Operand(kParameterMapHeaderSize)); in Generate()
4299 __ SmiUntag(r8); in Generate()
4300 __ mtctr(r8); in Generate()
4301 __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2)); in Generate()
4302 __ add(r10, r4, r8); in Generate()
4303 __ add(r8, r7, r8); in Generate()
4304 __ addi(r10, r10, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
4305 __ addi(r8, r8, Operand(kParameterMapHeaderSize - kHeapObjectTag)); in Generate()
4307 __ bind(¶meters_loop); in Generate()
4308 __ StorePU(r11, MemOperand(r8, -kPointerSize)); in Generate()
4309 __ StorePU(ip, MemOperand(r10, -kPointerSize)); in Generate()
4310 __ AddSmiLiteral(r11, r11, Smi::FromInt(1), r0); in Generate()
4311 __ bdnz(¶meters_loop); in Generate()
4314 __ LoadP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset)); in Generate()
4316 __ bind(&skip_parameter_map); in Generate()
4323 __ LoadRoot(r11, Heap::kFixedArrayMapRootIndex); in Generate()
4324 __ StoreP(r11, FieldMemOperand(r4, FixedArray::kMapOffset), r0); in Generate()
4325 __ StoreP(r8, FieldMemOperand(r4, FixedArray::kLengthOffset), r0); in Generate()
4326 __ sub(r11, r8, r9, LeaveOE, SetRC); in Generate()
4327 __ Ret(eq, cr0); in Generate()
4330 __ SmiUntag(r11); in Generate()
4331 __ mtctr(r11); in Generate()
4333 __ SmiToPtrArrayOffset(r0, r9); in Generate()
4334 __ sub(r6, r6, r0); in Generate()
4335 __ add(r11, r4, r0); in Generate()
4336 __ addi(r11, r11, in Generate()
4339 __ bind(&arguments_loop); in Generate()
4340 __ LoadPU(r7, MemOperand(r6, -kPointerSize)); in Generate()
4341 __ StorePU(r7, MemOperand(r11, kPointerSize)); in Generate()
4342 __ bdnz(&arguments_loop); in Generate()
4345 __ Ret(); in Generate()
4349 __ bind(&runtime); in Generate()
4350 __ Push(r4, r6, r8); in Generate()
4351 __ TailCallRuntime(Runtime::kNewSloppyArguments); in Generate()
4361 __ AssertFunction(r4); in Generate()
4364 __ mr(r5, fp); in Generate()
4368 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset)); in Generate()
4372 __ LoadP(ip, MemOperand(r5, StandardFrameConstants::kFunctionOffset)); in Generate()
4373 __ cmp(ip, r4); in Generate()
4374 __ b(&ok); in Generate()
4375 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4376 __ bind(&ok); in Generate()
4381 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kCallerFPOffset)); in Generate()
4382 __ LoadP(ip, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4383 __ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); in Generate()
4384 __ beq(&arguments_adaptor); in Generate()
4386 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4387 __ LoadWordArith( in Generate()
4391 __ SmiTag(r3); in Generate()
4393 __ SmiToPtrArrayOffset(r9, r3); in Generate()
4394 __ add(r5, r5, r9); in Generate()
4396 __ b(&arguments_done); in Generate()
4397 __ bind(&arguments_adaptor); in Generate()
4399 __ LoadP(r3, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
4400 __ SmiToPtrArrayOffset(r9, r3); in Generate()
4401 __ add(r5, r6, r9); in Generate()
4403 __ bind(&arguments_done); in Generate()
4404 __ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4417 __ mov(r10, in Generate()
4419 __ add(r10, r10, r9); in Generate()
4420 __ Allocate(r10, r6, r7, r8, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4421 __ bind(&done_allocate); in Generate()
4424 __ LoadRoot(r4, Heap::kFixedArrayMapRootIndex); in Generate()
4425 __ StoreP(r4, FieldMemOperand(r6, FixedArray::kMapOffset), r0); in Generate()
4426 __ StoreP(r3, FieldMemOperand(r6, FixedArray::kLengthOffset), r0); in Generate()
4427 __ addi(r7, r6, in Generate()
4431 __ SmiUntag(r0, r3, SetRC); in Generate()
4432 __ beq(&done_loop, cr0); in Generate()
4433 __ mtctr(r0); in Generate()
4434 __ bind(&loop); in Generate()
4435 __ LoadPU(ip, MemOperand(r5, -kPointerSize)); in Generate()
4436 __ StorePU(ip, MemOperand(r7, kPointerSize)); in Generate()
4437 __ bdnz(&loop); in Generate()
4438 __ bind(&done_loop); in Generate()
4439 __ addi(r7, r7, Operand(kPointerSize)); in Generate()
4443 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r4); in Generate()
4444 __ StoreP(r4, MemOperand(r7, JSStrictArgumentsObject::kMapOffset)); in Generate()
4445 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex); in Generate()
4446 __ StoreP(r4, MemOperand(r7, JSStrictArgumentsObject::kPropertiesOffset)); in Generate()
4447 __ StoreP(r6, MemOperand(r7, JSStrictArgumentsObject::kElementsOffset)); in Generate()
4448 __ StoreP(r3, MemOperand(r7, JSStrictArgumentsObject::kLengthOffset)); in Generate()
4450 __ addi(r3, r7, Operand(kHeapObjectTag)); in Generate()
4451 __ Ret(); in Generate()
4455 __ bind(&allocate); in Generate()
4456 __ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0); in Generate()
4457 __ bgt(&too_big_for_new_space); in Generate()
4460 __ SmiTag(r10); in Generate()
4461 __ Push(r3, r5, r10); in Generate()
4462 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4463 __ mr(r6, r3); in Generate()
4464 __ Pop(r3, r5); in Generate()
4466 __ b(&done_allocate); in Generate()
4469 __ bind(&too_big_for_new_space); in Generate()
4470 __ push(r4); in Generate()
4471 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4503 __ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
4504 __ lbz(scratch, MemOperand(scratch, 0)); in CallApiFunctionAndReturn()
4505 __ cmpi(scratch, Operand::Zero()); in CallApiFunctionAndReturn()
4508 __ mov(scratch, Operand(thunk_ref)); in CallApiFunctionAndReturn()
4509 __ isel(eq, scratch, function_address, scratch); in CallApiFunctionAndReturn()
4513 __ beq(&profiler_disabled); in CallApiFunctionAndReturn()
4514 __ mov(scratch, Operand(thunk_ref)); in CallApiFunctionAndReturn()
4515 __ b(&end_profiler_check); in CallApiFunctionAndReturn()
4516 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
4517 __ mr(scratch, function_address); in CallApiFunctionAndReturn()
4518 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
4526 __ mov(r17, Operand(next_address)); in CallApiFunctionAndReturn()
4527 __ LoadP(r14, MemOperand(r17, kNextOffset)); in CallApiFunctionAndReturn()
4528 __ LoadP(r15, MemOperand(r17, kLimitOffset)); in CallApiFunctionAndReturn()
4529 __ lwz(r16, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
4530 __ addi(r16, r16, Operand(1)); in CallApiFunctionAndReturn()
4531 __ stw(r16, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
4535 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4536 __ PrepareCallCFunction(1, r3); in CallApiFunctionAndReturn()
4537 __ mov(r3, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4538 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
4540 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4551 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4552 __ PrepareCallCFunction(1, r3); in CallApiFunctionAndReturn()
4553 __ mov(r3, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4554 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
4556 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4565 __ LoadP(r3, return_value_operand); in CallApiFunctionAndReturn()
4566 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
4569 __ StoreP(r14, MemOperand(r17, kNextOffset)); in CallApiFunctionAndReturn()
4570 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
4571 __ lwz(r4, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
4572 __ cmp(r4, r16); in CallApiFunctionAndReturn()
4573 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall); in CallApiFunctionAndReturn()
4575 __ subi(r16, r16, Operand(1)); in CallApiFunctionAndReturn()
4576 __ stw(r16, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
4577 __ LoadP(r0, MemOperand(r17, kLimitOffset)); in CallApiFunctionAndReturn()
4578 __ cmp(r15, r0); in CallApiFunctionAndReturn()
4579 __ bne(&delete_allocated_handles); in CallApiFunctionAndReturn()
4582 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
4585 __ LoadP(cp, *context_restore_operand); in CallApiFunctionAndReturn()
4589 __ lwz(r14, *stack_space_operand); in CallApiFunctionAndReturn()
4591 __ mov(r14, Operand(stack_space)); in CallApiFunctionAndReturn()
4593 __ LeaveExitFrame(false, r14, !restore_context, stack_space_operand != NULL); in CallApiFunctionAndReturn()
4596 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
4597 __ mov(r15, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
4598 __ LoadP(r15, MemOperand(r15)); in CallApiFunctionAndReturn()
4599 __ cmp(r14, r15); in CallApiFunctionAndReturn()
4600 __ bne(&promote_scheduled_exception); in CallApiFunctionAndReturn()
4602 __ blr(); in CallApiFunctionAndReturn()
4605 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
4606 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
4609 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
4610 __ StoreP(r15, MemOperand(r17, kLimitOffset)); in CallApiFunctionAndReturn()
4611 __ mr(r14, r3); in CallApiFunctionAndReturn()
4612 __ PrepareCallCFunction(1, r15); in CallApiFunctionAndReturn()
4613 __ mov(r3, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
4614 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
4616 __ mr(r3, r14); in CallApiFunctionAndReturn()
4617 __ b(&leave_exit_frame); in CallApiFunctionAndReturn()
4653 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
4656 __ push(context); in Generate()
4659 __ LoadP(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
4663 __ push(callee); in Generate()
4666 __ push(call_data); in Generate()
4670 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4673 __ push(scratch); in Generate()
4675 __ push(scratch); in Generate()
4677 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
4678 __ push(scratch); in Generate()
4680 __ push(holder); in Generate()
4683 __ mr(scratch, sp); in Generate()
4697 __ EnterExitFrame(false, kApiStackSpace); in Generate()
4702 __ addi(r3, sp, Operand(kFunctionCallbackInfoOffset)); in Generate()
4704 __ StoreP(scratch, MemOperand(r3, 0 * kPointerSize)); in Generate()
4706 __ addi(ip, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize)); in Generate()
4707 __ StoreP(ip, MemOperand(r3, 1 * kPointerSize)); in Generate()
4709 __ li(ip, Operand(argc())); in Generate()
4710 __ stw(ip, MemOperand(r3, 2 * kPointerSize)); in Generate()
4761 __ push(receiver); in Generate()
4763 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
4764 __ push(scratch); in Generate()
4765 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4766 __ Push(scratch, scratch); in Generate()
4767 __ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
4768 __ Push(scratch, holder); in Generate()
4769 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
4770 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
4771 __ push(scratch); in Generate()
4777 __ mr(r3, sp); // r3 = Handle<Name> in Generate()
4778 __ addi(r4, r3, Operand(1 * kPointerSize)); // r4 = v8::PCI::args_ in Generate()
4802 __ EnterExitFrame(false, apiStackSpace); in Generate()
4806 __ StoreP(r3, MemOperand(sp, arg0Slot * kPointerSize)); in Generate()
4807 __ addi(r3, sp, Operand(arg0Slot * kPointerSize)); in Generate()
4812 __ StoreP(r4, MemOperand(sp, accessorInfoSlot * kPointerSize)); in Generate()
4813 __ addi(r4, sp, Operand(accessorInfoSlot * kPointerSize)); in Generate()
4819 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
4820 __ LoadP(api_function_address, in Generate()
4830 #undef __