Lines Matching full:__

25 #define __ ACCESS_MASM(masm)  macro
28 __ Mov(x5, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
29 __ Str(x1, MemOperand(jssp, x5)); in Generate()
30 __ Push(x1); in Generate()
31 __ Push(x2); in Generate()
32 __ Add(x0, x0, Operand(3)); in Generate()
33 __ TailCallRuntime(Runtime::kNewArray); in Generate()
67 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
70 __ Ret(); in GenerateLightweightMiss()
90 __ Push(scratch1, scratch2); in Generate()
95 __ Push(double_scratch); in Generate()
97 __ Ldr(double_scratch, MemOperand(input, double_offset)); in Generate()
100 __ TryConvertDoubleToInt64(result, double_scratch, &done); in Generate()
101 __ Fmov(result, double_scratch); in Generate()
103 __ Ldr(result, MemOperand(input, double_offset)); in Generate()
110 __ Ubfx(exponent, result, HeapNumber::kMantissaBits, in Generate()
115 __ Cmp(exponent, HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 32); in Generate()
116 __ CzeroX(result, ge); in Generate()
117 __ B(ge, &done); in Generate()
124 __ Cmp(exponent, HeapNumber::kExponentBias + 63); in Generate()
126 __ Check(ge, kUnexpectedValue); in Generate()
131 __ Ubfx(mantissa, result, 0, HeapNumber::kMantissaBits); in Generate()
132 __ Orr(mantissa, mantissa, 1UL << HeapNumber::kMantissaBits); in Generate()
135 __ Tst(result, kXSignMask); in Generate()
136 __ Cneg(mantissa, mantissa, ne); in Generate()
140 __ Sub(exponent, exponent, in Generate()
142 __ Lsl(result, mantissa, exponent); in Generate()
144 __ Bind(&done); in Generate()
146 __ Pop(double_scratch); in Generate()
148 __ Pop(scratch2, scratch1); in Generate()
149 __ Ret(); in Generate()
162 __ Cmp(right, left); in EmitIdenticalObjectComparison()
163 __ B(ne, &not_identical); in EmitIdenticalObjectComparison()
172 __ JumpIfObjectType(right, right_type, right_type, FIRST_JS_RECEIVER_TYPE, in EmitIdenticalObjectComparison()
175 __ Cmp(right_type, SYMBOL_TYPE); in EmitIdenticalObjectComparison()
176 __ B(eq, slow); in EmitIdenticalObjectComparison()
178 __ Cmp(right_type, SIMD128_VALUE_TYPE); in EmitIdenticalObjectComparison()
179 __ B(eq, slow); in EmitIdenticalObjectComparison()
181 __ JumpIfHeapNumber(right, &heap_number); in EmitIdenticalObjectComparison()
183 __ JumpIfObjectType(right, right_type, right_type, HEAP_NUMBER_TYPE, in EmitIdenticalObjectComparison()
186 __ Cmp(right_type, FIRST_JS_RECEIVER_TYPE); in EmitIdenticalObjectComparison()
187 __ B(ge, slow); in EmitIdenticalObjectComparison()
189 __ Cmp(right_type, SYMBOL_TYPE); in EmitIdenticalObjectComparison()
190 __ B(eq, slow); in EmitIdenticalObjectComparison()
192 __ Cmp(right_type, SIMD128_VALUE_TYPE); in EmitIdenticalObjectComparison()
193 __ B(eq, slow); in EmitIdenticalObjectComparison()
198 __ Cmp(right_type, ODDBALL_TYPE); in EmitIdenticalObjectComparison()
199 __ B(ne, &return_equal); in EmitIdenticalObjectComparison()
200 __ JumpIfNotRoot(right, Heap::kUndefinedValueRootIndex, &return_equal); in EmitIdenticalObjectComparison()
203 __ Mov(result, GREATER); in EmitIdenticalObjectComparison()
206 __ Mov(result, LESS); in EmitIdenticalObjectComparison()
208 __ Ret(); in EmitIdenticalObjectComparison()
212 __ Bind(&return_equal); in EmitIdenticalObjectComparison()
214 __ Mov(result, GREATER); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
216 __ Mov(result, LESS); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
218 __ Mov(result, EQUAL); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
220 __ Ret(); in EmitIdenticalObjectComparison()
227 __ Bind(&heap_number); in EmitIdenticalObjectComparison()
231 __ Ldr(double_scratch, FieldMemOperand(right, HeapNumber::kValueOffset)); in EmitIdenticalObjectComparison()
232 __ Fcmp(double_scratch, double_scratch); in EmitIdenticalObjectComparison()
233 __ B(vc, &return_equal); // Not NaN, so treat as normal heap number. in EmitIdenticalObjectComparison()
236 __ Mov(result, GREATER); in EmitIdenticalObjectComparison()
238 __ Mov(result, LESS); in EmitIdenticalObjectComparison()
240 __ Ret(); in EmitIdenticalObjectComparison()
245 __ Unreachable(); in EmitIdenticalObjectComparison()
248 __ Bind(&not_identical); in EmitIdenticalObjectComparison()
263 __ Cmp(left, right); in EmitStrictTwoHeapObjectCompare()
264 __ Assert(ne, kExpectedNonIdenticalObjects); in EmitStrictTwoHeapObjectCompare()
273 __ Cmp(right_type, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
274 __ B(lt, &right_non_object); in EmitStrictTwoHeapObjectCompare()
279 __ Bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
280 __ Ret(); in EmitStrictTwoHeapObjectCompare()
282 __ Bind(&right_non_object); in EmitStrictTwoHeapObjectCompare()
285 __ Cmp(right_type, ODDBALL_TYPE); in EmitStrictTwoHeapObjectCompare()
288 __ Ccmp(left_type, ODDBALL_TYPE, ZFlag, ne); in EmitStrictTwoHeapObjectCompare()
292 __ Ccmp(left_type, FIRST_JS_RECEIVER_TYPE, NVFlag, ne); in EmitStrictTwoHeapObjectCompare()
294 __ B(ge, &return_not_equal); in EmitStrictTwoHeapObjectCompare()
300 __ Orr(scratch, left_type, right_type); in EmitStrictTwoHeapObjectCompare()
301 __ TestAndBranchIfAllClear( in EmitStrictTwoHeapObjectCompare()
320 __ JumpIfSmi(right, &right_is_smi); in EmitSmiNonsmiComparison()
327 __ JumpIfHeapNumber(right, &is_heap_number); in EmitSmiNonsmiComparison()
330 __ Mov(result, NOT_EQUAL); in EmitSmiNonsmiComparison()
332 __ Ret(); in EmitSmiNonsmiComparison()
333 __ Bind(&is_heap_number); in EmitSmiNonsmiComparison()
337 __ JumpIfNotHeapNumber(right, slow); in EmitSmiNonsmiComparison()
342 __ Ldr(right_d, FieldMemOperand(right, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
343 __ SmiUntagToDouble(left_d, left); in EmitSmiNonsmiComparison()
344 __ B(&done); in EmitSmiNonsmiComparison()
346 __ Bind(&right_is_smi); in EmitSmiNonsmiComparison()
352 __ JumpIfHeapNumber(left, &is_heap_number); in EmitSmiNonsmiComparison()
355 __ Mov(result, NOT_EQUAL); in EmitSmiNonsmiComparison()
357 __ Ret(); in EmitSmiNonsmiComparison()
358 __ Bind(&is_heap_number); in EmitSmiNonsmiComparison()
362 __ JumpIfNotHeapNumber(left, slow); in EmitSmiNonsmiComparison()
367 __ Ldr(left_d, FieldMemOperand(left, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
368 __ SmiUntagToDouble(right_d, right); in EmitSmiNonsmiComparison()
371 __ Bind(&done); in EmitSmiNonsmiComparison()
391 __ Tbnz(right_type, MaskToBit(kIsNotStringMask), &object_test); in EmitCheckForInternalizedStringsOrObjects()
392 __ Tbnz(right_type, MaskToBit(kIsNotInternalizedMask), possible_strings); in EmitCheckForInternalizedStringsOrObjects()
393 __ Tbnz(left_type, MaskToBit(kIsNotStringMask), runtime_call); in EmitCheckForInternalizedStringsOrObjects()
394 __ Tbnz(left_type, MaskToBit(kIsNotInternalizedMask), possible_strings); in EmitCheckForInternalizedStringsOrObjects()
399 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
401 __ Bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
405 __ Ldrb(right_bitfield, FieldMemOperand(right_map, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
406 __ Ldrb(left_bitfield, FieldMemOperand(left_map, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
407 __ Tbnz(right_bitfield, MaskToBit(1 << Map::kIsUndetectable), &undetectable); in EmitCheckForInternalizedStringsOrObjects()
408 __ Tbnz(left_bitfield, MaskToBit(1 << Map::kIsUndetectable), &return_unequal); in EmitCheckForInternalizedStringsOrObjects()
410 __ CompareInstanceType(right_map, right_type, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
411 __ B(lt, runtime_call); in EmitCheckForInternalizedStringsOrObjects()
412 __ CompareInstanceType(left_map, left_type, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
413 __ B(lt, runtime_call); in EmitCheckForInternalizedStringsOrObjects()
415 __ Bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
417 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
419 __ Bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
420 __ Tbz(left_bitfield, MaskToBit(1 << Map::kIsUndetectable), &return_unequal); in EmitCheckForInternalizedStringsOrObjects()
425 __ CompareInstanceType(right_map, right_type, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
426 __ B(eq, &return_equal); in EmitCheckForInternalizedStringsOrObjects()
427 __ CompareInstanceType(left_map, left_type, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
428 __ B(ne, &return_unequal); in EmitCheckForInternalizedStringsOrObjects()
430 __ Bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
431 __ Mov(result, EQUAL); in EmitCheckForInternalizedStringsOrObjects()
432 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
441 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
443 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
444 __ JumpIfNotHeapNumber(input, fail); in CompareICStub_CheckInputType()
448 __ Bind(&ok); in CompareICStub_CheckInputType()
465 __ JumpIfEitherNotSmi(lhs, rhs, &not_two_smis); in GenerateGeneric()
466 __ SmiUntag(lhs); in GenerateGeneric()
467 __ Sub(result, lhs, Operand::UntagSmi(rhs)); in GenerateGeneric()
468 __ Ret(); in GenerateGeneric()
470 __ Bind(&not_two_smis); in GenerateGeneric()
481 __ JumpIfBothNotSmi(lhs, rhs, &not_smis); in GenerateGeneric()
495 __ Bind(&both_loaded_as_doubles); in GenerateGeneric()
499 __ Fcmp(lhs_d, rhs_d); in GenerateGeneric()
500 __ B(vs, &nan); // Overflow flag set if either is NaN. in GenerateGeneric()
502 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL). in GenerateGeneric()
503 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0. in GenerateGeneric()
504 __ Ret(); in GenerateGeneric()
506 __ Bind(&nan); in GenerateGeneric()
512 __ Mov(result, GREATER); in GenerateGeneric()
514 __ Mov(result, LESS); in GenerateGeneric()
516 __ Ret(); in GenerateGeneric()
518 __ Bind(&not_smis); in GenerateGeneric()
527 __ Ldr(rhs_map, FieldMemOperand(rhs, HeapObject::kMapOffset)); in GenerateGeneric()
528 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); in GenerateGeneric()
529 __ Ldrb(rhs_type, FieldMemOperand(rhs_map, Map::kInstanceTypeOffset)); in GenerateGeneric()
530 __ Ldrb(lhs_type, FieldMemOperand(lhs_map, Map::kInstanceTypeOffset)); in GenerateGeneric()
542 __ Cmp(rhs_type, HEAP_NUMBER_TYPE); in GenerateGeneric()
543 __ B(ne, &check_for_internalized_strings); in GenerateGeneric()
544 __ Cmp(lhs_map, rhs_map); in GenerateGeneric()
548 __ B(ne, &flat_string_check); in GenerateGeneric()
552 __ Ldr(lhs_d, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in GenerateGeneric()
553 __ Ldr(rhs_d, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in GenerateGeneric()
554 __ B(&both_loaded_as_doubles); in GenerateGeneric()
556 __ Bind(&check_for_internalized_strings); in GenerateGeneric()
569 __ Bind(&flat_string_check); in GenerateGeneric()
570 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(lhs_type, rhs_type, x14, in GenerateGeneric()
573 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x10, in GenerateGeneric()
585 __ Unreachable(); in GenerateGeneric()
588 __ Bind(&slow); in GenerateGeneric()
593 __ Push(lhs, rhs); in GenerateGeneric()
594 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); in GenerateGeneric()
598 __ LoadRoot(x1, Heap::kTrueValueRootIndex); in GenerateGeneric()
599 __ Sub(x0, x0, x1); in GenerateGeneric()
600 __ Ret(); in GenerateGeneric()
602 __ Push(lhs, rhs); in GenerateGeneric()
610 __ Mov(x10, Smi::FromInt(ncr)); in GenerateGeneric()
611 __ Push(x10); in GenerateGeneric()
615 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
618 __ Bind(&miss); in GenerateGeneric()
635 __ PushCPURegList(saved_regs); in Generate()
637 __ PushCPURegList(saved_fp_regs); in Generate()
641 __ Mov(x0, ExternalReference::isolate_address(isolate())); in Generate()
642 __ CallCFunction( in Generate()
646 __ PopCPURegList(saved_fp_regs); in Generate()
648 __ PopCPURegList(saved_regs); in Generate()
649 __ Ret(); in Generate()
667 __ Mov(return_address, lr); in Generate()
670 __ Mov(lr, saved_lr); in Generate()
671 __ PushSafepointRegisters(); in Generate()
672 __ Ret(return_address); in Generate()
681 __ Mov(return_address, lr); in Generate()
682 __ PopSafepointRegisters(); in Generate()
683 __ Ret(return_address); in Generate()
712 __ JumpIfSmi(exponent_tagged, &exponent_is_smi); in Generate()
713 __ Ldr(exponent_double, in Generate()
721 __ TryRepresentDoubleAsInt64(exponent_integer, exponent_double, in Generate()
726 __ Mov(saved_lr, lr); in Generate()
727 __ CallCFunction( in Generate()
729 __ Mov(lr, saved_lr); in Generate()
730 __ B(&done); in Generate()
734 __ Bind(&exponent_is_smi); in Generate()
738 __ SmiUntag(exponent_integer, exponent_tagged); in Generate()
741 __ Bind(&exponent_is_integer); in Generate()
749 __ Cmp(exponent_integer, 0); in Generate()
750 __ Cneg(exponent_abs, exponent_integer, mi); in Generate()
765 __ Fmov(scratch1_double, base_double); in Generate()
766 __ Fmov(base_double_copy, base_double); in Generate()
767 __ Fmov(result_double, 1.0); in Generate()
768 __ B(&power_loop_entry); in Generate()
770 __ Bind(&power_loop); in Generate()
771 __ Fmul(scratch1_double, scratch1_double, scratch1_double); in Generate()
772 __ Lsr(exponent_abs, exponent_abs, 1); in Generate()
773 __ Cbz(exponent_abs, &power_loop_exit); in Generate()
775 __ Bind(&power_loop_entry); in Generate()
776 __ Tbz(exponent_abs, 0, &power_loop); in Generate()
777 __ Fmul(result_double, result_double, scratch1_double); in Generate()
778 __ B(&power_loop); in Generate()
780 __ Bind(&power_loop_exit); in Generate()
783 __ Tbz(exponent_integer, kXSignBit, &done); in Generate()
786 __ Fmov(scratch0_double, 1.0); in Generate()
787 __ Fdiv(result_double, scratch0_double, result_double); in Generate()
794 __ Fcmp(result_double, 0.0); in Generate()
795 __ B(&done, ne); in Generate()
798 __ Mov(saved_lr, lr); in Generate()
799 __ Fmov(base_double, base_double_copy); in Generate()
800 __ Scvtf(exponent_double, exponent_integer); in Generate()
801 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
803 __ Mov(lr, saved_lr); in Generate()
804 __ Bind(&done); in Generate()
805 __ Ret(); in Generate()
895 DCHECK(jssp.Is(__ StackPointer())); in Generate()
915 __ Add(temp_argv, jssp, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
917 __ Sub(temp_argv, temp_argv, 1 * kPointerSize); in Generate()
926 __ EnterExitFrame( in Generate()
929 DCHECK(csp.Is(__ StackPointer())); in Generate()
932 __ Poke(argv, 1 * kPointerSize); in Generate()
933 __ Poke(argc, 2 * kPointerSize); in Generate()
934 __ Poke(target, 3 * kPointerSize); in Generate()
938 __ Add(x8, __ StackPointer(), Operand(4 * kPointerSize)); in Generate()
946 __ Mov(argc, argc_input); in Generate()
947 __ Mov(target, target_input); in Generate()
948 __ Mov(argv, temp_argv); in Generate()
984 DCHECK(csp.Is(__ StackPointer())); in Generate()
987 __ Mov(x0, argc); in Generate()
988 __ Mov(x1, argv); in Generate()
989 __ Mov(x2, ExternalReference::isolate_address(isolate())); in Generate()
992 __ Adr(x12, &return_location); in Generate()
993 __ Poke(x12, 0); in Generate()
995 if (__ emit_debug_code()) { in Generate()
1000 __ Ldr(temp, MemOperand(fp, ExitFrameConstants::kSPOffset)); in Generate()
1001 __ Ldr(temp, MemOperand(temp, -static_cast<int64_t>(kXRegSize))); in Generate()
1002 __ Cmp(temp, x12); in Generate()
1003 __ Check(eq, kReturnAddressNotFoundInFrame); in Generate()
1007 __ Blr(target); in Generate()
1008 __ Bind(&return_location); in Generate()
1013 __ Ldr(x0, MemOperand(__ StackPointer(), 4 * kPointerSize)); in Generate()
1014 __ Ldr(x1, MemOperand(__ StackPointer(), 5 * kPointerSize)); in Generate()
1015 __ Ldr(x2, MemOperand(__ StackPointer(), 6 * kPointerSize)); in Generate()
1029 __ CompareRoot(result, Heap::kExceptionRootIndex); in Generate()
1030 __ B(eq, &exception_returned); in Generate()
1035 __ Mov(x11, argc); in Generate()
1037 __ Peek(argv, 1 * kPointerSize); in Generate()
1038 __ Peek(argc, 2 * kPointerSize); in Generate()
1039 __ Peek(target, 3 * kPointerSize); in Generate()
1041 __ LeaveExitFrame(save_doubles(), x10, true); in Generate()
1042 DCHECK(jssp.Is(__ StackPointer())); in Generate()
1045 __ Drop(x11); in Generate()
1047 __ AssertFPCRState(); in Generate()
1048 __ Ret(); in Generate()
1052 __ SetStackPointer(csp); in Generate()
1055 __ Bind(&exception_returned); in Generate()
1075 __ Mov(x0, 0); // argc. in Generate()
1076 __ Mov(x1, 0); // argv. in Generate()
1077 __ Mov(x2, ExternalReference::isolate_address(isolate())); in Generate()
1078 __ CallCFunction(find_handler, 3); in Generate()
1085 __ SetStackPointer(jssp); in Generate()
1088 __ Mov(cp, Operand(pending_handler_context_address)); in Generate()
1089 __ Ldr(cp, MemOperand(cp)); in Generate()
1090 __ Mov(jssp, Operand(pending_handler_sp_address)); in Generate()
1091 __ Ldr(jssp, MemOperand(jssp)); in Generate()
1092 __ Mov(csp, jssp); in Generate()
1093 __ Mov(fp, Operand(pending_handler_fp_address)); in Generate()
1094 __ Ldr(fp, MemOperand(fp)); in Generate()
1099 __ Cbz(cp, &not_js_frame); in Generate()
1100 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1101 __ Bind(&not_js_frame); in Generate()
1104 __ Mov(x10, Operand(pending_handler_code_address)); in Generate()
1105 __ Ldr(x10, MemOperand(x10)); in Generate()
1106 __ Mov(x11, Operand(pending_handler_offset_address)); in Generate()
1107 __ Ldr(x11, MemOperand(x11)); in Generate()
1108 __ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag); in Generate()
1109 __ Add(x10, x10, x11); in Generate()
1110 __ Br(x10); in Generate()
1125 DCHECK(jssp.Is(__ StackPointer())); in Generate()
1130 __ EnableInstrumentation(); in Generate()
1139 __ SetStackPointer(csp); in Generate()
1140 __ PushCalleeSavedRegisters(); in Generate()
1141 __ Mov(jssp, csp); in Generate()
1142 __ SetStackPointer(jssp); in Generate()
1147 __ Fmov(fp_zero, 0.0); in Generate()
1152 __ Mov(x13, bad_frame_pointer); in Generate()
1153 __ Mov(x12, Smi::FromInt(marker)); in Generate()
1154 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate())); in Generate()
1155 __ Ldr(x10, MemOperand(x11)); in Generate()
1157 __ Push(x13, x12, xzr, x10); in Generate()
1159 __ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset); in Generate()
1165 __ Mov(x10, ExternalReference(js_entry_sp)); in Generate()
1166 __ Ldr(x11, MemOperand(x10)); in Generate()
1167 __ Cbnz(x11, &non_outermost_js); in Generate()
1168 __ Str(fp, MemOperand(x10)); in Generate()
1169 __ Mov(x12, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1170 __ Push(x12); in Generate()
1171 __ B(&done); in Generate()
1172 __ Bind(&non_outermost_js); in Generate()
1175 __ Push(xzr); in Generate()
1176 __ Bind(&done); in Generate()
1188 __ B(&invoke); in Generate()
1197 __ bind(&handler_entry); in Generate()
1203 __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1206 __ Str(code_entry, MemOperand(x10)); in Generate()
1207 __ LoadRoot(x0, Heap::kExceptionRootIndex); in Generate()
1208 __ B(&exit); in Generate()
1211 __ Bind(&invoke); in Generate()
1212 __ PushStackHandler(); in Generate()
1232 __ Mov(x10, entry); in Generate()
1235 __ Ldr(x11, MemOperand(x10)); // Dereference the address. in Generate()
1236 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag); in Generate()
1237 __ Blr(x12); in Generate()
1240 __ PopStackHandler(); in Generate()
1243 __ Bind(&exit); in Generate()
1255 __ Pop(x10); in Generate()
1256 __ Cmp(x10, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1257 __ B(ne, &non_outermost_js_2); in Generate()
1258 __ Mov(x11, ExternalReference(js_entry_sp)); in Generate()
1259 __ Str(xzr, MemOperand(x11)); in Generate()
1260 __ Bind(&non_outermost_js_2); in Generate()
1263 __ Pop(x10); in Generate()
1264 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate())); in Generate()
1265 __ Str(x10, MemOperand(x11)); in Generate()
1268 __ Drop(-EntryFrameConstants::kCallerFPOffset, kByteSizeInBytes); in Generate()
1270 DCHECK(jssp.Is(__ StackPointer())); in Generate()
1271 __ Mov(csp, jssp); in Generate()
1272 __ SetStackPointer(csp); in Generate()
1273 __ PopCalleeSavedRegisters(); in Generate()
1276 __ Ret(); in Generate()
1291 __ Bind(&miss); in Generate()
1318 __ Ret(); in Generate()
1323 __ Bind(&miss); in Generate()
1331 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1394 __ Mov(x10, address_of_regexp_stack_memory_size); in Generate()
1395 __ Ldr(x10, MemOperand(x10)); in Generate()
1396 __ Cbz(x10, &runtime); in Generate()
1399 DCHECK(jssp.Is(__ StackPointer())); in Generate()
1400 __ Peek(jsregexp_object, kJSRegExpOffset); in Generate()
1401 __ JumpIfSmi(jsregexp_object, &runtime); in Generate()
1402 __ JumpIfNotObjectType(jsregexp_object, x10, x10, JS_REGEXP_TYPE, &runtime); in Generate()
1405 __ Ldr(regexp_data, FieldMemOperand(jsregexp_object, JSRegExp::kDataOffset)); in Generate()
1408 __ Tst(regexp_data, kSmiTagMask); in Generate()
1409 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1410 __ CompareObjectType(regexp_data, x10, x10, FIXED_ARRAY_TYPE); in Generate()
1411 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1415 __ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1416 __ Cmp(x10, Smi::FromInt(JSRegExp::IRREGEXP)); in Generate()
1417 __ B(ne, &runtime); in Generate()
1423 __ Ldrsw(x10, in Generate()
1429 __ Add(x10, x10, x10); in Generate()
1430 __ Cmp(x10, Isolate::kJSRegexpStaticOffsetsVectorSize - 2); in Generate()
1431 __ B(hi, &runtime); in Generate()
1434 __ Mov(sliced_string_offset, 0); in Generate()
1436 DCHECK(jssp.Is(__ StackPointer())); in Generate()
1437 __ Peek(subject, kSubjectOffset); in Generate()
1438 __ JumpIfSmi(subject, &runtime); in Generate()
1440 __ Ldr(jsstring_length, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
1464 __ Bind(&check_underlying); in Generate()
1465 __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1466 __ Ldrb(string_type, FieldMemOperand(x10, Map::kInstanceTypeOffset)); in Generate()
1469 __ And(string_representation, in Generate()
1485 __ Cbz(string_representation, &seq_string); // Go to (4). in Generate()
1492 __ Cmp(string_representation, kExternalStringTag); in Generate()
1493 __ B(ge, &not_seq_nor_cons); // Go to (5). in Generate()
1496 __ Ldr(x10, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1497 __ JumpIfNotRoot(x10, Heap::kempty_stringRootIndex, &runtime); in Generate()
1499 __ Ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1500 __ B(&check_underlying); in Generate()
1503 __ Bind(&seq_string); in Generate()
1507 DCHECK(jssp.Is(__ StackPointer())); in Generate()
1508 __ Peek(x10, kPreviousIndexOffset); in Generate()
1509 __ JumpIfNotSmi(x10, &runtime); in Generate()
1510 __ Cmp(jsstring_length, x10); in Generate()
1511 __ B(ls, &runtime); in Generate()
1515 __ SmiUntag(x1, x10); in Generate()
1527 __ Mov(x10, kPointerSize); in Generate()
1530 __ Ands(string_encoding, string_type, kStringEncodingMask); in Generate()
1531 __ CzeroX(x10, ne); in Generate()
1532 __ Add(x10, regexp_data, x10); in Generate()
1533 __ Ldr(code_object, FieldMemOperand(x10, JSRegExp::kDataOneByteCodeOffset)); in Generate()
1540 __ JumpIfSmi(code_object, &runtime); in Generate()
1543 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, in Generate()
1548 __ EnterExitFrame(false, x10, 1); in Generate()
1549 DCHECK(csp.Is(__ StackPointer())); in Generate()
1559 __ Mov(x10, ExternalReference::isolate_address(isolate())); in Generate()
1560 __ Poke(x10, kPointerSize); in Generate()
1567 __ Add(start, subject, SeqString::kHeaderSize - kHeapObjectTag); in Generate()
1572 __ Ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1573 __ Ldr(length, UntagSmiFieldMemOperand(subject, String::kLengthOffset)); in Generate()
1579 __ Ubfx(string_encoding, string_encoding, 2, 1); in Generate()
1580 __ Eor(string_encoding, string_encoding, 1); in Generate()
1586 __ Lsl(previous_index_in_bytes, w1, string_encoding); in Generate()
1587 __ Lsl(length, length, string_encoding); in Generate()
1588 __ Lsl(sliced_string_offset, sliced_string_offset, string_encoding); in Generate()
1591 __ Mov(x0, subject); in Generate()
1599 __ Add(w10, previous_index_in_bytes, sliced_string_offset); in Generate()
1600 __ Add(x2, start, Operand(w10, UXTW)); in Generate()
1604 __ Sub(w10, length, previous_index_in_bytes); in Generate()
1605 __ Add(x3, x2, Operand(w10, UXTW)); in Generate()
1608 __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate())); in Generate()
1613 __ Mov(x5, 0); in Generate()
1616 __ Mov(x10, address_of_regexp_stack_memory_address); in Generate()
1617 __ Ldr(x10, MemOperand(x10)); in Generate()
1618 __ Mov(x11, address_of_regexp_stack_memory_size); in Generate()
1619 __ Ldr(x11, MemOperand(x11)); in Generate()
1620 __ Add(x6, x10, x11); in Generate()
1623 __ Mov(x7, 1); in Generate()
1626 __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag); in Generate()
1630 __ LeaveExitFrame(false, x10, true); in Generate()
1634 __ CompareAndBranch(w0, NativeRegExpMacroAssembler::FAILURE, eq, &failure); in Generate()
1635 __ CompareAndBranch(w0, in Generate()
1639 __ CompareAndBranch(w0, NativeRegExpMacroAssembler::RETRY, eq, &runtime); in Generate()
1646 __ Ldrsw(x10, in Generate()
1649 __ Add(x10, x10, x10); in Generate()
1650 __ Add(number_of_capture_registers, x10, 2); in Generate()
1653 DCHECK(jssp.Is(__ StackPointer())); in Generate()
1654 __ Peek(last_match_info_elements, kLastMatchInfoOffset); in Generate()
1655 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1658 __ Ldr(x10, in Generate()
1660 __ JumpIfNotRoot(x10, Heap::kFixedArrayMapRootIndex, &runtime); in Generate()
1667 __ Ldrsw(x10, in Generate()
1670 __ Add(x11, number_of_capture_registers, RegExpMatchInfo::kLastMatchOverhead); in Generate()
1671 __ Cmp(x11, x10); in Generate()
1672 __ B(gt, &runtime); in Generate()
1675 __ SmiTag(x10, number_of_capture_registers); in Generate()
1676 __ Str(x10, FieldMemOperand(last_match_info_elements, in Generate()
1679 __ Str(subject, FieldMemOperand(last_match_info_elements, in Generate()
1683 __ Mov(x10, subject); in Generate()
1684 __ RecordWriteField(last_match_info_elements, in Generate()
1687 __ Str(subject, FieldMemOperand(last_match_info_elements, in Generate()
1689 __ Mov(x10, subject); in Generate()
1690 __ RecordWriteField(last_match_info_elements, in Generate()
1702 __ Mov(offsets_vector_index, address_of_static_offsets_vector); in Generate()
1707 __ Add(last_match_offsets, last_match_info_elements, in Generate()
1709 __ Bind(&next_capture); in Generate()
1710 __ Subs(number_of_capture_registers, number_of_capture_registers, 2); in Generate()
1711 __ B(mi, &done); in Generate()
1714 __ Ldr(current_offset, in Generate()
1717 __ SmiTag(x10, current_offset); in Generate()
1720 __ Bic(x11, current_offset, kSmiShiftMask); in Generate()
1721 __ Stp(x10, in Generate()
1724 __ B(&next_capture); in Generate()
1725 __ Bind(&done); in Generate()
1728 __ Mov(x0, last_match_info_elements); in Generate()
1730 __ Drop(4); in Generate()
1731 __ Ret(); in Generate()
1733 __ Bind(&exception); in Generate()
1738 __ Mov(x10, Operand(isolate()->factory()->the_hole_value())); in Generate()
1739 __ Mov(x11, in Generate()
1742 __ Ldr(exception_value, MemOperand(x11)); in Generate()
1743 __ Cmp(x10, exception_value); in Generate()
1744 __ B(eq, &runtime); in Generate()
1747 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1749 __ Bind(&failure); in Generate()
1750 __ Mov(x0, Operand(isolate()->factory()->null_value())); in Generate()
1752 __ Drop(4); in Generate()
1753 __ Ret(); in Generate()
1755 __ Bind(&runtime); in Generate()
1756 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1760 __ Bind(&not_seq_nor_cons); in Generate()
1762 __ B(ne, &not_long_external); // Go to (7). in Generate()
1765 __ Bind(&external_string); in Generate()
1769 __ Ldr(x10, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1770 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset)); in Generate()
1771 __ Tst(x10, kIsIndirectStringMask); in Generate()
1772 __ Check(eq, kExternalStringExpectedButNotFound); in Generate()
1773 __ And(x10, x10, kStringRepresentationMask); in Generate()
1774 __ Cmp(x10, 0); in Generate()
1775 __ Check(ne, kExternalStringExpectedButNotFound); in Generate()
1777 __ Ldr(subject, in Generate()
1781 __ Sub(subject, subject, SeqTwoByteString::kHeaderSize - kHeapObjectTag); in Generate()
1782 __ B(&seq_string); // Go to (4). in Generate()
1786 __ Bind(&not_long_external); in Generate()
1788 __ TestAndBranchIfAnySet(string_representation, in Generate()
1793 __ Ldr(sliced_string_offset, in Generate()
1795 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1796 __ B(&check_underlying); // Go to (1). in Generate()
1808 __ SmiTag(argc); in CallStubInRecordCallTarget()
1809 __ Push(argc, function, feedback_vector, index); in CallStubInRecordCallTarget()
1810 __ Push(cp); in CallStubInRecordCallTarget()
1813 __ CallStub(stub); in CallStubInRecordCallTarget()
1815 __ Pop(cp); in CallStubInRecordCallTarget()
1816 __ Pop(index, feedback_vector, function, argc); in CallStubInRecordCallTarget()
1817 __ SmiUntag(argc); in CallStubInRecordCallTarget()
1846 __ Add(feedback, feedback_vector, in GenerateRecordCallTarget()
1848 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1856 __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1857 __ Cmp(function, feedback_value); in GenerateRecordCallTarget()
1858 __ B(eq, &done); in GenerateRecordCallTarget()
1859 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1860 __ B(eq, &done); in GenerateRecordCallTarget()
1861 __ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1862 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1863 __ B(ne, &check_allocation_site); in GenerateRecordCallTarget()
1866 __ JumpIfSmi(feedback_value, &initialize); in GenerateRecordCallTarget()
1867 __ B(&megamorphic); in GenerateRecordCallTarget()
1869 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1874 __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss); in GenerateRecordCallTarget()
1877 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1); in GenerateRecordCallTarget()
1878 __ Cmp(function, scratch1); in GenerateRecordCallTarget()
1879 __ B(ne, &megamorphic); in GenerateRecordCallTarget()
1880 __ B(&done); in GenerateRecordCallTarget()
1882 __ Bind(&miss); in GenerateRecordCallTarget()
1886 __ JumpIfRoot(scratch1, Heap::kuninitialized_symbolRootIndex, &initialize); in GenerateRecordCallTarget()
1889 __ Bind(&megamorphic); in GenerateRecordCallTarget()
1890 __ Add(scratch1, feedback_vector, in GenerateRecordCallTarget()
1892 __ LoadRoot(scratch2, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1893 __ Str(scratch2, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1894 __ B(&done); in GenerateRecordCallTarget()
1898 __ Bind(&initialize); in GenerateRecordCallTarget()
1901 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1); in GenerateRecordCallTarget()
1902 __ Cmp(function, scratch1); in GenerateRecordCallTarget()
1903 __ B(ne, &not_array_function); in GenerateRecordCallTarget()
1911 __ B(&done); in GenerateRecordCallTarget()
1913 __ Bind(&not_array_function); in GenerateRecordCallTarget()
1918 __ Bind(&done); in GenerateRecordCallTarget()
1921 __ Add(scratch1, feedback_vector, in GenerateRecordCallTarget()
1923 __ Add(scratch1, scratch1, Operand(FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1924 __ Ldr(scratch2, FieldMemOperand(scratch1, 0)); in GenerateRecordCallTarget()
1925 __ Add(scratch2, scratch2, Operand(Smi::FromInt(1))); in GenerateRecordCallTarget()
1926 __ Str(scratch2, FieldMemOperand(scratch1, 0)); in GenerateRecordCallTarget()
1940 __ JumpIfSmi(function, &non_function); in Generate()
1943 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, in Generate()
1948 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); in Generate()
1951 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); in Generate()
1952 __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); in Generate()
1953 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, in Generate()
1955 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); in Generate()
1956 __ bind(&feedback_register_initialized); in Generate()
1958 __ AssertUndefinedOrAllocationSite(x2, x5); in Generate()
1960 __ Mov(x3, function); in Generate()
1964 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1965 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1966 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); in Generate()
1967 __ Br(x4); in Generate()
1969 __ Bind(&non_function); in Generate()
1970 __ Mov(x3, function); in Generate()
1971 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1977 __ Add(feedback_vector, feedback_vector, in IncrementCallCount()
1979 __ Add(feedback_vector, feedback_vector, in IncrementCallCount()
1981 __ Ldr(slot, FieldMemOperand(feedback_vector, 0)); in IncrementCallCount()
1982 __ Add(slot, slot, Operand(Smi::FromInt(1))); in IncrementCallCount()
1983 __ Str(slot, FieldMemOperand(feedback_vector, 0)); in IncrementCallCount()
1998 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch); in HandleArrayCase()
1999 __ Cmp(function, scratch); in HandleArrayCase()
2000 __ B(ne, miss); in HandleArrayCase()
2008 __ Mov(allocation_site_arg, allocation_site); in HandleArrayCase()
2009 __ Mov(new_target_arg, function); in HandleArrayCase()
2011 __ TailCallStub(&stub); in HandleArrayCase()
2029 __ Add(x4, feedback_vector, in Generate()
2031 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); in Generate()
2047 __ Ldr(x5, FieldMemOperand(x4, WeakCell::kValueOffset)); in Generate()
2048 __ Cmp(x5, function); in Generate()
2049 __ B(ne, &extra_checks_or_miss); in Generate()
2053 __ JumpIfSmi(function, &extra_checks_or_miss); in Generate()
2055 __ Bind(&call_function); in Generate()
2060 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), in Generate()
2064 __ bind(&extra_checks_or_miss); in Generate()
2067 __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call); in Generate()
2069 __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset)); in Generate()
2070 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &not_allocation_site); in Generate()
2074 __ bind(&not_allocation_site); in Generate()
2079 __ jmp(&miss); in Generate()
2083 __ JumpIfRoot(x4, Heap::kuninitialized_symbolRootIndex, &miss); in Generate()
2087 __ AssertNotSmi(x4); in Generate()
2088 __ JumpIfNotObjectType(x4, x5, x5, JS_FUNCTION_TYPE, &miss); in Generate()
2089 __ Add(x4, feedback_vector, in Generate()
2091 __ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex); in Generate()
2092 __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize)); in Generate()
2094 __ Bind(&call); in Generate()
2099 __ Bind(&call_count_incremented); in Generate()
2100 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), in Generate()
2103 __ bind(&uninitialized); in Generate()
2106 __ JumpIfSmi(function, &miss); in Generate()
2109 __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss); in Generate()
2113 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, x5); in Generate()
2114 __ Cmp(function, x5); in Generate()
2115 __ B(eq, &miss); in Generate()
2118 __ Ldr(x4, FieldMemOperand(function, JSFunction::kContextOffset)); in Generate()
2119 __ Ldr(x4, ContextMemOperand(x4, Context::NATIVE_CONTEXT_INDEX)); in Generate()
2120 __ Ldr(x5, NativeContextMemOperand()); in Generate()
2121 __ Cmp(x4, x5); in Generate()
2122 __ B(ne, &miss); in Generate()
2132 __ SmiTag(x0); in Generate()
2133 __ Push(x0); in Generate()
2134 __ Push(feedback_vector, index); in Generate()
2136 __ Push(cp, function); in Generate()
2137 __ CallStub(&create_stub); in Generate()
2138 __ Pop(cp, function); in Generate()
2140 __ Pop(feedback_vector, index); in Generate()
2141 __ Pop(x0); in Generate()
2142 __ SmiUntag(x0); in Generate()
2145 __ B(&call_function); in Generate()
2149 __ bind(&miss); in Generate()
2153 __ B(&call_count_incremented); in Generate()
2163 __ SmiTag(x0); in GenerateMiss()
2166 __ Push(x0, x1, x2, x3); in GenerateMiss()
2169 __ CallRuntime(Runtime::kCallIC_Miss); in GenerateMiss()
2172 __ Mov(x1, x0); in GenerateMiss()
2175 __ Pop(x0); in GenerateMiss()
2176 __ SmiUntag(x0); in GenerateMiss()
2183 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
2186 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
2187 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
2190 __ TestAndBranchIfAnySet(result_, kIsNotStringMask, receiver_not_string_); in GenerateFast()
2194 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
2196 __ Bind(&got_smi_index_); in GenerateFast()
2198 __ Ldrsw(result_, UntagSmiFieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
2199 __ Cmp(result_, Operand::UntagSmi(index_)); in GenerateFast()
2200 __ B(ls, index_out_of_range_); in GenerateFast()
2202 __ SmiUntag(index_); in GenerateFast()
2209 __ SmiTag(result_); in GenerateFast()
2210 __ Bind(&exit_); in GenerateFast()
2217 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
2219 __ Bind(&index_not_smi_); in GenerateSlow()
2221 __ JumpIfNotHeapNumber(index_, index_not_number_); in GenerateSlow()
2224 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
2228 __ Push(object_, index_); in GenerateSlow()
2230 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
2233 __ Mov(index_, x0); in GenerateSlow()
2235 __ Pop(object_, LoadWithVectorDescriptor::SlotRegister(), in GenerateSlow()
2238 __ Pop(object_); in GenerateSlow()
2241 __ Ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
2242 __ Ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
2246 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
2248 __ B(&got_smi_index_); in GenerateSlow()
2253 __ Bind(&call_runtime_); in GenerateSlow()
2255 __ SmiTag(index_); in GenerateSlow()
2256 __ Push(object_, index_); in GenerateSlow()
2257 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
2258 __ Mov(result_, x0); in GenerateSlow()
2260 __ B(&exit_); in GenerateSlow()
2262 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
2267 __ JumpIfNotSmi(code_, &slow_case_); in GenerateFast()
2268 __ Cmp(code_, Smi::FromInt(String::kMaxOneByteCharCode)); in GenerateFast()
2269 __ B(hi, &slow_case_); in GenerateFast()
2271 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
2273 __ Add(result_, result_, Operand::UntagSmiAndScale(code_, kPointerSizeLog2)); in GenerateFast()
2274 __ Ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); in GenerateFast()
2275 __ JumpIfRoot(result_, Heap::kUndefinedValueRootIndex, &slow_case_); in GenerateFast()
2276 __ Bind(&exit_); in GenerateFast()
2283 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); in GenerateSlow()
2285 __ Bind(&slow_case_); in GenerateSlow()
2287 __ Push(code_); in GenerateSlow()
2288 __ CallRuntime(Runtime::kStringCharFromCode); in GenerateSlow()
2289 __ Mov(result_, x0); in GenerateSlow()
2291 __ B(&exit_); in GenerateSlow()
2293 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); in GenerateSlow()
2303 __ CheckMap(x1, x2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2304 __ CheckMap(x0, x3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2306 __ Ldr(x1, FieldMemOperand(x1, Oddball::kToNumberOffset)); in GenerateBooleans()
2307 __ AssertSmi(x1); in GenerateBooleans()
2308 __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); in GenerateBooleans()
2309 __ AssertSmi(x0); in GenerateBooleans()
2311 __ Sub(x0, x1, x0); in GenerateBooleans()
2312 __ Ret(); in GenerateBooleans()
2314 __ Bind(&miss); in GenerateBooleans()
2325 __ JumpIfEitherNotSmi(x0, x1, &miss); in GenerateSmis()
2329 __ Sub(x0, x0, x1); in GenerateSmis()
2332 __ SmiUntag(x1); in GenerateSmis()
2333 __ Sub(x0, x1, Operand::UntagSmi(x0)); in GenerateSmis()
2335 __ Ret(); in GenerateSmis()
2337 __ Bind(&miss); in GenerateSmis()
2357 __ JumpIfNotSmi(lhs, &miss); in GenerateNumbers()
2360 __ JumpIfNotSmi(rhs, &miss); in GenerateNumbers()
2363 __ SmiUntagToDouble(rhs_d, rhs, kSpeculativeUntag); in GenerateNumbers()
2364 __ SmiUntagToDouble(lhs_d, lhs, kSpeculativeUntag); in GenerateNumbers()
2367 __ JumpIfSmi(rhs, &handle_lhs); in GenerateNumbers()
2368 __ JumpIfNotHeapNumber(rhs, &maybe_undefined1); in GenerateNumbers()
2369 __ Ldr(rhs_d, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in GenerateNumbers()
2372 __ Bind(&handle_lhs); in GenerateNumbers()
2373 __ JumpIfSmi(lhs, &values_in_d_regs); in GenerateNumbers()
2374 __ JumpIfNotHeapNumber(lhs, &maybe_undefined2); in GenerateNumbers()
2375 __ Ldr(lhs_d, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in GenerateNumbers()
2377 __ Bind(&values_in_d_regs); in GenerateNumbers()
2378 __ Fcmp(lhs_d, rhs_d); in GenerateNumbers()
2379 __ B(vs, &unordered); // Overflow flag set if either is NaN. in GenerateNumbers()
2381 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL). in GenerateNumbers()
2382 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0. in GenerateNumbers()
2383 __ Ret(); in GenerateNumbers()
2385 __ Bind(&unordered); in GenerateNumbers()
2388 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2390 __ Bind(&maybe_undefined1); in GenerateNumbers()
2392 __ JumpIfNotRoot(rhs, Heap::kUndefinedValueRootIndex, &miss); in GenerateNumbers()
2393 __ JumpIfSmi(lhs, &unordered); in GenerateNumbers()
2394 __ JumpIfNotHeapNumber(lhs, &maybe_undefined2); in GenerateNumbers()
2395 __ B(&unordered); in GenerateNumbers()
2398 __ Bind(&maybe_undefined2); in GenerateNumbers()
2400 __ JumpIfRoot(lhs, Heap::kUndefinedValueRootIndex, &unordered); in GenerateNumbers()
2403 __ Bind(&miss); in GenerateNumbers()
2418 __ JumpIfEitherSmi(lhs, rhs, &miss); in GenerateInternalizedStrings()
2425 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2426 __ Ldr(rhs_map, FieldMemOperand(rhs, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2427 __ Ldrb(lhs_type, FieldMemOperand(lhs_map, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2428 __ Ldrb(rhs_type, FieldMemOperand(rhs_map, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2431 __ Orr(x12, lhs_type, rhs_type); in GenerateInternalizedStrings()
2432 __ TestAndBranchIfAnySet( in GenerateInternalizedStrings()
2437 __ Cmp(lhs, rhs); in GenerateInternalizedStrings()
2438 __ Cset(result, ne); in GenerateInternalizedStrings()
2439 __ Ret(); in GenerateInternalizedStrings()
2441 __ Bind(&miss); in GenerateInternalizedStrings()
2460 __ JumpIfEitherSmi(lhs, rhs, &miss); in GenerateUniqueNames()
2464 __ Ldr(x10, FieldMemOperand(lhs, HeapObject::kMapOffset)); in GenerateUniqueNames()
2465 __ Ldr(x11, FieldMemOperand(rhs, HeapObject::kMapOffset)); in GenerateUniqueNames()
2466 __ Ldrb(lhs_instance_type, FieldMemOperand(x10, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2467 __ Ldrb(rhs_instance_type, FieldMemOperand(x11, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2471 __ JumpIfNotUniqueNameInstanceType(lhs_instance_type, &miss); in GenerateUniqueNames()
2472 __ JumpIfNotUniqueNameInstanceType(rhs_instance_type, &miss); in GenerateUniqueNames()
2476 __ Cmp(lhs, rhs); in GenerateUniqueNames()
2477 __ Cset(result, ne); in GenerateUniqueNames()
2478 __ Ret(); in GenerateUniqueNames()
2480 __ Bind(&miss); in GenerateUniqueNames()
2498 __ JumpIfEitherSmi(rhs, lhs, &miss); in GenerateStrings()
2505 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); in GenerateStrings()
2506 __ Ldr(rhs_map, FieldMemOperand(rhs, HeapObject::kMapOffset)); in GenerateStrings()
2507 __ Ldrb(lhs_type, FieldMemOperand(lhs_map, Map::kInstanceTypeOffset)); in GenerateStrings()
2508 __ Ldrb(rhs_type, FieldMemOperand(rhs_map, Map::kInstanceTypeOffset)); in GenerateStrings()
2510 __ Orr(x12, lhs_type, rhs_type); in GenerateStrings()
2511 __ Tbnz(x12, MaskToBit(kIsNotStringMask), &miss); in GenerateStrings()
2515 __ Cmp(lhs, rhs); in GenerateStrings()
2516 __ B(ne, &not_equal); in GenerateStrings()
2517 __ Mov(result, EQUAL); in GenerateStrings()
2518 __ Ret(); in GenerateStrings()
2520 __ Bind(&not_equal); in GenerateStrings()
2530 __ Orr(x12, lhs_type, rhs_type); in GenerateStrings()
2531 __ TestAndBranchIfAnySet( in GenerateStrings()
2534 __ Ret(); in GenerateStrings()
2535 __ Bind(&not_internalized_strings); in GenerateStrings()
2540 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(lhs_type, rhs_type, x12, in GenerateStrings()
2553 __ Bind(&runtime); in GenerateStrings()
2557 __ Push(lhs, rhs); in GenerateStrings()
2558 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2560 __ LoadRoot(x1, Heap::kTrueValueRootIndex); in GenerateStrings()
2561 __ Sub(x0, x0, x1); in GenerateStrings()
2562 __ Ret(); in GenerateStrings()
2564 __ Push(lhs, rhs); in GenerateStrings()
2565 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2568 __ Bind(&miss); in GenerateStrings()
2583 __ JumpIfEitherSmi(rhs, lhs, &miss); in GenerateReceivers()
2586 __ JumpIfObjectType(rhs, x10, x10, FIRST_JS_RECEIVER_TYPE, &miss, lt); in GenerateReceivers()
2587 __ JumpIfObjectType(lhs, x10, x10, FIRST_JS_RECEIVER_TYPE, &miss, lt); in GenerateReceivers()
2590 __ Sub(result, rhs, lhs); in GenerateReceivers()
2591 __ Ret(); in GenerateReceivers()
2593 __ Bind(&miss); in GenerateReceivers()
2608 __ JumpIfEitherSmi(rhs, lhs, &miss); in GenerateKnownReceivers()
2613 __ GetWeakValue(map, cell); in GenerateKnownReceivers()
2614 __ Ldr(rhs_map, FieldMemOperand(rhs, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2615 __ Ldr(lhs_map, FieldMemOperand(lhs, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2616 __ Cmp(rhs_map, map); in GenerateKnownReceivers()
2617 __ B(ne, &miss); in GenerateKnownReceivers()
2618 __ Cmp(lhs_map, map); in GenerateKnownReceivers()
2619 __ B(ne, &miss); in GenerateKnownReceivers()
2622 __ Sub(result, rhs, lhs); in GenerateKnownReceivers()
2623 __ Ret(); in GenerateKnownReceivers()
2627 __ Mov(ncr, Smi::FromInt(GREATER)); in GenerateKnownReceivers()
2629 __ Mov(ncr, Smi::FromInt(LESS)); in GenerateKnownReceivers()
2631 __ Push(lhs, rhs, ncr); in GenerateKnownReceivers()
2632 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2635 __ Bind(&miss); in GenerateKnownReceivers()
2654 __ Push(x1, x0, lr); in GenerateMiss()
2656 __ Mov(op, Smi::FromInt(this->op())); in GenerateMiss()
2657 __ Push(left, right, op); in GenerateMiss()
2660 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2663 __ Add(stub_entry, x0, Code::kHeaderSize - kHeapObjectTag); in GenerateMiss()
2665 __ Pop(lr, x0, x1); in GenerateMiss()
2669 __ Jump(stub_entry); in GenerateMiss()
2684 __ Ldr(left_length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2685 __ Ldr(right_length, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2686 __ Cmp(left_length, right_length); in GenerateFlatOneByteStringEquals()
2687 __ B(eq, &check_zero_length); in GenerateFlatOneByteStringEquals()
2689 __ Bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
2690 __ Mov(result, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
2691 __ Ret(); in GenerateFlatOneByteStringEquals()
2695 __ Bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
2697 __ Cbnz(left_length, &compare_chars); in GenerateFlatOneByteStringEquals()
2698 __ Mov(result, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2699 __ Ret(); in GenerateFlatOneByteStringEquals()
2702 __ Bind(&compare_chars); in GenerateFlatOneByteStringEquals()
2707 __ Mov(result, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2708 __ Ret(); in GenerateFlatOneByteStringEquals()
2720 __ Ldr(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2721 __ Ldr(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2722 __ Subs(length_delta, scratch1, scratch2); in GenerateCompareFlatOneByteStrings()
2725 __ Csel(min_length, scratch2, scratch1, gt); in GenerateCompareFlatOneByteStrings()
2726 __ Cbz(min_length, &compare_lengths); in GenerateCompareFlatOneByteStrings()
2733 __ Bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2739 __ Subs(result, length_delta, 0); in GenerateCompareFlatOneByteStrings()
2741 __ Bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
2744 __ Mov(greater, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
2745 __ Mov(less, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
2746 __ CmovX(result, greater, gt); in GenerateCompareFlatOneByteStrings()
2747 __ CmovX(result, less, lt); in GenerateCompareFlatOneByteStrings()
2748 __ Ret(); in GenerateCompareFlatOneByteStrings()
2760 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
2761 __ Add(scratch1, length, SeqOneByteString::kHeaderSize - kHeapObjectTag); in GenerateOneByteCharsCompareLoop()
2762 __ Add(left, left, scratch1); in GenerateOneByteCharsCompareLoop()
2763 __ Add(right, right, scratch1); in GenerateOneByteCharsCompareLoop()
2766 __ Neg(index, length); // index = -length; in GenerateOneByteCharsCompareLoop()
2770 __ Bind(&loop); in GenerateOneByteCharsCompareLoop()
2771 __ Ldrb(scratch1, MemOperand(left, index)); in GenerateOneByteCharsCompareLoop()
2772 __ Ldrb(scratch2, MemOperand(right, index)); in GenerateOneByteCharsCompareLoop()
2773 __ Cmp(scratch1, scratch2); in GenerateOneByteCharsCompareLoop()
2774 __ B(ne, chars_not_equal); in GenerateOneByteCharsCompareLoop()
2775 __ Add(index, index, 1); in GenerateOneByteCharsCompareLoop()
2776 __ Cbnz(index, &loop); in GenerateOneByteCharsCompareLoop()
2790 __ LoadObject(x2, handle(isolate()->heap()->undefined_value())); in Generate()
2794 __ AssertNotSmi(x2, kExpectedAllocationSite); in Generate()
2795 __ Ldr(x10, FieldMemOperand(x2, HeapObject::kMapOffset)); in Generate()
2796 __ AssertRegisterIsRoot(x10, Heap::kAllocationSiteMapRootIndex, in Generate()
2803 __ TailCallStub(&stub); in Generate()
2816 __ Ldr(val, MemOperand(regs_.address())); in GenerateIncremental()
2817 __ JumpIfNotInNewSpace(val, &dont_need_remembered_set); in GenerateIncremental()
2819 __ JumpIfInNewSpace(regs_.object(), &dont_need_remembered_set); in GenerateIncremental()
2828 __ RememberedSetHelper(object(), address(), in GenerateIncremental()
2832 __ Bind(&dont_need_remembered_set); in GenerateIncremental()
2839 __ Ret(); in GenerateIncremental()
2849 __ Mov(address, regs_.address()); in InformIncrementalMarker()
2850 __ Mov(x0, regs_.object()); in InformIncrementalMarker()
2851 __ Mov(x1, address); in InformIncrementalMarker()
2852 __ Mov(x2, ExternalReference::isolate_address(isolate())); in InformIncrementalMarker()
2858 __ CallCFunction(function, 3, 0); in InformIncrementalMarker()
2873 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
2877 __ RememberedSetHelper(object(), address(), in CheckNeedsToInformIncrementalMarker()
2881 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2884 __ Bind(&on_black); in CheckNeedsToInformIncrementalMarker()
2887 __ Ldr(val, MemOperand(regs_.address())); in CheckNeedsToInformIncrementalMarker()
2892 __ CheckPageFlagClear(val, regs_.scratch1(), in CheckNeedsToInformIncrementalMarker()
2896 __ CheckPageFlagClear(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2901 __ Bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2906 __ Push(regs_.address(), regs_.object()); in CheckNeedsToInformIncrementalMarker()
2907 __ JumpIfWhite(val, in CheckNeedsToInformIncrementalMarker()
2913 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2917 __ RememberedSetHelper(object(), address(), in CheckNeedsToInformIncrementalMarker()
2921 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2924 __ Bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
2925 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2927 __ Bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2943 __ adr(xzr, &skip_to_incremental_noncompacting); in Generate()
2944 __ adr(xzr, &skip_to_incremental_compacting); in Generate()
2948 __ RememberedSetHelper(object(), address(), in Generate()
2952 __ Ret(); in Generate()
2954 __ Bind(&skip_to_incremental_noncompacting); in Generate()
2957 __ Bind(&skip_to_incremental_compacting); in Generate()
2964 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2967 __ Ldr(x1, MemOperand(fp, parameter_count_offset)); in Generate()
2969 __ Add(x1, x1, 1); in Generate()
2972 __ Drop(x1); in Generate()
2974 __ Ret(); in Generate()
2978 __ EmitLoadTypeFeedbackVector(x2); in Generate()
2980 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2995 __ Ldr(cached_map, in HandleArrayCases()
2997 __ Ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
2998 __ Cmp(receiver_map, cached_map); in HandleArrayCases()
2999 __ B(ne, &start_polymorphic); in HandleArrayCases()
3002 __ Ldr(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1))); in HandleArrayCases()
3003 __ Add(handler, handler, Code::kHeaderSize - kHeapObjectTag); in HandleArrayCases()
3004 __ Jump(feedback); in HandleArrayCases()
3007 __ Bind(&start_polymorphic); in HandleArrayCases()
3008 __ Ldr(length, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandleArrayCases()
3010 __ Cmp(length, Operand(Smi::FromInt(2))); in HandleArrayCases()
3011 __ B(eq, miss); in HandleArrayCases()
3027 __ Add(too_far, feedback, in HandleArrayCases()
3029 __ Add(too_far, too_far, FixedArray::kHeaderSize - kHeapObjectTag); in HandleArrayCases()
3030 __ Add(pointer_reg, feedback, in HandleArrayCases()
3033 __ Bind(&next_loop); in HandleArrayCases()
3034 __ Ldr(cached_map, MemOperand(pointer_reg)); in HandleArrayCases()
3035 __ Ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
3036 __ Cmp(receiver_map, cached_map); in HandleArrayCases()
3037 __ B(ne, &prepare_next); in HandleArrayCases()
3038 __ Ldr(handler, MemOperand(pointer_reg, kPointerSize)); in HandleArrayCases()
3039 __ Add(handler, handler, Code::kHeaderSize - kHeapObjectTag); in HandleArrayCases()
3040 __ Jump(handler); in HandleArrayCases()
3042 __ Bind(&prepare_next); in HandleArrayCases()
3043 __ Add(pointer_reg, pointer_reg, kPointerSize * 2); in HandleArrayCases()
3044 __ Cmp(pointer_reg, too_far); in HandleArrayCases()
3045 __ B(lt, &next_loop); in HandleArrayCases()
3048 __ jmp(miss); in HandleArrayCases()
3057 __ JumpIfSmi(receiver, load_smi_map); in HandleMonomorphicCase()
3058 __ Ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in HandleMonomorphicCase()
3059 __ bind(compare_map); in HandleMonomorphicCase()
3062 __ Ldr(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset)); in HandleMonomorphicCase()
3063 __ Cmp(cached_map, receiver_map); in HandleMonomorphicCase()
3064 __ B(ne, try_array); in HandleMonomorphicCase()
3067 __ Add(handler, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); in HandleMonomorphicCase()
3068 __ Ldr(handler, in HandleMonomorphicCase()
3070 __ Add(handler, handler, Code::kHeaderSize - kHeapObjectTag); in HandleMonomorphicCase()
3071 __ Jump(handler); in HandleMonomorphicCase()
3075 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); in Generate()
3101 __ Ldr(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset)); in HandlePolymorphicStoreCase()
3113 __ Add(too_far, feedback, in HandlePolymorphicStoreCase()
3115 __ Add(too_far, too_far, FixedArray::kHeaderSize - kHeapObjectTag); in HandlePolymorphicStoreCase()
3116 __ Add(pointer_reg, feedback, in HandlePolymorphicStoreCase()
3119 __ Bind(&next_loop); in HandlePolymorphicStoreCase()
3120 __ Ldr(cached_map, MemOperand(pointer_reg)); in HandlePolymorphicStoreCase()
3121 __ Ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3122 __ Cmp(receiver_map, cached_map); in HandlePolymorphicStoreCase()
3123 __ B(ne, &prepare_next); in HandlePolymorphicStoreCase()
3125 __ Ldr(too_far, MemOperand(pointer_reg, kPointerSize)); in HandlePolymorphicStoreCase()
3126 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex); in HandlePolymorphicStoreCase()
3127 __ B(ne, &transition_call); in HandlePolymorphicStoreCase()
3129 __ Ldr(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3130 __ Add(pointer_reg, pointer_reg, Code::kHeaderSize - kHeapObjectTag); in HandlePolymorphicStoreCase()
3131 __ Jump(pointer_reg); in HandlePolymorphicStoreCase()
3133 __ Bind(&transition_call); in HandlePolymorphicStoreCase()
3134 __ Ldr(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset)); in HandlePolymorphicStoreCase()
3135 __ JumpIfSmi(too_far, miss); in HandlePolymorphicStoreCase()
3137 __ Ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2)); in HandlePolymorphicStoreCase()
3140 __ mov(feedback, too_far); in HandlePolymorphicStoreCase()
3141 __ Add(receiver_map, receiver_map, Code::kHeaderSize - kHeapObjectTag); in HandlePolymorphicStoreCase()
3142 __ Jump(receiver_map); in HandlePolymorphicStoreCase()
3144 __ Bind(&prepare_next); in HandlePolymorphicStoreCase()
3145 __ Add(pointer_reg, pointer_reg, kPointerSize * 3); in HandlePolymorphicStoreCase()
3146 __ Cmp(pointer_reg, too_far); in HandlePolymorphicStoreCase()
3147 __ B(lt, &next_loop); in HandlePolymorphicStoreCase()
3150 __ jmp(miss); in HandlePolymorphicStoreCase()
3163 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); in GenerateImpl()
3164 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); in GenerateImpl()
3174 __ Bind(&try_array); in GenerateImpl()
3176 __ Ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset)); in GenerateImpl()
3177 __ JumpIfNotRoot(scratch1, Heap::kFixedArrayMapRootIndex, &not_array); in GenerateImpl()
3183 __ Bind(&not_array); in GenerateImpl()
3185 __ JumpIfNotRoot(feedback, Heap::kmegamorphic_symbolRootIndex, in GenerateImpl()
3189 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET); in GenerateImpl()
3191 __ Bind(&try_poly_name); in GenerateImpl()
3193 __ Cmp(key, feedback); in GenerateImpl()
3194 __ B(ne, &miss); in GenerateImpl()
3197 __ Add(feedback, vector, Operand::UntagSmiAndScale(slot, kPointerSizeLog2)); in GenerateImpl()
3198 __ Ldr(feedback, in GenerateImpl()
3202 __ Bind(&miss); in GenerateImpl()
3205 __ Bind(&load_smi_map); in GenerateImpl()
3206 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); in GenerateImpl()
3207 __ jmp(&compare_map); in GenerateImpl()
3223 __ Bind(&entry_hook_call_start); in MaybeCallEntryHook()
3224 __ Push(lr); in MaybeCallEntryHook()
3225 __ CallStub(&stub); in MaybeCallEntryHook()
3229 __ Pop(lr); in MaybeCallEntryHook()
3240 __ PushCPURegList(kCallerSaved); in Generate()
3245 __ Sub(x0, lr, kProfileEntryHookCallSize); in Generate()
3250 __ Mov(x10, entry_hook); in Generate()
3255 __ Mov(x10, Operand(ExternalReference(&dispatcher, in Generate()
3259 __ Mov(x2, ExternalReference::isolate_address(isolate())); in Generate()
3264 __ Add(x1, __ StackPointer(), kNumSavedRegs * kPointerSize); in Generate()
3269 __ CallCFunction(x10, 2, 0); in Generate()
3272 __ PopCPURegList(kCallerSaved); in Generate()
3273 __ Ret(); in Generate()
3283 const Register old_stack_pointer = __ StackPointer(); in Generate()
3284 __ SetStackPointer(csp); in Generate()
3287 __ Poke(lr, 0); in Generate()
3289 __ Blr(x10); in Generate()
3291 __ Peek(lr, 0); in Generate()
3292 __ AssertFPCRState(); in Generate()
3293 __ Ret(); in Generate()
3295 __ SetStackPointer(old_stack_pointer); in Generate()
3302 DCHECK(csp.Is(__ StackPointer())); in GenerateCall()
3306 __ Mov(lr, Operand(code, RelocInfo::CODE_TARGET)); in GenerateCall()
3307 __ Mov(x10, target); in GenerateCall()
3309 __ Blr(lr); in GenerateCall()
3330 __ AssertName(name); in GeneratePositiveLookup()
3333 __ Ldrsw(scratch1, UntagSmiFieldMemOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
3334 __ Sub(scratch1, scratch1, 1); in GeneratePositiveLookup()
3339 __ Ldr(scratch2, FieldMemOperand(name, Name::kHashFieldOffset)); in GeneratePositiveLookup()
3346 __ Add(scratch2, scratch2, Operand( in GeneratePositiveLookup()
3349 __ And(scratch2, scratch1, Operand(scratch2, LSR, Name::kHashShift)); in GeneratePositiveLookup()
3353 __ Add(scratch2, scratch2, Operand(scratch2, LSL, 1)); in GeneratePositiveLookup()
3358 __ Add(scratch2, elements, Operand(scratch2, LSL, kPointerSizeLog2)); in GeneratePositiveLookup()
3359 __ Ldr(scratch3, FieldMemOperand(scratch2, kElementsStartOffset)); in GeneratePositiveLookup()
3360 __ Cmp(name, scratch3); in GeneratePositiveLookup()
3361 __ B(eq, done); in GeneratePositiveLookup()
3372 __ PushCPURegList(spill_list); in GeneratePositiveLookup()
3376 __ Mov(x1, name); in GeneratePositiveLookup()
3377 __ Mov(x0, elements); in GeneratePositiveLookup()
3379 __ Mov(x0, elements); in GeneratePositiveLookup()
3380 __ Mov(x1, name); in GeneratePositiveLookup()
3385 __ CallStub(&stub); in GeneratePositiveLookup()
3386 __ Cbz(x0, &not_found); in GeneratePositiveLookup()
3387 __ Mov(scratch2, x2); // Move entry index into scratch2. in GeneratePositiveLookup()
3388 __ PopCPURegList(spill_list); in GeneratePositiveLookup()
3389 __ B(done); in GeneratePositiveLookup()
3391 __ Bind(&not_found); in GeneratePositiveLookup()
3392 __ PopCPURegList(spill_list); in GeneratePositiveLookup()
3393 __ B(miss); in GeneratePositiveLookup()
3416 __ Ldrsw(index, UntagSmiFieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
3417 __ Sub(index, index, 1); in GenerateNegativeLookup()
3418 __ And(index, index, name->Hash() + NameDictionary::GetProbeOffset(i)); in GenerateNegativeLookup()
3422 __ Add(index, index, Operand(index, LSL, 1)); // index *= 3. in GenerateNegativeLookup()
3427 __ Add(tmp, properties, Operand(index, LSL, kPointerSizeLog2)); in GenerateNegativeLookup()
3428 __ Ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
3430 __ JumpIfRoot(entity_name, Heap::kUndefinedValueRootIndex, done); in GenerateNegativeLookup()
3433 __ Cmp(entity_name, Operand(name)); in GenerateNegativeLookup()
3434 __ B(eq, miss); in GenerateNegativeLookup()
3437 __ JumpIfRoot(entity_name, Heap::kTheHoleValueRootIndex, &good); in GenerateNegativeLookup()
3440 __ Ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
3441 __ Ldrb(entity_name, in GenerateNegativeLookup()
3443 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
3444 __ Bind(&good); in GenerateNegativeLookup()
3451 __ PushCPURegList(spill_list); in GenerateNegativeLookup()
3453 __ Ldr(x0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
3454 __ Mov(x1, Operand(name)); in GenerateNegativeLookup()
3456 __ CallStub(&stub); in GenerateNegativeLookup()
3459 __ Mov(scratch0, x0); in GenerateNegativeLookup()
3460 __ PopCPURegList(spill_list); in GenerateNegativeLookup()
3462 __ Cbz(scratch0, done); in GenerateNegativeLookup()
3463 __ B(miss); in GenerateNegativeLookup()
3489 __ Ldrsw(mask, UntagSmiFieldMemOperand(dictionary, kCapacityOffset)); in Generate()
3490 __ Sub(mask, mask, 1); in Generate()
3492 __ Ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset)); in Generate()
3493 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
3504 __ Add(index, hash, in Generate()
3507 __ Mov(index, hash); in Generate()
3509 __ And(index, mask, Operand(index, LSR, Name::kHashShift)); in Generate()
3513 __ Add(index, index, Operand(index, LSL, 1)); // index *= 3. in Generate()
3515 __ Add(index, dictionary, Operand(index, LSL, kPointerSizeLog2)); in Generate()
3516 __ Ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
3519 __ Cmp(entry_key, undefined); in Generate()
3520 __ B(eq, &not_in_dictionary); in Generate()
3523 __ Cmp(entry_key, key); in Generate()
3524 __ B(eq, &in_dictionary); in Generate()
3528 __ Ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
3529 __ Ldrb(entry_key, FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); in Generate()
3530 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
3534 __ Bind(&maybe_in_dictionary); in Generate()
3539 __ Mov(result, 0); in Generate()
3540 __ Ret(); in Generate()
3543 __ Bind(&in_dictionary); in Generate()
3544 __ Mov(result, 1); in Generate()
3545 __ Ret(); in Generate()
3547 __ Bind(&not_in_dictionary); in Generate()
3548 __ Mov(result, 0); in Generate()
3549 __ Ret(); in Generate()
3559 __ TailCallStub(&stub); in CreateArrayDispatch()
3570 __ CompareAndBranch(kind, candidate_kind, ne, &next); in CreateArrayDispatch()
3572 __ TailCallStub(&stub); in CreateArrayDispatch()
3573 __ Bind(&next); in CreateArrayDispatch()
3577 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
3609 __ Tbnz(kind, 0, &normal_sequence); in CreateArrayDispatchOneArgument()
3614 __ Peek(x10, 0); in CreateArrayDispatchOneArgument()
3615 __ Cbz(x10, &normal_sequence); in CreateArrayDispatchOneArgument()
3624 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
3626 __ Bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3630 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3634 __ Orr(kind, kind, 1); in CreateArrayDispatchOneArgument()
3637 __ Ldr(x10, FieldMemOperand(allocation_site, 0)); in CreateArrayDispatchOneArgument()
3638 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, in CreateArrayDispatchOneArgument()
3640 __ Assert(eq, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
3647 __ Ldr(x11, FieldMemOperand(allocation_site, in CreateArrayDispatchOneArgument()
3649 __ Add(x11, x11, Smi::FromInt(kFastElementsKindPackedToHoley)); in CreateArrayDispatchOneArgument()
3650 __ Str(x11, FieldMemOperand(allocation_site, in CreateArrayDispatchOneArgument()
3653 __ Bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3659 __ CompareAndBranch(kind, candidate_kind, ne, &next); in CreateArrayDispatchOneArgument()
3661 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3662 __ Bind(&next); in CreateArrayDispatchOneArgument()
3666 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3711 __ Cbz(argc, &zero_case); in GenerateDispatchToArrayStub()
3712 __ Cmp(argc, 1); in GenerateDispatchToArrayStub()
3713 __ B(ne, &n_case); in GenerateDispatchToArrayStub()
3718 __ Bind(&zero_case); in GenerateDispatchToArrayStub()
3722 __ Bind(&n_case); in GenerateDispatchToArrayStub()
3725 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3748 __ Ldr(x10, FieldMemOperand(constructor, in Generate()
3751 __ JumpIfSmi(x10, &unexpected_map); in Generate()
3752 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); in Generate()
3753 __ Bind(&unexpected_map); in Generate()
3754 __ Abort(kUnexpectedInitialMapForArrayFunction); in Generate()
3755 __ Bind(&map_ok); in Generate()
3759 __ AssertUndefinedOrAllocationSite(allocation_site, x10); in Generate()
3763 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); in Generate()
3766 __ Cmp(new_target, constructor); in Generate()
3767 __ B(ne, &subclassing); in Generate()
3772 __ JumpIfRoot(allocation_site, Heap::kUndefinedValueRootIndex, &no_info); in Generate()
3774 __ Ldrsw(kind, in Generate()
3777 __ And(kind, kind, AllocationSite::ElementsKindBits::kMask); in Generate()
3780 __ Bind(&no_info); in Generate()
3784 __ Bind(&subclassing); in Generate()
3785 __ Poke(constructor, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
3786 __ Add(x0, x0, Operand(3)); in Generate()
3787 __ Push(new_target, allocation_site); in Generate()
3788 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3797 __ Cbz(argc, &zero_case); in GenerateCase()
3798 __ CompareAndBranch(argc, 1, ne, &n_case); in GenerateCase()
3805 __ Peek(x10, 0); in GenerateCase()
3806 __ Cbz(x10, &packed_case); in GenerateCase()
3810 __ TailCallStub(&stub1_holey); in GenerateCase()
3812 __ Bind(&packed_case); in GenerateCase()
3815 __ TailCallStub(&stub1); in GenerateCase()
3817 __ Bind(&zero_case); in GenerateCase()
3820 __ TailCallStub(&stub0); in GenerateCase()
3822 __ Bind(&n_case); in GenerateCase()
3825 __ TailCallStub(&stubN); in GenerateCase()
3845 __ Ldr(x10, FieldMemOperand(constructor, in Generate()
3848 __ JumpIfSmi(x10, &unexpected_map); in Generate()
3849 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); in Generate()
3850 __ Bind(&unexpected_map); in Generate()
3851 __ Abort(kUnexpectedInitialMapForArrayFunction); in Generate()
3852 __ Bind(&map_ok); in Generate()
3857 __ Ldr(x10, FieldMemOperand(constructor, in Generate()
3861 __ LoadElementsKindFromMap(kind, x10); in Generate()
3865 __ Cmp(x3, FAST_ELEMENTS); in Generate()
3866 __ Ccmp(x3, FAST_HOLEY_ELEMENTS, ZFlag, ne); in Generate()
3867 __ Assert(eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray); in Generate()
3871 __ CompareAndBranch(kind, FAST_ELEMENTS, eq, &fast_elements_case); in Generate()
3874 __ Bind(&fast_elements_case); in Generate()
3886 __ AssertFunction(x1); in Generate()
3887 __ AssertReceiver(x3); in Generate()
3891 __ JumpIfNotObjectType(x3, x2, x2, JS_FUNCTION_TYPE, &new_object); in Generate()
3894 __ Ldr(x2, FieldMemOperand(x3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3895 __ JumpIfSmi(x2, &new_object); in Generate()
3896 __ JumpIfNotObjectType(x2, x0, x0, MAP_TYPE, &new_object); in Generate()
3900 __ Ldr(x0, FieldMemOperand(x2, Map::kConstructorOrBackPointerOffset)); in Generate()
3901 __ CompareAndBranch(x0, x1, ne, &new_object); in Generate()
3905 __ Ldrb(x4, FieldMemOperand(x2, Map::kInstanceSizeOffset)); in Generate()
3906 __ Allocate(x4, x0, x5, x6, &allocate, SIZE_IN_WORDS); in Generate()
3907 __ Bind(&done_allocate); in Generate()
3911 __ Str(x2, FieldMemOperand(x0, JSObject::kMapOffset)); in Generate()
3912 __ LoadRoot(x3, Heap::kEmptyFixedArrayRootIndex); in Generate()
3915 __ Str(x3, FieldMemOperand(x0, JSObject::kPropertiesOffset)); in Generate()
3916 __ Str(x3, FieldMemOperand(x0, JSObject::kElementsOffset)); in Generate()
3918 __ Add(x1, x0, Operand(JSObject::kHeaderSize - kHeapObjectTag)); in Generate()
3932 __ LoadRoot(x6, Heap::kUndefinedValueRootIndex); in Generate()
3933 __ Ldr(w3, FieldMemOperand(x2, Map::kBitField3Offset)); in Generate()
3934 __ TestAndBranchIfAnySet(w3, Map::ConstructionCounter::kMask, in Generate()
3938 __ InitializeFieldsWithFiller(x1, x5, x6); in Generate()
3939 __ Ret(); in Generate()
3941 __ Bind(&slack_tracking); in Generate()
3945 __ Sub(w3, w3, 1 << Map::ConstructionCounter::kShift); in Generate()
3946 __ Str(w3, FieldMemOperand(x2, Map::kBitField3Offset)); in Generate()
3949 __ Ldrb(x4, FieldMemOperand(x2, Map::kUnusedPropertyFieldsOffset)); in Generate()
3950 __ Sub(x4, x5, Operand(x4, LSL, kPointerSizeLog2)); in Generate()
3951 __ InitializeFieldsWithFiller(x1, x4, x6); in Generate()
3954 __ LoadRoot(x6, Heap::kOnePointerFillerMapRootIndex); in Generate()
3955 __ InitializeFieldsWithFiller(x1, x5, x6); in Generate()
3960 __ TestAndBranchIfAllClear(w3, Map::ConstructionCounter::kMask, &finalize); in Generate()
3961 __ Ret(); in Generate()
3964 __ Bind(&finalize); in Generate()
3967 __ Push(x0, x2); in Generate()
3968 __ CallRuntime(Runtime::kFinalizeInstanceSize); in Generate()
3969 __ Pop(x0); in Generate()
3971 __ Ret(); in Generate()
3975 __ Bind(&allocate); in Generate()
3980 __ Mov(x4, in Generate()
3982 __ Push(x2, x4); in Generate()
3983 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3984 __ Pop(x2); in Generate()
3986 __ Ldrb(x5, FieldMemOperand(x2, Map::kInstanceSizeOffset)); in Generate()
3987 __ Add(x5, x0, Operand(x5, LSL, kPointerSizeLog2)); in Generate()
3989 __ Sub(x5, x5, kHeapObjectTag); // Subtract the tag from end. in Generate()
3990 __ B(&done_allocate); in Generate()
3993 __ Bind(&new_object); in Generate()
3994 __ Push(x1, x3); in Generate()
3995 __ TailCallRuntime(Runtime::kNewObject); in Generate()
4006 __ AssertFunction(x1); in Generate()
4009 __ Mov(x2, fp); in Generate()
4013 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4017 __ Ldr(x3, MemOperand(x2, StandardFrameConstants::kFunctionOffset)); in Generate()
4018 __ Cmp(x3, x1); in Generate()
4019 __ B(eq, &ok); in Generate()
4020 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4021 __ Bind(&ok); in Generate()
4027 __ Ldr(x2, MemOperand(x2, CommonFrameConstants::kCallerFPOffset)); in Generate()
4028 __ Ldr(x3, MemOperand(x2, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4029 __ Cmp(x3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); in Generate()
4030 __ B(ne, &no_rest_parameters); in Generate()
4035 __ Ldrsw(x0, UntagSmiMemOperand( in Generate()
4037 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4038 __ Ldrsw( in Generate()
4040 __ Subs(x0, x0, x3); in Generate()
4041 __ B(gt, &rest_parameters); in Generate()
4044 __ Bind(&no_rest_parameters); in Generate()
4053 __ Allocate(JSArray::kSize, x0, x1, x2, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4054 __ Bind(&done_allocate); in Generate()
4057 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, x1); in Generate()
4058 __ Str(x1, FieldMemOperand(x0, JSArray::kMapOffset)); in Generate()
4059 __ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex); in Generate()
4060 __ Str(x1, FieldMemOperand(x0, JSArray::kPropertiesOffset)); in Generate()
4061 __ Str(x1, FieldMemOperand(x0, JSArray::kElementsOffset)); in Generate()
4062 __ Mov(x1, Smi::kZero); in Generate()
4063 __ Str(x1, FieldMemOperand(x0, JSArray::kLengthOffset)); in Generate()
4065 __ Ret(); in Generate()
4068 __ Bind(&allocate); in Generate()
4071 __ Push(Smi::FromInt(JSArray::kSize)); in Generate()
4072 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4074 __ B(&done_allocate); in Generate()
4077 __ Bind(&rest_parameters); in Generate()
4080 __ Add(x2, x2, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
4081 __ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize); in Generate()
4093 __ Mov(x6, JSArray::kSize + FixedArray::kHeaderSize); in Generate()
4094 __ Add(x6, x6, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
4095 __ Allocate(x6, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4096 __ Bind(&done_allocate); in Generate()
4099 __ SmiTag(x6, x0); in Generate()
4102 __ LoadRoot(x1, Heap::kFixedArrayMapRootIndex); in Generate()
4103 __ Str(x1, FieldMemOperand(x3, FixedArray::kMapOffset)); in Generate()
4104 __ Str(x6, FieldMemOperand(x3, FixedArray::kLengthOffset)); in Generate()
4105 __ Add(x4, x3, FixedArray::kHeaderSize); in Generate()
4108 __ Add(x0, x4, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
4109 __ Bind(&loop); in Generate()
4110 __ Cmp(x4, x0); in Generate()
4111 __ B(eq, &done_loop); in Generate()
4112 __ Ldr(x5, MemOperand(x2, 0 * kPointerSize)); in Generate()
4113 __ Str(x5, FieldMemOperand(x4, 0 * kPointerSize)); in Generate()
4114 __ Sub(x2, x2, Operand(1 * kPointerSize)); in Generate()
4115 __ Add(x4, x4, Operand(1 * kPointerSize)); in Generate()
4116 __ B(&loop); in Generate()
4117 __ Bind(&done_loop); in Generate()
4121 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, x1); in Generate()
4122 __ Str(x1, FieldMemOperand(x0, JSArray::kMapOffset)); in Generate()
4123 __ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex); in Generate()
4124 __ Str(x1, FieldMemOperand(x0, JSArray::kPropertiesOffset)); in Generate()
4125 __ Str(x3, FieldMemOperand(x0, JSArray::kElementsOffset)); in Generate()
4126 __ Str(x6, FieldMemOperand(x0, JSArray::kLengthOffset)); in Generate()
4128 __ Ret(); in Generate()
4132 __ Bind(&allocate); in Generate()
4133 __ Cmp(x6, Operand(kMaxRegularHeapObjectSize)); in Generate()
4134 __ B(gt, &too_big_for_new_space); in Generate()
4137 __ SmiTag(x0); in Generate()
4138 __ SmiTag(x6); in Generate()
4139 __ Push(x0, x2, x6); in Generate()
4140 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4141 __ Mov(x3, x0); in Generate()
4142 __ Pop(x2, x0); in Generate()
4143 __ SmiUntag(x0); in Generate()
4145 __ B(&done_allocate); in Generate()
4148 __ Bind(&too_big_for_new_space); in Generate()
4149 __ Push(x1); in Generate()
4150 __ TailCallRuntime(Runtime::kNewRestParameter); in Generate()
4162 __ AssertFunction(x1); in Generate()
4165 __ Mov(x6, fp); in Generate()
4169 __ Ldr(x6, MemOperand(x6, StandardFrameConstants::kCallerFPOffset)); in Generate()
4173 __ Ldr(x3, MemOperand(x6, StandardFrameConstants::kFunctionOffset)); in Generate()
4174 __ Cmp(x3, x1); in Generate()
4175 __ B(eq, &ok); in Generate()
4176 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4177 __ Bind(&ok); in Generate()
4181 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4182 __ Ldrsw( in Generate()
4184 __ Add(x3, x6, Operand(x2, LSL, kPointerSizeLog2)); in Generate()
4185 __ Add(x3, x3, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate()
4186 __ SmiTag(x2); in Generate()
4202 __ SmiUntag(param_count, param_count_smi); in Generate()
4209 __ Ldr(caller_fp, MemOperand(x6, StandardFrameConstants::kCallerFPOffset)); in Generate()
4210 __ Ldr( in Generate()
4213 __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); in Generate()
4214 __ B(eq, &adaptor_frame); in Generate()
4228 __ Mov(arg_count, param_count); in Generate()
4229 __ Mov(mapped_params, param_count); in Generate()
4230 __ B(&try_allocate); in Generate()
4233 __ Bind(&adaptor_frame); in Generate()
4234 __ Ldr(arg_count_smi, in Generate()
4237 __ SmiUntag(arg_count, arg_count_smi); in Generate()
4238 __ Add(x10, caller_fp, Operand(arg_count, LSL, kPointerSizeLog2)); in Generate()
4239 __ Add(recv_arg, x10, StandardFrameConstants::kCallerSPOffset); in Generate()
4242 __ Cmp(param_count, arg_count); in Generate()
4243 __ Csel(mapped_params, param_count, arg_count, lt); in Generate()
4245 __ Bind(&try_allocate); in Generate()
4265 __ Mov(size, Operand(mapped_params, LSL, kPointerSizeLog2)); in Generate()
4266 __ Add(size, size, kParameterMapHeaderSize); in Generate()
4270 __ Cmp(mapped_params, 0); in Generate()
4271 __ CzeroX(size, eq); in Generate()
4274 __ Add(size, size, Operand(arg_count, LSL, kPointerSizeLog2)); in Generate()
4275 __ Add(size, size, FixedArray::kHeaderSize + JSSloppyArgumentsObject::kSize); in Generate()
4280 __ Allocate(size, alloc_obj, x11, x12, &runtime, NO_ALLOCATION_FLAGS); in Generate()
4297 __ Ldr(global_ctx, NativeContextMemOperand()); in Generate()
4299 __ Ldr(sloppy_args_map, in Generate()
4301 __ Ldr( in Generate()
4304 __ Cmp(mapped_params, 0); in Generate()
4305 __ CmovX(sloppy_args_map, aliased_args_map, ne); in Generate()
4308 __ Str(sloppy_args_map, FieldMemOperand(alloc_obj, JSObject::kMapOffset)); in Generate()
4309 __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex); in Generate()
4310 __ Str(x10, FieldMemOperand(alloc_obj, JSObject::kPropertiesOffset)); in Generate()
4311 __ Str(x10, FieldMemOperand(alloc_obj, JSObject::kElementsOffset)); in Generate()
4314 __ AssertNotSmi(function); in Generate()
4315 __ Str(function, in Generate()
4319 __ Str(arg_count_smi, in Generate()
4338 __ Add(elements, alloc_obj, JSSloppyArgumentsObject::kSize); in Generate()
4339 __ Str(elements, FieldMemOperand(alloc_obj, JSObject::kElementsOffset)); in Generate()
4343 __ Cmp(mapped_params, 0); in Generate()
4347 __ CmovX(backing_store, elements, eq); in Generate()
4348 __ B(eq, &skip_parameter_map); in Generate()
4350 __ LoadRoot(x10, Heap::kSloppyArgumentsElementsMapRootIndex); in Generate()
4351 __ Str(x10, FieldMemOperand(elements, FixedArray::kMapOffset)); in Generate()
4352 __ Add(x10, mapped_params, 2); in Generate()
4353 __ SmiTag(x10); in Generate()
4354 __ Str(x10, FieldMemOperand(elements, FixedArray::kLengthOffset)); in Generate()
4355 __ Str(cp, FieldMemOperand(elements, in Generate()
4357 __ Add(x10, elements, Operand(mapped_params, LSL, kPointerSizeLog2)); in Generate()
4358 __ Add(x10, x10, kParameterMapHeaderSize); in Generate()
4359 __ Str(x10, FieldMemOperand(elements, in Generate()
4393 __ Mov(loop_count, mapped_params); in Generate()
4394 __ Add(index, param_count, static_cast<int>(Context::MIN_CONTEXT_SLOTS)); in Generate()
4395 __ Sub(index, index, mapped_params); in Generate()
4396 __ SmiTag(index); in Generate()
4397 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex); in Generate()
4398 __ Add(backing_store, elements, Operand(loop_count, LSL, kPointerSizeLog2)); in Generate()
4399 __ Add(backing_store, backing_store, kParameterMapHeaderSize); in Generate()
4401 __ B(&parameters_test); in Generate()
4403 __ Bind(&parameters_loop); in Generate()
4404 __ Sub(loop_count, loop_count, 1); in Generate()
4405 __ Mov(x10, Operand(loop_count, LSL, kPointerSizeLog2)); in Generate()
4406 __ Add(x10, x10, kParameterMapHeaderSize - kHeapObjectTag); in Generate()
4407 __ Str(index, MemOperand(elements, x10)); in Generate()
4408 __ Sub(x10, x10, kParameterMapHeaderSize - FixedArray::kHeaderSize); in Generate()
4409 __ Str(the_hole, MemOperand(backing_store, x10)); in Generate()
4410 __ Add(index, index, Smi::FromInt(1)); in Generate()
4411 __ Bind(&parameters_test); in Generate()
4412 __ Cbnz(loop_count, &parameters_loop); in Generate()
4414 __ Bind(&skip_parameter_map); in Generate()
4416 __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex); in Generate()
4417 __ Str(x10, FieldMemOperand(backing_store, FixedArray::kMapOffset)); in Generate()
4418 __ Str(arg_count_smi, FieldMemOperand(backing_store, in Generate()
4431 __ Mov(x10, mapped_params); in Generate()
4432 __ Sub(recv_arg, recv_arg, Operand(x10, LSL, kPointerSizeLog2)); in Generate()
4433 __ B(&arguments_test); in Generate()
4435 __ Bind(&arguments_loop); in Generate()
4436 __ Sub(recv_arg, recv_arg, kPointerSize); in Generate()
4437 __ Ldr(x11, MemOperand(recv_arg)); in Generate()
4438 __ Add(x12, backing_store, Operand(x10, LSL, kPointerSizeLog2)); in Generate()
4439 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize)); in Generate()
4440 __ Add(x10, x10, 1); in Generate()
4442 __ Bind(&arguments_test); in Generate()
4443 __ Cmp(x10, arg_count); in Generate()
4444 __ B(lt, &arguments_loop); in Generate()
4446 __ Ret(); in Generate()
4449 __ Bind(&runtime); in Generate()
4450 __ Push(function, recv_arg, arg_count_smi); in Generate()
4451 __ TailCallRuntime(Runtime::kNewSloppyArguments); in Generate()
4462 __ AssertFunction(x1); in Generate()
4465 __ Mov(x2, fp); in Generate()
4469 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4473 __ Ldr(x3, MemOperand(x2, StandardFrameConstants::kFunctionOffset)); in Generate()
4474 __ Cmp(x3, x1); in Generate()
4475 __ B(eq, &ok); in Generate()
4476 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4477 __ Bind(&ok); in Generate()
4482 __ Ldr(x3, MemOperand(x2, StandardFrameConstants::kCallerFPOffset)); in Generate()
4483 __ Ldr(x4, MemOperand(x3, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
4484 __ Cmp(x4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); in Generate()
4485 __ B(eq, &arguments_adaptor); in Generate()
4487 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4488 __ Ldrsw(x0, FieldMemOperand( in Generate()
4490 __ Add(x2, x2, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
4491 __ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize); in Generate()
4493 __ B(&arguments_done); in Generate()
4494 __ Bind(&arguments_adaptor); in Generate()
4496 __ Ldrsw(x0, UntagSmiMemOperand( in Generate()
4498 __ Add(x2, x3, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
4499 __ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize); in Generate()
4501 __ Bind(&arguments_done); in Generate()
4513 __ Mov(x6, JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize); in Generate()
4514 __ Add(x6, x6, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
4515 __ Allocate(x6, x3, x4, x5, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4516 __ Bind(&done_allocate); in Generate()
4519 __ SmiTag(x6, x0); in Generate()
4522 __ LoadRoot(x1, Heap::kFixedArrayMapRootIndex); in Generate()
4523 __ Str(x1, FieldMemOperand(x3, FixedArray::kMapOffset)); in Generate()
4524 __ Str(x6, FieldMemOperand(x3, FixedArray::kLengthOffset)); in Generate()
4525 __ Add(x4, x3, FixedArray::kHeaderSize); in Generate()
4528 __ Add(x0, x4, Operand(x0, LSL, kPointerSizeLog2)); in Generate()
4529 __ Bind(&loop); in Generate()
4530 __ Cmp(x4, x0); in Generate()
4531 __ B(eq, &done_loop); in Generate()
4532 __ Ldr(x5, MemOperand(x2, 0 * kPointerSize)); in Generate()
4533 __ Str(x5, FieldMemOperand(x4, 0 * kPointerSize)); in Generate()
4534 __ Sub(x2, x2, Operand(1 * kPointerSize)); in Generate()
4535 __ Add(x4, x4, Operand(1 * kPointerSize)); in Generate()
4536 __ B(&loop); in Generate()
4537 __ Bind(&done_loop); in Generate()
4541 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, x1); in Generate()
4542 __ Str(x1, FieldMemOperand(x0, JSStrictArgumentsObject::kMapOffset)); in Generate()
4543 __ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex); in Generate()
4544 __ Str(x1, FieldMemOperand(x0, JSStrictArgumentsObject::kPropertiesOffset)); in Generate()
4545 __ Str(x3, FieldMemOperand(x0, JSStrictArgumentsObject::kElementsOffset)); in Generate()
4546 __ Str(x6, FieldMemOperand(x0, JSStrictArgumentsObject::kLengthOffset)); in Generate()
4548 __ Ret(); in Generate()
4552 __ Bind(&allocate); in Generate()
4553 __ Cmp(x6, Operand(kMaxRegularHeapObjectSize)); in Generate()
4554 __ B(gt, &too_big_for_new_space); in Generate()
4557 __ SmiTag(x0); in Generate()
4558 __ SmiTag(x6); in Generate()
4559 __ Push(x0, x2, x6); in Generate()
4560 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4561 __ Mov(x3, x0); in Generate()
4562 __ Pop(x2, x0); in Generate()
4563 __ SmiUntag(x0); in Generate()
4565 __ B(&done_allocate); in Generate()
4568 __ Bind(&too_big_for_new_space); in Generate()
4569 __ Push(x1); in Generate()
4570 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4610 __ Mov(x10, ExternalReference::is_profiling_address(isolate)); in CallApiFunctionAndReturn()
4611 __ Ldrb(w10, MemOperand(x10)); in CallApiFunctionAndReturn()
4612 __ Cbz(w10, &profiler_disabled); in CallApiFunctionAndReturn()
4613 __ Mov(x3, thunk_ref); in CallApiFunctionAndReturn()
4614 __ B(&end_profiler_check); in CallApiFunctionAndReturn()
4616 __ Bind(&profiler_disabled); in CallApiFunctionAndReturn()
4617 __ Mov(x3, function_address); in CallApiFunctionAndReturn()
4618 __ Bind(&end_profiler_check); in CallApiFunctionAndReturn()
4623 __ Poke(x19, (spill_offset + 0) * kXRegSize); in CallApiFunctionAndReturn()
4624 __ Poke(x20, (spill_offset + 1) * kXRegSize); in CallApiFunctionAndReturn()
4625 __ Poke(x21, (spill_offset + 2) * kXRegSize); in CallApiFunctionAndReturn()
4626 __ Poke(x22, (spill_offset + 3) * kXRegSize); in CallApiFunctionAndReturn()
4636 __ Mov(handle_scope_base, next_address); in CallApiFunctionAndReturn()
4637 __ Ldr(next_address_reg, MemOperand(handle_scope_base, kNextOffset)); in CallApiFunctionAndReturn()
4638 __ Ldr(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); in CallApiFunctionAndReturn()
4639 __ Ldr(level_reg, MemOperand(handle_scope_base, kLevelOffset)); in CallApiFunctionAndReturn()
4640 __ Add(level_reg, level_reg, 1); in CallApiFunctionAndReturn()
4641 __ Str(level_reg, MemOperand(handle_scope_base, kLevelOffset)); in CallApiFunctionAndReturn()
4645 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4646 __ Mov(x0, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
4647 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
4649 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4660 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4661 __ Mov(x0, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
4662 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
4664 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4673 __ Ldr(x0, return_value_operand); in CallApiFunctionAndReturn()
4674 __ Bind(&return_value_loaded); in CallApiFunctionAndReturn()
4677 __ Str(next_address_reg, MemOperand(handle_scope_base, kNextOffset)); in CallApiFunctionAndReturn()
4678 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
4679 __ Ldr(w1, MemOperand(handle_scope_base, kLevelOffset)); in CallApiFunctionAndReturn()
4680 __ Cmp(w1, level_reg); in CallApiFunctionAndReturn()
4681 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall); in CallApiFunctionAndReturn()
4683 __ Sub(level_reg, level_reg, 1); in CallApiFunctionAndReturn()
4684 __ Str(level_reg, MemOperand(handle_scope_base, kLevelOffset)); in CallApiFunctionAndReturn()
4685 __ Ldr(x1, MemOperand(handle_scope_base, kLimitOffset)); in CallApiFunctionAndReturn()
4686 __ Cmp(limit_reg, x1); in CallApiFunctionAndReturn()
4687 __ B(ne, &delete_allocated_handles); in CallApiFunctionAndReturn()
4690 __ Bind(&leave_exit_frame); in CallApiFunctionAndReturn()
4692 __ Peek(x19, (spill_offset + 0) * kXRegSize); in CallApiFunctionAndReturn()
4693 __ Peek(x20, (spill_offset + 1) * kXRegSize); in CallApiFunctionAndReturn()
4694 __ Peek(x21, (spill_offset + 2) * kXRegSize); in CallApiFunctionAndReturn()
4695 __ Peek(x22, (spill_offset + 3) * kXRegSize); in CallApiFunctionAndReturn()
4699 __ Ldr(cp, *context_restore_operand); in CallApiFunctionAndReturn()
4703 __ Ldr(w2, *stack_space_operand); in CallApiFunctionAndReturn()
4706 __ LeaveExitFrame(false, x1, !restore_context); in CallApiFunctionAndReturn()
4709 __ Mov(x5, ExternalReference::scheduled_exception_address(isolate)); in CallApiFunctionAndReturn()
4710 __ Ldr(x5, MemOperand(x5)); in CallApiFunctionAndReturn()
4711 __ JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex, in CallApiFunctionAndReturn()
4715 __ Drop(x2, 1); in CallApiFunctionAndReturn()
4717 __ Drop(stack_space); in CallApiFunctionAndReturn()
4719 __ Ret(); in CallApiFunctionAndReturn()
4722 __ Bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
4723 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
4726 __ Bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
4727 __ Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); in CallApiFunctionAndReturn()
4730 __ Mov(saved_result, x0); in CallApiFunctionAndReturn()
4731 __ Mov(x0, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
4732 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
4734 __ Mov(x0, saved_result); in CallApiFunctionAndReturn()
4735 __ B(&leave_exit_frame); in CallApiFunctionAndReturn()
4773 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
4776 __ Push(context, callee, call_data); in Generate()
4780 __ Ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
4784 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); in Generate()
4787 __ Mov(isolate_reg, ExternalReference::isolate_address(masm->isolate())); in Generate()
4791 __ Push(call_data, call_data, isolate_reg, holder); in Generate()
4795 __ Mov(args, masm->StackPointer()); in Generate()
4806 __ EnterExitFrame(false, x10, kApiStackSpace + kCallApiFunctionSpillSpace); in Generate()
4811 __ Add(x0, masm->StackPointer(), 1 * kPointerSize); in Generate()
4813 __ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize)); in Generate()
4814 __ Stp(args, x10, MemOperand(x0, 0 * kPointerSize)); in Generate()
4816 __ Mov(x10, argc()); in Generate()
4817 __ Str(x10, MemOperand(x0, 2 * kPointerSize)); in Generate()
4867 __ Push(receiver); in Generate()
4869 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4870 __ Mov(scratch2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
4871 __ Ldr(scratch3, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
4872 __ Push(scratch3, scratch, scratch, scratch2, holder); in Generate()
4873 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
4874 __ Ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
4875 __ Push(scratch); in Generate()
4881 __ Mov(x0, masm->StackPointer()); // x0 = Handle<Name> in Generate()
4882 __ Add(x1, x0, 1 * kPointerSize); // x1 = v8::PCI::args_ in Generate()
4891 __ EnterExitFrame(false, x10, kApiStackSpace + kCallApiFunctionSpillSpace); in Generate()
4895 __ Poke(x1, 1 * kPointerSize); in Generate()
4896 __ Add(x1, masm->StackPointer(), 1 * kPointerSize); in Generate()
4903 __ Ldr(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
4904 __ Ldr(api_function_address, in Generate()
4916 #undef __