Lines Matching full:__

23 #define __ ACCESS_MASM(masm)  macro
26 __ popq(rcx); in Generate()
27 __ movq(MemOperand(rsp, rax, times_8, 0), rdi); in Generate()
28 __ pushq(rdi); in Generate()
29 __ pushq(rbx); in Generate()
30 __ pushq(rcx); in Generate()
31 __ addq(rax, Immediate(3)); in Generate()
32 __ TailCallRuntime(Runtime::kNewArray); in Generate()
60 __ Push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
62 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
65 __ Ret(); in GenerateLightweightMiss()
70 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs); in Generate()
72 __ PrepareCallCFunction(argument_count); in Generate()
73 __ LoadAddress(arg_reg_1, in Generate()
77 __ CallCFunction( in Generate()
80 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs); in Generate()
81 __ ret(0); in Generate()
130 __ pushq(scratch1); in Generate()
131 __ pushq(save_reg); in Generate()
134 __ movl(scratch1, mantissa_operand); in Generate()
135 __ Movsd(kScratchDoubleReg, mantissa_operand); in Generate()
136 __ movl(rcx, exponent_operand); in Generate()
137 if (stash_exponent_copy) __ pushq(rcx); in Generate()
139 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); in Generate()
140 __ shrl(rcx, Immediate(HeapNumber::kExponentShift)); in Generate()
141 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias)); in Generate()
142 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits)); in Generate()
143 __ j(below, &process_64_bits); in Generate()
147 __ subl(rcx, Immediate(delta)); in Generate()
148 __ xorl(result_reg, result_reg); in Generate()
149 __ cmpl(rcx, Immediate(31)); in Generate()
150 __ j(above, &done); in Generate()
151 __ shll_cl(scratch1); in Generate()
152 __ jmp(&check_negative); in Generate()
154 __ bind(&process_64_bits); in Generate()
155 __ Cvttsd2siq(result_reg, kScratchDoubleReg); in Generate()
156 __ jmp(&done, Label::kNear); in Generate()
159 __ bind(&check_negative); in Generate()
160 __ movl(result_reg, scratch1); in Generate()
161 __ negl(result_reg); in Generate()
163 __ cmpl(MemOperand(rsp, 0), Immediate(0)); in Generate()
165 __ cmpl(exponent_operand, Immediate(0)); in Generate()
167 __ cmovl(greater, result_reg, scratch1); in Generate()
170 __ bind(&done); in Generate()
172 __ addp(rsp, Immediate(kDoubleSize)); in Generate()
176 __ movl(final_result_reg, result_reg); in Generate()
178 __ popq(save_reg); in Generate()
179 __ popq(scratch1); in Generate()
180 __ ret(0); in Generate()
188 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); in LoadSSE2UnknownOperands()
189 __ JumpIfSmi(rdx, &load_smi_rdx); in LoadSSE2UnknownOperands()
190 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx); in LoadSSE2UnknownOperands()
191 __ j(not_equal, not_numbers); // Argument in rdx is not a number. in LoadSSE2UnknownOperands()
192 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in LoadSSE2UnknownOperands()
194 __ JumpIfSmi(rax, &load_smi_rax); in LoadSSE2UnknownOperands()
196 __ bind(&load_nonsmi_rax); in LoadSSE2UnknownOperands()
197 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx); in LoadSSE2UnknownOperands()
198 __ j(not_equal, not_numbers); in LoadSSE2UnknownOperands()
199 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in LoadSSE2UnknownOperands()
200 __ jmp(&done); in LoadSSE2UnknownOperands()
202 __ bind(&load_smi_rdx); in LoadSSE2UnknownOperands()
203 __ SmiToInteger32(kScratchRegister, rdx); in LoadSSE2UnknownOperands()
204 __ Cvtlsi2sd(xmm0, kScratchRegister); in LoadSSE2UnknownOperands()
205 __ JumpIfNotSmi(rax, &load_nonsmi_rax); in LoadSSE2UnknownOperands()
207 __ bind(&load_smi_rax); in LoadSSE2UnknownOperands()
208 __ SmiToInteger32(kScratchRegister, rax); in LoadSSE2UnknownOperands()
209 __ Cvtlsi2sd(xmm1, kScratchRegister); in LoadSSE2UnknownOperands()
210 __ bind(&done); in LoadSSE2UnknownOperands()
226 __ movp(scratch, Immediate(1)); in Generate()
227 __ Cvtlsi2sd(double_result, scratch); in Generate()
230 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); in Generate()
231 __ SmiToInteger32(exponent, exponent); in Generate()
232 __ jmp(&int_exponent); in Generate()
234 __ bind(&exponent_not_smi); in Generate()
235 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); in Generate()
241 __ DoubleToI(exponent, double_exponent, double_scratch, in Generate()
245 __ jmp(&int_exponent); in Generate()
247 __ bind(&try_arithmetic_simplification); in Generate()
248 __ Cvttsd2si(exponent, double_exponent); in Generate()
250 __ cmpl(exponent, Immediate(0x1)); in Generate()
251 __ j(overflow, &call_runtime); in Generate()
255 __ bind(&fast_power); in Generate()
256 __ fnclex(); // Clear flags to catch exceptions later. in Generate()
258 __ subp(rsp, Immediate(kDoubleSize)); in Generate()
259 __ Movsd(Operand(rsp, 0), double_exponent); in Generate()
260 __ fld_d(Operand(rsp, 0)); // E in Generate()
261 __ Movsd(Operand(rsp, 0), double_base); in Generate()
262 __ fld_d(Operand(rsp, 0)); // B, E in Generate()
267 __ fyl2x(); // X in Generate()
268 __ fld(0); // X, X in Generate()
269 __ frndint(); // rnd(X), X in Generate()
270 __ fsub(1); // rnd(X), X-rnd(X) in Generate()
271 __ fxch(1); // X - rnd(X), rnd(X) in Generate()
273 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) in Generate()
274 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) in Generate()
275 __ faddp(1); // 2^(X-rnd(X)), rnd(X) in Generate()
277 __ fscale(); // 2^X, rnd(X) in Generate()
278 __ fstp(1); in Generate()
280 __ fnstsw_ax(); in Generate()
281 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception. in Generate()
282 __ j(not_zero, &fast_power_failed, Label::kNear); in Generate()
283 __ fstp_d(Operand(rsp, 0)); in Generate()
284 __ Movsd(double_result, Operand(rsp, 0)); in Generate()
285 __ addp(rsp, Immediate(kDoubleSize)); in Generate()
286 __ jmp(&done); in Generate()
288 __ bind(&fast_power_failed); in Generate()
289 __ fninit(); in Generate()
290 __ addp(rsp, Immediate(kDoubleSize)); in Generate()
291 __ jmp(&call_runtime); in Generate()
295 __ bind(&int_exponent); in Generate()
298 __ movp(scratch, exponent); // Back up exponent. in Generate()
299 __ Movsd(double_scratch, double_base); // Back up base. in Generate()
300 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1. in Generate()
304 __ testl(scratch, scratch); in Generate()
305 __ j(positive, &no_neg, Label::kNear); in Generate()
306 __ negl(scratch); in Generate()
307 __ bind(&no_neg); in Generate()
309 __ j(zero, &while_false, Label::kNear); in Generate()
310 __ shrl(scratch, Immediate(1)); in Generate()
313 __ j(above, &while_true, Label::kNear); in Generate()
314 __ Movsd(double_result, double_scratch); in Generate()
315 __ j(zero, &while_false, Label::kNear); in Generate()
317 __ bind(&while_true); in Generate()
318 __ shrl(scratch, Immediate(1)); in Generate()
319 __ Mulsd(double_scratch, double_scratch); in Generate()
320 __ j(above, &while_true, Label::kNear); in Generate()
321 __ Mulsd(double_result, double_scratch); in Generate()
322 __ j(not_zero, &while_true); in Generate()
324 __ bind(&while_false); in Generate()
326 __ testl(exponent, exponent); in Generate()
327 __ j(greater, &done); in Generate()
328 __ Divsd(double_scratch2, double_result); in Generate()
329 __ Movsd(double_result, double_scratch2); in Generate()
332 __ Xorpd(double_scratch2, double_scratch2); in Generate()
333 __ Ucomisd(double_scratch2, double_result); in Generate()
337 __ j(not_equal, &done); in Generate()
338 __ Cvtlsi2sd(double_exponent, exponent); in Generate()
341 __ bind(&call_runtime); in Generate()
343 __ Movsd(xmm0, double_base); in Generate()
347 __ PrepareCallCFunction(2); in Generate()
348 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
352 __ Movsd(double_result, xmm0); in Generate()
354 __ bind(&done); in Generate()
355 __ ret(0); in Generate()
369 __ bind(&miss); in Generate()
396 __ ret(0); in Generate()
401 __ bind(&miss); in Generate()
412 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
438 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); in Generate()
439 __ testp(kScratchRegister, kScratchRegister); in Generate()
440 __ j(zero, &runtime); in Generate()
443 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); in Generate()
444 __ JumpIfSmi(rax, &runtime); in Generate()
445 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); in Generate()
446 __ j(not_equal, &runtime); in Generate()
449 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset)); in Generate()
452 __ Check(NegateCondition(is_smi), in Generate()
454 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); in Generate()
455 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
460 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); in Generate()
461 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); in Generate()
462 __ j(not_equal, &runtime); in Generate()
466 __ SmiToInteger32(rdx, in Generate()
471 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1)); in Generate()
472 __ j(above, &runtime); in Generate()
475 __ Set(r14, 0); in Generate()
476 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); in Generate()
477 __ JumpIfSmi(rdi, &runtime); in Generate()
478 __ movp(r15, rdi); // Make a copy of the original subject string. in Generate()
504 __ bind(&check_underlying); in Generate()
505 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
506 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
509 __ andb(rbx, Immediate(kIsNotStringMask | in Generate()
514 __ j(zero, &seq_two_byte_string); // Go to (9). in Generate()
518 __ andb(rbx, Immediate(kIsNotStringMask | in Generate()
521 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5). in Generate()
530 __ cmpp(rbx, Immediate(kExternalStringTag)); in Generate()
531 __ j(greater_equal, &not_seq_nor_cons); // Go to (6). in Generate()
535 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset), in Generate()
537 __ j(not_equal, &runtime); in Generate()
538 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); in Generate()
539 __ jmp(&check_underlying); in Generate()
542 __ bind(&seq_one_byte_string); in Generate()
544 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset)); in Generate()
545 __ Set(rcx, 1); // Type is one byte. in Generate()
548 __ bind(&check_code); in Generate()
553 __ JumpIfSmi(r11, &runtime); in Generate()
564 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX)); in Generate()
565 __ JumpIfNotSmi(rbx, &runtime); in Generate()
566 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); in Generate()
567 __ j(above_equal, &runtime); in Generate()
568 __ SmiToInteger64(rbx, rbx); in Generate()
576 __ IncrementCounter(counters->regexp_entry_native(), 1); in Generate()
582 __ EnterApiExitFrame(argument_slots_on_stack); in Generate()
585 __ LoadAddress(kScratchRegister, in Generate()
587 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), in Generate()
591 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), in Generate()
595 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); in Generate()
596 __ movp(r9, Operand(kScratchRegister, 0)); in Generate()
597 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); in Generate()
598 __ addp(r9, Operand(kScratchRegister, 0)); in Generate()
599 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); in Generate()
605 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), in Generate()
608 __ Set(r9, 0); in Generate()
612 __ LoadAddress( in Generate()
616 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); in Generate()
627 __ movp(arg_reg_2, rbx); in Generate()
634 __ addp(rbx, r14); in Generate()
635 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset)); in Generate()
636 __ addp(r14, arg_reg_3); // Using arg3 as scratch. in Generate()
641 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string. in Generate()
642 __ j(zero, &setup_two_byte, Label::kNear); in Generate()
643 __ leap(arg_reg_4, in Generate()
645 __ leap(arg_reg_3, in Generate()
647 __ jmp(&setup_rest, Label::kNear); in Generate()
648 __ bind(&setup_two_byte); in Generate()
649 __ leap(arg_reg_4, in Generate()
651 __ leap(arg_reg_3, in Generate()
653 __ bind(&setup_rest); in Generate()
660 __ movp(arg_reg_1, r15); in Generate()
663 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); in Generate()
664 __ call(r11); in Generate()
666 __ LeaveApiExitFrame(true); in Generate()
671 __ cmpl(rax, Immediate(1)); in Generate()
674 __ j(equal, &success, Label::kNear); in Generate()
675 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
676 __ j(equal, &exception); in Generate()
677 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); in Generate()
680 __ j(not_equal, &runtime); in Generate()
683 __ LoadRoot(rax, Heap::kNullValueRootIndex); in Generate()
684 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); in Generate()
687 __ bind(&success); in Generate()
688 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); in Generate()
689 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); in Generate()
690 __ SmiToInteger32(rax, in Generate()
693 __ leal(rdx, Operand(rax, rax, times_1, 2)); in Generate()
697 __ movp(rbx, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX)); in Generate()
698 __ JumpIfSmi(rbx, &runtime); in Generate()
700 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset)); in Generate()
701 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex); in Generate()
702 __ j(not_equal, &runtime); in Generate()
706 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); in Generate()
707 __ subl(rax, Immediate(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
708 __ cmpl(rdx, rax); in Generate()
709 __ j(greater, &runtime); in Generate()
714 __ Integer32ToSmi(kScratchRegister, rdx); in Generate()
715 __ movp(FieldOperand(rbx, RegExpMatchInfo::kNumberOfCapturesOffset), in Generate()
718 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); in Generate()
719 __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastSubjectOffset), rax); in Generate()
720 __ movp(rcx, rax); in Generate()
721 __ RecordWriteField(rbx, RegExpMatchInfo::kLastSubjectOffset, rax, rdi, in Generate()
723 __ movp(rax, rcx); in Generate()
724 __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastInputOffset), rax); in Generate()
725 __ RecordWriteField(rbx, RegExpMatchInfo::kLastInputOffset, rax, rdi, in Generate()
729 __ LoadAddress( in Generate()
738 __ bind(&next_capture); in Generate()
739 __ subp(rdx, Immediate(1)); in Generate()
740 __ j(negative, &done, Label::kNear); in Generate()
742 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); in Generate()
743 __ Integer32ToSmi(rdi, rdi); in Generate()
745 __ movp(FieldOperand(rbx, rdx, times_pointer_size, in Generate()
748 __ jmp(&next_capture); in Generate()
749 __ bind(&done); in Generate()
752 __ movp(rax, rbx); in Generate()
753 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); in Generate()
755 __ bind(&exception); in Generate()
764 __ movp(rax, pending_exception_operand); in Generate()
765 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); in Generate()
766 __ cmpp(rax, rdx); in Generate()
767 __ j(equal, &runtime); in Generate()
770 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
773 __ bind(&runtime); in Generate()
774 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
778 __ bind(&not_seq_nor_cons); in Generate()
780 __ j(greater, &not_long_external, Label::kNear); // Go to (10). in Generate()
783 __ bind(&external_string); in Generate()
784 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
785 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
789 __ testb(rbx, Immediate(kIsIndirectStringMask)); in Generate()
790 __ Assert(zero, kExternalStringExpectedButNotFound); in Generate()
792 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); in Generate()
795 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
798 __ testb(rbx, Immediate(kStringEncodingMask)); in Generate()
799 __ j(not_zero, &seq_one_byte_string); // Go to (5). in Generate()
804 __ bind(&seq_two_byte_string); in Generate()
805 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); in Generate()
806 __ Set(rcx, 0); // Type is two byte. in Generate()
807 __ jmp(&check_code); // Go to (E). in Generate()
810 __ bind(&not_long_external); in Generate()
813 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask)); in Generate()
814 __ j(not_zero, &runtime); in Generate()
818 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); in Generate()
819 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); in Generate()
820 __ jmp(&check_underlying); in Generate()
837 __ JumpIfNotSmi(input, fail); in CheckInputType()
839 __ JumpIfSmi(input, &ok); in CheckInputType()
840 __ CompareMap(input, masm->isolate()->factory()->heap_number_map()); in CheckInputType()
841 __ j(not_equal, fail); in CheckInputType()
845 __ bind(&ok); in CheckInputType()
853 __ JumpIfSmi(object, label); in BranchIfNotInternalizedString()
854 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset)); in BranchIfNotInternalizedString()
855 __ movzxbp(scratch, in BranchIfNotInternalizedString()
858 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in BranchIfNotInternalizedString()
859 __ j(not_zero, label); in BranchIfNotInternalizedString()
874 __ JumpIfNotBothSmi(rax, rdx, &non_smi); in GenerateGeneric()
875 __ subp(rdx, rax); in GenerateGeneric()
876 __ j(no_overflow, &smi_done); in GenerateGeneric()
877 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. in GenerateGeneric()
878 __ bind(&smi_done); in GenerateGeneric()
879 __ movp(rax, rdx); in GenerateGeneric()
880 __ ret(0); in GenerateGeneric()
881 __ bind(&non_smi); in GenerateGeneric()
891 __ cmpp(rax, rdx); in GenerateGeneric()
892 __ j(not_equal, &not_identical, Label::kNear); in GenerateGeneric()
897 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); in GenerateGeneric()
899 __ j(not_equal, &check_for_nan, Label::kNear); in GenerateGeneric()
900 __ Set(rax, NegativeComparisonResult(cc)); in GenerateGeneric()
901 __ ret(0); in GenerateGeneric()
902 __ bind(&check_for_nan); in GenerateGeneric()
909 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), in GenerateGeneric()
911 __ j(equal, &heap_number, Label::kNear); in GenerateGeneric()
913 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); in GenerateGeneric()
914 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); in GenerateGeneric()
916 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE))); in GenerateGeneric()
917 __ j(above_equal, &runtime_call, Label::kFar); in GenerateGeneric()
919 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE))); in GenerateGeneric()
920 __ j(equal, &runtime_call, Label::kFar); in GenerateGeneric()
922 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE))); in GenerateGeneric()
923 __ j(equal, &runtime_call, Label::kFar); in GenerateGeneric()
925 __ Set(rax, EQUAL); in GenerateGeneric()
926 __ ret(0); in GenerateGeneric()
928 __ bind(&heap_number); in GenerateGeneric()
933 __ Set(rax, EQUAL); in GenerateGeneric()
934 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in GenerateGeneric()
935 __ Ucomisd(xmm0, xmm0); in GenerateGeneric()
936 __ setcc(parity_even, rax); in GenerateGeneric()
939 __ negp(rax); in GenerateGeneric()
941 __ ret(0); in GenerateGeneric()
943 __ bind(&not_identical); in GenerateGeneric()
958 __ SelectNonSmi(rbx, rax, rdx, &not_smis); in GenerateGeneric()
961 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), in GenerateGeneric()
964 __ j(equal, &slow); in GenerateGeneric()
966 __ movp(rax, rbx); in GenerateGeneric()
967 __ ret(0); in GenerateGeneric()
969 __ bind(&not_smis); in GenerateGeneric()
979 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateGeneric()
980 __ j(below, &first_non_object, Label::kNear); in GenerateGeneric()
984 __ bind(&return_not_equal); in GenerateGeneric()
985 __ ret(0); in GenerateGeneric()
987 __ bind(&first_non_object); in GenerateGeneric()
989 __ CmpInstanceType(rcx, ODDBALL_TYPE); in GenerateGeneric()
990 __ j(equal, &return_not_equal); in GenerateGeneric()
992 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateGeneric()
993 __ j(above_equal, &return_not_equal); in GenerateGeneric()
996 __ CmpInstanceType(rcx, ODDBALL_TYPE); in GenerateGeneric()
997 __ j(equal, &return_not_equal); in GenerateGeneric()
1001 __ bind(&slow); in GenerateGeneric()
1008 __ xorl(rax, rax); in GenerateGeneric()
1009 __ xorl(rcx, rcx); in GenerateGeneric()
1010 __ Ucomisd(xmm0, xmm1); in GenerateGeneric()
1013 __ j(parity_even, &unordered, Label::kNear); in GenerateGeneric()
1015 __ setcc(above, rax); in GenerateGeneric()
1016 __ setcc(below, rcx); in GenerateGeneric()
1017 __ subp(rax, rcx); in GenerateGeneric()
1018 __ ret(0); in GenerateGeneric()
1022 __ bind(&unordered); in GenerateGeneric()
1025 __ Set(rax, 1); in GenerateGeneric()
1027 __ Set(rax, -1); in GenerateGeneric()
1029 __ ret(0); in GenerateGeneric()
1032 __ bind(&non_number_comparison); in GenerateGeneric()
1045 __ ret(0); in GenerateGeneric()
1048 __ bind(&check_for_strings); in GenerateGeneric()
1050 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, in GenerateGeneric()
1062 __ Abort(kUnexpectedFallThroughFromStringComparison); in GenerateGeneric()
1065 __ bind(&check_unequal_objects); in GenerateGeneric()
1076 __ leap(rcx, Operand(rax, rdx, times_1, 0)); in GenerateGeneric()
1077 __ testb(rcx, Immediate(kSmiTagMask)); in GenerateGeneric()
1078 __ j(not_zero, &runtime_call, Label::kNear); in GenerateGeneric()
1080 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); in GenerateGeneric()
1081 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); in GenerateGeneric()
1082 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), in GenerateGeneric()
1084 __ j(not_zero, &undetectable, Label::kNear); in GenerateGeneric()
1085 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), in GenerateGeneric()
1087 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
1089 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
1090 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
1091 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
1092 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
1094 __ bind(&return_unequal); in GenerateGeneric()
1096 __ ret(0); in GenerateGeneric()
1098 __ bind(&undetectable); in GenerateGeneric()
1099 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), in GenerateGeneric()
1101 __ j(zero, &return_unequal, Label::kNear); in GenerateGeneric()
1106 __ CmpInstanceType(rbx, ODDBALL_TYPE); in GenerateGeneric()
1107 __ j(zero, &return_equal, Label::kNear); in GenerateGeneric()
1108 __ CmpInstanceType(rcx, ODDBALL_TYPE); in GenerateGeneric()
1109 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
1111 __ bind(&return_equal); in GenerateGeneric()
1112 __ Set(rax, EQUAL); in GenerateGeneric()
1113 __ ret(0); in GenerateGeneric()
1115 __ bind(&runtime_call); in GenerateGeneric()
1120 __ Push(rdx); in GenerateGeneric()
1121 __ Push(rax); in GenerateGeneric()
1122 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); in GenerateGeneric()
1126 __ LoadRoot(rdx, Heap::kTrueValueRootIndex); in GenerateGeneric()
1127 __ subp(rax, rdx); in GenerateGeneric()
1128 __ Ret(); in GenerateGeneric()
1131 __ PopReturnAddressTo(rcx); in GenerateGeneric()
1132 __ Push(rdx); in GenerateGeneric()
1133 __ Push(rax); in GenerateGeneric()
1134 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); in GenerateGeneric()
1135 __ PushReturnAddressFrom(rcx); in GenerateGeneric()
1136 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
1139 __ bind(&miss); in GenerateGeneric()
1152 __ Integer32ToSmi(rax, rax); in CallStubInRecordCallTarget()
1153 __ Push(rax); in CallStubInRecordCallTarget()
1154 __ Push(rdi); in CallStubInRecordCallTarget()
1155 __ Integer32ToSmi(rdx, rdx); in CallStubInRecordCallTarget()
1156 __ Push(rdx); in CallStubInRecordCallTarget()
1157 __ Push(rbx); in CallStubInRecordCallTarget()
1158 __ Push(rsi); in CallStubInRecordCallTarget()
1160 __ CallStub(stub); in CallStubInRecordCallTarget()
1162 __ Pop(rsi); in CallStubInRecordCallTarget()
1163 __ Pop(rbx); in CallStubInRecordCallTarget()
1164 __ Pop(rdx); in CallStubInRecordCallTarget()
1165 __ Pop(rdi); in CallStubInRecordCallTarget()
1166 __ Pop(rax); in CallStubInRecordCallTarget()
1167 __ SmiToInteger32(rdx, rdx); in CallStubInRecordCallTarget()
1168 __ SmiToInteger32(rax, rax); in CallStubInRecordCallTarget()
1184 __ SmiToInteger32(rdx, rdx); in GenerateRecordCallTarget()
1185 __ movp(r11, in GenerateRecordCallTarget()
1194 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1195 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1196 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1197 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1198 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset), in GenerateRecordCallTarget()
1200 __ j(not_equal, &check_allocation_site); in GenerateRecordCallTarget()
1203 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1204 __ j(equal, &initialize); in GenerateRecordCallTarget()
1205 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1207 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1212 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1213 __ j(not_equal, &miss); in GenerateRecordCallTarget()
1216 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); in GenerateRecordCallTarget()
1217 __ cmpp(rdi, r11); in GenerateRecordCallTarget()
1218 __ j(not_equal, &megamorphic); in GenerateRecordCallTarget()
1219 __ jmp(&done); in GenerateRecordCallTarget()
1221 __ bind(&miss); in GenerateRecordCallTarget()
1225 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1226 __ j(equal, &initialize); in GenerateRecordCallTarget()
1229 __ bind(&megamorphic); in GenerateRecordCallTarget()
1230 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), in GenerateRecordCallTarget()
1232 __ jmp(&done); in GenerateRecordCallTarget()
1236 __ bind(&initialize); in GenerateRecordCallTarget()
1239 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); in GenerateRecordCallTarget()
1240 __ cmpp(rdi, r11); in GenerateRecordCallTarget()
1241 __ j(not_equal, &not_array_function); in GenerateRecordCallTarget()
1245 __ jmp(&done); in GenerateRecordCallTarget()
1247 __ bind(&not_array_function); in GenerateRecordCallTarget()
1251 __ bind(&done); in GenerateRecordCallTarget()
1253 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, in GenerateRecordCallTarget()
1267 __ JumpIfSmi(rdi, &non_function); in Generate()
1269 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11); in Generate()
1270 __ j(not_equal, &non_function); in Generate()
1276 __ movp(rbx, in Generate()
1278 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex); in Generate()
1279 __ j(equal, &feedback_register_initialized, Label::kNear); in Generate()
1280 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); in Generate()
1281 __ bind(&feedback_register_initialized); in Generate()
1283 __ AssertUndefinedOrAllocationSite(rbx); in Generate()
1286 __ movp(rdx, rdi); in Generate()
1290 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1291 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1292 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); in Generate()
1293 __ jmp(rcx); in Generate()
1295 __ bind(&non_function); in Generate()
1296 __ movp(rdx, rdi); in Generate()
1297 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1302 __ SmiAddConstant(FieldOperand(feedback_vector, slot, times_pointer_size, in IncrementCallCount()
1312 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); in HandleArrayCase()
1313 __ cmpp(rdi, r8); in HandleArrayCase()
1314 __ j(not_equal, miss); in HandleArrayCase()
1319 __ movp(rbx, rcx); in HandleArrayCase()
1320 __ movp(rdx, rdi); in HandleArrayCase()
1322 __ TailCallStub(&stub); in HandleArrayCase()
1337 __ SmiToInteger32(rdx, rdx); in Generate()
1338 __ movp(rcx, in Generate()
1355 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); in Generate()
1356 __ j(not_equal, &extra_checks_or_miss); in Generate()
1360 __ JumpIfSmi(rdi, &extra_checks_or_miss); in Generate()
1362 __ bind(&call_function); in Generate()
1366 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), in Generate()
1370 __ bind(&extra_checks_or_miss); in Generate()
1373 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); in Generate()
1374 __ j(equal, &call); in Generate()
1377 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), in Generate()
1379 __ j(not_equal, &not_allocation_site); in Generate()
1384 __ bind(&not_allocation_site); in Generate()
1389 __ jmp(&miss); in Generate()
1392 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); in Generate()
1393 __ j(equal, &uninitialized); in Generate()
1397 __ AssertNotSmi(rcx); in Generate()
1398 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); in Generate()
1399 __ j(not_equal, &miss); in Generate()
1400 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), in Generate()
1403 __ bind(&call); in Generate()
1408 __ bind(&call_count_incremented); in Generate()
1409 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), in Generate()
1412 __ bind(&uninitialized); in Generate()
1415 __ JumpIfSmi(rdi, &miss); in Generate()
1418 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); in Generate()
1419 __ j(not_equal, &miss); in Generate()
1423 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); in Generate()
1424 __ cmpp(rdi, rcx); in Generate()
1425 __ j(equal, &miss); in Generate()
1428 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); in Generate()
1429 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); in Generate()
1430 __ cmpp(rcx, NativeContextOperand()); in Generate()
1431 __ j(not_equal, &miss); in Generate()
1441 __ Integer32ToSmi(rax, rax); in Generate()
1442 __ Integer32ToSmi(rdx, rdx); in Generate()
1443 __ Push(rax); in Generate()
1444 __ Push(rbx); in Generate()
1445 __ Push(rdx); in Generate()
1446 __ Push(rdi); in Generate()
1447 __ Push(rsi); in Generate()
1448 __ CallStub(&create_stub); in Generate()
1449 __ Pop(rsi); in Generate()
1450 __ Pop(rdi); in Generate()
1451 __ Pop(rdx); in Generate()
1452 __ Pop(rbx); in Generate()
1453 __ Pop(rax); in Generate()
1454 __ SmiToInteger32(rdx, rdx); in Generate()
1455 __ SmiToInteger32(rax, rax); in Generate()
1458 __ jmp(&call_function); in Generate()
1462 __ bind(&miss); in Generate()
1465 __ jmp(&call_count_incremented); in Generate()
1468 __ int3(); in Generate()
1475 __ Integer32ToSmi(rax, rax); in GenerateMiss()
1476 __ Push(rax); in GenerateMiss()
1479 __ Integer32ToSmi(rdx, rdx); in GenerateMiss()
1480 __ Push(rdi); in GenerateMiss()
1481 __ Push(rbx); in GenerateMiss()
1482 __ Push(rdx); in GenerateMiss()
1485 __ CallRuntime(Runtime::kCallIC_Miss); in GenerateMiss()
1488 __ movp(rdi, rax); in GenerateMiss()
1491 __ Pop(rax); in GenerateMiss()
1492 __ SmiToInteger32(rax, rax); in GenerateMiss()
1569 __ EnterApiExitFrame(arg_stack_space); in Generate()
1571 __ movp(r14, rax); in Generate()
1573 __ EnterExitFrame( in Generate()
1586 __ CheckStackAlignment(); in Generate()
1594 __ movp(kCCallArg0, r14); // argc. in Generate()
1595 __ movp(kCCallArg1, r15); // argv. in Generate()
1596 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate())); in Generate()
1600 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace)); in Generate()
1602 __ movp(kCCallArg1, r14); // argc. in Generate()
1603 __ movp(kCCallArg2, r15); // argv. in Generate()
1604 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate())); in Generate()
1606 __ call(rbx); in Generate()
1612 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0)); in Generate()
1613 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1)); in Generate()
1615 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2)); in Generate()
1622 __ CompareRoot(rax, Heap::kExceptionRootIndex); in Generate()
1623 __ j(equal, &exception_returned); in Generate()
1629 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); in Generate()
1634 __ cmpp(r14, pending_exception_operand); in Generate()
1635 __ j(equal, &okay, Label::kNear); in Generate()
1636 __ int3(); in Generate()
1637 __ bind(&okay); in Generate()
1641 __ LeaveExitFrame(save_doubles(), !argv_in_register()); in Generate()
1642 __ ret(0); in Generate()
1645 __ bind(&exception_returned); in Generate()
1664 __ movp(arg_reg_1, Immediate(0)); // argc. in Generate()
1665 __ movp(arg_reg_2, Immediate(0)); // argv. in Generate()
1666 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate())); in Generate()
1667 __ PrepareCallCFunction(3); in Generate()
1668 __ CallCFunction(find_handler, 3); in Generate()
1672 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address)); in Generate()
1673 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address)); in Generate()
1674 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address)); in Generate()
1679 __ testp(rsi, rsi); in Generate()
1680 __ j(zero, &skip, Label::kNear); in Generate()
1681 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); in Generate()
1682 __ bind(&skip); in Generate()
1685 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address)); in Generate()
1686 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address)); in Generate()
1687 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize)); in Generate()
1688 __ jmp(rdi); in Generate()
1701 __ pushq(rbp); in Generate()
1702 __ movp(rbp, rsp); in Generate()
1706 __ Push(Smi::FromInt(marker)); // context slot in Generate()
1708 __ Load(kScratchRegister, context_address); in Generate()
1709 __ Push(kScratchRegister); // context in Generate()
1711 __ pushq(r12); in Generate()
1712 __ pushq(r13); in Generate()
1713 __ pushq(r14); in Generate()
1714 __ pushq(r15); in Generate()
1716 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. in Generate()
1717 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. in Generate()
1719 __ pushq(rbx); in Generate()
1723 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); in Generate()
1724 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6); in Generate()
1725 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7); in Generate()
1726 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8); in Generate()
1727 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9); in Generate()
1728 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10); in Generate()
1729 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11); in Generate()
1730 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12); in Generate()
1731 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13); in Generate()
1732 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); in Generate()
1733 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); in Generate()
1738 __ InitializeRootRegister(); in Generate()
1745 __ Push(c_entry_fp_operand); in Generate()
1750 __ Load(rax, js_entry_sp); in Generate()
1751 __ testp(rax, rax); in Generate()
1752 __ j(not_zero, &not_outermost_js); in Generate()
1753 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1754 __ movp(rax, rbp); in Generate()
1755 __ Store(js_entry_sp, rax); in Generate()
1757 __ jmp(&cont); in Generate()
1758 __ bind(&not_outermost_js); in Generate()
1759 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1760 __ bind(&cont); in Generate()
1764 __ jmp(&invoke); in Generate()
1765 __ bind(&handler_entry); in Generate()
1771 __ Store(pending_exception, rax); in Generate()
1772 __ LoadRoot(rax, Heap::kExceptionRootIndex); in Generate()
1773 __ jmp(&exit); in Generate()
1776 __ bind(&invoke); in Generate()
1777 __ PushStackHandler(); in Generate()
1780 __ Push(Immediate(0)); // receiver in Generate()
1790 __ Load(rax, construct_entry); in Generate()
1793 __ Load(rax, entry); in Generate()
1795 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); in Generate()
1796 __ call(kScratchRegister); in Generate()
1799 __ PopStackHandler(); in Generate()
1801 __ bind(&exit); in Generate()
1803 __ Pop(rbx); in Generate()
1804 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1805 __ j(not_equal, &not_outermost_js_2); in Generate()
1806 __ Move(kScratchRegister, js_entry_sp); in Generate()
1807 __ movp(Operand(kScratchRegister, 0), Immediate(0)); in Generate()
1808 __ bind(&not_outermost_js_2); in Generate()
1812 __ Pop(c_entry_fp_operand); in Generate()
1818 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0)); in Generate()
1819 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1)); in Generate()
1820 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2)); in Generate()
1821 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3)); in Generate()
1822 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4)); in Generate()
1823 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5)); in Generate()
1824 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6)); in Generate()
1825 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7)); in Generate()
1826 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8)); in Generate()
1827 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9)); in Generate()
1828 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); in Generate()
1831 __ popq(rbx); in Generate()
1834 __ popq(rsi); in Generate()
1835 __ popq(rdi); in Generate()
1837 __ popq(r15); in Generate()
1838 __ popq(r14); in Generate()
1839 __ popq(r13); in Generate()
1840 __ popq(r12); in Generate()
1841 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers in Generate()
1844 __ popq(rbp); in Generate()
1845 __ ret(0); in Generate()
1855 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1858 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1859 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1861 __ testb(result_, Immediate(kIsNotStringMask)); in GenerateFast()
1862 __ j(not_zero, receiver_not_string_); in GenerateFast()
1866 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1867 __ bind(&got_smi_index_); in GenerateFast()
1870 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset)); in GenerateFast()
1871 __ j(above_equal, index_out_of_range_); in GenerateFast()
1873 __ SmiToInteger32(index_, index_); in GenerateFast()
1878 __ Integer32ToSmi(result_, result_); in GenerateFast()
1879 __ bind(&exit_); in GenerateFast()
1886 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1890 __ bind(&index_not_smi_); in GenerateSlow()
1892 __ CheckMap(index_, in GenerateSlow()
1898 __ Push(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1899 __ Push(LoadDescriptor::SlotRegister()); in GenerateSlow()
1901 __ Push(object_); in GenerateSlow()
1902 __ Push(index_); // Consumed by runtime conversion function. in GenerateSlow()
1903 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1907 __ movp(index_, rax); in GenerateSlow()
1909 __ Pop(object_); in GenerateSlow()
1911 __ Pop(LoadDescriptor::SlotRegister()); in GenerateSlow()
1912 __ Pop(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1915 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1916 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1919 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1921 __ jmp(&got_smi_index_); in GenerateSlow()
1926 __ bind(&call_runtime_); in GenerateSlow()
1928 __ Push(object_); in GenerateSlow()
1929 __ Integer32ToSmi(index_, index_); in GenerateSlow()
1930 __ Push(index_); in GenerateSlow()
1931 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1933 __ movp(result_, rax); in GenerateSlow()
1936 __ jmp(&exit_); in GenerateSlow()
1938 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1947 __ JumpIfNotSmi(code_, &slow_case_); in GenerateFast()
1948 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); in GenerateFast()
1949 __ j(above, &slow_case_); in GenerateFast()
1951 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
1953 __ movp(result_, FieldOperand(result_, index.reg, index.scale, in GenerateFast()
1955 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); in GenerateFast()
1956 __ j(equal, &slow_case_); in GenerateFast()
1957 __ bind(&exit_); in GenerateFast()
1964 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); in GenerateSlow()
1966 __ bind(&slow_case_); in GenerateSlow()
1968 __ Push(code_); in GenerateSlow()
1969 __ CallRuntime(Runtime::kStringCharFromCode); in GenerateSlow()
1971 __ movp(result_, rax); in GenerateSlow()
1974 __ jmp(&exit_); in GenerateSlow()
1976 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); in GenerateSlow()
1987 __ testl(count, count); in GenerateCopyCharacters()
1988 __ j(zero, &done, Label::kNear); in GenerateCopyCharacters()
1993 __ addl(count, count); in GenerateCopyCharacters()
1998 __ bind(&loop); in GenerateCopyCharacters()
1999 __ movb(kScratchRegister, Operand(src, 0)); in GenerateCopyCharacters()
2000 __ movb(Operand(dest, 0), kScratchRegister); in GenerateCopyCharacters()
2001 __ incp(src); in GenerateCopyCharacters()
2002 __ incp(dest); in GenerateCopyCharacters()
2003 __ decl(count); in GenerateCopyCharacters()
2004 __ j(not_zero, &loop); in GenerateCopyCharacters()
2006 __ bind(&done); in GenerateCopyCharacters()
2019 __ movp(length, FieldOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2020 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
2021 __ j(equal, &check_zero_length, Label::kNear); in GenerateFlatOneByteStringEquals()
2022 __ Move(rax, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
2023 __ ret(0); in GenerateFlatOneByteStringEquals()
2027 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
2029 __ SmiTest(length); in GenerateFlatOneByteStringEquals()
2030 __ j(not_zero, &compare_chars, Label::kNear); in GenerateFlatOneByteStringEquals()
2031 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2032 __ ret(0); in GenerateFlatOneByteStringEquals()
2035 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
2041 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
2042 __ ret(0); in GenerateFlatOneByteStringEquals()
2045 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
2046 __ Move(rax, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
2047 __ ret(0); in GenerateFlatOneByteStringEquals()
2059 __ movp(scratch1, FieldOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
2060 __ movp(scratch4, scratch1); in GenerateCompareFlatOneByteStrings()
2061 __ SmiSub(scratch4, in GenerateCompareFlatOneByteStrings()
2067 __ j(less, &left_shorter, Label::kNear); in GenerateCompareFlatOneByteStrings()
2071 __ SmiSub(scratch1, scratch1, length_difference); in GenerateCompareFlatOneByteStrings()
2072 __ bind(&left_shorter); in GenerateCompareFlatOneByteStrings()
2078 __ SmiTest(min_length); in GenerateCompareFlatOneByteStrings()
2079 __ j(zero, &compare_lengths, Label::kNear); in GenerateCompareFlatOneByteStrings()
2091 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2092 __ SmiTest(length_difference); in GenerateCompareFlatOneByteStrings()
2094 __ j(not_zero, &length_not_equal, Label::kNear); in GenerateCompareFlatOneByteStrings()
2097 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateCompareFlatOneByteStrings()
2098 __ ret(0); in GenerateCompareFlatOneByteStrings()
2102 __ bind(&length_not_equal); in GenerateCompareFlatOneByteStrings()
2103 __ j(greater, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
2104 __ jmp(&result_less, Label::kNear); in GenerateCompareFlatOneByteStrings()
2105 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
2107 __ j(above, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
2108 __ bind(&result_less); in GenerateCompareFlatOneByteStrings()
2111 __ Move(rax, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
2112 __ ret(0); in GenerateCompareFlatOneByteStrings()
2115 __ bind(&result_greater); in GenerateCompareFlatOneByteStrings()
2116 __ Move(rax, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
2117 __ ret(0); in GenerateCompareFlatOneByteStrings()
2127 __ SmiToInteger32(length, length); in GenerateOneByteCharsCompareLoop()
2128 __ leap(left, in GenerateOneByteCharsCompareLoop()
2130 __ leap(right, in GenerateOneByteCharsCompareLoop()
2132 __ negq(length); in GenerateOneByteCharsCompareLoop()
2137 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
2138 __ movb(scratch, Operand(left, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
2139 __ cmpb(scratch, Operand(right, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
2140 __ j(not_equal, chars_not_equal, near_jump); in GenerateOneByteCharsCompareLoop()
2141 __ incq(index); in GenerateOneByteCharsCompareLoop()
2142 __ j(not_zero, &loop); in GenerateOneByteCharsCompareLoop()
2156 __ Move(rcx, isolate()->factory()->undefined_value()); in Generate()
2160 __ testb(rcx, Immediate(kSmiTagMask)); in Generate()
2161 __ Assert(not_equal, kExpectedAllocationSite); in Generate()
2162 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), in Generate()
2164 __ Assert(equal, kExpectedAllocationSite); in Generate()
2170 __ TailCallStub(&stub); in Generate()
2180 __ JumpIfSmi(rdx, &miss, miss_distance); in GenerateBooleans()
2181 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); in GenerateBooleans()
2182 __ JumpIfSmi(rax, &miss, miss_distance); in GenerateBooleans()
2183 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); in GenerateBooleans()
2184 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
2185 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
2187 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); in GenerateBooleans()
2188 __ AssertSmi(rax); in GenerateBooleans()
2189 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset)); in GenerateBooleans()
2190 __ AssertSmi(rdx); in GenerateBooleans()
2191 __ pushq(rax); in GenerateBooleans()
2192 __ movq(rax, rdx); in GenerateBooleans()
2193 __ popq(rdx); in GenerateBooleans()
2195 __ subp(rax, rdx); in GenerateBooleans()
2196 __ Ret(); in GenerateBooleans()
2198 __ bind(&miss); in GenerateBooleans()
2206 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); in GenerateSmis()
2210 __ subp(rax, rdx); in GenerateSmis()
2213 __ subp(rdx, rax); in GenerateSmis()
2214 __ j(no_overflow, &done, Label::kNear); in GenerateSmis()
2216 __ notp(rdx); in GenerateSmis()
2217 __ bind(&done); in GenerateSmis()
2218 __ movp(rax, rdx); in GenerateSmis()
2220 __ ret(0); in GenerateSmis()
2222 __ bind(&miss); in GenerateSmis()
2235 __ JumpIfNotSmi(rdx, &miss); in GenerateNumbers()
2238 __ JumpIfNotSmi(rax, &miss); in GenerateNumbers()
2243 __ JumpIfSmi(rax, &right_smi, Label::kNear); in GenerateNumbers()
2244 __ CompareMap(rax, isolate()->factory()->heap_number_map()); in GenerateNumbers()
2245 __ j(not_equal, &maybe_undefined1, Label::kNear); in GenerateNumbers()
2246 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in GenerateNumbers()
2247 __ jmp(&left, Label::kNear); in GenerateNumbers()
2248 __ bind(&right_smi); in GenerateNumbers()
2249 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. in GenerateNumbers()
2250 __ Cvtlsi2sd(xmm1, rcx); in GenerateNumbers()
2252 __ bind(&left); in GenerateNumbers()
2253 __ JumpIfSmi(rdx, &left_smi, Label::kNear); in GenerateNumbers()
2254 __ CompareMap(rdx, isolate()->factory()->heap_number_map()); in GenerateNumbers()
2255 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
2256 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in GenerateNumbers()
2257 __ jmp(&done); in GenerateNumbers()
2258 __ bind(&left_smi); in GenerateNumbers()
2259 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. in GenerateNumbers()
2260 __ Cvtlsi2sd(xmm0, rcx); in GenerateNumbers()
2262 __ bind(&done); in GenerateNumbers()
2264 __ Ucomisd(xmm0, xmm1); in GenerateNumbers()
2267 __ j(parity_even, &unordered, Label::kNear); in GenerateNumbers()
2271 __ movl(rax, Immediate(0)); in GenerateNumbers()
2272 __ movl(rcx, Immediate(0)); in GenerateNumbers()
2273 __ setcc(above, rax); // Add one to zero if carry clear and not equal. in GenerateNumbers()
2274 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set). in GenerateNumbers()
2275 __ ret(0); in GenerateNumbers()
2277 __ bind(&unordered); in GenerateNumbers()
2278 __ bind(&generic_stub); in GenerateNumbers()
2281 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2283 __ bind(&maybe_undefined1); in GenerateNumbers()
2285 __ Cmp(rax, isolate()->factory()->undefined_value()); in GenerateNumbers()
2286 __ j(not_equal, &miss); in GenerateNumbers()
2287 __ JumpIfSmi(rdx, &unordered); in GenerateNumbers()
2288 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); in GenerateNumbers()
2289 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
2290 __ jmp(&unordered); in GenerateNumbers()
2293 __ bind(&maybe_undefined2); in GenerateNumbers()
2295 __ Cmp(rdx, isolate()->factory()->undefined_value()); in GenerateNumbers()
2296 __ j(equal, &unordered); in GenerateNumbers()
2299 __ bind(&miss); in GenerateNumbers()
2317 __ j(cond, &miss, Label::kNear); in GenerateInternalizedStrings()
2320 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2321 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2322 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2323 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2325 __ orp(tmp1, tmp2); in GenerateInternalizedStrings()
2326 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2327 __ j(not_zero, &miss, Label::kNear); in GenerateInternalizedStrings()
2331 __ cmpp(left, right); in GenerateInternalizedStrings()
2335 __ j(not_equal, &done, Label::kNear); in GenerateInternalizedStrings()
2338 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateInternalizedStrings()
2339 __ bind(&done); in GenerateInternalizedStrings()
2340 __ ret(0); in GenerateInternalizedStrings()
2342 __ bind(&miss); in GenerateInternalizedStrings()
2360 __ j(cond, &miss, Label::kNear); in GenerateUniqueNames()
2364 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2365 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2366 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2367 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2369 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear); in GenerateUniqueNames()
2370 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear); in GenerateUniqueNames()
2374 __ cmpp(left, right); in GenerateUniqueNames()
2378 __ j(not_equal, &done, Label::kNear); in GenerateUniqueNames()
2381 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateUniqueNames()
2382 __ bind(&done); in GenerateUniqueNames()
2383 __ ret(0); in GenerateUniqueNames()
2385 __ bind(&miss); in GenerateUniqueNames()
2405 __ j(cond, &miss); in GenerateStrings()
2409 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2410 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2411 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2412 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2413 __ movp(tmp3, tmp1); in GenerateStrings()
2415 __ orp(tmp3, tmp2); in GenerateStrings()
2416 __ testb(tmp3, Immediate(kIsNotStringMask)); in GenerateStrings()
2417 __ j(not_zero, &miss); in GenerateStrings()
2421 __ cmpp(left, right); in GenerateStrings()
2422 __ j(not_equal, &not_same, Label::kNear); in GenerateStrings()
2425 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateStrings()
2426 __ ret(0); in GenerateStrings()
2429 __ bind(&not_same); in GenerateStrings()
2437 __ orp(tmp1, tmp2); in GenerateStrings()
2438 __ testb(tmp1, Immediate(kIsNotInternalizedMask)); in GenerateStrings()
2439 __ j(not_zero, &do_compare, Label::kNear); in GenerateStrings()
2443 __ ret(0); in GenerateStrings()
2444 __ bind(&do_compare); in GenerateStrings()
2449 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime); in GenerateStrings()
2461 __ bind(&runtime); in GenerateStrings()
2465 __ Push(left); in GenerateStrings()
2466 __ Push(right); in GenerateStrings()
2467 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2469 __ LoadRoot(rdx, Heap::kTrueValueRootIndex); in GenerateStrings()
2470 __ subp(rax, rdx); in GenerateStrings()
2471 __ Ret(); in GenerateStrings()
2473 __ PopReturnAddressTo(tmp1); in GenerateStrings()
2474 __ Push(left); in GenerateStrings()
2475 __ Push(right); in GenerateStrings()
2476 __ PushReturnAddressFrom(tmp1); in GenerateStrings()
2477 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2480 __ bind(&miss); in GenerateStrings()
2489 __ j(either_smi, &miss, Label::kNear); in GenerateReceivers()
2492 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateReceivers()
2493 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2494 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateReceivers()
2495 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2498 __ subp(rax, rdx); in GenerateReceivers()
2499 __ ret(0); in GenerateReceivers()
2501 __ bind(&miss); in GenerateReceivers()
2510 __ j(either_smi, &miss, Label::kNear); in GenerateKnownReceivers()
2512 __ GetWeakValue(rdi, cell); in GenerateKnownReceivers()
2513 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi); in GenerateKnownReceivers()
2514 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2515 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi); in GenerateKnownReceivers()
2516 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2519 __ subp(rax, rdx); in GenerateKnownReceivers()
2520 __ ret(0); in GenerateKnownReceivers()
2522 __ PopReturnAddressTo(rcx); in GenerateKnownReceivers()
2523 __ Push(rdx); in GenerateKnownReceivers()
2524 __ Push(rax); in GenerateKnownReceivers()
2525 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition()))); in GenerateKnownReceivers()
2526 __ PushReturnAddressFrom(rcx); in GenerateKnownReceivers()
2527 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2530 __ bind(&miss); in GenerateKnownReceivers()
2539 __ Push(rdx); in GenerateMiss()
2540 __ Push(rax); in GenerateMiss()
2541 __ Push(rdx); in GenerateMiss()
2542 __ Push(rax); in GenerateMiss()
2543 __ Push(Smi::FromInt(op())); in GenerateMiss()
2544 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2547 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize)); in GenerateMiss()
2548 __ Pop(rax); in GenerateMiss()
2549 __ Pop(rdx); in GenerateMiss()
2553 __ jmp(rdi); in GenerateMiss()
2574 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2575 __ decl(index); in GenerateNegativeLookup()
2576 __ andp(index, in GenerateNegativeLookup()
2581 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3. in GenerateNegativeLookup()
2586 __ movp(entity_name, Operand(properties, in GenerateNegativeLookup()
2590 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); in GenerateNegativeLookup()
2591 __ j(equal, done); in GenerateNegativeLookup()
2594 __ Cmp(entity_name, Handle<Name>(name)); in GenerateNegativeLookup()
2595 __ j(equal, miss); in GenerateNegativeLookup()
2599 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2600 __ j(equal, &good, Label::kNear); in GenerateNegativeLookup()
2603 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2604 __ JumpIfNotUniqueNameInstanceType( in GenerateNegativeLookup()
2606 __ bind(&good); in GenerateNegativeLookup()
2611 __ Push(Handle<Object>(name)); in GenerateNegativeLookup()
2612 __ Push(Immediate(name->Hash())); in GenerateNegativeLookup()
2613 __ CallStub(&stub); in GenerateNegativeLookup()
2614 __ testp(r0, r0); in GenerateNegativeLookup()
2615 __ j(not_zero, miss); in GenerateNegativeLookup()
2616 __ jmp(done); in GenerateNegativeLookup()
2636 __ AssertName(name); in GeneratePositiveLookup()
2638 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
2639 __ decl(r0); in GeneratePositiveLookup()
2643 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset)); in GeneratePositiveLookup()
2644 __ shrl(r1, Immediate(Name::kHashShift)); in GeneratePositiveLookup()
2646 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i))); in GeneratePositiveLookup()
2648 __ andp(r1, r0); in GeneratePositiveLookup()
2652 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 in GeneratePositiveLookup()
2655 __ cmpp(name, Operand(elements, r1, times_pointer_size, in GeneratePositiveLookup()
2657 __ j(equal, done); in GeneratePositiveLookup()
2662 __ Push(name); in GeneratePositiveLookup()
2663 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); in GeneratePositiveLookup()
2664 __ shrl(r0, Immediate(Name::kHashShift)); in GeneratePositiveLookup()
2665 __ Push(r0); in GeneratePositiveLookup()
2666 __ CallStub(&stub); in GeneratePositiveLookup()
2668 __ testp(r0, r0); in GeneratePositiveLookup()
2669 __ j(zero, miss); in GeneratePositiveLookup()
2670 __ jmp(done); in GeneratePositiveLookup()
2693 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset)); in Generate()
2694 __ decl(scratch); in Generate()
2695 __ Push(scratch); in Generate()
2706 __ movp(scratch, args.GetArgumentOperand(1)); in Generate()
2708 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); in Generate()
2710 __ andp(scratch, Operand(rsp, 0)); in Generate()
2714 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. in Generate()
2717 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size, in Generate()
2720 __ Cmp(scratch, isolate()->factory()->undefined_value()); in Generate()
2721 __ j(equal, &not_in_dictionary); in Generate()
2724 __ cmpp(scratch, args.GetArgumentOperand(0)); in Generate()
2725 __ j(equal, &in_dictionary); in Generate()
2733 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); in Generate()
2734 __ JumpIfNotUniqueNameInstanceType( in Generate()
2740 __ bind(&maybe_in_dictionary); in Generate()
2745 __ movp(scratch, Immediate(0)); in Generate()
2746 __ Drop(1); in Generate()
2747 __ ret(2 * kPointerSize); in Generate()
2750 __ bind(&in_dictionary); in Generate()
2751 __ movp(scratch, Immediate(1)); in Generate()
2752 __ Drop(1); in Generate()
2753 __ ret(2 * kPointerSize); in Generate()
2755 __ bind(&not_in_dictionary); in Generate()
2756 __ movp(scratch, Immediate(0)); in Generate()
2757 __ Drop(1); in Generate()
2758 __ ret(2 * kPointerSize); in Generate()
2784 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); in Generate()
2785 __ jmp(&skip_to_incremental_compacting, Label::kFar); in Generate()
2788 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2791 __ ret(0); in Generate()
2794 __ bind(&skip_to_incremental_noncompacting); in Generate()
2797 __ bind(&skip_to_incremental_compacting); in Generate()
2813 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); in GenerateIncremental()
2814 __ JumpIfNotInNewSpace(regs_.scratch0(), in GenerateIncremental()
2818 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2827 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
2830 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2837 __ ret(0); in GenerateIncremental()
2847 __ Move(address, regs_.address()); in InformIncrementalMarker()
2848 __ Move(arg_reg_1, regs_.object()); in InformIncrementalMarker()
2850 __ Move(arg_reg_2, address); in InformIncrementalMarker()
2851 __ LoadAddress(arg_reg_3, in InformIncrementalMarker()
2856 __ PrepareCallCFunction(argument_count); in InformIncrementalMarker()
2857 __ CallCFunction( in InformIncrementalMarker()
2874 __ JumpIfBlack(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2882 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2885 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2888 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
2891 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2896 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2903 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2909 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2914 __ Push(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2915 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2919 __ Pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2923 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2926 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2929 __ bind(&need_incremental_pop_object); in CheckNeedsToInformIncrementalMarker()
2930 __ Pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2932 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2940 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2943 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); in Generate()
2945 __ PopReturnAddressTo(rcx); in Generate()
2948 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); in Generate()
2949 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. in Generate()
2964 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0))); in HandleArrayCases()
2965 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
2966 __ j(not_equal, &start_polymorphic); in HandleArrayCases()
2970 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1))); in HandleArrayCases()
2971 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); in HandleArrayCases()
2972 __ jmp(handler); in HandleArrayCases()
2975 __ bind(&start_polymorphic); in HandleArrayCases()
2976 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset)); in HandleArrayCases()
2980 __ cmpl(length, Immediate(2)); in HandleArrayCases()
2981 __ j(equal, miss); in HandleArrayCases()
2983 __ movl(counter, Immediate(2)); in HandleArrayCases()
2985 __ bind(&next_loop); in HandleArrayCases()
2986 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size, in HandleArrayCases()
2988 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); in HandleArrayCases()
2989 __ j(not_equal, &prepare_next); in HandleArrayCases()
2990 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size, in HandleArrayCases()
2992 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); in HandleArrayCases()
2993 __ jmp(handler); in HandleArrayCases()
2995 __ bind(&prepare_next); in HandleArrayCases()
2996 __ addl(counter, Immediate(2)); in HandleArrayCases()
2997 __ cmpl(counter, length); in HandleArrayCases()
2998 __ j(less, &next_loop); in HandleArrayCases()
3001 __ jmp(miss); in HandleArrayCases()
3010 __ JumpIfSmi(receiver, load_smi_map); in HandleMonomorphicCase()
3011 __ movp(receiver_map, FieldOperand(receiver, 0)); in HandleMonomorphicCase()
3013 __ bind(compare_map); in HandleMonomorphicCase()
3014 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset)); in HandleMonomorphicCase()
3015 __ j(not_equal, try_array); in HandleMonomorphicCase()
3017 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size, in HandleMonomorphicCase()
3019 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); in HandleMonomorphicCase()
3020 __ jmp(handler); in HandleMonomorphicCase()
3024 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister()); in Generate()
3052 __ movp(counter, Immediate(0)); in HandlePolymorphicKeyedStoreCase()
3053 __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset)); in HandlePolymorphicKeyedStoreCase()
3054 __ SmiToInteger32(length, length); in HandlePolymorphicKeyedStoreCase()
3056 __ bind(&next_loop); in HandlePolymorphicKeyedStoreCase()
3057 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size, in HandlePolymorphicKeyedStoreCase()
3059 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); in HandlePolymorphicKeyedStoreCase()
3060 __ j(not_equal, &prepare_next); in HandlePolymorphicKeyedStoreCase()
3061 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size, in HandlePolymorphicKeyedStoreCase()
3063 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex); in HandlePolymorphicKeyedStoreCase()
3064 __ j(not_equal, &transition_call); in HandlePolymorphicKeyedStoreCase()
3065 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size, in HandlePolymorphicKeyedStoreCase()
3067 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize)); in HandlePolymorphicKeyedStoreCase()
3068 __ jmp(feedback); in HandlePolymorphicKeyedStoreCase()
3070 __ bind(&transition_call); in HandlePolymorphicKeyedStoreCase()
3072 __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); in HandlePolymorphicKeyedStoreCase()
3074 __ JumpIfSmi(receiver_map, miss); in HandlePolymorphicKeyedStoreCase()
3076 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size, in HandlePolymorphicKeyedStoreCase()
3078 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize)); in HandlePolymorphicKeyedStoreCase()
3079 __ jmp(feedback); in HandlePolymorphicKeyedStoreCase()
3081 __ bind(&prepare_next); in HandlePolymorphicKeyedStoreCase()
3082 __ addl(counter, Immediate(3)); in HandlePolymorphicKeyedStoreCase()
3083 __ cmpl(counter, length); in HandlePolymorphicKeyedStoreCase()
3084 __ j(less, &next_loop); in HandlePolymorphicKeyedStoreCase()
3087 __ jmp(miss); in HandlePolymorphicKeyedStoreCase()
3101 __ SmiToInteger32(integer_slot, slot); in GenerateImpl()
3102 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, in GenerateImpl()
3114 __ bind(&try_array); in GenerateImpl()
3115 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex); in GenerateImpl()
3116 __ j(not_equal, &not_array); in GenerateImpl()
3120 __ bind(&not_array); in GenerateImpl()
3122 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); in GenerateImpl()
3123 __ j(not_equal, &try_poly_name); in GenerateImpl()
3127 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET); in GenerateImpl()
3129 __ bind(&try_poly_name); in GenerateImpl()
3131 __ cmpp(key, feedback); in GenerateImpl()
3132 __ j(not_equal, &miss); in GenerateImpl()
3135 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, in GenerateImpl()
3140 __ bind(&miss); in GenerateImpl()
3143 __ bind(&load_smi_map); in GenerateImpl()
3144 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); in GenerateImpl()
3145 __ jmp(&compare_map); in GenerateImpl()
3150 __ EmitLoadTypeFeedbackVector(rbx); in Generate()
3152 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); in Generate()
3168 __ pushq(arg_reg_1); in Generate()
3169 __ pushq(arg_reg_2); in Generate()
3172 __ leap(arg_reg_2, in Generate()
3176 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); in Generate()
3177 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); in Generate()
3183 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()), in Generate()
3189 __ PrepareCallCFunction(kArgumentCount); in Generate()
3190 __ CallCFunction(rax, kArgumentCount); in Generate()
3194 __ popq(arg_reg_2); in Generate()
3195 __ popq(arg_reg_1); in Generate()
3197 __ Ret(); in Generate()
3206 __ TailCallStub(&stub); in CreateArrayDispatch()
3213 __ cmpl(rdx, Immediate(kind)); in CreateArrayDispatch()
3214 __ j(not_equal, &next); in CreateArrayDispatch()
3216 __ TailCallStub(&stub); in CreateArrayDispatch()
3217 __ bind(&next); in CreateArrayDispatch()
3221 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
3247 __ testb(rdx, Immediate(1)); in CreateArrayDispatchOneArgument()
3248 __ j(not_zero, &normal_sequence); in CreateArrayDispatchOneArgument()
3253 __ movp(rcx, args.GetArgumentOperand(0)); in CreateArrayDispatchOneArgument()
3254 __ testp(rcx, rcx); in CreateArrayDispatchOneArgument()
3255 __ j(zero, &normal_sequence); in CreateArrayDispatchOneArgument()
3264 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
3266 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3270 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3274 __ incl(rdx); in CreateArrayDispatchOneArgument()
3279 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); in CreateArrayDispatchOneArgument()
3280 __ Assert(equal, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
3287 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset), in CreateArrayDispatchOneArgument()
3290 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3296 __ cmpl(rdx, Immediate(kind)); in CreateArrayDispatchOneArgument()
3297 __ j(not_equal, &next); in CreateArrayDispatchOneArgument()
3299 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3300 __ bind(&next); in CreateArrayDispatchOneArgument()
3304 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3347 __ testp(rax, rax); in GenerateDispatchToArrayStub()
3348 __ j(not_zero, &not_zero_case); in GenerateDispatchToArrayStub()
3351 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
3352 __ cmpl(rax, Immediate(1)); in GenerateDispatchToArrayStub()
3353 __ j(greater, &not_one_case); in GenerateDispatchToArrayStub()
3356 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
3358 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3375 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3379 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); in Generate()
3380 __ CmpObjectType(rcx, MAP_TYPE, rcx); in Generate()
3381 __ Check(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
3384 __ AssertUndefinedOrAllocationSite(rbx); in Generate()
3388 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); in Generate()
3391 __ cmpp(rdi, rdx); in Generate()
3392 __ j(not_equal, &subclassing); in Generate()
3397 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); in Generate()
3398 __ j(equal, &no_info); in Generate()
3401 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); in Generate()
3402 __ SmiToInteger32(rdx, rdx); in Generate()
3404 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); in Generate()
3407 __ bind(&no_info); in Generate()
3411 __ bind(&subclassing); in Generate()
3413 __ movp(args.GetReceiverOperand(), rdi); in Generate()
3414 __ addp(rax, Immediate(3)); in Generate()
3415 __ PopReturnAddressTo(rcx); in Generate()
3416 __ Push(rdx); in Generate()
3417 __ Push(rbx); in Generate()
3418 __ PushReturnAddressFrom(rcx); in Generate()
3419 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3428 __ testp(rax, rax); in GenerateCase()
3429 __ j(not_zero, &not_zero_case); in GenerateCase()
3431 __ TailCallStub(&stub0); in GenerateCase()
3433 __ bind(&not_zero_case); in GenerateCase()
3434 __ cmpl(rax, Immediate(1)); in GenerateCase()
3435 __ j(greater, &not_one_case); in GenerateCase()
3441 __ movp(rcx, args.GetArgumentOperand(0)); in GenerateCase()
3442 __ testp(rcx, rcx); in GenerateCase()
3443 __ j(zero, &normal_sequence); in GenerateCase()
3447 __ TailCallStub(&stub1_holey); in GenerateCase()
3450 __ bind(&normal_sequence); in GenerateCase()
3452 __ TailCallStub(&stub1); in GenerateCase()
3454 __ bind(&not_one_case); in GenerateCase()
3456 __ TailCallStub(&stubN); in GenerateCase()
3473 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3477 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); in Generate()
3478 __ CmpObjectType(rcx, MAP_TYPE, rcx); in Generate()
3479 __ Check(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
3483 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3487 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); in Generate()
3489 __ DecodeField<Map::ElementsKindBits>(rcx); in Generate()
3493 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); in Generate()
3494 __ j(equal, &done); in Generate()
3495 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); in Generate()
3496 __ Assert(equal, in Generate()
3498 __ bind(&done); in Generate()
3502 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); in Generate()
3503 __ j(equal, &fast_elements_case); in Generate()
3506 __ bind(&fast_elements_case); in Generate()
3518 __ AssertFunction(rdi); in Generate()
3519 __ AssertReceiver(rdx); in Generate()
3523 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx); in Generate()
3524 __ j(not_equal, &new_object); in Generate()
3527 __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3528 __ JumpIfSmi(rcx, &new_object); in Generate()
3529 __ CmpObjectType(rcx, MAP_TYPE, rbx); in Generate()
3530 __ j(not_equal, &new_object); in Generate()
3534 __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset)); in Generate()
3535 __ j(not_equal, &new_object); in Generate()
3539 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset)); in Generate()
3540 __ leal(rbx, Operand(rbx, times_pointer_size, 0)); in Generate()
3541 __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3542 __ bind(&done_allocate); in Generate()
3545 __ movp(FieldOperand(rax, JSObject::kMapOffset), rcx); in Generate()
3546 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); in Generate()
3547 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); in Generate()
3548 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx); in Generate()
3550 __ leap(rbx, FieldOperand(rax, JSObject::kHeaderSize)); in Generate()
3564 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex); in Generate()
3565 __ testl(FieldOperand(rcx, Map::kBitField3Offset), in Generate()
3567 __ j(not_zero, &slack_tracking, Label::kNear); in Generate()
3570 __ InitializeFieldsWithFiller(rbx, rdi, r11); in Generate()
3571 __ Ret(); in Generate()
3573 __ bind(&slack_tracking); in Generate()
3577 __ subl(FieldOperand(rcx, Map::kBitField3Offset), in Generate()
3581 __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset)); in Generate()
3582 __ negp(rdx); in Generate()
3583 __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0)); in Generate()
3584 __ InitializeFieldsWithFiller(rbx, rdx, r11); in Generate()
3587 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex); in Generate()
3588 __ InitializeFieldsWithFiller(rdx, rdi, r11); in Generate()
3593 __ testl(FieldOperand(rcx, Map::kBitField3Offset), in Generate()
3595 __ j(zero, &finalize, Label::kNear); in Generate()
3596 __ Ret(); in Generate()
3599 __ bind(&finalize); in Generate()
3602 __ Push(rax); in Generate()
3603 __ Push(rcx); in Generate()
3604 __ CallRuntime(Runtime::kFinalizeInstanceSize); in Generate()
3605 __ Pop(rax); in Generate()
3607 __ Ret(); in Generate()
3611 __ bind(&allocate); in Generate()
3614 __ Integer32ToSmi(rbx, rbx); in Generate()
3615 __ Push(rcx); in Generate()
3616 __ Push(rbx); in Generate()
3617 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3618 __ Pop(rcx); in Generate()
3620 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset)); in Generate()
3621 __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0)); in Generate()
3623 __ decp(rdi); // Remove the tag from the end address. in Generate()
3624 __ jmp(&done_allocate); in Generate()
3627 __ bind(&new_object); in Generate()
3628 __ PopReturnAddressTo(rcx); in Generate()
3629 __ Push(rdi); in Generate()
3630 __ Push(rdx); in Generate()
3631 __ PushReturnAddressFrom(rcx); in Generate()
3632 __ TailCallRuntime(Runtime::kNewObject); in Generate()
3643 __ AssertFunction(rdi); in Generate()
3646 __ movp(rdx, rbp); in Generate()
3650 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3654 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset)); in Generate()
3655 __ j(equal, &ok); in Generate()
3656 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
3657 __ bind(&ok); in Generate()
3663 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3664 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset), in Generate()
3666 __ j(not_equal, &no_rest_parameters, Label::kNear); in Generate()
3671 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3672 __ LoadSharedFunctionInfoSpecialField( in Generate()
3674 __ SmiToInteger32( in Generate()
3676 __ subl(rax, rcx); in Generate()
3677 __ j(greater, &rest_parameters); in Generate()
3680 __ bind(&no_rest_parameters); in Generate()
3689 __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3690 __ bind(&done_allocate); in Generate()
3693 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx); in Generate()
3694 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx); in Generate()
3695 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); in Generate()
3696 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx); in Generate()
3697 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx); in Generate()
3698 __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0)); in Generate()
3700 __ Ret(); in Generate()
3703 __ bind(&allocate); in Generate()
3706 __ Push(Smi::FromInt(JSArray::kSize)); in Generate()
3707 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3709 __ jmp(&done_allocate); in Generate()
3712 __ bind(&rest_parameters); in Generate()
3715 __ leap(rbx, Operand(rbx, rax, times_pointer_size, in Generate()
3729 __ leal(rcx, Operand(rax, times_pointer_size, in Generate()
3731 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3732 __ bind(&done_allocate); in Generate()
3735 __ Integer32ToSmi(rdi, rax); in Generate()
3738 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); in Generate()
3739 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx); in Generate()
3740 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi); in Generate()
3743 __ Set(rcx, 0); in Generate()
3744 __ bind(&loop); in Generate()
3745 __ cmpl(rcx, rax); in Generate()
3746 __ j(equal, &done_loop, Label::kNear); in Generate()
3747 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize)); in Generate()
3748 __ movp( in Generate()
3751 __ subp(rbx, Immediate(1 * kPointerSize)); in Generate()
3752 __ addl(rcx, Immediate(1)); in Generate()
3753 __ jmp(&loop); in Generate()
3754 __ bind(&done_loop); in Generate()
3758 __ leap(rax, in Generate()
3760 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx); in Generate()
3761 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx); in Generate()
3762 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); in Generate()
3763 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx); in Generate()
3764 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx); in Generate()
3765 __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi); in Generate()
3767 __ Ret(); in Generate()
3771 __ bind(&allocate); in Generate()
3772 __ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize)); in Generate()
3773 __ j(greater, &too_big_for_new_space); in Generate()
3776 __ Integer32ToSmi(rax, rax); in Generate()
3777 __ Integer32ToSmi(rcx, rcx); in Generate()
3778 __ Push(rax); in Generate()
3779 __ Push(rbx); in Generate()
3780 __ Push(rcx); in Generate()
3781 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3782 __ movp(rdx, rax); in Generate()
3783 __ Pop(rbx); in Generate()
3784 __ Pop(rax); in Generate()
3785 __ SmiToInteger32(rax, rax); in Generate()
3787 __ jmp(&done_allocate); in Generate()
3790 __ bind(&too_big_for_new_space); in Generate()
3791 __ PopReturnAddressTo(kScratchRegister); in Generate()
3792 __ Push(rdi); in Generate()
3793 __ PushReturnAddressFrom(kScratchRegister); in Generate()
3794 __ TailCallRuntime(Runtime::kNewRestParameter); in Generate()
3806 __ AssertFunction(rdi); in Generate()
3809 __ movp(r9, rbp); in Generate()
3813 __ movp(r9, Operand(r9, StandardFrameConstants::kCallerFPOffset)); in Generate()
3817 __ cmpp(rdi, Operand(r9, StandardFrameConstants::kFunctionOffset)); in Generate()
3818 __ j(equal, &ok); in Generate()
3819 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
3820 __ bind(&ok); in Generate()
3824 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3825 __ LoadSharedFunctionInfoSpecialField( in Generate()
3827 __ leap(rdx, Operand(r9, rcx, times_pointer_size, in Generate()
3829 __ Integer32ToSmi(rcx, rcx); in Generate()
3841 __ SmiToInteger64(rbx, rcx); in Generate()
3846 __ movp(rax, Operand(r9, StandardFrameConstants::kCallerFPOffset)); in Generate()
3847 __ movp(r8, Operand(rax, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
3848 __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); in Generate()
3849 __ j(equal, &adaptor_frame); in Generate()
3852 __ movp(r11, rbx); in Generate()
3853 __ jmp(&try_allocate, Label::kNear); in Generate()
3856 __ bind(&adaptor_frame); in Generate()
3857 __ SmiToInteger64( in Generate()
3859 __ leap(rdx, Operand(rax, r11, times_pointer_size, in Generate()
3865 __ cmpp(rbx, r11); in Generate()
3866 __ j(less_equal, &try_allocate, Label::kNear); in Generate()
3867 __ movp(rbx, r11); in Generate()
3869 __ bind(&try_allocate); in Generate()
3876 __ xorp(r8, r8); in Generate()
3877 __ testp(rbx, rbx); in Generate()
3878 __ j(zero, &no_parameter_map, Label::kNear); in Generate()
3879 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); in Generate()
3880 __ bind(&no_parameter_map); in Generate()
3883 __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize)); in Generate()
3886 __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize)); in Generate()
3889 __ Allocate(r8, rax, r9, no_reg, &runtime, NO_ALLOCATION_FLAGS); in Generate()
3895 __ movp(r9, NativeContextOperand()); in Generate()
3896 __ testp(rbx, rbx); in Generate()
3897 __ j(not_zero, &has_mapped_parameters, Label::kNear); in Generate()
3900 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex))); in Generate()
3901 __ jmp(&instantiate, Label::kNear); in Generate()
3904 __ bind(&has_mapped_parameters); in Generate()
3905 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex))); in Generate()
3906 __ bind(&instantiate); in Generate()
3912 __ movp(FieldOperand(rax, JSObject::kMapOffset), r9); in Generate()
3913 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); in Generate()
3914 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); in Generate()
3915 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister); in Generate()
3918 __ AssertNotSmi(rdi); in Generate()
3919 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi); in Generate()
3923 __ Integer32ToSmi(r11, r11); in Generate()
3924 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11); in Generate()
3929 __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize)); in Generate()
3930 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi); in Generate()
3939 __ testp(rbx, rbx); in Generate()
3940 __ j(zero, &skip_parameter_map); in Generate()
3942 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex); in Generate()
3944 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); in Generate()
3945 __ Integer64PlusConstantToSmi(r9, rbx, 2); in Generate()
3946 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9); in Generate()
3947 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi); in Generate()
3948 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); in Generate()
3949 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9); in Generate()
3962 __ Integer32ToSmi(r9, rbx); in Generate()
3963 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); in Generate()
3964 __ addp(r8, rcx); in Generate()
3965 __ subp(r8, r9); in Generate()
3966 __ movp(rcx, rdi); in Generate()
3967 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); in Generate()
3968 __ SmiToInteger64(r9, r9); in Generate()
3973 __ jmp(&parameters_test, Label::kNear); in Generate()
3975 __ bind(&parameters_loop); in Generate()
3976 __ subp(r9, Immediate(1)); in Generate()
3977 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); in Generate()
3978 __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize), in Generate()
3980 __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize), in Generate()
3982 __ SmiAddConstant(r8, r8, Smi::FromInt(1)); in Generate()
3983 __ bind(&parameters_test); in Generate()
3984 __ testp(r9, r9); in Generate()
3985 __ j(not_zero, &parameters_loop, Label::kNear); in Generate()
3987 __ bind(&skip_parameter_map); in Generate()
3992 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), in Generate()
3994 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11); in Generate()
3997 __ movp(r8, rbx); in Generate()
3999 __ SmiToInteger64(r11, r11); in Generate()
4000 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0)); in Generate()
4001 __ subp(rdx, kScratchRegister); in Generate()
4002 __ jmp(&arguments_test, Label::kNear); in Generate()
4004 __ bind(&arguments_loop); in Generate()
4005 __ subp(rdx, Immediate(kPointerSize)); in Generate()
4006 __ movp(r9, Operand(rdx, 0)); in Generate()
4007 __ movp(FieldOperand(rdi, r8, in Generate()
4011 __ addp(r8, Immediate(1)); in Generate()
4013 __ bind(&arguments_test); in Generate()
4014 __ cmpp(r8, r11); in Generate()
4015 __ j(less, &arguments_loop, Label::kNear); in Generate()
4018 __ ret(0); in Generate()
4022 __ bind(&runtime); in Generate()
4023 __ Integer32ToSmi(r11, r11); in Generate()
4024 __ PopReturnAddressTo(rax); in Generate()
4025 __ Push(rdi); // Push function. in Generate()
4026 __ Push(rdx); // Push parameters pointer. in Generate()
4027 __ Push(r11); // Push parameter count. in Generate()
4028 __ PushReturnAddressFrom(rax); in Generate()
4029 __ TailCallRuntime(Runtime::kNewSloppyArguments); in Generate()
4040 __ AssertFunction(rdi); in Generate()
4043 __ movp(rdx, rbp); in Generate()
4047 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); in Generate()
4051 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset)); in Generate()
4052 __ j(equal, &ok); in Generate()
4053 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
4054 __ bind(&ok); in Generate()
4059 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); in Generate()
4060 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset), in Generate()
4062 __ j(equal, &arguments_adaptor, Label::kNear); in Generate()
4064 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
4065 __ LoadSharedFunctionInfoSpecialField( in Generate()
4067 __ leap(rbx, Operand(rdx, rax, times_pointer_size, in Generate()
4071 __ jmp(&arguments_done, Label::kNear); in Generate()
4072 __ bind(&arguments_adaptor); in Generate()
4074 __ SmiToInteger32( in Generate()
4076 __ leap(rbx, Operand(rbx, rax, times_pointer_size, in Generate()
4080 __ bind(&arguments_done); in Generate()
4092 __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize + in Generate()
4094 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS); in Generate()
4095 __ bind(&done_allocate); in Generate()
4098 __ Integer32ToSmi(rdi, rax); in Generate()
4101 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); in Generate()
4102 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx); in Generate()
4103 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi); in Generate()
4106 __ Set(rcx, 0); in Generate()
4107 __ bind(&loop); in Generate()
4108 __ cmpl(rcx, rax); in Generate()
4109 __ j(equal, &done_loop, Label::kNear); in Generate()
4110 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize)); in Generate()
4111 __ movp( in Generate()
4114 __ subp(rbx, Immediate(1 * kPointerSize)); in Generate()
4115 __ addl(rcx, Immediate(1)); in Generate()
4116 __ jmp(&loop); in Generate()
4117 __ bind(&done_loop); in Generate()
4121 __ leap(rax, in Generate()
4123 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx); in Generate()
4124 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx); in Generate()
4125 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); in Generate()
4126 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx); in Generate()
4127 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx); in Generate()
4128 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi); in Generate()
4130 __ Ret(); in Generate()
4134 __ bind(&allocate); in Generate()
4135 __ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize)); in Generate()
4136 __ j(greater, &too_big_for_new_space); in Generate()
4139 __ Integer32ToSmi(rax, rax); in Generate()
4140 __ Integer32ToSmi(rcx, rcx); in Generate()
4141 __ Push(rax); in Generate()
4142 __ Push(rbx); in Generate()
4143 __ Push(rcx); in Generate()
4144 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
4145 __ movp(rdx, rax); in Generate()
4146 __ Pop(rbx); in Generate()
4147 __ Pop(rax); in Generate()
4148 __ SmiToInteger32(rax, rax); in Generate()
4150 __ jmp(&done_allocate); in Generate()
4153 __ bind(&too_big_for_new_space); in Generate()
4154 __ PopReturnAddressTo(kScratchRegister); in Generate()
4155 __ Push(rdi); in Generate()
4156 __ PushReturnAddressFrom(kScratchRegister); in Generate()
4157 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
4175 __ EnterApiExitFrame(arg_stack_space); in PrepareCallApiFunction()
4213 __ Move(base_reg, next_address); in CallApiFunctionAndReturn()
4214 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset)); in CallApiFunctionAndReturn()
4215 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset)); in CallApiFunctionAndReturn()
4216 __ addl(Operand(base_reg, kLevelOffset), Immediate(1)); in CallApiFunctionAndReturn()
4220 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4221 __ PrepareCallCFunction(1); in CallApiFunctionAndReturn()
4222 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
4223 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
4225 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4230 __ Move(rax, ExternalReference::is_profiling_address(isolate)); in CallApiFunctionAndReturn()
4231 __ cmpb(Operand(rax, 0), Immediate(0)); in CallApiFunctionAndReturn()
4232 __ j(zero, &profiler_disabled); in CallApiFunctionAndReturn()
4235 __ Move(thunk_last_arg, function_address); in CallApiFunctionAndReturn()
4236 __ Move(rax, thunk_ref); in CallApiFunctionAndReturn()
4237 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
4239 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
4241 __ Move(rax, function_address); in CallApiFunctionAndReturn()
4243 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
4246 __ call(rax); in CallApiFunctionAndReturn()
4250 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
4251 __ PrepareCallCFunction(1); in CallApiFunctionAndReturn()
4252 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
4253 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
4255 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
4259 __ movp(rax, return_value_operand); in CallApiFunctionAndReturn()
4260 __ bind(&prologue); in CallApiFunctionAndReturn()
4264 __ subl(Operand(base_reg, kLevelOffset), Immediate(1)); in CallApiFunctionAndReturn()
4265 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg); in CallApiFunctionAndReturn()
4266 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset)); in CallApiFunctionAndReturn()
4267 __ j(not_equal, &delete_allocated_handles); in CallApiFunctionAndReturn()
4270 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
4273 __ movp(rsi, *context_restore_operand); in CallApiFunctionAndReturn()
4276 __ movp(rbx, *stack_space_operand); in CallApiFunctionAndReturn()
4278 __ LeaveApiExitFrame(!restore_context); in CallApiFunctionAndReturn()
4281 __ Move(rdi, scheduled_exception_address); in CallApiFunctionAndReturn()
4282 __ Cmp(Operand(rdi, 0), factory->the_hole_value()); in CallApiFunctionAndReturn()
4283 __ j(not_equal, &promote_scheduled_exception); in CallApiFunctionAndReturn()
4291 __ JumpIfSmi(return_value, &ok, Label::kNear); in CallApiFunctionAndReturn()
4292 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset)); in CallApiFunctionAndReturn()
4294 __ CmpInstanceType(map, LAST_NAME_TYPE); in CallApiFunctionAndReturn()
4295 __ j(below_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
4297 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); in CallApiFunctionAndReturn()
4298 __ j(above_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
4300 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); in CallApiFunctionAndReturn()
4301 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
4303 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex); in CallApiFunctionAndReturn()
4304 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
4306 __ CompareRoot(return_value, Heap::kTrueValueRootIndex); in CallApiFunctionAndReturn()
4307 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
4309 __ CompareRoot(return_value, Heap::kFalseValueRootIndex); in CallApiFunctionAndReturn()
4310 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
4312 __ CompareRoot(return_value, Heap::kNullValueRootIndex); in CallApiFunctionAndReturn()
4313 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
4315 __ Abort(kAPICallReturnedInvalidObject); in CallApiFunctionAndReturn()
4317 __ bind(&ok); in CallApiFunctionAndReturn()
4322 __ PopReturnAddressTo(rcx); in CallApiFunctionAndReturn()
4323 __ addq(rsp, rbx); in CallApiFunctionAndReturn()
4324 __ jmp(rcx); in CallApiFunctionAndReturn()
4326 __ ret(stack_space * kPointerSize); in CallApiFunctionAndReturn()
4330 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
4331 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
4334 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
4335 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg); in CallApiFunctionAndReturn()
4336 __ movp(prev_limit_reg, rax); in CallApiFunctionAndReturn()
4337 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
4338 __ LoadAddress(rax, in CallApiFunctionAndReturn()
4340 __ call(rax); in CallApiFunctionAndReturn()
4341 __ movp(rax, prev_limit_reg); in CallApiFunctionAndReturn()
4342 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
4379 __ PopReturnAddressTo(return_address); in Generate()
4382 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
4385 __ Push(context); in Generate()
4388 __ Push(callee); in Generate()
4391 __ Push(call_data); in Generate()
4394 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
4397 __ Push(scratch); in Generate()
4399 __ Push(scratch); in Generate()
4401 __ Move(scratch, ExternalReference::isolate_address(masm->isolate())); in Generate()
4402 __ Push(scratch); in Generate()
4404 __ Push(holder); in Generate()
4406 __ movp(scratch, rsp); in Generate()
4408 __ PushReturnAddressFrom(return_address); in Generate()
4412 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset)); in Generate()
4423 __ movp(StackSpaceOperand(0), scratch); in Generate()
4424 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize)); in Generate()
4426 __ movp(StackSpaceOperand(1), scratch); in Generate()
4428 __ Set(StackSpaceOperand(2), argc); in Generate()
4443 __ leap(arguments_arg, StackSpaceOperand(0)); in Generate()
4495 __ PopReturnAddressTo(scratch); in Generate()
4496 __ Push(receiver); in Generate()
4497 __ Push(FieldOperand(callback, AccessorInfo::kDataOffset)); in Generate()
4498 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); in Generate()
4499 __ Push(kScratchRegister); // return value in Generate()
4500 __ Push(kScratchRegister); // return value default in Generate()
4501 __ PushAddress(ExternalReference::isolate_address(isolate())); in Generate()
4502 __ Push(holder); in Generate()
4503 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
4504 __ Push(FieldOperand(callback, AccessorInfo::kNameOffset)); in Generate()
4505 __ PushReturnAddressFrom(scratch); in Generate()
4514 __ leap(scratch, Operand(rsp, 2 * kPointerSize)); in Generate()
4520 __ movp(info_object, scratch); in Generate()
4522 __ leap(name_arg, Operand(scratch, -kPointerSize)); in Generate()
4525 __ leap(accessor_info_arg, info_object); in Generate()
4534 __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
4535 __ movp(api_function_address, in Generate()
4546 #undef __