1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_X64
6
7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h"
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/ic/handler-compiler.h"
12 #include "src/ic/ic.h"
13 #include "src/ic/stub-cache.h"
14 #include "src/isolate.h"
15 #include "src/regexp/jsregexp.h"
16 #include "src/regexp/regexp-macro-assembler.h"
17 #include "src/runtime/runtime.h"
18 #include "src/x64/code-stubs-x64.h"
19
20 namespace v8 {
21 namespace internal {
22
23 #define __ ACCESS_MASM(masm)
24
Generate(MacroAssembler * masm)25 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
26 __ popq(rcx);
27 __ movq(MemOperand(rsp, rax, times_8, 0), rdi);
28 __ pushq(rdi);
29 __ pushq(rbx);
30 __ pushq(rcx);
31 __ addq(rax, Immediate(3));
32 __ TailCallRuntime(Runtime::kNewArray);
33 }
34
InitializeDescriptor(CodeStubDescriptor * descriptor)35 void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
36 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
37 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
38 }
39
InitializeDescriptor(CodeStubDescriptor * descriptor)40 void FastFunctionBindStub::InitializeDescriptor(
41 CodeStubDescriptor* descriptor) {
42 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
43 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
44 }
45
GenerateLightweightMiss(MacroAssembler * masm,ExternalReference miss)46 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
47 ExternalReference miss) {
48 // Update the static counter each time a new code stub is generated.
49 isolate()->counters()->code_stubs()->Increment();
50
51 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
52 int param_count = descriptor.GetRegisterParameterCount();
53 {
54 // Call the runtime system in a fresh internal frame.
55 FrameScope scope(masm, StackFrame::INTERNAL);
56 DCHECK(param_count == 0 ||
57 rax.is(descriptor.GetRegisterParameter(param_count - 1)));
58 // Push arguments
59 for (int i = 0; i < param_count; ++i) {
60 __ Push(descriptor.GetRegisterParameter(i));
61 }
62 __ CallExternalReference(miss, param_count);
63 }
64
65 __ Ret();
66 }
67
68
Generate(MacroAssembler * masm)69 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
70 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
71 const int argument_count = 1;
72 __ PrepareCallCFunction(argument_count);
73 __ LoadAddress(arg_reg_1,
74 ExternalReference::isolate_address(isolate()));
75
76 AllowExternalCallThatCantCauseGC scope(masm);
77 __ CallCFunction(
78 ExternalReference::store_buffer_overflow_function(isolate()),
79 argument_count);
80 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
81 __ ret(0);
82 }
83
84
85 class FloatingPointHelper : public AllStatic {
86 public:
87 enum ConvertUndefined {
88 CONVERT_UNDEFINED_TO_ZERO,
89 BAILOUT_ON_UNDEFINED
90 };
91 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
92 // If the operands are not both numbers, jump to not_numbers.
93 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
94 // NumberOperands assumes both are smis or heap numbers.
95 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
96 Label* not_numbers);
97 };
98
99
Generate(MacroAssembler * masm)100 void DoubleToIStub::Generate(MacroAssembler* masm) {
101 Register input_reg = this->source();
102 Register final_result_reg = this->destination();
103 DCHECK(is_truncating());
104
105 Label check_negative, process_64_bits, done;
106
107 int double_offset = offset();
108
109 // Account for return address and saved regs if input is rsp.
110 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
111
112 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
113 MemOperand exponent_operand(MemOperand(input_reg,
114 double_offset + kDoubleSize / 2));
115
116 Register scratch1;
117 Register scratch_candidates[3] = { rbx, rdx, rdi };
118 for (int i = 0; i < 3; i++) {
119 scratch1 = scratch_candidates[i];
120 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
121 }
122
123 // Since we must use rcx for shifts below, use some other register (rax)
124 // to calculate the result if ecx is the requested return register.
125 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
126 // Save ecx if it isn't the return register and therefore volatile, or if it
127 // is the return register, then save the temp register we use in its stead
128 // for the result.
129 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
130 __ pushq(scratch1);
131 __ pushq(save_reg);
132
133 bool stash_exponent_copy = !input_reg.is(rsp);
134 __ movl(scratch1, mantissa_operand);
135 __ Movsd(kScratchDoubleReg, mantissa_operand);
136 __ movl(rcx, exponent_operand);
137 if (stash_exponent_copy) __ pushq(rcx);
138
139 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
140 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
141 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
142 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
143 __ j(below, &process_64_bits);
144
145 // Result is entirely in lower 32-bits of mantissa
146 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
147 __ subl(rcx, Immediate(delta));
148 __ xorl(result_reg, result_reg);
149 __ cmpl(rcx, Immediate(31));
150 __ j(above, &done);
151 __ shll_cl(scratch1);
152 __ jmp(&check_negative);
153
154 __ bind(&process_64_bits);
155 __ Cvttsd2siq(result_reg, kScratchDoubleReg);
156 __ jmp(&done, Label::kNear);
157
158 // If the double was negative, negate the integer result.
159 __ bind(&check_negative);
160 __ movl(result_reg, scratch1);
161 __ negl(result_reg);
162 if (stash_exponent_copy) {
163 __ cmpl(MemOperand(rsp, 0), Immediate(0));
164 } else {
165 __ cmpl(exponent_operand, Immediate(0));
166 }
167 __ cmovl(greater, result_reg, scratch1);
168
169 // Restore registers
170 __ bind(&done);
171 if (stash_exponent_copy) {
172 __ addp(rsp, Immediate(kDoubleSize));
173 }
174 if (!final_result_reg.is(result_reg)) {
175 DCHECK(final_result_reg.is(rcx));
176 __ movl(final_result_reg, result_reg);
177 }
178 __ popq(save_reg);
179 __ popq(scratch1);
180 __ ret(0);
181 }
182
183
LoadSSE2UnknownOperands(MacroAssembler * masm,Label * not_numbers)184 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
185 Label* not_numbers) {
186 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
187 // Load operand in rdx into xmm0, or branch to not_numbers.
188 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
189 __ JumpIfSmi(rdx, &load_smi_rdx);
190 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
191 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
192 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
193 // Load operand in rax into xmm1, or branch to not_numbers.
194 __ JumpIfSmi(rax, &load_smi_rax);
195
196 __ bind(&load_nonsmi_rax);
197 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
198 __ j(not_equal, not_numbers);
199 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
200 __ jmp(&done);
201
202 __ bind(&load_smi_rdx);
203 __ SmiToInteger32(kScratchRegister, rdx);
204 __ Cvtlsi2sd(xmm0, kScratchRegister);
205 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
206
207 __ bind(&load_smi_rax);
208 __ SmiToInteger32(kScratchRegister, rax);
209 __ Cvtlsi2sd(xmm1, kScratchRegister);
210 __ bind(&done);
211 }
212
213
Generate(MacroAssembler * masm)214 void MathPowStub::Generate(MacroAssembler* masm) {
215 const Register exponent = MathPowTaggedDescriptor::exponent();
216 DCHECK(exponent.is(rdx));
217 const Register scratch = rcx;
218 const XMMRegister double_result = xmm3;
219 const XMMRegister double_base = xmm2;
220 const XMMRegister double_exponent = xmm1;
221 const XMMRegister double_scratch = xmm4;
222
223 Label call_runtime, done, exponent_not_smi, int_exponent;
224
225 // Save 1 in double_result - we need this several times later on.
226 __ movp(scratch, Immediate(1));
227 __ Cvtlsi2sd(double_result, scratch);
228
229 if (exponent_type() == TAGGED) {
230 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
231 __ SmiToInteger32(exponent, exponent);
232 __ jmp(&int_exponent);
233
234 __ bind(&exponent_not_smi);
235 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
236 }
237
238 if (exponent_type() != INTEGER) {
239 Label fast_power, try_arithmetic_simplification;
240 // Detect integer exponents stored as double.
241 __ DoubleToI(exponent, double_exponent, double_scratch,
242 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
243 &try_arithmetic_simplification,
244 &try_arithmetic_simplification);
245 __ jmp(&int_exponent);
246
247 __ bind(&try_arithmetic_simplification);
248 __ Cvttsd2si(exponent, double_exponent);
249 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
250 __ cmpl(exponent, Immediate(0x1));
251 __ j(overflow, &call_runtime);
252
253 // Using FPU instructions to calculate power.
254 Label fast_power_failed;
255 __ bind(&fast_power);
256 __ fnclex(); // Clear flags to catch exceptions later.
257 // Transfer (B)ase and (E)xponent onto the FPU register stack.
258 __ subp(rsp, Immediate(kDoubleSize));
259 __ Movsd(Operand(rsp, 0), double_exponent);
260 __ fld_d(Operand(rsp, 0)); // E
261 __ Movsd(Operand(rsp, 0), double_base);
262 __ fld_d(Operand(rsp, 0)); // B, E
263
264 // Exponent is in st(1) and base is in st(0)
265 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
266 // FYL2X calculates st(1) * log2(st(0))
267 __ fyl2x(); // X
268 __ fld(0); // X, X
269 __ frndint(); // rnd(X), X
270 __ fsub(1); // rnd(X), X-rnd(X)
271 __ fxch(1); // X - rnd(X), rnd(X)
272 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
273 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
274 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
275 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
276 // FSCALE calculates st(0) * 2^st(1)
277 __ fscale(); // 2^X, rnd(X)
278 __ fstp(1);
279 // Bail out to runtime in case of exceptions in the status word.
280 __ fnstsw_ax();
281 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
282 __ j(not_zero, &fast_power_failed, Label::kNear);
283 __ fstp_d(Operand(rsp, 0));
284 __ Movsd(double_result, Operand(rsp, 0));
285 __ addp(rsp, Immediate(kDoubleSize));
286 __ jmp(&done);
287
288 __ bind(&fast_power_failed);
289 __ fninit();
290 __ addp(rsp, Immediate(kDoubleSize));
291 __ jmp(&call_runtime);
292 }
293
294 // Calculate power with integer exponent.
295 __ bind(&int_exponent);
296 const XMMRegister double_scratch2 = double_exponent;
297 // Back up exponent as we need to check if exponent is negative later.
298 __ movp(scratch, exponent); // Back up exponent.
299 __ Movsd(double_scratch, double_base); // Back up base.
300 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1.
301
302 // Get absolute value of exponent.
303 Label no_neg, while_true, while_false;
304 __ testl(scratch, scratch);
305 __ j(positive, &no_neg, Label::kNear);
306 __ negl(scratch);
307 __ bind(&no_neg);
308
309 __ j(zero, &while_false, Label::kNear);
310 __ shrl(scratch, Immediate(1));
311 // Above condition means CF==0 && ZF==0. This means that the
312 // bit that has been shifted out is 0 and the result is not 0.
313 __ j(above, &while_true, Label::kNear);
314 __ Movsd(double_result, double_scratch);
315 __ j(zero, &while_false, Label::kNear);
316
317 __ bind(&while_true);
318 __ shrl(scratch, Immediate(1));
319 __ Mulsd(double_scratch, double_scratch);
320 __ j(above, &while_true, Label::kNear);
321 __ Mulsd(double_result, double_scratch);
322 __ j(not_zero, &while_true);
323
324 __ bind(&while_false);
325 // If the exponent is negative, return 1/result.
326 __ testl(exponent, exponent);
327 __ j(greater, &done);
328 __ Divsd(double_scratch2, double_result);
329 __ Movsd(double_result, double_scratch2);
330 // Test whether result is zero. Bail out to check for subnormal result.
331 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
332 __ Xorpd(double_scratch2, double_scratch2);
333 __ Ucomisd(double_scratch2, double_result);
334 // double_exponent aliased as double_scratch2 has already been overwritten
335 // and may not have contained the exponent value in the first place when the
336 // input was a smi. We reset it with exponent value before bailing out.
337 __ j(not_equal, &done);
338 __ Cvtlsi2sd(double_exponent, exponent);
339
340 // Returning or bailing out.
341 __ bind(&call_runtime);
342 // Move base to the correct argument register. Exponent is already in xmm1.
343 __ Movsd(xmm0, double_base);
344 DCHECK(double_exponent.is(xmm1));
345 {
346 AllowExternalCallThatCantCauseGC scope(masm);
347 __ PrepareCallCFunction(2);
348 __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
349 2);
350 }
351 // Return value is in xmm0.
352 __ Movsd(double_result, xmm0);
353
354 __ bind(&done);
355 __ ret(0);
356 }
357
358
Generate(MacroAssembler * masm)359 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
360 Label miss;
361 Register receiver = LoadDescriptor::ReceiverRegister();
362 // Ensure that the vector and slot registers won't be clobbered before
363 // calling the miss handler.
364 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(),
365 LoadDescriptor::SlotRegister()));
366
367 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
368 r9, &miss);
369 __ bind(&miss);
370 PropertyAccessCompiler::TailCallBuiltin(
371 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
372 }
373
374
Generate(MacroAssembler * masm)375 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
376 // Return address is on the stack.
377 Label miss;
378
379 Register receiver = LoadDescriptor::ReceiverRegister();
380 Register index = LoadDescriptor::NameRegister();
381 Register scratch = rdi;
382 Register result = rax;
383 DCHECK(!scratch.is(receiver) && !scratch.is(index));
384 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
385 result.is(LoadDescriptor::SlotRegister()));
386
387 // StringCharAtGenerator doesn't use the result register until it's passed
388 // the different miss possibilities. If it did, we would have a conflict
389 // when FLAG_vector_ics is true.
390 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
391 &miss, // When not a string.
392 &miss, // When not a number.
393 &miss, // When index out of range.
394 RECEIVER_IS_STRING);
395 char_at_generator.GenerateFast(masm);
396 __ ret(0);
397
398 StubRuntimeCallHelper call_helper;
399 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
400
401 __ bind(&miss);
402 PropertyAccessCompiler::TailCallBuiltin(
403 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
404 }
405
406
Generate(MacroAssembler * masm)407 void RegExpExecStub::Generate(MacroAssembler* masm) {
408 // Just jump directly to runtime if native RegExp is not selected at compile
409 // time or if regexp entry in generated code is turned off runtime switch or
410 // at compilation.
411 #ifdef V8_INTERPRETED_REGEXP
412 __ TailCallRuntime(Runtime::kRegExpExec);
413 #else // V8_INTERPRETED_REGEXP
414
415 // Stack frame on entry.
416 // rsp[0] : return address
417 // rsp[8] : last_match_info (expected JSArray)
418 // rsp[16] : previous index
419 // rsp[24] : subject string
420 // rsp[32] : JSRegExp object
421
422 enum RegExpExecStubArgumentIndices {
423 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
424 SUBJECT_STRING_ARGUMENT_INDEX,
425 PREVIOUS_INDEX_ARGUMENT_INDEX,
426 LAST_MATCH_INFO_ARGUMENT_INDEX,
427 REG_EXP_EXEC_ARGUMENT_COUNT
428 };
429
430 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
431 ARGUMENTS_DONT_CONTAIN_RECEIVER);
432 Label runtime;
433 // Ensure that a RegExp stack is allocated.
434 ExternalReference address_of_regexp_stack_memory_address =
435 ExternalReference::address_of_regexp_stack_memory_address(isolate());
436 ExternalReference address_of_regexp_stack_memory_size =
437 ExternalReference::address_of_regexp_stack_memory_size(isolate());
438 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
439 __ testp(kScratchRegister, kScratchRegister);
440 __ j(zero, &runtime);
441
442 // Check that the first argument is a JSRegExp object.
443 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
444 __ JumpIfSmi(rax, &runtime);
445 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
446 __ j(not_equal, &runtime);
447
448 // Check that the RegExp has been compiled (data contains a fixed array).
449 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
450 if (FLAG_debug_code) {
451 Condition is_smi = masm->CheckSmi(rax);
452 __ Check(NegateCondition(is_smi),
453 kUnexpectedTypeForRegExpDataFixedArrayExpected);
454 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
455 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
456 }
457
458 // rax: RegExp data (FixedArray)
459 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
460 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
461 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
462 __ j(not_equal, &runtime);
463
464 // rax: RegExp data (FixedArray)
465 // Check that the number of captures fit in the static offsets vector buffer.
466 __ SmiToInteger32(rdx,
467 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
468 // Check (number_of_captures + 1) * 2 <= offsets vector size
469 // Or number_of_captures <= offsets vector size / 2 - 1
470 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
471 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
472 __ j(above, &runtime);
473
474 // Reset offset for possibly sliced string.
475 __ Set(r14, 0);
476 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
477 __ JumpIfSmi(rdi, &runtime);
478 __ movp(r15, rdi); // Make a copy of the original subject string.
479 // rax: RegExp data (FixedArray)
480 // rdi: subject string
481 // r15: subject string
482 // Handle subject string according to its encoding and representation:
483 // (1) Sequential two byte? If yes, go to (9).
484 // (2) Sequential one byte? If yes, go to (5).
485 // (3) Sequential or cons? If not, go to (6).
486 // (4) Cons string. If the string is flat, replace subject with first string
487 // and go to (1). Otherwise bail out to runtime.
488 // (5) One byte sequential. Load regexp code for one byte.
489 // (E) Carry on.
490 /// [...]
491
492 // Deferred code at the end of the stub:
493 // (6) Long external string? If not, go to (10).
494 // (7) External string. Make it, offset-wise, look like a sequential string.
495 // (8) Is the external string one byte? If yes, go to (5).
496 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
497 // (10) Short external string or not a string? If yes, bail out to runtime.
498 // (11) Sliced string. Replace subject with parent. Go to (1).
499
500 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
501 external_string /* 7 */, check_underlying /* 1 */,
502 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
503
504 __ bind(&check_underlying);
505 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
506 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
507
508 // (1) Sequential two byte? If yes, go to (9).
509 __ andb(rbx, Immediate(kIsNotStringMask |
510 kStringRepresentationMask |
511 kStringEncodingMask |
512 kShortExternalStringMask));
513 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
514 __ j(zero, &seq_two_byte_string); // Go to (9).
515
516 // (2) Sequential one byte? If yes, go to (5).
517 // Any other sequential string must be one byte.
518 __ andb(rbx, Immediate(kIsNotStringMask |
519 kStringRepresentationMask |
520 kShortExternalStringMask));
521 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
522
523 // (3) Sequential or cons? If not, go to (6).
524 // We check whether the subject string is a cons, since sequential strings
525 // have already been covered.
526 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
527 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
528 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
529 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
530 __ cmpp(rbx, Immediate(kExternalStringTag));
531 __ j(greater_equal, ¬_seq_nor_cons); // Go to (6).
532
533 // (4) Cons string. Check that it's flat.
534 // Replace subject with first string and reload instance type.
535 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
536 Heap::kempty_stringRootIndex);
537 __ j(not_equal, &runtime);
538 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
539 __ jmp(&check_underlying);
540
541 // (5) One byte sequential. Load regexp code for one byte.
542 __ bind(&seq_one_byte_string);
543 // rax: RegExp data (FixedArray)
544 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
545 __ Set(rcx, 1); // Type is one byte.
546
547 // (E) Carry on. String handling is done.
548 __ bind(&check_code);
549 // r11: irregexp code
550 // Check that the irregexp code has been generated for the actual string
551 // encoding. If it has, the field contains a code object otherwise it contains
552 // smi (code flushing support)
553 __ JumpIfSmi(r11, &runtime);
554
555 // rdi: sequential subject string (or look-alike, external string)
556 // r15: original subject string
557 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
558 // r11: code
559 // Load used arguments before starting to push arguments for call to native
560 // RegExp code to avoid handling changing stack height.
561 // We have to use r15 instead of rdi to load the length because rdi might
562 // have been only made to look like a sequential string when it actually
563 // is an external string.
564 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
565 __ JumpIfNotSmi(rbx, &runtime);
566 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
567 __ j(above_equal, &runtime);
568 __ SmiToInteger64(rbx, rbx);
569
570 // rdi: subject string
571 // rbx: previous index
572 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
573 // r11: code
574 // All checks done. Now push arguments for native regexp code.
575 Counters* counters = isolate()->counters();
576 __ IncrementCounter(counters->regexp_entry_native(), 1);
577
578 // Isolates: note we add an additional parameter here (isolate pointer).
579 static const int kRegExpExecuteArguments = 9;
580 int argument_slots_on_stack =
581 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
582 __ EnterApiExitFrame(argument_slots_on_stack);
583
584 // Argument 9: Pass current isolate address.
585 __ LoadAddress(kScratchRegister,
586 ExternalReference::isolate_address(isolate()));
587 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
588 kScratchRegister);
589
590 // Argument 8: Indicate that this is a direct call from JavaScript.
591 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
592 Immediate(1));
593
594 // Argument 7: Start (high end) of backtracking stack memory area.
595 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
596 __ movp(r9, Operand(kScratchRegister, 0));
597 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
598 __ addp(r9, Operand(kScratchRegister, 0));
599 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
600
601 // Argument 6: Set the number of capture registers to zero to force global
602 // regexps to behave as non-global. This does not affect non-global regexps.
603 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
604 #ifdef _WIN64
605 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
606 Immediate(0));
607 #else
608 __ Set(r9, 0);
609 #endif
610
611 // Argument 5: static offsets vector buffer.
612 __ LoadAddress(
613 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
614 // Argument 5 passed in r8 on Linux and on the stack on Windows.
615 #ifdef _WIN64
616 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
617 #endif
618
619 // rdi: subject string
620 // rbx: previous index
621 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
622 // r11: code
623 // r14: slice offset
624 // r15: original subject string
625
626 // Argument 2: Previous index.
627 __ movp(arg_reg_2, rbx);
628
629 // Argument 4: End of string data
630 // Argument 3: Start of string data
631 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
632 // Prepare start and end index of the input.
633 // Load the length from the original sliced string if that is the case.
634 __ addp(rbx, r14);
635 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
636 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
637
638 // rbx: start index of the input
639 // r14: end index of the input
640 // r15: original subject string
641 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
642 __ j(zero, &setup_two_byte, Label::kNear);
643 __ leap(arg_reg_4,
644 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
645 __ leap(arg_reg_3,
646 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
647 __ jmp(&setup_rest, Label::kNear);
648 __ bind(&setup_two_byte);
649 __ leap(arg_reg_4,
650 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
651 __ leap(arg_reg_3,
652 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
653 __ bind(&setup_rest);
654
655 // Argument 1: Original subject string.
656 // The original subject is in the previous stack frame. Therefore we have to
657 // use rbp, which points exactly to one pointer size below the previous rsp.
658 // (Because creating a new stack frame pushes the previous rbp onto the stack
659 // and thereby moves up rsp by one kPointerSize.)
660 __ movp(arg_reg_1, r15);
661
662 // Locate the code entry and call it.
663 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
664 __ call(r11);
665
666 __ LeaveApiExitFrame(true);
667
668 // Check the result.
669 Label success;
670 Label exception;
671 __ cmpl(rax, Immediate(1));
672 // We expect exactly one result since we force the called regexp to behave
673 // as non-global.
674 __ j(equal, &success, Label::kNear);
675 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
676 __ j(equal, &exception);
677 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
678 // If none of the above, it can only be retry.
679 // Handle that in the runtime system.
680 __ j(not_equal, &runtime);
681
682 // For failure return null.
683 __ LoadRoot(rax, Heap::kNullValueRootIndex);
684 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
685
686 // Load RegExp data.
687 __ bind(&success);
688 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
689 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
690 __ SmiToInteger32(rax,
691 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
692 // Calculate number of capture registers (number_of_captures + 1) * 2.
693 __ leal(rdx, Operand(rax, rax, times_1, 2));
694
695 // rdx: Number of capture registers
696 // Check that the last match info is a FixedArray.
697 __ movp(rbx, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
698 __ JumpIfSmi(rbx, &runtime);
699 // Check that the object has fast elements.
700 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
701 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
702 __ j(not_equal, &runtime);
703 // Check that the last match info has space for the capture registers and the
704 // additional information. Ensure no overflow in add.
705 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
706 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
707 __ subl(rax, Immediate(RegExpMatchInfo::kLastMatchOverhead));
708 __ cmpl(rdx, rax);
709 __ j(greater, &runtime);
710
711 // rbx: last_match_info (FixedArray)
712 // rdx: number of capture registers
713 // Store the capture count.
714 __ Integer32ToSmi(kScratchRegister, rdx);
715 __ movp(FieldOperand(rbx, RegExpMatchInfo::kNumberOfCapturesOffset),
716 kScratchRegister);
717 // Store last subject and last input.
718 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
719 __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastSubjectOffset), rax);
720 __ movp(rcx, rax);
721 __ RecordWriteField(rbx, RegExpMatchInfo::kLastSubjectOffset, rax, rdi,
722 kDontSaveFPRegs);
723 __ movp(rax, rcx);
724 __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastInputOffset), rax);
725 __ RecordWriteField(rbx, RegExpMatchInfo::kLastInputOffset, rax, rdi,
726 kDontSaveFPRegs);
727
728 // Get the static offsets vector filled by the native regexp code.
729 __ LoadAddress(
730 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
731
732 // rbx: last_match_info (FixedArray)
733 // rcx: offsets vector
734 // rdx: number of capture registers
735 Label next_capture, done;
736 // Capture register counter starts from number of capture registers and
737 // counts down until wrapping after zero.
738 __ bind(&next_capture);
739 __ subp(rdx, Immediate(1));
740 __ j(negative, &done, Label::kNear);
741 // Read the value from the static offsets vector buffer and make it a smi.
742 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
743 __ Integer32ToSmi(rdi, rdi);
744 // Store the smi value in the last match info.
745 __ movp(FieldOperand(rbx, rdx, times_pointer_size,
746 RegExpMatchInfo::kFirstCaptureOffset),
747 rdi);
748 __ jmp(&next_capture);
749 __ bind(&done);
750
751 // Return last match info.
752 __ movp(rax, rbx);
753 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
754
755 __ bind(&exception);
756 // Result must now be exception. If there is no pending exception already a
757 // stack overflow (on the backtrack stack) was detected in RegExp code but
758 // haven't created the exception yet. Handle that in the runtime system.
759 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
760 ExternalReference pending_exception_address(
761 Isolate::kPendingExceptionAddress, isolate());
762 Operand pending_exception_operand =
763 masm->ExternalOperand(pending_exception_address, rbx);
764 __ movp(rax, pending_exception_operand);
765 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
766 __ cmpp(rax, rdx);
767 __ j(equal, &runtime);
768
769 // For exception, throw the exception again.
770 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
771
772 // Do the runtime call to execute the regexp.
773 __ bind(&runtime);
774 __ TailCallRuntime(Runtime::kRegExpExec);
775
776 // Deferred code for string handling.
777 // (6) Long external string? If not, go to (10).
778 __ bind(¬_seq_nor_cons);
779 // Compare flags are still set from (3).
780 __ j(greater, ¬_long_external, Label::kNear); // Go to (10).
781
782 // (7) External string. Short external strings have been ruled out.
783 __ bind(&external_string);
784 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
785 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
786 if (FLAG_debug_code) {
787 // Assert that we do not have a cons or slice (indirect strings) here.
788 // Sequential strings have already been ruled out.
789 __ testb(rbx, Immediate(kIsIndirectStringMask));
790 __ Assert(zero, kExternalStringExpectedButNotFound);
791 }
792 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
793 // Move the pointer so that offset-wise, it looks like a sequential string.
794 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
795 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
796 STATIC_ASSERT(kTwoByteStringTag == 0);
797 // (8) Is the external string one byte? If yes, go to (5).
798 __ testb(rbx, Immediate(kStringEncodingMask));
799 __ j(not_zero, &seq_one_byte_string); // Go to (5).
800
801 // rdi: subject string (flat two-byte)
802 // rax: RegExp data (FixedArray)
803 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
804 __ bind(&seq_two_byte_string);
805 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
806 __ Set(rcx, 0); // Type is two byte.
807 __ jmp(&check_code); // Go to (E).
808
809 // (10) Not a string or a short external string? If yes, bail out to runtime.
810 __ bind(¬_long_external);
811 // Catch non-string subject or short external string.
812 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
813 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
814 __ j(not_zero, &runtime);
815
816 // (11) Sliced string. Replace subject with parent. Go to (1).
817 // Load offset into r14 and replace subject string with parent.
818 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
819 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
820 __ jmp(&check_underlying);
821 #endif // V8_INTERPRETED_REGEXP
822 }
823
824
NegativeComparisonResult(Condition cc)825 static int NegativeComparisonResult(Condition cc) {
826 DCHECK(cc != equal);
827 DCHECK((cc == less) || (cc == less_equal)
828 || (cc == greater) || (cc == greater_equal));
829 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
830 }
831
832
CheckInputType(MacroAssembler * masm,Register input,CompareICState::State expected,Label * fail)833 static void CheckInputType(MacroAssembler* masm, Register input,
834 CompareICState::State expected, Label* fail) {
835 Label ok;
836 if (expected == CompareICState::SMI) {
837 __ JumpIfNotSmi(input, fail);
838 } else if (expected == CompareICState::NUMBER) {
839 __ JumpIfSmi(input, &ok);
840 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
841 __ j(not_equal, fail);
842 }
843 // We could be strict about internalized/non-internalized here, but as long as
844 // hydrogen doesn't care, the stub doesn't have to care either.
845 __ bind(&ok);
846 }
847
848
BranchIfNotInternalizedString(MacroAssembler * masm,Label * label,Register object,Register scratch)849 static void BranchIfNotInternalizedString(MacroAssembler* masm,
850 Label* label,
851 Register object,
852 Register scratch) {
853 __ JumpIfSmi(object, label);
854 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
855 __ movzxbp(scratch,
856 FieldOperand(scratch, Map::kInstanceTypeOffset));
857 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
858 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
859 __ j(not_zero, label);
860 }
861
862
GenerateGeneric(MacroAssembler * masm)863 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
864 Label runtime_call, check_unequal_objects, done;
865 Condition cc = GetCondition();
866 Factory* factory = isolate()->factory();
867
868 Label miss;
869 CheckInputType(masm, rdx, left(), &miss);
870 CheckInputType(masm, rax, right(), &miss);
871
872 // Compare two smis.
873 Label non_smi, smi_done;
874 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
875 __ subp(rdx, rax);
876 __ j(no_overflow, &smi_done);
877 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
878 __ bind(&smi_done);
879 __ movp(rax, rdx);
880 __ ret(0);
881 __ bind(&non_smi);
882
883 // The compare stub returns a positive, negative, or zero 64-bit integer
884 // value in rax, corresponding to result of comparing the two inputs.
885 // NOTICE! This code is only reached after a smi-fast-case check, so
886 // it is certain that at least one operand isn't a smi.
887
888 // Two identical objects are equal unless they are both NaN or undefined.
889 {
890 Label not_identical;
891 __ cmpp(rax, rdx);
892 __ j(not_equal, ¬_identical, Label::kNear);
893
894 if (cc != equal) {
895 // Check for undefined. undefined OP undefined is false even though
896 // undefined == undefined.
897 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
898 Label check_for_nan;
899 __ j(not_equal, &check_for_nan, Label::kNear);
900 __ Set(rax, NegativeComparisonResult(cc));
901 __ ret(0);
902 __ bind(&check_for_nan);
903 }
904
905 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
906 // so we do the second best thing - test it ourselves.
907 Label heap_number;
908 // If it's not a heap number, then return equal for (in)equality operator.
909 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
910 factory->heap_number_map());
911 __ j(equal, &heap_number, Label::kNear);
912 if (cc != equal) {
913 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
914 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
915 // Call runtime on identical objects. Otherwise return equal.
916 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
917 __ j(above_equal, &runtime_call, Label::kFar);
918 // Call runtime on identical symbols since we need to throw a TypeError.
919 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
920 __ j(equal, &runtime_call, Label::kFar);
921 // Call runtime on identical SIMD values since we must throw a TypeError.
922 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE)));
923 __ j(equal, &runtime_call, Label::kFar);
924 }
925 __ Set(rax, EQUAL);
926 __ ret(0);
927
928 __ bind(&heap_number);
929 // It is a heap number, so return equal if it's not NaN.
930 // For NaN, return 1 for every condition except greater and
931 // greater-equal. Return -1 for them, so the comparison yields
932 // false for all conditions except not-equal.
933 __ Set(rax, EQUAL);
934 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
935 __ Ucomisd(xmm0, xmm0);
936 __ setcc(parity_even, rax);
937 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
938 if (cc == greater_equal || cc == greater) {
939 __ negp(rax);
940 }
941 __ ret(0);
942
943 __ bind(¬_identical);
944 }
945
946 if (cc == equal) { // Both strict and non-strict.
947 Label slow; // Fallthrough label.
948
949 // If we're doing a strict equality comparison, we don't have to do
950 // type conversion, so we generate code to do fast comparison for objects
951 // and oddballs. Non-smi numbers and strings still go through the usual
952 // slow-case code.
953 if (strict()) {
954 // If either is a Smi (we know that not both are), then they can only
955 // be equal if the other is a HeapNumber. If so, use the slow case.
956 {
957 Label not_smis;
958 __ SelectNonSmi(rbx, rax, rdx, ¬_smis);
959
960 // Check if the non-smi operand is a heap number.
961 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
962 factory->heap_number_map());
963 // If heap number, handle it in the slow case.
964 __ j(equal, &slow);
965 // Return non-equal. ebx (the lower half of rbx) is not zero.
966 __ movp(rax, rbx);
967 __ ret(0);
968
969 __ bind(¬_smis);
970 }
971
972 // If either operand is a JSObject or an oddball value, then they are not
973 // equal since their pointers are different
974 // There is no test for undetectability in strict equality.
975
976 // If the first object is a JS object, we have done pointer comparison.
977 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
978 Label first_non_object;
979 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
980 __ j(below, &first_non_object, Label::kNear);
981 // Return non-zero (rax (not rax) is not zero)
982 Label return_not_equal;
983 STATIC_ASSERT(kHeapObjectTag != 0);
984 __ bind(&return_not_equal);
985 __ ret(0);
986
987 __ bind(&first_non_object);
988 // Check for oddballs: true, false, null, undefined.
989 __ CmpInstanceType(rcx, ODDBALL_TYPE);
990 __ j(equal, &return_not_equal);
991
992 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
993 __ j(above_equal, &return_not_equal);
994
995 // Check for oddballs: true, false, null, undefined.
996 __ CmpInstanceType(rcx, ODDBALL_TYPE);
997 __ j(equal, &return_not_equal);
998
999 // Fall through to the general case.
1000 }
1001 __ bind(&slow);
1002 }
1003
1004 // Generate the number comparison code.
1005 Label non_number_comparison;
1006 Label unordered;
1007 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1008 __ xorl(rax, rax);
1009 __ xorl(rcx, rcx);
1010 __ Ucomisd(xmm0, xmm1);
1011
1012 // Don't base result on EFLAGS when a NaN is involved.
1013 __ j(parity_even, &unordered, Label::kNear);
1014 // Return a result of -1, 0, or 1, based on EFLAGS.
1015 __ setcc(above, rax);
1016 __ setcc(below, rcx);
1017 __ subp(rax, rcx);
1018 __ ret(0);
1019
1020 // If one of the numbers was NaN, then the result is always false.
1021 // The cc is never not-equal.
1022 __ bind(&unordered);
1023 DCHECK(cc != not_equal);
1024 if (cc == less || cc == less_equal) {
1025 __ Set(rax, 1);
1026 } else {
1027 __ Set(rax, -1);
1028 }
1029 __ ret(0);
1030
1031 // The number comparison code did not provide a valid result.
1032 __ bind(&non_number_comparison);
1033
1034 // Fast negative check for internalized-to-internalized equality.
1035 Label check_for_strings;
1036 if (cc == equal) {
1037 BranchIfNotInternalizedString(
1038 masm, &check_for_strings, rax, kScratchRegister);
1039 BranchIfNotInternalizedString(
1040 masm, &check_for_strings, rdx, kScratchRegister);
1041
1042 // We've already checked for object identity, so if both operands are
1043 // internalized strings they aren't equal. Register rax (not rax) already
1044 // holds a non-zero value, which indicates not equal, so just return.
1045 __ ret(0);
1046 }
1047
1048 __ bind(&check_for_strings);
1049
1050 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1051 &check_unequal_objects);
1052
1053 // Inline comparison of one-byte strings.
1054 if (cc == equal) {
1055 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
1056 } else {
1057 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1058 rdi, r8);
1059 }
1060
1061 #ifdef DEBUG
1062 __ Abort(kUnexpectedFallThroughFromStringComparison);
1063 #endif
1064
1065 __ bind(&check_unequal_objects);
1066 if (cc == equal && !strict()) {
1067 // Not strict equality. Objects are unequal if
1068 // they are both JSObjects and not undetectable,
1069 // and their pointers are different.
1070 Label return_equal, return_unequal, undetectable;
1071 // At most one is a smi, so we can test for smi by adding the two.
1072 // A smi plus a heap object has the low bit set, a heap object plus
1073 // a heap object has the low bit clear.
1074 STATIC_ASSERT(kSmiTag == 0);
1075 STATIC_ASSERT(kSmiTagMask == 1);
1076 __ leap(rcx, Operand(rax, rdx, times_1, 0));
1077 __ testb(rcx, Immediate(kSmiTagMask));
1078 __ j(not_zero, &runtime_call, Label::kNear);
1079
1080 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1081 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1082 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1083 Immediate(1 << Map::kIsUndetectable));
1084 __ j(not_zero, &undetectable, Label::kNear);
1085 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1086 Immediate(1 << Map::kIsUndetectable));
1087 __ j(not_zero, &return_unequal, Label::kNear);
1088
1089 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
1090 __ j(below, &runtime_call, Label::kNear);
1091 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
1092 __ j(below, &runtime_call, Label::kNear);
1093
1094 __ bind(&return_unequal);
1095 // Return non-equal by returning the non-zero object pointer in rax.
1096 __ ret(0);
1097
1098 __ bind(&undetectable);
1099 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1100 Immediate(1 << Map::kIsUndetectable));
1101 __ j(zero, &return_unequal, Label::kNear);
1102
1103 // If both sides are JSReceivers, then the result is false according to
1104 // the HTML specification, which says that only comparisons with null or
1105 // undefined are affected by special casing for document.all.
1106 __ CmpInstanceType(rbx, ODDBALL_TYPE);
1107 __ j(zero, &return_equal, Label::kNear);
1108 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1109 __ j(not_zero, &return_unequal, Label::kNear);
1110
1111 __ bind(&return_equal);
1112 __ Set(rax, EQUAL);
1113 __ ret(0);
1114 }
1115 __ bind(&runtime_call);
1116
1117 if (cc == equal) {
1118 {
1119 FrameScope scope(masm, StackFrame::INTERNAL);
1120 __ Push(rdx);
1121 __ Push(rax);
1122 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1123 }
1124 // Turn true into 0 and false into some non-zero value.
1125 STATIC_ASSERT(EQUAL == 0);
1126 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
1127 __ subp(rax, rdx);
1128 __ Ret();
1129 } else {
1130 // Push arguments below the return address to prepare jump to builtin.
1131 __ PopReturnAddressTo(rcx);
1132 __ Push(rdx);
1133 __ Push(rax);
1134 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
1135 __ PushReturnAddressFrom(rcx);
1136 __ TailCallRuntime(Runtime::kCompare);
1137 }
1138
1139 __ bind(&miss);
1140 GenerateMiss(masm);
1141 }
1142
1143
CallStubInRecordCallTarget(MacroAssembler * masm,CodeStub * stub)1144 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1145 // rax : number of arguments to the construct function
1146 // rbx : feedback vector
1147 // rdx : slot in feedback vector (Smi)
1148 // rdi : the function to call
1149 FrameScope scope(masm, StackFrame::INTERNAL);
1150
1151 // Number-of-arguments register must be smi-tagged to call out.
1152 __ Integer32ToSmi(rax, rax);
1153 __ Push(rax);
1154 __ Push(rdi);
1155 __ Integer32ToSmi(rdx, rdx);
1156 __ Push(rdx);
1157 __ Push(rbx);
1158 __ Push(rsi);
1159
1160 __ CallStub(stub);
1161
1162 __ Pop(rsi);
1163 __ Pop(rbx);
1164 __ Pop(rdx);
1165 __ Pop(rdi);
1166 __ Pop(rax);
1167 __ SmiToInteger32(rdx, rdx);
1168 __ SmiToInteger32(rax, rax);
1169 }
1170
1171
GenerateRecordCallTarget(MacroAssembler * masm)1172 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1173 // Cache the called function in a feedback vector slot. Cache states
1174 // are uninitialized, monomorphic (indicated by a JSFunction), and
1175 // megamorphic.
1176 // rax : number of arguments to the construct function
1177 // rbx : feedback vector
1178 // rdx : slot in feedback vector (Smi)
1179 // rdi : the function to call
1180 Isolate* isolate = masm->isolate();
1181 Label initialize, done, miss, megamorphic, not_array_function;
1182
1183 // Load the cache state into r11.
1184 __ SmiToInteger32(rdx, rdx);
1185 __ movp(r11,
1186 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1187
1188 // A monomorphic cache hit or an already megamorphic state: invoke the
1189 // function without changing the state.
1190 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
1191 // at this position in a symbol (see static asserts in
1192 // type-feedback-vector.h).
1193 Label check_allocation_site;
1194 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
1195 __ j(equal, &done, Label::kFar);
1196 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
1197 __ j(equal, &done, Label::kFar);
1198 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
1199 Heap::kWeakCellMapRootIndex);
1200 __ j(not_equal, &check_allocation_site);
1201
1202 // If the weak cell is cleared, we have a new chance to become monomorphic.
1203 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
1204 __ j(equal, &initialize);
1205 __ jmp(&megamorphic);
1206
1207 __ bind(&check_allocation_site);
1208 // If we came here, we need to see if we are the array function.
1209 // If we didn't have a matching function, and we didn't find the megamorph
1210 // sentinel, then we have in the slot either some other function or an
1211 // AllocationSite.
1212 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
1213 __ j(not_equal, &miss);
1214
1215 // Make sure the function is the Array() function
1216 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1217 __ cmpp(rdi, r11);
1218 __ j(not_equal, &megamorphic);
1219 __ jmp(&done);
1220
1221 __ bind(&miss);
1222
1223 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1224 // megamorphic.
1225 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
1226 __ j(equal, &initialize);
1227 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1228 // write-barrier is needed.
1229 __ bind(&megamorphic);
1230 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1231 TypeFeedbackVector::MegamorphicSentinel(isolate));
1232 __ jmp(&done);
1233
1234 // An uninitialized cache is patched with the function or sentinel to
1235 // indicate the ElementsKind if function is the Array constructor.
1236 __ bind(&initialize);
1237
1238 // Make sure the function is the Array() function
1239 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1240 __ cmpp(rdi, r11);
1241 __ j(not_equal, ¬_array_function);
1242
1243 CreateAllocationSiteStub create_stub(isolate);
1244 CallStubInRecordCallTarget(masm, &create_stub);
1245 __ jmp(&done);
1246
1247 __ bind(¬_array_function);
1248 CreateWeakCellStub weak_cell_stub(isolate);
1249 CallStubInRecordCallTarget(masm, &weak_cell_stub);
1250
1251 __ bind(&done);
1252 // Increment the call count for all function calls.
1253 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1254 FixedArray::kHeaderSize + kPointerSize),
1255 Smi::FromInt(1));
1256 }
1257
1258
Generate(MacroAssembler * masm)1259 void CallConstructStub::Generate(MacroAssembler* masm) {
1260 // rax : number of arguments
1261 // rbx : feedback vector
1262 // rdx : slot in feedback vector (Smi)
1263 // rdi : constructor function
1264
1265 Label non_function;
1266 // Check that the constructor is not a smi.
1267 __ JumpIfSmi(rdi, &non_function);
1268 // Check that constructor is a JSFunction.
1269 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
1270 __ j(not_equal, &non_function);
1271
1272 GenerateRecordCallTarget(masm);
1273
1274 Label feedback_register_initialized;
1275 // Put the AllocationSite from the feedback vector into rbx, or undefined.
1276 __ movp(rbx,
1277 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1278 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
1279 __ j(equal, &feedback_register_initialized, Label::kNear);
1280 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1281 __ bind(&feedback_register_initialized);
1282
1283 __ AssertUndefinedOrAllocationSite(rbx);
1284
1285 // Pass new target to construct stub.
1286 __ movp(rdx, rdi);
1287
1288 // Tail call to the function-specific construct stub (still in the caller
1289 // context at this point).
1290 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1291 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
1292 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
1293 __ jmp(rcx);
1294
1295 __ bind(&non_function);
1296 __ movp(rdx, rdi);
1297 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1298 }
1299
IncrementCallCount(MacroAssembler * masm,Register feedback_vector,Register slot)1300 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
1301 Register slot) {
1302 __ SmiAddConstant(FieldOperand(feedback_vector, slot, times_pointer_size,
1303 FixedArray::kHeaderSize + kPointerSize),
1304 Smi::FromInt(1));
1305 }
1306
HandleArrayCase(MacroAssembler * masm,Label * miss)1307 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1308 // rdi - function
1309 // rdx - slot id
1310 // rbx - vector
1311 // rcx - allocation site (loaded from vector[slot]).
1312 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1313 __ cmpp(rdi, r8);
1314 __ j(not_equal, miss);
1315
1316 // Increment the call count for monomorphic function calls.
1317 IncrementCallCount(masm, rbx, rdx);
1318
1319 __ movp(rbx, rcx);
1320 __ movp(rdx, rdi);
1321 ArrayConstructorStub stub(masm->isolate());
1322 __ TailCallStub(&stub);
1323 }
1324
1325
Generate(MacroAssembler * masm)1326 void CallICStub::Generate(MacroAssembler* masm) {
1327 // ----------- S t a t e -------------
1328 // -- rax - number of arguments
1329 // -- rdi - function
1330 // -- rdx - slot id
1331 // -- rbx - vector
1332 // -----------------------------------
1333 Isolate* isolate = masm->isolate();
1334 Label extra_checks_or_miss, call, call_function, call_count_incremented;
1335
1336 // The checks. First, does rdi match the recorded monomorphic target?
1337 __ SmiToInteger32(rdx, rdx);
1338 __ movp(rcx,
1339 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1340
1341 // We don't know that we have a weak cell. We might have a private symbol
1342 // or an AllocationSite, but the memory is safe to examine.
1343 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1344 // FixedArray.
1345 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1346 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1347 // computed, meaning that it can't appear to be a pointer. If the low bit is
1348 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1349 // to be a pointer.
1350 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1351 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1352 WeakCell::kValueOffset &&
1353 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1354
1355 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
1356 __ j(not_equal, &extra_checks_or_miss);
1357
1358 // The compare above could have been a SMI/SMI comparison. Guard against this
1359 // convincing us that we have a monomorphic JSFunction.
1360 __ JumpIfSmi(rdi, &extra_checks_or_miss);
1361
1362 __ bind(&call_function);
1363 // Increment the call count for monomorphic function calls.
1364 IncrementCallCount(masm, rbx, rdx);
1365
1366 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1367 tail_call_mode()),
1368 RelocInfo::CODE_TARGET);
1369
1370 __ bind(&extra_checks_or_miss);
1371 Label uninitialized, miss, not_allocation_site;
1372
1373 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
1374 __ j(equal, &call);
1375
1376 // Check if we have an allocation site.
1377 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
1378 Heap::kAllocationSiteMapRootIndex);
1379 __ j(not_equal, ¬_allocation_site);
1380
1381 // We have an allocation site.
1382 HandleArrayCase(masm, &miss);
1383
1384 __ bind(¬_allocation_site);
1385
1386 // The following cases attempt to handle MISS cases without going to the
1387 // runtime.
1388 if (FLAG_trace_ic) {
1389 __ jmp(&miss);
1390 }
1391
1392 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1393 __ j(equal, &uninitialized);
1394
1395 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1396 // to handle it here. More complex cases are dealt with in the runtime.
1397 __ AssertNotSmi(rcx);
1398 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
1399 __ j(not_equal, &miss);
1400 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1401 TypeFeedbackVector::MegamorphicSentinel(isolate));
1402
1403 __ bind(&call);
1404
1405 // Increment the call count for megamorphic function calls.
1406 IncrementCallCount(masm, rbx, rdx);
1407
1408 __ bind(&call_count_incremented);
1409 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
1410 RelocInfo::CODE_TARGET);
1411
1412 __ bind(&uninitialized);
1413
1414 // We are going monomorphic, provided we actually have a JSFunction.
1415 __ JumpIfSmi(rdi, &miss);
1416
1417 // Goto miss case if we do not have a function.
1418 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1419 __ j(not_equal, &miss);
1420
1421 // Make sure the function is not the Array() function, which requires special
1422 // behavior on MISS.
1423 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx);
1424 __ cmpp(rdi, rcx);
1425 __ j(equal, &miss);
1426
1427 // Make sure the function belongs to the same native context.
1428 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset));
1429 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX));
1430 __ cmpp(rcx, NativeContextOperand());
1431 __ j(not_equal, &miss);
1432
1433 // Store the function. Use a stub since we need a frame for allocation.
1434 // rbx - vector
1435 // rdx - slot (needs to be in smi form)
1436 // rdi - function
1437 {
1438 FrameScope scope(masm, StackFrame::INTERNAL);
1439 CreateWeakCellStub create_stub(isolate);
1440
1441 __ Integer32ToSmi(rax, rax);
1442 __ Integer32ToSmi(rdx, rdx);
1443 __ Push(rax);
1444 __ Push(rbx);
1445 __ Push(rdx);
1446 __ Push(rdi);
1447 __ Push(rsi);
1448 __ CallStub(&create_stub);
1449 __ Pop(rsi);
1450 __ Pop(rdi);
1451 __ Pop(rdx);
1452 __ Pop(rbx);
1453 __ Pop(rax);
1454 __ SmiToInteger32(rdx, rdx);
1455 __ SmiToInteger32(rax, rax);
1456 }
1457
1458 __ jmp(&call_function);
1459
1460 // We are here because tracing is on or we encountered a MISS case we can't
1461 // handle here.
1462 __ bind(&miss);
1463 GenerateMiss(masm);
1464
1465 __ jmp(&call_count_incremented);
1466
1467 // Unreachable
1468 __ int3();
1469 }
1470
GenerateMiss(MacroAssembler * masm)1471 void CallICStub::GenerateMiss(MacroAssembler* masm) {
1472 FrameScope scope(masm, StackFrame::INTERNAL);
1473
1474 // Preserve the number of arguments.
1475 __ Integer32ToSmi(rax, rax);
1476 __ Push(rax);
1477
1478 // Push the receiver and the function and feedback info.
1479 __ Integer32ToSmi(rdx, rdx);
1480 __ Push(rdi);
1481 __ Push(rbx);
1482 __ Push(rdx);
1483
1484 // Call the entry.
1485 __ CallRuntime(Runtime::kCallIC_Miss);
1486
1487 // Move result to edi and exit the internal frame.
1488 __ movp(rdi, rax);
1489
1490 // Restore number of arguments.
1491 __ Pop(rax);
1492 __ SmiToInteger32(rax, rax);
1493 }
1494
NeedsImmovableCode()1495 bool CEntryStub::NeedsImmovableCode() {
1496 return false;
1497 }
1498
1499
GenerateStubsAheadOfTime(Isolate * isolate)1500 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1501 CEntryStub::GenerateAheadOfTime(isolate);
1502 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1503 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1504 // It is important that the store buffer overflow stubs are generated first.
1505 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
1506 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1507 CreateWeakCellStub::GenerateAheadOfTime(isolate);
1508 BinaryOpICStub::GenerateAheadOfTime(isolate);
1509 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1510 StoreFastElementStub::GenerateAheadOfTime(isolate);
1511 }
1512
1513
GenerateFPStubs(Isolate * isolate)1514 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1515 }
1516
1517
GenerateAheadOfTime(Isolate * isolate)1518 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1519 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1520 stub.GetCode();
1521 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1522 save_doubles.GetCode();
1523 }
1524
1525
Generate(MacroAssembler * masm)1526 void CEntryStub::Generate(MacroAssembler* masm) {
1527 // rax: number of arguments including receiver
1528 // rbx: pointer to C function (C callee-saved)
1529 // rbp: frame pointer of calling JS frame (restored after C call)
1530 // rsp: stack pointer (restored after C call)
1531 // rsi: current context (restored)
1532 //
1533 // If argv_in_register():
1534 // r15: pointer to the first argument
1535
1536 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1537
1538 #ifdef _WIN64
1539 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
1540 // stack to be aligned to 16 bytes. It only allows a single-word to be
1541 // returned in register rax. Larger return sizes must be written to an address
1542 // passed as a hidden first argument.
1543 const Register kCCallArg0 = rcx;
1544 const Register kCCallArg1 = rdx;
1545 const Register kCCallArg2 = r8;
1546 const Register kCCallArg3 = r9;
1547 const int kArgExtraStackSpace = 2;
1548 const int kMaxRegisterResultSize = 1;
1549 #else
1550 // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
1551 // are returned in rax, and a struct of two pointers are returned in rax+rdx.
1552 // Larger return sizes must be written to an address passed as a hidden first
1553 // argument.
1554 const Register kCCallArg0 = rdi;
1555 const Register kCCallArg1 = rsi;
1556 const Register kCCallArg2 = rdx;
1557 const Register kCCallArg3 = rcx;
1558 const int kArgExtraStackSpace = 0;
1559 const int kMaxRegisterResultSize = 2;
1560 #endif // _WIN64
1561
1562 // Enter the exit frame that transitions from JavaScript to C++.
1563 int arg_stack_space =
1564 kArgExtraStackSpace +
1565 (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
1566 if (argv_in_register()) {
1567 DCHECK(!save_doubles());
1568 DCHECK(!is_builtin_exit());
1569 __ EnterApiExitFrame(arg_stack_space);
1570 // Move argc into r14 (argv is already in r15).
1571 __ movp(r14, rax);
1572 } else {
1573 __ EnterExitFrame(
1574 arg_stack_space, save_doubles(),
1575 is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
1576 }
1577
1578 // rbx: pointer to builtin function (C callee-saved).
1579 // rbp: frame pointer of exit frame (restored after C call).
1580 // rsp: stack pointer (restored after C call).
1581 // r14: number of arguments including receiver (C callee-saved).
1582 // r15: argv pointer (C callee-saved).
1583
1584 // Check stack alignment.
1585 if (FLAG_debug_code) {
1586 __ CheckStackAlignment();
1587 }
1588
1589 // Call C function. The arguments object will be created by stubs declared by
1590 // DECLARE_RUNTIME_FUNCTION().
1591 if (result_size() <= kMaxRegisterResultSize) {
1592 // Pass a pointer to the Arguments object as the first argument.
1593 // Return result in single register (rax), or a register pair (rax, rdx).
1594 __ movp(kCCallArg0, r14); // argc.
1595 __ movp(kCCallArg1, r15); // argv.
1596 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
1597 } else {
1598 DCHECK_LE(result_size(), 3);
1599 // Pass a pointer to the result location as the first argument.
1600 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
1601 // Pass a pointer to the Arguments object as the second argument.
1602 __ movp(kCCallArg1, r14); // argc.
1603 __ movp(kCCallArg2, r15); // argv.
1604 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
1605 }
1606 __ call(rbx);
1607
1608 if (result_size() > kMaxRegisterResultSize) {
1609 // Read result values stored on stack. Result is stored
1610 // above the the two Arguments object slots on Win64.
1611 DCHECK_LE(result_size(), 3);
1612 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
1613 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
1614 if (result_size() > 2) {
1615 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
1616 }
1617 }
1618 // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
1619
1620 // Check result for exception sentinel.
1621 Label exception_returned;
1622 __ CompareRoot(rax, Heap::kExceptionRootIndex);
1623 __ j(equal, &exception_returned);
1624
1625 // Check that there is no pending exception, otherwise we
1626 // should have returned the exception sentinel.
1627 if (FLAG_debug_code) {
1628 Label okay;
1629 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
1630 ExternalReference pending_exception_address(
1631 Isolate::kPendingExceptionAddress, isolate());
1632 Operand pending_exception_operand =
1633 masm->ExternalOperand(pending_exception_address);
1634 __ cmpp(r14, pending_exception_operand);
1635 __ j(equal, &okay, Label::kNear);
1636 __ int3();
1637 __ bind(&okay);
1638 }
1639
1640 // Exit the JavaScript to C++ exit frame.
1641 __ LeaveExitFrame(save_doubles(), !argv_in_register());
1642 __ ret(0);
1643
1644 // Handling of exception.
1645 __ bind(&exception_returned);
1646
1647 ExternalReference pending_handler_context_address(
1648 Isolate::kPendingHandlerContextAddress, isolate());
1649 ExternalReference pending_handler_code_address(
1650 Isolate::kPendingHandlerCodeAddress, isolate());
1651 ExternalReference pending_handler_offset_address(
1652 Isolate::kPendingHandlerOffsetAddress, isolate());
1653 ExternalReference pending_handler_fp_address(
1654 Isolate::kPendingHandlerFPAddress, isolate());
1655 ExternalReference pending_handler_sp_address(
1656 Isolate::kPendingHandlerSPAddress, isolate());
1657
1658 // Ask the runtime for help to determine the handler. This will set rax to
1659 // contain the current pending exception, don't clobber it.
1660 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1661 isolate());
1662 {
1663 FrameScope scope(masm, StackFrame::MANUAL);
1664 __ movp(arg_reg_1, Immediate(0)); // argc.
1665 __ movp(arg_reg_2, Immediate(0)); // argv.
1666 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
1667 __ PrepareCallCFunction(3);
1668 __ CallCFunction(find_handler, 3);
1669 }
1670
1671 // Retrieve the handler context, SP and FP.
1672 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
1673 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
1674 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
1675
1676 // If the handler is a JS frame, restore the context to the frame. Note that
1677 // the context will be set to (rsi == 0) for non-JS frames.
1678 Label skip;
1679 __ testp(rsi, rsi);
1680 __ j(zero, &skip, Label::kNear);
1681 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
1682 __ bind(&skip);
1683
1684 // Compute the handler entry address and jump to it.
1685 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
1686 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
1687 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
1688 __ jmp(rdi);
1689 }
1690
1691
Generate(MacroAssembler * masm)1692 void JSEntryStub::Generate(MacroAssembler* masm) {
1693 Label invoke, handler_entry, exit;
1694 Label not_outermost_js, not_outermost_js_2;
1695
1696 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1697
1698 { // NOLINT. Scope block confuses linter.
1699 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
1700 // Set up frame.
1701 __ pushq(rbp);
1702 __ movp(rbp, rsp);
1703
1704 // Push the stack frame type.
1705 int marker = type();
1706 __ Push(Smi::FromInt(marker)); // context slot
1707 ExternalReference context_address(Isolate::kContextAddress, isolate());
1708 __ Load(kScratchRegister, context_address);
1709 __ Push(kScratchRegister); // context
1710 // Save callee-saved registers (X64/X32/Win64 calling conventions).
1711 __ pushq(r12);
1712 __ pushq(r13);
1713 __ pushq(r14);
1714 __ pushq(r15);
1715 #ifdef _WIN64
1716 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1717 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1718 #endif
1719 __ pushq(rbx);
1720
1721 #ifdef _WIN64
1722 // On Win64 XMM6-XMM15 are callee-save
1723 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1724 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
1725 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
1726 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
1727 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
1728 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
1729 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
1730 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
1731 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
1732 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
1733 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
1734 #endif
1735
1736 // Set up the roots and smi constant registers.
1737 // Needs to be done before any further smi loads.
1738 __ InitializeRootRegister();
1739 }
1740
1741 // Save copies of the top frame descriptor on the stack.
1742 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
1743 {
1744 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
1745 __ Push(c_entry_fp_operand);
1746 }
1747
1748 // If this is the outermost JS call, set js_entry_sp value.
1749 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1750 __ Load(rax, js_entry_sp);
1751 __ testp(rax, rax);
1752 __ j(not_zero, ¬_outermost_js);
1753 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1754 __ movp(rax, rbp);
1755 __ Store(js_entry_sp, rax);
1756 Label cont;
1757 __ jmp(&cont);
1758 __ bind(¬_outermost_js);
1759 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
1760 __ bind(&cont);
1761
1762 // Jump to a faked try block that does the invoke, with a faked catch
1763 // block that sets the pending exception.
1764 __ jmp(&invoke);
1765 __ bind(&handler_entry);
1766 handler_offset_ = handler_entry.pos();
1767 // Caught exception: Store result (exception) in the pending exception
1768 // field in the JSEnv and return a failure sentinel.
1769 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1770 isolate());
1771 __ Store(pending_exception, rax);
1772 __ LoadRoot(rax, Heap::kExceptionRootIndex);
1773 __ jmp(&exit);
1774
1775 // Invoke: Link this frame into the handler chain.
1776 __ bind(&invoke);
1777 __ PushStackHandler();
1778
1779 // Fake a receiver (NULL).
1780 __ Push(Immediate(0)); // receiver
1781
1782 // Invoke the function by calling through JS entry trampoline builtin and
1783 // pop the faked function when we return. We load the address from an
1784 // external reference instead of inlining the call target address directly
1785 // in the code, because the builtin stubs may not have been generated yet
1786 // at the time this code is generated.
1787 if (type() == StackFrame::ENTRY_CONSTRUCT) {
1788 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1789 isolate());
1790 __ Load(rax, construct_entry);
1791 } else {
1792 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
1793 __ Load(rax, entry);
1794 }
1795 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
1796 __ call(kScratchRegister);
1797
1798 // Unlink this frame from the handler chain.
1799 __ PopStackHandler();
1800
1801 __ bind(&exit);
1802 // Check if the current stack frame is marked as the outermost JS frame.
1803 __ Pop(rbx);
1804 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
1805 __ j(not_equal, ¬_outermost_js_2);
1806 __ Move(kScratchRegister, js_entry_sp);
1807 __ movp(Operand(kScratchRegister, 0), Immediate(0));
1808 __ bind(¬_outermost_js_2);
1809
1810 // Restore the top frame descriptor from the stack.
1811 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
1812 __ Pop(c_entry_fp_operand);
1813 }
1814
1815 // Restore callee-saved registers (X64 conventions).
1816 #ifdef _WIN64
1817 // On Win64 XMM6-XMM15 are callee-save
1818 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
1819 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
1820 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
1821 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
1822 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
1823 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
1824 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
1825 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
1826 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
1827 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
1828 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1829 #endif
1830
1831 __ popq(rbx);
1832 #ifdef _WIN64
1833 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
1834 __ popq(rsi);
1835 __ popq(rdi);
1836 #endif
1837 __ popq(r15);
1838 __ popq(r14);
1839 __ popq(r13);
1840 __ popq(r12);
1841 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
1842
1843 // Restore frame pointer and return.
1844 __ popq(rbp);
1845 __ ret(0);
1846 }
1847
1848
1849 // -------------------------------------------------------------------------
1850 // StringCharCodeAtGenerator
1851
GenerateFast(MacroAssembler * masm)1852 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
1853 // If the receiver is a smi trigger the non-string case.
1854 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
1855 __ JumpIfSmi(object_, receiver_not_string_);
1856
1857 // Fetch the instance type of the receiver into result register.
1858 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
1859 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1860 // If the receiver is not a string trigger the non-string case.
1861 __ testb(result_, Immediate(kIsNotStringMask));
1862 __ j(not_zero, receiver_not_string_);
1863 }
1864
1865 // If the index is non-smi trigger the non-smi case.
1866 __ JumpIfNotSmi(index_, &index_not_smi_);
1867 __ bind(&got_smi_index_);
1868
1869 // Check for index out of range.
1870 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
1871 __ j(above_equal, index_out_of_range_);
1872
1873 __ SmiToInteger32(index_, index_);
1874
1875 StringCharLoadGenerator::Generate(
1876 masm, object_, index_, result_, &call_runtime_);
1877
1878 __ Integer32ToSmi(result_, result_);
1879 __ bind(&exit_);
1880 }
1881
1882
GenerateSlow(MacroAssembler * masm,EmbedMode embed_mode,const RuntimeCallHelper & call_helper)1883 void StringCharCodeAtGenerator::GenerateSlow(
1884 MacroAssembler* masm, EmbedMode embed_mode,
1885 const RuntimeCallHelper& call_helper) {
1886 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
1887
1888 Factory* factory = masm->isolate()->factory();
1889 // Index is not a smi.
1890 __ bind(&index_not_smi_);
1891 // If index is a heap number, try converting it to an integer.
1892 __ CheckMap(index_,
1893 factory->heap_number_map(),
1894 index_not_number_,
1895 DONT_DO_SMI_CHECK);
1896 call_helper.BeforeCall(masm);
1897 if (embed_mode == PART_OF_IC_HANDLER) {
1898 __ Push(LoadWithVectorDescriptor::VectorRegister());
1899 __ Push(LoadDescriptor::SlotRegister());
1900 }
1901 __ Push(object_);
1902 __ Push(index_); // Consumed by runtime conversion function.
1903 __ CallRuntime(Runtime::kNumberToSmi);
1904 if (!index_.is(rax)) {
1905 // Save the conversion result before the pop instructions below
1906 // have a chance to overwrite it.
1907 __ movp(index_, rax);
1908 }
1909 __ Pop(object_);
1910 if (embed_mode == PART_OF_IC_HANDLER) {
1911 __ Pop(LoadDescriptor::SlotRegister());
1912 __ Pop(LoadWithVectorDescriptor::VectorRegister());
1913 }
1914 // Reload the instance type.
1915 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
1916 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1917 call_helper.AfterCall(masm);
1918 // If index is still not a smi, it must be out of range.
1919 __ JumpIfNotSmi(index_, index_out_of_range_);
1920 // Otherwise, return to the fast path.
1921 __ jmp(&got_smi_index_);
1922
1923 // Call runtime. We get here when the receiver is a string and the
1924 // index is a number, but the code of getting the actual character
1925 // is too complex (e.g., when the string needs to be flattened).
1926 __ bind(&call_runtime_);
1927 call_helper.BeforeCall(masm);
1928 __ Push(object_);
1929 __ Integer32ToSmi(index_, index_);
1930 __ Push(index_);
1931 __ CallRuntime(Runtime::kStringCharCodeAtRT);
1932 if (!result_.is(rax)) {
1933 __ movp(result_, rax);
1934 }
1935 call_helper.AfterCall(masm);
1936 __ jmp(&exit_);
1937
1938 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
1939 }
1940
1941
1942 // -------------------------------------------------------------------------
1943 // StringCharFromCodeGenerator
1944
GenerateFast(MacroAssembler * masm)1945 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
1946 // Fast case of Heap::LookupSingleCharacterStringFromCode.
1947 __ JumpIfNotSmi(code_, &slow_case_);
1948 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
1949 __ j(above, &slow_case_);
1950
1951 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
1952 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
1953 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
1954 FixedArray::kHeaderSize));
1955 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
1956 __ j(equal, &slow_case_);
1957 __ bind(&exit_);
1958 }
1959
1960
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)1961 void StringCharFromCodeGenerator::GenerateSlow(
1962 MacroAssembler* masm,
1963 const RuntimeCallHelper& call_helper) {
1964 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
1965
1966 __ bind(&slow_case_);
1967 call_helper.BeforeCall(masm);
1968 __ Push(code_);
1969 __ CallRuntime(Runtime::kStringCharFromCode);
1970 if (!result_.is(rax)) {
1971 __ movp(result_, rax);
1972 }
1973 call_helper.AfterCall(masm);
1974 __ jmp(&exit_);
1975
1976 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
1977 }
1978
1979
GenerateCopyCharacters(MacroAssembler * masm,Register dest,Register src,Register count,String::Encoding encoding)1980 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
1981 Register dest,
1982 Register src,
1983 Register count,
1984 String::Encoding encoding) {
1985 // Nothing to do for zero characters.
1986 Label done;
1987 __ testl(count, count);
1988 __ j(zero, &done, Label::kNear);
1989
1990 // Make count the number of bytes to copy.
1991 if (encoding == String::TWO_BYTE_ENCODING) {
1992 STATIC_ASSERT(2 == sizeof(uc16));
1993 __ addl(count, count);
1994 }
1995
1996 // Copy remaining characters.
1997 Label loop;
1998 __ bind(&loop);
1999 __ movb(kScratchRegister, Operand(src, 0));
2000 __ movb(Operand(dest, 0), kScratchRegister);
2001 __ incp(src);
2002 __ incp(dest);
2003 __ decl(count);
2004 __ j(not_zero, &loop);
2005
2006 __ bind(&done);
2007 }
2008
2009
GenerateFlatOneByteStringEquals(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2)2010 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2011 Register left,
2012 Register right,
2013 Register scratch1,
2014 Register scratch2) {
2015 Register length = scratch1;
2016
2017 // Compare lengths.
2018 Label check_zero_length;
2019 __ movp(length, FieldOperand(left, String::kLengthOffset));
2020 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
2021 __ j(equal, &check_zero_length, Label::kNear);
2022 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2023 __ ret(0);
2024
2025 // Check if the length is zero.
2026 Label compare_chars;
2027 __ bind(&check_zero_length);
2028 STATIC_ASSERT(kSmiTag == 0);
2029 __ SmiTest(length);
2030 __ j(not_zero, &compare_chars, Label::kNear);
2031 __ Move(rax, Smi::FromInt(EQUAL));
2032 __ ret(0);
2033
2034 // Compare characters.
2035 __ bind(&compare_chars);
2036 Label strings_not_equal;
2037 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2038 &strings_not_equal, Label::kNear);
2039
2040 // Characters are equal.
2041 __ Move(rax, Smi::FromInt(EQUAL));
2042 __ ret(0);
2043
2044 // Characters are not equal.
2045 __ bind(&strings_not_equal);
2046 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2047 __ ret(0);
2048 }
2049
2050
GenerateCompareFlatOneByteStrings(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2,Register scratch3,Register scratch4)2051 void StringHelper::GenerateCompareFlatOneByteStrings(
2052 MacroAssembler* masm, Register left, Register right, Register scratch1,
2053 Register scratch2, Register scratch3, Register scratch4) {
2054 // Ensure that you can always subtract a string length from a non-negative
2055 // number (e.g. another length).
2056 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
2057
2058 // Find minimum length and length difference.
2059 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
2060 __ movp(scratch4, scratch1);
2061 __ SmiSub(scratch4,
2062 scratch4,
2063 FieldOperand(right, String::kLengthOffset));
2064 // Register scratch4 now holds left.length - right.length.
2065 const Register length_difference = scratch4;
2066 Label left_shorter;
2067 __ j(less, &left_shorter, Label::kNear);
2068 // The right string isn't longer that the left one.
2069 // Get the right string's length by subtracting the (non-negative) difference
2070 // from the left string's length.
2071 __ SmiSub(scratch1, scratch1, length_difference);
2072 __ bind(&left_shorter);
2073 // Register scratch1 now holds Min(left.length, right.length).
2074 const Register min_length = scratch1;
2075
2076 Label compare_lengths;
2077 // If min-length is zero, go directly to comparing lengths.
2078 __ SmiTest(min_length);
2079 __ j(zero, &compare_lengths, Label::kNear);
2080
2081 // Compare loop.
2082 Label result_not_equal;
2083 GenerateOneByteCharsCompareLoop(
2084 masm, left, right, min_length, scratch2, &result_not_equal,
2085 // In debug-code mode, SmiTest below might push
2086 // the target label outside the near range.
2087 Label::kFar);
2088
2089 // Completed loop without finding different characters.
2090 // Compare lengths (precomputed).
2091 __ bind(&compare_lengths);
2092 __ SmiTest(length_difference);
2093 Label length_not_equal;
2094 __ j(not_zero, &length_not_equal, Label::kNear);
2095
2096 // Result is EQUAL.
2097 __ Move(rax, Smi::FromInt(EQUAL));
2098 __ ret(0);
2099
2100 Label result_greater;
2101 Label result_less;
2102 __ bind(&length_not_equal);
2103 __ j(greater, &result_greater, Label::kNear);
2104 __ jmp(&result_less, Label::kNear);
2105 __ bind(&result_not_equal);
2106 // Unequal comparison of left to right, either character or length.
2107 __ j(above, &result_greater, Label::kNear);
2108 __ bind(&result_less);
2109
2110 // Result is LESS.
2111 __ Move(rax, Smi::FromInt(LESS));
2112 __ ret(0);
2113
2114 // Result is GREATER.
2115 __ bind(&result_greater);
2116 __ Move(rax, Smi::FromInt(GREATER));
2117 __ ret(0);
2118 }
2119
2120
GenerateOneByteCharsCompareLoop(MacroAssembler * masm,Register left,Register right,Register length,Register scratch,Label * chars_not_equal,Label::Distance near_jump)2121 void StringHelper::GenerateOneByteCharsCompareLoop(
2122 MacroAssembler* masm, Register left, Register right, Register length,
2123 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
2124 // Change index to run from -length to -1 by adding length to string
2125 // start. This means that loop ends when index reaches zero, which
2126 // doesn't need an additional compare.
2127 __ SmiToInteger32(length, length);
2128 __ leap(left,
2129 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2130 __ leap(right,
2131 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2132 __ negq(length);
2133 Register index = length; // index = -length;
2134
2135 // Compare loop.
2136 Label loop;
2137 __ bind(&loop);
2138 __ movb(scratch, Operand(left, index, times_1, 0));
2139 __ cmpb(scratch, Operand(right, index, times_1, 0));
2140 __ j(not_equal, chars_not_equal, near_jump);
2141 __ incq(index);
2142 __ j(not_zero, &loop);
2143 }
2144
2145
Generate(MacroAssembler * masm)2146 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2147 // ----------- S t a t e -------------
2148 // -- rdx : left
2149 // -- rax : right
2150 // -- rsp[0] : return address
2151 // -----------------------------------
2152
2153 // Load rcx with the allocation site. We stick an undefined dummy value here
2154 // and replace it with the real allocation site later when we instantiate this
2155 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2156 __ Move(rcx, isolate()->factory()->undefined_value());
2157
2158 // Make sure that we actually patched the allocation site.
2159 if (FLAG_debug_code) {
2160 __ testb(rcx, Immediate(kSmiTagMask));
2161 __ Assert(not_equal, kExpectedAllocationSite);
2162 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2163 isolate()->factory()->allocation_site_map());
2164 __ Assert(equal, kExpectedAllocationSite);
2165 }
2166
2167 // Tail call into the stub that handles binary operations with allocation
2168 // sites.
2169 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2170 __ TailCallStub(&stub);
2171 }
2172
2173
GenerateBooleans(MacroAssembler * masm)2174 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2175 DCHECK_EQ(CompareICState::BOOLEAN, state());
2176 Label miss;
2177 Label::Distance const miss_distance =
2178 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2179
2180 __ JumpIfSmi(rdx, &miss, miss_distance);
2181 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
2182 __ JumpIfSmi(rax, &miss, miss_distance);
2183 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2184 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2185 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2186 if (!Token::IsEqualityOp(op())) {
2187 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2188 __ AssertSmi(rax);
2189 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
2190 __ AssertSmi(rdx);
2191 __ pushq(rax);
2192 __ movq(rax, rdx);
2193 __ popq(rdx);
2194 }
2195 __ subp(rax, rdx);
2196 __ Ret();
2197
2198 __ bind(&miss);
2199 GenerateMiss(masm);
2200 }
2201
2202
GenerateSmis(MacroAssembler * masm)2203 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2204 DCHECK(state() == CompareICState::SMI);
2205 Label miss;
2206 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
2207
2208 if (GetCondition() == equal) {
2209 // For equality we do not care about the sign of the result.
2210 __ subp(rax, rdx);
2211 } else {
2212 Label done;
2213 __ subp(rdx, rax);
2214 __ j(no_overflow, &done, Label::kNear);
2215 // Correct sign of result in case of overflow.
2216 __ notp(rdx);
2217 __ bind(&done);
2218 __ movp(rax, rdx);
2219 }
2220 __ ret(0);
2221
2222 __ bind(&miss);
2223 GenerateMiss(masm);
2224 }
2225
2226
GenerateNumbers(MacroAssembler * masm)2227 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2228 DCHECK(state() == CompareICState::NUMBER);
2229
2230 Label generic_stub;
2231 Label unordered, maybe_undefined1, maybe_undefined2;
2232 Label miss;
2233
2234 if (left() == CompareICState::SMI) {
2235 __ JumpIfNotSmi(rdx, &miss);
2236 }
2237 if (right() == CompareICState::SMI) {
2238 __ JumpIfNotSmi(rax, &miss);
2239 }
2240
2241 // Load left and right operand.
2242 Label done, left, left_smi, right_smi;
2243 __ JumpIfSmi(rax, &right_smi, Label::kNear);
2244 __ CompareMap(rax, isolate()->factory()->heap_number_map());
2245 __ j(not_equal, &maybe_undefined1, Label::kNear);
2246 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
2247 __ jmp(&left, Label::kNear);
2248 __ bind(&right_smi);
2249 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
2250 __ Cvtlsi2sd(xmm1, rcx);
2251
2252 __ bind(&left);
2253 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
2254 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
2255 __ j(not_equal, &maybe_undefined2, Label::kNear);
2256 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2257 __ jmp(&done);
2258 __ bind(&left_smi);
2259 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
2260 __ Cvtlsi2sd(xmm0, rcx);
2261
2262 __ bind(&done);
2263 // Compare operands
2264 __ Ucomisd(xmm0, xmm1);
2265
2266 // Don't base result on EFLAGS when a NaN is involved.
2267 __ j(parity_even, &unordered, Label::kNear);
2268
2269 // Return a result of -1, 0, or 1, based on EFLAGS.
2270 // Performing mov, because xor would destroy the flag register.
2271 __ movl(rax, Immediate(0));
2272 __ movl(rcx, Immediate(0));
2273 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
2274 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
2275 __ ret(0);
2276
2277 __ bind(&unordered);
2278 __ bind(&generic_stub);
2279 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
2280 CompareICState::GENERIC, CompareICState::GENERIC);
2281 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2282
2283 __ bind(&maybe_undefined1);
2284 if (Token::IsOrderedRelationalCompareOp(op())) {
2285 __ Cmp(rax, isolate()->factory()->undefined_value());
2286 __ j(not_equal, &miss);
2287 __ JumpIfSmi(rdx, &unordered);
2288 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
2289 __ j(not_equal, &maybe_undefined2, Label::kNear);
2290 __ jmp(&unordered);
2291 }
2292
2293 __ bind(&maybe_undefined2);
2294 if (Token::IsOrderedRelationalCompareOp(op())) {
2295 __ Cmp(rdx, isolate()->factory()->undefined_value());
2296 __ j(equal, &unordered);
2297 }
2298
2299 __ bind(&miss);
2300 GenerateMiss(masm);
2301 }
2302
2303
GenerateInternalizedStrings(MacroAssembler * masm)2304 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2305 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2306 DCHECK(GetCondition() == equal);
2307
2308 // Registers containing left and right operands respectively.
2309 Register left = rdx;
2310 Register right = rax;
2311 Register tmp1 = rcx;
2312 Register tmp2 = rbx;
2313
2314 // Check that both operands are heap objects.
2315 Label miss;
2316 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2317 __ j(cond, &miss, Label::kNear);
2318
2319 // Check that both operands are internalized strings.
2320 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2321 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2322 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2323 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2324 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2325 __ orp(tmp1, tmp2);
2326 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2327 __ j(not_zero, &miss, Label::kNear);
2328
2329 // Internalized strings are compared by identity.
2330 Label done;
2331 __ cmpp(left, right);
2332 // Make sure rax is non-zero. At this point input operands are
2333 // guaranteed to be non-zero.
2334 DCHECK(right.is(rax));
2335 __ j(not_equal, &done, Label::kNear);
2336 STATIC_ASSERT(EQUAL == 0);
2337 STATIC_ASSERT(kSmiTag == 0);
2338 __ Move(rax, Smi::FromInt(EQUAL));
2339 __ bind(&done);
2340 __ ret(0);
2341
2342 __ bind(&miss);
2343 GenerateMiss(masm);
2344 }
2345
2346
GenerateUniqueNames(MacroAssembler * masm)2347 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2348 DCHECK(state() == CompareICState::UNIQUE_NAME);
2349 DCHECK(GetCondition() == equal);
2350
2351 // Registers containing left and right operands respectively.
2352 Register left = rdx;
2353 Register right = rax;
2354 Register tmp1 = rcx;
2355 Register tmp2 = rbx;
2356
2357 // Check that both operands are heap objects.
2358 Label miss;
2359 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2360 __ j(cond, &miss, Label::kNear);
2361
2362 // Check that both operands are unique names. This leaves the instance
2363 // types loaded in tmp1 and tmp2.
2364 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2365 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2366 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2367 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2368
2369 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2370 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2371
2372 // Unique names are compared by identity.
2373 Label done;
2374 __ cmpp(left, right);
2375 // Make sure rax is non-zero. At this point input operands are
2376 // guaranteed to be non-zero.
2377 DCHECK(right.is(rax));
2378 __ j(not_equal, &done, Label::kNear);
2379 STATIC_ASSERT(EQUAL == 0);
2380 STATIC_ASSERT(kSmiTag == 0);
2381 __ Move(rax, Smi::FromInt(EQUAL));
2382 __ bind(&done);
2383 __ ret(0);
2384
2385 __ bind(&miss);
2386 GenerateMiss(masm);
2387 }
2388
2389
GenerateStrings(MacroAssembler * masm)2390 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2391 DCHECK(state() == CompareICState::STRING);
2392 Label miss;
2393
2394 bool equality = Token::IsEqualityOp(op());
2395
2396 // Registers containing left and right operands respectively.
2397 Register left = rdx;
2398 Register right = rax;
2399 Register tmp1 = rcx;
2400 Register tmp2 = rbx;
2401 Register tmp3 = rdi;
2402
2403 // Check that both operands are heap objects.
2404 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2405 __ j(cond, &miss);
2406
2407 // Check that both operands are strings. This leaves the instance
2408 // types loaded in tmp1 and tmp2.
2409 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2410 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2411 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2412 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2413 __ movp(tmp3, tmp1);
2414 STATIC_ASSERT(kNotStringTag != 0);
2415 __ orp(tmp3, tmp2);
2416 __ testb(tmp3, Immediate(kIsNotStringMask));
2417 __ j(not_zero, &miss);
2418
2419 // Fast check for identical strings.
2420 Label not_same;
2421 __ cmpp(left, right);
2422 __ j(not_equal, ¬_same, Label::kNear);
2423 STATIC_ASSERT(EQUAL == 0);
2424 STATIC_ASSERT(kSmiTag == 0);
2425 __ Move(rax, Smi::FromInt(EQUAL));
2426 __ ret(0);
2427
2428 // Handle not identical strings.
2429 __ bind(¬_same);
2430
2431 // Check that both strings are internalized strings. If they are, we're done
2432 // because we already know they are not identical. We also know they are both
2433 // strings.
2434 if (equality) {
2435 Label do_compare;
2436 STATIC_ASSERT(kInternalizedTag == 0);
2437 __ orp(tmp1, tmp2);
2438 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
2439 __ j(not_zero, &do_compare, Label::kNear);
2440 // Make sure rax is non-zero. At this point input operands are
2441 // guaranteed to be non-zero.
2442 DCHECK(right.is(rax));
2443 __ ret(0);
2444 __ bind(&do_compare);
2445 }
2446
2447 // Check that both strings are sequential one-byte.
2448 Label runtime;
2449 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
2450
2451 // Compare flat one-byte strings. Returns when done.
2452 if (equality) {
2453 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2454 tmp2);
2455 } else {
2456 StringHelper::GenerateCompareFlatOneByteStrings(
2457 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
2458 }
2459
2460 // Handle more complex cases in runtime.
2461 __ bind(&runtime);
2462 if (equality) {
2463 {
2464 FrameScope scope(masm, StackFrame::INTERNAL);
2465 __ Push(left);
2466 __ Push(right);
2467 __ CallRuntime(Runtime::kStringEqual);
2468 }
2469 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
2470 __ subp(rax, rdx);
2471 __ Ret();
2472 } else {
2473 __ PopReturnAddressTo(tmp1);
2474 __ Push(left);
2475 __ Push(right);
2476 __ PushReturnAddressFrom(tmp1);
2477 __ TailCallRuntime(Runtime::kStringCompare);
2478 }
2479
2480 __ bind(&miss);
2481 GenerateMiss(masm);
2482 }
2483
2484
GenerateReceivers(MacroAssembler * masm)2485 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2486 DCHECK_EQ(CompareICState::RECEIVER, state());
2487 Label miss;
2488 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
2489 __ j(either_smi, &miss, Label::kNear);
2490
2491 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2492 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
2493 __ j(below, &miss, Label::kNear);
2494 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
2495 __ j(below, &miss, Label::kNear);
2496
2497 DCHECK_EQ(equal, GetCondition());
2498 __ subp(rax, rdx);
2499 __ ret(0);
2500
2501 __ bind(&miss);
2502 GenerateMiss(masm);
2503 }
2504
2505
GenerateKnownReceivers(MacroAssembler * masm)2506 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
2507 Label miss;
2508 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
2509 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
2510 __ j(either_smi, &miss, Label::kNear);
2511
2512 __ GetWeakValue(rdi, cell);
2513 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
2514 __ j(not_equal, &miss, Label::kNear);
2515 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
2516 __ j(not_equal, &miss, Label::kNear);
2517
2518 if (Token::IsEqualityOp(op())) {
2519 __ subp(rax, rdx);
2520 __ ret(0);
2521 } else {
2522 __ PopReturnAddressTo(rcx);
2523 __ Push(rdx);
2524 __ Push(rax);
2525 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
2526 __ PushReturnAddressFrom(rcx);
2527 __ TailCallRuntime(Runtime::kCompare);
2528 }
2529
2530 __ bind(&miss);
2531 GenerateMiss(masm);
2532 }
2533
2534
GenerateMiss(MacroAssembler * masm)2535 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
2536 {
2537 // Call the runtime system in a fresh internal frame.
2538 FrameScope scope(masm, StackFrame::INTERNAL);
2539 __ Push(rdx);
2540 __ Push(rax);
2541 __ Push(rdx);
2542 __ Push(rax);
2543 __ Push(Smi::FromInt(op()));
2544 __ CallRuntime(Runtime::kCompareIC_Miss);
2545
2546 // Compute the entry point of the rewritten stub.
2547 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
2548 __ Pop(rax);
2549 __ Pop(rdx);
2550 }
2551
2552 // Do a tail call to the rewritten stub.
2553 __ jmp(rdi);
2554 }
2555
2556
GenerateNegativeLookup(MacroAssembler * masm,Label * miss,Label * done,Register properties,Handle<Name> name,Register r0)2557 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
2558 Label* miss,
2559 Label* done,
2560 Register properties,
2561 Handle<Name> name,
2562 Register r0) {
2563 DCHECK(name->IsUniqueName());
2564 // If names of slots in range from 1 to kProbes - 1 for the hash value are
2565 // not equal to the name and kProbes-th slot is not used (its name is the
2566 // undefined value), it guarantees the hash table doesn't contain the
2567 // property. It's true even if some slots represent deleted properties
2568 // (their names are the hole value).
2569 for (int i = 0; i < kInlinedProbes; i++) {
2570 // r0 points to properties hash.
2571 // Compute the masked index: (hash + i + i * i) & mask.
2572 Register index = r0;
2573 // Capacity is smi 2^n.
2574 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
2575 __ decl(index);
2576 __ andp(index,
2577 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
2578
2579 // Scale the index by multiplying by the entry size.
2580 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2581 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
2582
2583 Register entity_name = r0;
2584 // Having undefined at this place means the name is not contained.
2585 STATIC_ASSERT(kSmiTagSize == 1);
2586 __ movp(entity_name, Operand(properties,
2587 index,
2588 times_pointer_size,
2589 kElementsStartOffset - kHeapObjectTag));
2590 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
2591 __ j(equal, done);
2592
2593 // Stop if found the property.
2594 __ Cmp(entity_name, Handle<Name>(name));
2595 __ j(equal, miss);
2596
2597 Label good;
2598 // Check for the hole and skip.
2599 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
2600 __ j(equal, &good, Label::kNear);
2601
2602 // Check if the entry name is not a unique name.
2603 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
2604 __ JumpIfNotUniqueNameInstanceType(
2605 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
2606 __ bind(&good);
2607 }
2608
2609 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
2610 NEGATIVE_LOOKUP);
2611 __ Push(Handle<Object>(name));
2612 __ Push(Immediate(name->Hash()));
2613 __ CallStub(&stub);
2614 __ testp(r0, r0);
2615 __ j(not_zero, miss);
2616 __ jmp(done);
2617 }
2618
2619
2620 // Probe the name dictionary in the |elements| register. Jump to the
2621 // |done| label if a property with the given name is found leaving the
2622 // index into the dictionary in |r1|. Jump to the |miss| label
2623 // otherwise.
GeneratePositiveLookup(MacroAssembler * masm,Label * miss,Label * done,Register elements,Register name,Register r0,Register r1)2624 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
2625 Label* miss,
2626 Label* done,
2627 Register elements,
2628 Register name,
2629 Register r0,
2630 Register r1) {
2631 DCHECK(!elements.is(r0));
2632 DCHECK(!elements.is(r1));
2633 DCHECK(!name.is(r0));
2634 DCHECK(!name.is(r1));
2635
2636 __ AssertName(name);
2637
2638 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
2639 __ decl(r0);
2640
2641 for (int i = 0; i < kInlinedProbes; i++) {
2642 // Compute the masked index: (hash + i + i * i) & mask.
2643 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
2644 __ shrl(r1, Immediate(Name::kHashShift));
2645 if (i > 0) {
2646 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
2647 }
2648 __ andp(r1, r0);
2649
2650 // Scale the index by multiplying by the entry size.
2651 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2652 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
2653
2654 // Check if the key is identical to the name.
2655 __ cmpp(name, Operand(elements, r1, times_pointer_size,
2656 kElementsStartOffset - kHeapObjectTag));
2657 __ j(equal, done);
2658 }
2659
2660 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
2661 POSITIVE_LOOKUP);
2662 __ Push(name);
2663 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
2664 __ shrl(r0, Immediate(Name::kHashShift));
2665 __ Push(r0);
2666 __ CallStub(&stub);
2667
2668 __ testp(r0, r0);
2669 __ j(zero, miss);
2670 __ jmp(done);
2671 }
2672
2673
Generate(MacroAssembler * masm)2674 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
2675 // This stub overrides SometimesSetsUpAFrame() to return false. That means
2676 // we cannot call anything that could cause a GC from this stub.
2677 // Stack frame on entry:
2678 // rsp[0 * kPointerSize] : return address.
2679 // rsp[1 * kPointerSize] : key's hash.
2680 // rsp[2 * kPointerSize] : key.
2681 // Registers:
2682 // dictionary_: NameDictionary to probe.
2683 // result_: used as scratch.
2684 // index_: will hold an index of entry if lookup is successful.
2685 // might alias with result_.
2686 // Returns:
2687 // result_ is zero if lookup failed, non zero otherwise.
2688
2689 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
2690
2691 Register scratch = result();
2692
2693 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
2694 __ decl(scratch);
2695 __ Push(scratch);
2696
2697 // If names of slots in range from 1 to kProbes - 1 for the hash value are
2698 // not equal to the name and kProbes-th slot is not used (its name is the
2699 // undefined value), it guarantees the hash table doesn't contain the
2700 // property. It's true even if some slots represent deleted properties
2701 // (their names are the null value).
2702 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
2703 kPointerSize);
2704 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
2705 // Compute the masked index: (hash + i + i * i) & mask.
2706 __ movp(scratch, args.GetArgumentOperand(1));
2707 if (i > 0) {
2708 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
2709 }
2710 __ andp(scratch, Operand(rsp, 0));
2711
2712 // Scale the index by multiplying by the entry size.
2713 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2714 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
2715
2716 // Having undefined at this place means the name is not contained.
2717 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
2718 kElementsStartOffset - kHeapObjectTag));
2719
2720 __ Cmp(scratch, isolate()->factory()->undefined_value());
2721 __ j(equal, ¬_in_dictionary);
2722
2723 // Stop if found the property.
2724 __ cmpp(scratch, args.GetArgumentOperand(0));
2725 __ j(equal, &in_dictionary);
2726
2727 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
2728 // If we hit a key that is not a unique name during negative
2729 // lookup we have to bailout as this key might be equal to the
2730 // key we are looking for.
2731
2732 // Check if the entry name is not a unique name.
2733 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2734 __ JumpIfNotUniqueNameInstanceType(
2735 FieldOperand(scratch, Map::kInstanceTypeOffset),
2736 &maybe_in_dictionary);
2737 }
2738 }
2739
2740 __ bind(&maybe_in_dictionary);
2741 // If we are doing negative lookup then probing failure should be
2742 // treated as a lookup success. For positive lookup probing failure
2743 // should be treated as lookup failure.
2744 if (mode() == POSITIVE_LOOKUP) {
2745 __ movp(scratch, Immediate(0));
2746 __ Drop(1);
2747 __ ret(2 * kPointerSize);
2748 }
2749
2750 __ bind(&in_dictionary);
2751 __ movp(scratch, Immediate(1));
2752 __ Drop(1);
2753 __ ret(2 * kPointerSize);
2754
2755 __ bind(¬_in_dictionary);
2756 __ movp(scratch, Immediate(0));
2757 __ Drop(1);
2758 __ ret(2 * kPointerSize);
2759 }
2760
2761
GenerateFixedRegStubsAheadOfTime(Isolate * isolate)2762 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
2763 Isolate* isolate) {
2764 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
2765 stub1.GetCode();
2766 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
2767 stub2.GetCode();
2768 }
2769
2770
2771 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
2772 // the value has just been written into the object, now this stub makes sure
2773 // we keep the GC informed. The word in the object where the value has been
2774 // written is in the address register.
Generate(MacroAssembler * masm)2775 void RecordWriteStub::Generate(MacroAssembler* masm) {
2776 Label skip_to_incremental_noncompacting;
2777 Label skip_to_incremental_compacting;
2778
2779 // The first two instructions are generated with labels so as to get the
2780 // offset fixed up correctly by the bind(Label*) call. We patch it back and
2781 // forth between a compare instructions (a nop in this position) and the
2782 // real branch when we start and stop incremental heap marking.
2783 // See RecordWriteStub::Patch for details.
2784 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
2785 __ jmp(&skip_to_incremental_compacting, Label::kFar);
2786
2787 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2788 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2789 MacroAssembler::kReturnAtEnd);
2790 } else {
2791 __ ret(0);
2792 }
2793
2794 __ bind(&skip_to_incremental_noncompacting);
2795 GenerateIncremental(masm, INCREMENTAL);
2796
2797 __ bind(&skip_to_incremental_compacting);
2798 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
2799
2800 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
2801 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
2802 masm->set_byte_at(0, kTwoByteNopInstruction);
2803 masm->set_byte_at(2, kFiveByteNopInstruction);
2804 }
2805
2806
GenerateIncremental(MacroAssembler * masm,Mode mode)2807 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
2808 regs_.Save(masm);
2809
2810 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2811 Label dont_need_remembered_set;
2812
2813 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
2814 __ JumpIfNotInNewSpace(regs_.scratch0(),
2815 regs_.scratch0(),
2816 &dont_need_remembered_set);
2817
2818 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
2819 &dont_need_remembered_set);
2820
2821 // First notify the incremental marker if necessary, then update the
2822 // remembered set.
2823 CheckNeedsToInformIncrementalMarker(
2824 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
2825 InformIncrementalMarker(masm);
2826 regs_.Restore(masm);
2827 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2828 MacroAssembler::kReturnAtEnd);
2829
2830 __ bind(&dont_need_remembered_set);
2831 }
2832
2833 CheckNeedsToInformIncrementalMarker(
2834 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
2835 InformIncrementalMarker(masm);
2836 regs_.Restore(masm);
2837 __ ret(0);
2838 }
2839
2840
InformIncrementalMarker(MacroAssembler * masm)2841 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
2842 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
2843 Register address =
2844 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
2845 DCHECK(!address.is(regs_.object()));
2846 DCHECK(!address.is(arg_reg_1));
2847 __ Move(address, regs_.address());
2848 __ Move(arg_reg_1, regs_.object());
2849 // TODO(gc) Can we just set address arg2 in the beginning?
2850 __ Move(arg_reg_2, address);
2851 __ LoadAddress(arg_reg_3,
2852 ExternalReference::isolate_address(isolate()));
2853 int argument_count = 3;
2854
2855 AllowExternalCallThatCantCauseGC scope(masm);
2856 __ PrepareCallCFunction(argument_count);
2857 __ CallCFunction(
2858 ExternalReference::incremental_marking_record_write_function(isolate()),
2859 argument_count);
2860 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
2861 }
2862
2863
CheckNeedsToInformIncrementalMarker(MacroAssembler * masm,OnNoNeedToInformIncrementalMarker on_no_need,Mode mode)2864 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
2865 MacroAssembler* masm,
2866 OnNoNeedToInformIncrementalMarker on_no_need,
2867 Mode mode) {
2868 Label on_black;
2869 Label need_incremental;
2870 Label need_incremental_pop_object;
2871
2872 // Let's look at the color of the object: If it is not black we don't have
2873 // to inform the incremental marker.
2874 __ JumpIfBlack(regs_.object(),
2875 regs_.scratch0(),
2876 regs_.scratch1(),
2877 &on_black,
2878 Label::kNear);
2879
2880 regs_.Restore(masm);
2881 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2882 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2883 MacroAssembler::kReturnAtEnd);
2884 } else {
2885 __ ret(0);
2886 }
2887
2888 __ bind(&on_black);
2889
2890 // Get the value from the slot.
2891 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
2892
2893 if (mode == INCREMENTAL_COMPACTION) {
2894 Label ensure_not_white;
2895
2896 __ CheckPageFlag(regs_.scratch0(), // Contains value.
2897 regs_.scratch1(), // Scratch.
2898 MemoryChunk::kEvacuationCandidateMask,
2899 zero,
2900 &ensure_not_white,
2901 Label::kNear);
2902
2903 __ CheckPageFlag(regs_.object(),
2904 regs_.scratch1(), // Scratch.
2905 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
2906 zero,
2907 &need_incremental);
2908
2909 __ bind(&ensure_not_white);
2910 }
2911
2912 // We need an extra register for this, so we push the object register
2913 // temporarily.
2914 __ Push(regs_.object());
2915 __ JumpIfWhite(regs_.scratch0(), // The value.
2916 regs_.scratch1(), // Scratch.
2917 regs_.object(), // Scratch.
2918 &need_incremental_pop_object, Label::kNear);
2919 __ Pop(regs_.object());
2920
2921 regs_.Restore(masm);
2922 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2923 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2924 MacroAssembler::kReturnAtEnd);
2925 } else {
2926 __ ret(0);
2927 }
2928
2929 __ bind(&need_incremental_pop_object);
2930 __ Pop(regs_.object());
2931
2932 __ bind(&need_incremental);
2933
2934 // Fall through when we need to inform the incremental marker.
2935 }
2936
2937
Generate(MacroAssembler * masm)2938 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
2939 CEntryStub ces(isolate(), 1, kSaveFPRegs);
2940 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
2941 int parameter_count_offset =
2942 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
2943 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
2944 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
2945 __ PopReturnAddressTo(rcx);
2946 int additional_offset =
2947 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
2948 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
2949 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
2950 }
2951
HandleArrayCases(MacroAssembler * masm,Register feedback,Register receiver_map,Register scratch1,Register scratch2,Register scratch3,bool is_polymorphic,Label * miss)2952 static void HandleArrayCases(MacroAssembler* masm, Register feedback,
2953 Register receiver_map, Register scratch1,
2954 Register scratch2, Register scratch3,
2955 bool is_polymorphic, Label* miss) {
2956 // feedback initially contains the feedback array
2957 Label next_loop, prepare_next;
2958 Label start_polymorphic;
2959
2960 Register counter = scratch1;
2961 Register length = scratch2;
2962 Register cached_map = scratch3;
2963
2964 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
2965 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
2966 __ j(not_equal, &start_polymorphic);
2967
2968 // found, now call handler.
2969 Register handler = feedback;
2970 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
2971 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
2972 __ jmp(handler);
2973
2974 // Polymorphic, we have to loop from 2 to N
2975 __ bind(&start_polymorphic);
2976 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
2977 if (!is_polymorphic) {
2978 // If the IC could be monomorphic we have to make sure we don't go past the
2979 // end of the feedback array.
2980 __ cmpl(length, Immediate(2));
2981 __ j(equal, miss);
2982 }
2983 __ movl(counter, Immediate(2));
2984
2985 __ bind(&next_loop);
2986 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
2987 FixedArray::kHeaderSize));
2988 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
2989 __ j(not_equal, &prepare_next);
2990 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
2991 FixedArray::kHeaderSize + kPointerSize));
2992 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
2993 __ jmp(handler);
2994
2995 __ bind(&prepare_next);
2996 __ addl(counter, Immediate(2));
2997 __ cmpl(counter, length);
2998 __ j(less, &next_loop);
2999
3000 // We exhausted our array of map handler pairs.
3001 __ jmp(miss);
3002 }
3003
3004
HandleMonomorphicCase(MacroAssembler * masm,Register receiver,Register receiver_map,Register feedback,Register vector,Register integer_slot,Label * compare_map,Label * load_smi_map,Label * try_array)3005 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3006 Register receiver_map, Register feedback,
3007 Register vector, Register integer_slot,
3008 Label* compare_map, Label* load_smi_map,
3009 Label* try_array) {
3010 __ JumpIfSmi(receiver, load_smi_map);
3011 __ movp(receiver_map, FieldOperand(receiver, 0));
3012
3013 __ bind(compare_map);
3014 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
3015 __ j(not_equal, try_array);
3016 Register handler = feedback;
3017 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
3018 FixedArray::kHeaderSize + kPointerSize));
3019 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3020 __ jmp(handler);
3021 }
3022
Generate(MacroAssembler * masm)3023 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3024 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
3025 KeyedStoreICStub stub(isolate(), state());
3026 stub.GenerateForTrampoline(masm);
3027 }
3028
Generate(MacroAssembler * masm)3029 void KeyedStoreICStub::Generate(MacroAssembler* masm) {
3030 GenerateImpl(masm, false);
3031 }
3032
GenerateForTrampoline(MacroAssembler * masm)3033 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3034 GenerateImpl(masm, true);
3035 }
3036
3037
HandlePolymorphicKeyedStoreCase(MacroAssembler * masm,Register receiver_map,Register feedback,Register scratch,Register scratch1,Register scratch2,Label * miss)3038 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3039 Register receiver_map,
3040 Register feedback, Register scratch,
3041 Register scratch1,
3042 Register scratch2, Label* miss) {
3043 // feedback initially contains the feedback array
3044 Label next, next_loop, prepare_next;
3045 Label transition_call;
3046
3047 Register cached_map = scratch;
3048 Register counter = scratch1;
3049 Register length = scratch2;
3050
3051 // Polymorphic, we have to loop from 0 to N - 1
3052 __ movp(counter, Immediate(0));
3053 __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3054 __ SmiToInteger32(length, length);
3055
3056 __ bind(&next_loop);
3057 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3058 FixedArray::kHeaderSize));
3059 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3060 __ j(not_equal, &prepare_next);
3061 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3062 FixedArray::kHeaderSize + kPointerSize));
3063 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3064 __ j(not_equal, &transition_call);
3065 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3066 FixedArray::kHeaderSize + 2 * kPointerSize));
3067 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3068 __ jmp(feedback);
3069
3070 __ bind(&transition_call);
3071 DCHECK(receiver_map.is(StoreTransitionDescriptor::MapRegister()));
3072 __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3073 // The weak cell may have been cleared.
3074 __ JumpIfSmi(receiver_map, miss);
3075 // Get the handler in value.
3076 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3077 FixedArray::kHeaderSize + 2 * kPointerSize));
3078 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3079 __ jmp(feedback);
3080
3081 __ bind(&prepare_next);
3082 __ addl(counter, Immediate(3));
3083 __ cmpl(counter, length);
3084 __ j(less, &next_loop);
3085
3086 // We exhausted our array of map handler pairs.
3087 __ jmp(miss);
3088 }
3089
GenerateImpl(MacroAssembler * masm,bool in_frame)3090 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3091 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // rdx
3092 Register key = StoreWithVectorDescriptor::NameRegister(); // rcx
3093 Register vector = StoreWithVectorDescriptor::VectorRegister(); // rbx
3094 Register slot = StoreWithVectorDescriptor::SlotRegister(); // rdi
3095 DCHECK(StoreWithVectorDescriptor::ValueRegister().is(rax)); // rax
3096 Register feedback = r8;
3097 Register integer_slot = r9;
3098 Register receiver_map = r11;
3099 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3100
3101 __ SmiToInteger32(integer_slot, slot);
3102 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3103 FixedArray::kHeaderSize));
3104
3105 // Try to quickly handle the monomorphic case without knowing for sure
3106 // if we have a weak cell in feedback. We do know it's safe to look
3107 // at WeakCell::kValueOffset.
3108 Label try_array, load_smi_map, compare_map;
3109 Label not_array, miss;
3110 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3111 integer_slot, &compare_map, &load_smi_map, &try_array);
3112
3113 // Is it a fixed array?
3114 __ bind(&try_array);
3115 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3116 __ j(not_equal, ¬_array);
3117 HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot,
3118 r15, r14, &miss);
3119
3120 __ bind(¬_array);
3121 Label try_poly_name;
3122 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3123 __ j(not_equal, &try_poly_name);
3124
3125 Handle<Code> megamorphic_stub =
3126 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3127 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3128
3129 __ bind(&try_poly_name);
3130 // We might have a name in feedback, and a fixed array in the next slot.
3131 __ cmpp(key, feedback);
3132 __ j(not_equal, &miss);
3133 // If the name comparison succeeded, we know we have a fixed array with
3134 // at least one map/handler pair.
3135 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3136 FixedArray::kHeaderSize + kPointerSize));
3137 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false,
3138 &miss);
3139
3140 __ bind(&miss);
3141 KeyedStoreIC::GenerateMiss(masm);
3142
3143 __ bind(&load_smi_map);
3144 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3145 __ jmp(&compare_map);
3146 }
3147
3148
Generate(MacroAssembler * masm)3149 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3150 __ EmitLoadTypeFeedbackVector(rbx);
3151 CallICStub stub(isolate(), state());
3152 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3153 }
3154
3155
MaybeCallEntryHook(MacroAssembler * masm)3156 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3157 if (masm->isolate()->function_entry_hook() != NULL) {
3158 ProfileEntryHookStub stub(masm->isolate());
3159 masm->CallStub(&stub);
3160 }
3161 }
3162
3163
Generate(MacroAssembler * masm)3164 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3165 // This stub can be called from essentially anywhere, so it needs to save
3166 // all volatile and callee-save registers.
3167 const size_t kNumSavedRegisters = 2;
3168 __ pushq(arg_reg_1);
3169 __ pushq(arg_reg_2);
3170
3171 // Calculate the original stack pointer and store it in the second arg.
3172 __ leap(arg_reg_2,
3173 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
3174
3175 // Calculate the function address to the first arg.
3176 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
3177 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
3178
3179 // Save the remainder of the volatile registers.
3180 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
3181
3182 // Call the entry hook function.
3183 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
3184 Assembler::RelocInfoNone());
3185
3186 AllowExternalCallThatCantCauseGC scope(masm);
3187
3188 const int kArgumentCount = 2;
3189 __ PrepareCallCFunction(kArgumentCount);
3190 __ CallCFunction(rax, kArgumentCount);
3191
3192 // Restore volatile regs.
3193 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
3194 __ popq(arg_reg_2);
3195 __ popq(arg_reg_1);
3196
3197 __ Ret();
3198 }
3199
3200
3201 template<class T>
CreateArrayDispatch(MacroAssembler * masm,AllocationSiteOverrideMode mode)3202 static void CreateArrayDispatch(MacroAssembler* masm,
3203 AllocationSiteOverrideMode mode) {
3204 if (mode == DISABLE_ALLOCATION_SITES) {
3205 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
3206 __ TailCallStub(&stub);
3207 } else if (mode == DONT_OVERRIDE) {
3208 int last_index = GetSequenceIndexFromFastElementsKind(
3209 TERMINAL_FAST_ELEMENTS_KIND);
3210 for (int i = 0; i <= last_index; ++i) {
3211 Label next;
3212 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3213 __ cmpl(rdx, Immediate(kind));
3214 __ j(not_equal, &next);
3215 T stub(masm->isolate(), kind);
3216 __ TailCallStub(&stub);
3217 __ bind(&next);
3218 }
3219
3220 // If we reached this point there is a problem.
3221 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3222 } else {
3223 UNREACHABLE();
3224 }
3225 }
3226
3227
CreateArrayDispatchOneArgument(MacroAssembler * masm,AllocationSiteOverrideMode mode)3228 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3229 AllocationSiteOverrideMode mode) {
3230 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
3231 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
3232 // rax - number of arguments
3233 // rdi - constructor?
3234 // rsp[0] - return address
3235 // rsp[8] - last argument
3236
3237 Label normal_sequence;
3238 if (mode == DONT_OVERRIDE) {
3239 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3240 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3241 STATIC_ASSERT(FAST_ELEMENTS == 2);
3242 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
3243 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
3244 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
3245
3246 // is the low bit set? If so, we are holey and that is good.
3247 __ testb(rdx, Immediate(1));
3248 __ j(not_zero, &normal_sequence);
3249 }
3250
3251 // look at the first argument
3252 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3253 __ movp(rcx, args.GetArgumentOperand(0));
3254 __ testp(rcx, rcx);
3255 __ j(zero, &normal_sequence);
3256
3257 if (mode == DISABLE_ALLOCATION_SITES) {
3258 ElementsKind initial = GetInitialFastElementsKind();
3259 ElementsKind holey_initial = GetHoleyElementsKind(initial);
3260
3261 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
3262 holey_initial,
3263 DISABLE_ALLOCATION_SITES);
3264 __ TailCallStub(&stub_holey);
3265
3266 __ bind(&normal_sequence);
3267 ArraySingleArgumentConstructorStub stub(masm->isolate(),
3268 initial,
3269 DISABLE_ALLOCATION_SITES);
3270 __ TailCallStub(&stub);
3271 } else if (mode == DONT_OVERRIDE) {
3272 // We are going to create a holey array, but our kind is non-holey.
3273 // Fix kind and retry (only if we have an allocation site in the slot).
3274 __ incl(rdx);
3275
3276 if (FLAG_debug_code) {
3277 Handle<Map> allocation_site_map =
3278 masm->isolate()->factory()->allocation_site_map();
3279 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
3280 __ Assert(equal, kExpectedAllocationSite);
3281 }
3282
3283 // Save the resulting elements kind in type info. We can't just store r3
3284 // in the AllocationSite::transition_info field because elements kind is
3285 // restricted to a portion of the field...upper bits need to be left alone.
3286 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3287 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
3288 Smi::FromInt(kFastElementsKindPackedToHoley));
3289
3290 __ bind(&normal_sequence);
3291 int last_index = GetSequenceIndexFromFastElementsKind(
3292 TERMINAL_FAST_ELEMENTS_KIND);
3293 for (int i = 0; i <= last_index; ++i) {
3294 Label next;
3295 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3296 __ cmpl(rdx, Immediate(kind));
3297 __ j(not_equal, &next);
3298 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
3299 __ TailCallStub(&stub);
3300 __ bind(&next);
3301 }
3302
3303 // If we reached this point there is a problem.
3304 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3305 } else {
3306 UNREACHABLE();
3307 }
3308 }
3309
3310
3311 template<class T>
ArrayConstructorStubAheadOfTimeHelper(Isolate * isolate)3312 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
3313 int to_index = GetSequenceIndexFromFastElementsKind(
3314 TERMINAL_FAST_ELEMENTS_KIND);
3315 for (int i = 0; i <= to_index; ++i) {
3316 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3317 T stub(isolate, kind);
3318 stub.GetCode();
3319 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
3320 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
3321 stub1.GetCode();
3322 }
3323 }
3324 }
3325
GenerateStubsAheadOfTime(Isolate * isolate)3326 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
3327 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
3328 isolate);
3329 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
3330 isolate);
3331 ArrayNArgumentsConstructorStub stub(isolate);
3332 stub.GetCode();
3333
3334 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
3335 for (int i = 0; i < 2; i++) {
3336 // For internal arrays we only need a few things
3337 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
3338 stubh1.GetCode();
3339 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
3340 stubh2.GetCode();
3341 }
3342 }
3343
GenerateDispatchToArrayStub(MacroAssembler * masm,AllocationSiteOverrideMode mode)3344 void ArrayConstructorStub::GenerateDispatchToArrayStub(
3345 MacroAssembler* masm, AllocationSiteOverrideMode mode) {
3346 Label not_zero_case, not_one_case;
3347 __ testp(rax, rax);
3348 __ j(not_zero, ¬_zero_case);
3349 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
3350
3351 __ bind(¬_zero_case);
3352 __ cmpl(rax, Immediate(1));
3353 __ j(greater, ¬_one_case);
3354 CreateArrayDispatchOneArgument(masm, mode);
3355
3356 __ bind(¬_one_case);
3357 ArrayNArgumentsConstructorStub stub(masm->isolate());
3358 __ TailCallStub(&stub);
3359 }
3360
Generate(MacroAssembler * masm)3361 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
3362 // ----------- S t a t e -------------
3363 // -- rax : argc
3364 // -- rbx : AllocationSite or undefined
3365 // -- rdi : constructor
3366 // -- rdx : new target
3367 // -- rsp[0] : return address
3368 // -- rsp[8] : last argument
3369 // -----------------------------------
3370 if (FLAG_debug_code) {
3371 // The array construct code is only set for the global and natives
3372 // builtin Array functions which always have maps.
3373
3374 // Initial map for the builtin Array function should be a map.
3375 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
3376 // Will both indicate a NULL and a Smi.
3377 STATIC_ASSERT(kSmiTag == 0);
3378 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
3379 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
3380 __ CmpObjectType(rcx, MAP_TYPE, rcx);
3381 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
3382
3383 // We should either have undefined in rbx or a valid AllocationSite
3384 __ AssertUndefinedOrAllocationSite(rbx);
3385 }
3386
3387 // Enter the context of the Array function.
3388 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3389
3390 Label subclassing;
3391 __ cmpp(rdi, rdx);
3392 __ j(not_equal, &subclassing);
3393
3394 Label no_info;
3395 // If the feedback vector is the undefined value call an array constructor
3396 // that doesn't use AllocationSites.
3397 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
3398 __ j(equal, &no_info);
3399
3400 // Only look at the lower 16 bits of the transition info.
3401 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
3402 __ SmiToInteger32(rdx, rdx);
3403 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3404 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
3405 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
3406
3407 __ bind(&no_info);
3408 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
3409
3410 // Subclassing
3411 __ bind(&subclassing);
3412 StackArgumentsAccessor args(rsp, rax);
3413 __ movp(args.GetReceiverOperand(), rdi);
3414 __ addp(rax, Immediate(3));
3415 __ PopReturnAddressTo(rcx);
3416 __ Push(rdx);
3417 __ Push(rbx);
3418 __ PushReturnAddressFrom(rcx);
3419 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
3420 }
3421
3422
GenerateCase(MacroAssembler * masm,ElementsKind kind)3423 void InternalArrayConstructorStub::GenerateCase(
3424 MacroAssembler* masm, ElementsKind kind) {
3425 Label not_zero_case, not_one_case;
3426 Label normal_sequence;
3427
3428 __ testp(rax, rax);
3429 __ j(not_zero, ¬_zero_case);
3430 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
3431 __ TailCallStub(&stub0);
3432
3433 __ bind(¬_zero_case);
3434 __ cmpl(rax, Immediate(1));
3435 __ j(greater, ¬_one_case);
3436
3437 if (IsFastPackedElementsKind(kind)) {
3438 // We might need to create a holey array
3439 // look at the first argument
3440 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3441 __ movp(rcx, args.GetArgumentOperand(0));
3442 __ testp(rcx, rcx);
3443 __ j(zero, &normal_sequence);
3444
3445 InternalArraySingleArgumentConstructorStub
3446 stub1_holey(isolate(), GetHoleyElementsKind(kind));
3447 __ TailCallStub(&stub1_holey);
3448 }
3449
3450 __ bind(&normal_sequence);
3451 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
3452 __ TailCallStub(&stub1);
3453
3454 __ bind(¬_one_case);
3455 ArrayNArgumentsConstructorStub stubN(isolate());
3456 __ TailCallStub(&stubN);
3457 }
3458
3459
Generate(MacroAssembler * masm)3460 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
3461 // ----------- S t a t e -------------
3462 // -- rax : argc
3463 // -- rdi : constructor
3464 // -- rsp[0] : return address
3465 // -- rsp[8] : last argument
3466 // -----------------------------------
3467
3468 if (FLAG_debug_code) {
3469 // The array construct code is only set for the global and natives
3470 // builtin Array functions which always have maps.
3471
3472 // Initial map for the builtin Array function should be a map.
3473 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
3474 // Will both indicate a NULL and a Smi.
3475 STATIC_ASSERT(kSmiTag == 0);
3476 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
3477 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
3478 __ CmpObjectType(rcx, MAP_TYPE, rcx);
3479 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
3480 }
3481
3482 // Figure out the right elements kind
3483 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
3484
3485 // Load the map's "bit field 2" into |result|. We only need the first byte,
3486 // but the following masking takes care of that anyway.
3487 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
3488 // Retrieve elements_kind from bit field 2.
3489 __ DecodeField<Map::ElementsKindBits>(rcx);
3490
3491 if (FLAG_debug_code) {
3492 Label done;
3493 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
3494 __ j(equal, &done);
3495 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
3496 __ Assert(equal,
3497 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
3498 __ bind(&done);
3499 }
3500
3501 Label fast_elements_case;
3502 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
3503 __ j(equal, &fast_elements_case);
3504 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
3505
3506 __ bind(&fast_elements_case);
3507 GenerateCase(masm, FAST_ELEMENTS);
3508 }
3509
3510
Generate(MacroAssembler * masm)3511 void FastNewObjectStub::Generate(MacroAssembler* masm) {
3512 // ----------- S t a t e -------------
3513 // -- rdi : target
3514 // -- rdx : new target
3515 // -- rsi : context
3516 // -- rsp[0] : return address
3517 // -----------------------------------
3518 __ AssertFunction(rdi);
3519 __ AssertReceiver(rdx);
3520
3521 // Verify that the new target is a JSFunction.
3522 Label new_object;
3523 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
3524 __ j(not_equal, &new_object);
3525
3526 // Load the initial map and verify that it's in fact a map.
3527 __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
3528 __ JumpIfSmi(rcx, &new_object);
3529 __ CmpObjectType(rcx, MAP_TYPE, rbx);
3530 __ j(not_equal, &new_object);
3531
3532 // Fall back to runtime if the target differs from the new target's
3533 // initial map constructor.
3534 __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
3535 __ j(not_equal, &new_object);
3536
3537 // Allocate the JSObject on the heap.
3538 Label allocate, done_allocate;
3539 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
3540 __ leal(rbx, Operand(rbx, times_pointer_size, 0));
3541 __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
3542 __ bind(&done_allocate);
3543
3544 // Initialize the JSObject fields.
3545 __ movp(FieldOperand(rax, JSObject::kMapOffset), rcx);
3546 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
3547 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
3548 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
3549 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
3550 __ leap(rbx, FieldOperand(rax, JSObject::kHeaderSize));
3551
3552 // ----------- S t a t e -------------
3553 // -- rax : result (tagged)
3554 // -- rbx : result fields (untagged)
3555 // -- rdi : result end (untagged)
3556 // -- rcx : initial map
3557 // -- rsi : context
3558 // -- rsp[0] : return address
3559 // -----------------------------------
3560
3561 // Perform in-object slack tracking if requested.
3562 Label slack_tracking;
3563 STATIC_ASSERT(Map::kNoSlackTracking == 0);
3564 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
3565 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
3566 Immediate(Map::ConstructionCounter::kMask));
3567 __ j(not_zero, &slack_tracking, Label::kNear);
3568 {
3569 // Initialize all in-object fields with undefined.
3570 __ InitializeFieldsWithFiller(rbx, rdi, r11);
3571 __ Ret();
3572 }
3573 __ bind(&slack_tracking);
3574 {
3575 // Decrease generous allocation count.
3576 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
3577 __ subl(FieldOperand(rcx, Map::kBitField3Offset),
3578 Immediate(1 << Map::ConstructionCounter::kShift));
3579
3580 // Initialize the in-object fields with undefined.
3581 __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
3582 __ negp(rdx);
3583 __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
3584 __ InitializeFieldsWithFiller(rbx, rdx, r11);
3585
3586 // Initialize the remaining (reserved) fields with one pointer filler map.
3587 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
3588 __ InitializeFieldsWithFiller(rdx, rdi, r11);
3589
3590 // Check if we can finalize the instance size.
3591 Label finalize;
3592 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3593 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
3594 Immediate(Map::ConstructionCounter::kMask));
3595 __ j(zero, &finalize, Label::kNear);
3596 __ Ret();
3597
3598 // Finalize the instance size.
3599 __ bind(&finalize);
3600 {
3601 FrameScope scope(masm, StackFrame::INTERNAL);
3602 __ Push(rax);
3603 __ Push(rcx);
3604 __ CallRuntime(Runtime::kFinalizeInstanceSize);
3605 __ Pop(rax);
3606 }
3607 __ Ret();
3608 }
3609
3610 // Fall back to %AllocateInNewSpace.
3611 __ bind(&allocate);
3612 {
3613 FrameScope scope(masm, StackFrame::INTERNAL);
3614 __ Integer32ToSmi(rbx, rbx);
3615 __ Push(rcx);
3616 __ Push(rbx);
3617 __ CallRuntime(Runtime::kAllocateInNewSpace);
3618 __ Pop(rcx);
3619 }
3620 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
3621 __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
3622 STATIC_ASSERT(kHeapObjectTag == 1);
3623 __ decp(rdi); // Remove the tag from the end address.
3624 __ jmp(&done_allocate);
3625
3626 // Fall back to %NewObject.
3627 __ bind(&new_object);
3628 __ PopReturnAddressTo(rcx);
3629 __ Push(rdi);
3630 __ Push(rdx);
3631 __ PushReturnAddressFrom(rcx);
3632 __ TailCallRuntime(Runtime::kNewObject);
3633 }
3634
3635
Generate(MacroAssembler * masm)3636 void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
3637 // ----------- S t a t e -------------
3638 // -- rdi : function
3639 // -- rsi : context
3640 // -- rbp : frame pointer
3641 // -- rsp[0] : return address
3642 // -----------------------------------
3643 __ AssertFunction(rdi);
3644
3645 // Make rdx point to the JavaScript frame.
3646 __ movp(rdx, rbp);
3647 if (skip_stub_frame()) {
3648 // For Ignition we need to skip the handler/stub frame to reach the
3649 // JavaScript frame for the function.
3650 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
3651 }
3652 if (FLAG_debug_code) {
3653 Label ok;
3654 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
3655 __ j(equal, &ok);
3656 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
3657 __ bind(&ok);
3658 }
3659
3660 // Check if we have rest parameters (only possible if we have an
3661 // arguments adaptor frame below the function frame).
3662 Label no_rest_parameters;
3663 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
3664 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
3665 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3666 __ j(not_equal, &no_rest_parameters, Label::kNear);
3667
3668 // Check if the arguments adaptor frame contains more arguments than
3669 // specified by the function's internal formal parameter count.
3670 Label rest_parameters;
3671 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
3672 __ LoadSharedFunctionInfoSpecialField(
3673 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
3674 __ SmiToInteger32(
3675 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3676 __ subl(rax, rcx);
3677 __ j(greater, &rest_parameters);
3678
3679 // Return an empty rest parameter array.
3680 __ bind(&no_rest_parameters);
3681 {
3682 // ----------- S t a t e -------------
3683 // -- rsi : context
3684 // -- rsp[0] : return address
3685 // -----------------------------------
3686
3687 // Allocate an empty rest parameter array.
3688 Label allocate, done_allocate;
3689 __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, NO_ALLOCATION_FLAGS);
3690 __ bind(&done_allocate);
3691
3692 // Setup the rest parameter array in rax.
3693 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
3694 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
3695 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
3696 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
3697 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx);
3698 __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0));
3699 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
3700 __ Ret();
3701
3702 // Fall back to %AllocateInNewSpace.
3703 __ bind(&allocate);
3704 {
3705 FrameScope scope(masm, StackFrame::INTERNAL);
3706 __ Push(Smi::FromInt(JSArray::kSize));
3707 __ CallRuntime(Runtime::kAllocateInNewSpace);
3708 }
3709 __ jmp(&done_allocate);
3710 }
3711
3712 __ bind(&rest_parameters);
3713 {
3714 // Compute the pointer to the first rest parameter (skippping the receiver).
3715 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
3716 StandardFrameConstants::kCallerSPOffset -
3717 1 * kPointerSize));
3718
3719 // ----------- S t a t e -------------
3720 // -- rdi : function
3721 // -- rsi : context
3722 // -- rax : number of rest parameters
3723 // -- rbx : pointer to first rest parameters
3724 // -- rsp[0] : return address
3725 // -----------------------------------
3726
3727 // Allocate space for the rest parameter array plus the backing store.
3728 Label allocate, done_allocate;
3729 __ leal(rcx, Operand(rax, times_pointer_size,
3730 JSArray::kSize + FixedArray::kHeaderSize));
3731 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
3732 __ bind(&done_allocate);
3733
3734 // Compute the arguments.length in rdi.
3735 __ Integer32ToSmi(rdi, rax);
3736
3737 // Setup the elements array in rdx.
3738 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
3739 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
3740 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
3741 {
3742 Label loop, done_loop;
3743 __ Set(rcx, 0);
3744 __ bind(&loop);
3745 __ cmpl(rcx, rax);
3746 __ j(equal, &done_loop, Label::kNear);
3747 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
3748 __ movp(
3749 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
3750 kScratchRegister);
3751 __ subp(rbx, Immediate(1 * kPointerSize));
3752 __ addl(rcx, Immediate(1));
3753 __ jmp(&loop);
3754 __ bind(&done_loop);
3755 }
3756
3757 // Setup the rest parameter array in rax.
3758 __ leap(rax,
3759 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
3760 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
3761 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
3762 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
3763 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
3764 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx);
3765 __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi);
3766 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
3767 __ Ret();
3768
3769 // Fall back to %AllocateInNewSpace (if not too big).
3770 Label too_big_for_new_space;
3771 __ bind(&allocate);
3772 __ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize));
3773 __ j(greater, &too_big_for_new_space);
3774 {
3775 FrameScope scope(masm, StackFrame::INTERNAL);
3776 __ Integer32ToSmi(rax, rax);
3777 __ Integer32ToSmi(rcx, rcx);
3778 __ Push(rax);
3779 __ Push(rbx);
3780 __ Push(rcx);
3781 __ CallRuntime(Runtime::kAllocateInNewSpace);
3782 __ movp(rdx, rax);
3783 __ Pop(rbx);
3784 __ Pop(rax);
3785 __ SmiToInteger32(rax, rax);
3786 }
3787 __ jmp(&done_allocate);
3788
3789 // Fall back to %NewRestParameter.
3790 __ bind(&too_big_for_new_space);
3791 __ PopReturnAddressTo(kScratchRegister);
3792 __ Push(rdi);
3793 __ PushReturnAddressFrom(kScratchRegister);
3794 __ TailCallRuntime(Runtime::kNewRestParameter);
3795 }
3796 }
3797
3798
Generate(MacroAssembler * masm)3799 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
3800 // ----------- S t a t e -------------
3801 // -- rdi : function
3802 // -- rsi : context
3803 // -- rbp : frame pointer
3804 // -- rsp[0] : return address
3805 // -----------------------------------
3806 __ AssertFunction(rdi);
3807
3808 // Make r9 point to the JavaScript frame.
3809 __ movp(r9, rbp);
3810 if (skip_stub_frame()) {
3811 // For Ignition we need to skip the handler/stub frame to reach the
3812 // JavaScript frame for the function.
3813 __ movp(r9, Operand(r9, StandardFrameConstants::kCallerFPOffset));
3814 }
3815 if (FLAG_debug_code) {
3816 Label ok;
3817 __ cmpp(rdi, Operand(r9, StandardFrameConstants::kFunctionOffset));
3818 __ j(equal, &ok);
3819 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
3820 __ bind(&ok);
3821 }
3822
3823 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
3824 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
3825 __ LoadSharedFunctionInfoSpecialField(
3826 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
3827 __ leap(rdx, Operand(r9, rcx, times_pointer_size,
3828 StandardFrameConstants::kCallerSPOffset));
3829 __ Integer32ToSmi(rcx, rcx);
3830
3831 // rcx : number of parameters (tagged)
3832 // rdx : parameters pointer
3833 // rdi : function
3834 // rsp[0] : return address
3835 // r9 : JavaScript frame pointer.
3836 // Registers used over the whole function:
3837 // rbx: the mapped parameter count (untagged)
3838 // rax: the allocated object (tagged).
3839 Factory* factory = isolate()->factory();
3840
3841 __ SmiToInteger64(rbx, rcx);
3842 // rbx = parameter count (untagged)
3843
3844 // Check if the calling frame is an arguments adaptor frame.
3845 Label adaptor_frame, try_allocate, runtime;
3846 __ movp(rax, Operand(r9, StandardFrameConstants::kCallerFPOffset));
3847 __ movp(r8, Operand(rax, CommonFrameConstants::kContextOrFrameTypeOffset));
3848 __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3849 __ j(equal, &adaptor_frame);
3850
3851 // No adaptor, parameter count = argument count.
3852 __ movp(r11, rbx);
3853 __ jmp(&try_allocate, Label::kNear);
3854
3855 // We have an adaptor frame. Patch the parameters pointer.
3856 __ bind(&adaptor_frame);
3857 __ SmiToInteger64(
3858 r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset));
3859 __ leap(rdx, Operand(rax, r11, times_pointer_size,
3860 StandardFrameConstants::kCallerSPOffset));
3861
3862 // rbx = parameter count (untagged)
3863 // r11 = argument count (untagged)
3864 // Compute the mapped parameter count = min(rbx, r11) in rbx.
3865 __ cmpp(rbx, r11);
3866 __ j(less_equal, &try_allocate, Label::kNear);
3867 __ movp(rbx, r11);
3868
3869 __ bind(&try_allocate);
3870
3871 // Compute the sizes of backing store, parameter map, and arguments object.
3872 // 1. Parameter map, has 2 extra words containing context and backing store.
3873 const int kParameterMapHeaderSize =
3874 FixedArray::kHeaderSize + 2 * kPointerSize;
3875 Label no_parameter_map;
3876 __ xorp(r8, r8);
3877 __ testp(rbx, rbx);
3878 __ j(zero, &no_parameter_map, Label::kNear);
3879 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
3880 __ bind(&no_parameter_map);
3881
3882 // 2. Backing store.
3883 __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize));
3884
3885 // 3. Arguments object.
3886 __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize));
3887
3888 // Do the allocation of all three objects in one go.
3889 __ Allocate(r8, rax, r9, no_reg, &runtime, NO_ALLOCATION_FLAGS);
3890
3891 // rax = address of new object(s) (tagged)
3892 // r11 = argument count (untagged)
3893 // Get the arguments map from the current native context into r9.
3894 Label has_mapped_parameters, instantiate;
3895 __ movp(r9, NativeContextOperand());
3896 __ testp(rbx, rbx);
3897 __ j(not_zero, &has_mapped_parameters, Label::kNear);
3898
3899 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
3900 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex)));
3901 __ jmp(&instantiate, Label::kNear);
3902
3903 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
3904 __ bind(&has_mapped_parameters);
3905 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex)));
3906 __ bind(&instantiate);
3907
3908 // rax = address of new object (tagged)
3909 // rbx = mapped parameter count (untagged)
3910 // r11 = argument count (untagged)
3911 // r9 = address of arguments map (tagged)
3912 __ movp(FieldOperand(rax, JSObject::kMapOffset), r9);
3913 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
3914 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
3915 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
3916
3917 // Set up the callee in-object property.
3918 __ AssertNotSmi(rdi);
3919 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi);
3920
3921 // Use the length (smi tagged) and set that as an in-object property too.
3922 // Note: r11 is tagged from here on.
3923 __ Integer32ToSmi(r11, r11);
3924 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11);
3925
3926 // Set up the elements pointer in the allocated arguments object.
3927 // If we allocated a parameter map, rdi will point there, otherwise to the
3928 // backing store.
3929 __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize));
3930 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
3931
3932 // rax = address of new object (tagged)
3933 // rbx = mapped parameter count (untagged)
3934 // r11 = argument count (tagged)
3935 // rdi = address of parameter map or backing store (tagged)
3936
3937 // Initialize parameter map. If there are no mapped arguments, we're done.
3938 Label skip_parameter_map;
3939 __ testp(rbx, rbx);
3940 __ j(zero, &skip_parameter_map);
3941
3942 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
3943 // rbx contains the untagged argument count. Add 2 and tag to write.
3944 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
3945 __ Integer64PlusConstantToSmi(r9, rbx, 2);
3946 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
3947 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
3948 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
3949 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
3950
3951 // Copy the parameter slots and the holes in the arguments.
3952 // We need to fill in mapped_parameter_count slots. They index the context,
3953 // where parameters are stored in reverse order, at
3954 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
3955 // The mapped parameter thus need to get indices
3956 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
3957 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
3958 // We loop from right to left.
3959 Label parameters_loop, parameters_test;
3960
3961 // Load tagged parameter count into r9.
3962 __ Integer32ToSmi(r9, rbx);
3963 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
3964 __ addp(r8, rcx);
3965 __ subp(r8, r9);
3966 __ movp(rcx, rdi);
3967 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
3968 __ SmiToInteger64(r9, r9);
3969 // r9 = loop variable (untagged)
3970 // r8 = mapping index (tagged)
3971 // rcx = address of parameter map (tagged)
3972 // rdi = address of backing store (tagged)
3973 __ jmp(¶meters_test, Label::kNear);
3974
3975 __ bind(¶meters_loop);
3976 __ subp(r9, Immediate(1));
3977 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
3978 __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize),
3979 r8);
3980 __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize),
3981 kScratchRegister);
3982 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
3983 __ bind(¶meters_test);
3984 __ testp(r9, r9);
3985 __ j(not_zero, ¶meters_loop, Label::kNear);
3986
3987 __ bind(&skip_parameter_map);
3988
3989 // r11 = argument count (tagged)
3990 // rdi = address of backing store (tagged)
3991 // Copy arguments header and remaining slots (if there are any).
3992 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
3993 factory->fixed_array_map());
3994 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11);
3995
3996 Label arguments_loop, arguments_test;
3997 __ movp(r8, rbx);
3998 // Untag r11 for the loop below.
3999 __ SmiToInteger64(r11, r11);
4000 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
4001 __ subp(rdx, kScratchRegister);
4002 __ jmp(&arguments_test, Label::kNear);
4003
4004 __ bind(&arguments_loop);
4005 __ subp(rdx, Immediate(kPointerSize));
4006 __ movp(r9, Operand(rdx, 0));
4007 __ movp(FieldOperand(rdi, r8,
4008 times_pointer_size,
4009 FixedArray::kHeaderSize),
4010 r9);
4011 __ addp(r8, Immediate(1));
4012
4013 __ bind(&arguments_test);
4014 __ cmpp(r8, r11);
4015 __ j(less, &arguments_loop, Label::kNear);
4016
4017 // Return.
4018 __ ret(0);
4019
4020 // Do the runtime call to allocate the arguments object.
4021 // r11 = argument count (untagged)
4022 __ bind(&runtime);
4023 __ Integer32ToSmi(r11, r11);
4024 __ PopReturnAddressTo(rax);
4025 __ Push(rdi); // Push function.
4026 __ Push(rdx); // Push parameters pointer.
4027 __ Push(r11); // Push parameter count.
4028 __ PushReturnAddressFrom(rax);
4029 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4030 }
4031
4032
Generate(MacroAssembler * masm)4033 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4034 // ----------- S t a t e -------------
4035 // -- rdi : function
4036 // -- rsi : context
4037 // -- rbp : frame pointer
4038 // -- rsp[0] : return address
4039 // -----------------------------------
4040 __ AssertFunction(rdi);
4041
4042 // Make rdx point to the JavaScript frame.
4043 __ movp(rdx, rbp);
4044 if (skip_stub_frame()) {
4045 // For Ignition we need to skip the handler/stub frame to reach the
4046 // JavaScript frame for the function.
4047 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4048 }
4049 if (FLAG_debug_code) {
4050 Label ok;
4051 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
4052 __ j(equal, &ok);
4053 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4054 __ bind(&ok);
4055 }
4056
4057 // Check if we have an arguments adaptor frame below the function frame.
4058 Label arguments_adaptor, arguments_done;
4059 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4060 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
4061 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4062 __ j(equal, &arguments_adaptor, Label::kNear);
4063 {
4064 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4065 __ LoadSharedFunctionInfoSpecialField(
4066 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
4067 __ leap(rbx, Operand(rdx, rax, times_pointer_size,
4068 StandardFrameConstants::kCallerSPOffset -
4069 1 * kPointerSize));
4070 }
4071 __ jmp(&arguments_done, Label::kNear);
4072 __ bind(&arguments_adaptor);
4073 {
4074 __ SmiToInteger32(
4075 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4076 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4077 StandardFrameConstants::kCallerSPOffset -
4078 1 * kPointerSize));
4079 }
4080 __ bind(&arguments_done);
4081
4082 // ----------- S t a t e -------------
4083 // -- rax : number of arguments
4084 // -- rbx : pointer to the first argument
4085 // -- rdi : function
4086 // -- rsi : context
4087 // -- rsp[0] : return address
4088 // -----------------------------------
4089
4090 // Allocate space for the strict arguments object plus the backing store.
4091 Label allocate, done_allocate;
4092 __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
4093 FixedArray::kHeaderSize));
4094 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4095 __ bind(&done_allocate);
4096
4097 // Compute the arguments.length in rdi.
4098 __ Integer32ToSmi(rdi, rax);
4099
4100 // Setup the elements array in rdx.
4101 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4102 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4103 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4104 {
4105 Label loop, done_loop;
4106 __ Set(rcx, 0);
4107 __ bind(&loop);
4108 __ cmpl(rcx, rax);
4109 __ j(equal, &done_loop, Label::kNear);
4110 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4111 __ movp(
4112 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
4113 kScratchRegister);
4114 __ subp(rbx, Immediate(1 * kPointerSize));
4115 __ addl(rcx, Immediate(1));
4116 __ jmp(&loop);
4117 __ bind(&done_loop);
4118 }
4119
4120 // Setup the strict arguments object in rax.
4121 __ leap(rax,
4122 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
4123 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx);
4124 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx);
4125 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4126 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx);
4127 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx);
4128 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi);
4129 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
4130 __ Ret();
4131
4132 // Fall back to %AllocateInNewSpace (if not too big).
4133 Label too_big_for_new_space;
4134 __ bind(&allocate);
4135 __ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize));
4136 __ j(greater, &too_big_for_new_space);
4137 {
4138 FrameScope scope(masm, StackFrame::INTERNAL);
4139 __ Integer32ToSmi(rax, rax);
4140 __ Integer32ToSmi(rcx, rcx);
4141 __ Push(rax);
4142 __ Push(rbx);
4143 __ Push(rcx);
4144 __ CallRuntime(Runtime::kAllocateInNewSpace);
4145 __ movp(rdx, rax);
4146 __ Pop(rbx);
4147 __ Pop(rax);
4148 __ SmiToInteger32(rax, rax);
4149 }
4150 __ jmp(&done_allocate);
4151
4152 // Fall back to %NewStrictArguments.
4153 __ bind(&too_big_for_new_space);
4154 __ PopReturnAddressTo(kScratchRegister);
4155 __ Push(rdi);
4156 __ PushReturnAddressFrom(kScratchRegister);
4157 __ TailCallRuntime(Runtime::kNewStrictArguments);
4158 }
4159
4160
Offset(ExternalReference ref0,ExternalReference ref1)4161 static int Offset(ExternalReference ref0, ExternalReference ref1) {
4162 int64_t offset = (ref0.address() - ref1.address());
4163 // Check that fits into int.
4164 DCHECK(static_cast<int>(offset) == offset);
4165 return static_cast<int>(offset);
4166 }
4167
4168
4169 // Prepares stack to put arguments (aligns and so on). WIN64 calling
4170 // convention requires to put the pointer to the return value slot into
4171 // rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
4172 // context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
4173 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
PrepareCallApiFunction(MacroAssembler * masm,int arg_stack_space)4174 static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
4175 __ EnterApiExitFrame(arg_stack_space);
4176 }
4177
4178
4179 // Calls an API function. Allocates HandleScope, extracts returned value
4180 // from handle and propagates exceptions. Clobbers r14, r15, rbx and
4181 // caller-save registers. Restores context. On return removes
4182 // stack_space * kPointerSize (GCed).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,Register thunk_last_arg,int stack_space,Operand * stack_space_operand,Operand return_value_operand,Operand * context_restore_operand)4183 static void CallApiFunctionAndReturn(MacroAssembler* masm,
4184 Register function_address,
4185 ExternalReference thunk_ref,
4186 Register thunk_last_arg, int stack_space,
4187 Operand* stack_space_operand,
4188 Operand return_value_operand,
4189 Operand* context_restore_operand) {
4190 Label prologue;
4191 Label promote_scheduled_exception;
4192 Label delete_allocated_handles;
4193 Label leave_exit_frame;
4194 Label write_back;
4195
4196 Isolate* isolate = masm->isolate();
4197 Factory* factory = isolate->factory();
4198 ExternalReference next_address =
4199 ExternalReference::handle_scope_next_address(isolate);
4200 const int kNextOffset = 0;
4201 const int kLimitOffset = Offset(
4202 ExternalReference::handle_scope_limit_address(isolate), next_address);
4203 const int kLevelOffset = Offset(
4204 ExternalReference::handle_scope_level_address(isolate), next_address);
4205 ExternalReference scheduled_exception_address =
4206 ExternalReference::scheduled_exception_address(isolate);
4207
4208 DCHECK(rdx.is(function_address) || r8.is(function_address));
4209 // Allocate HandleScope in callee-save registers.
4210 Register prev_next_address_reg = r14;
4211 Register prev_limit_reg = rbx;
4212 Register base_reg = r15;
4213 __ Move(base_reg, next_address);
4214 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
4215 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
4216 __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
4217
4218 if (FLAG_log_timer_events) {
4219 FrameScope frame(masm, StackFrame::MANUAL);
4220 __ PushSafepointRegisters();
4221 __ PrepareCallCFunction(1);
4222 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
4223 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
4224 1);
4225 __ PopSafepointRegisters();
4226 }
4227
4228 Label profiler_disabled;
4229 Label end_profiler_check;
4230 __ Move(rax, ExternalReference::is_profiling_address(isolate));
4231 __ cmpb(Operand(rax, 0), Immediate(0));
4232 __ j(zero, &profiler_disabled);
4233
4234 // Third parameter is the address of the actual getter function.
4235 __ Move(thunk_last_arg, function_address);
4236 __ Move(rax, thunk_ref);
4237 __ jmp(&end_profiler_check);
4238
4239 __ bind(&profiler_disabled);
4240 // Call the api function!
4241 __ Move(rax, function_address);
4242
4243 __ bind(&end_profiler_check);
4244
4245 // Call the api function!
4246 __ call(rax);
4247
4248 if (FLAG_log_timer_events) {
4249 FrameScope frame(masm, StackFrame::MANUAL);
4250 __ PushSafepointRegisters();
4251 __ PrepareCallCFunction(1);
4252 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
4253 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
4254 1);
4255 __ PopSafepointRegisters();
4256 }
4257
4258 // Load the value from ReturnValue
4259 __ movp(rax, return_value_operand);
4260 __ bind(&prologue);
4261
4262 // No more valid handles (the result handle was the last one). Restore
4263 // previous handle scope.
4264 __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
4265 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
4266 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
4267 __ j(not_equal, &delete_allocated_handles);
4268
4269 // Leave the API exit frame.
4270 __ bind(&leave_exit_frame);
4271 bool restore_context = context_restore_operand != NULL;
4272 if (restore_context) {
4273 __ movp(rsi, *context_restore_operand);
4274 }
4275 if (stack_space_operand != nullptr) {
4276 __ movp(rbx, *stack_space_operand);
4277 }
4278 __ LeaveApiExitFrame(!restore_context);
4279
4280 // Check if the function scheduled an exception.
4281 __ Move(rdi, scheduled_exception_address);
4282 __ Cmp(Operand(rdi, 0), factory->the_hole_value());
4283 __ j(not_equal, &promote_scheduled_exception);
4284
4285 #if DEBUG
4286 // Check if the function returned a valid JavaScript value.
4287 Label ok;
4288 Register return_value = rax;
4289 Register map = rcx;
4290
4291 __ JumpIfSmi(return_value, &ok, Label::kNear);
4292 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
4293
4294 __ CmpInstanceType(map, LAST_NAME_TYPE);
4295 __ j(below_equal, &ok, Label::kNear);
4296
4297 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
4298 __ j(above_equal, &ok, Label::kNear);
4299
4300 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4301 __ j(equal, &ok, Label::kNear);
4302
4303 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
4304 __ j(equal, &ok, Label::kNear);
4305
4306 __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
4307 __ j(equal, &ok, Label::kNear);
4308
4309 __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
4310 __ j(equal, &ok, Label::kNear);
4311
4312 __ CompareRoot(return_value, Heap::kNullValueRootIndex);
4313 __ j(equal, &ok, Label::kNear);
4314
4315 __ Abort(kAPICallReturnedInvalidObject);
4316
4317 __ bind(&ok);
4318 #endif
4319
4320 if (stack_space_operand != nullptr) {
4321 DCHECK_EQ(stack_space, 0);
4322 __ PopReturnAddressTo(rcx);
4323 __ addq(rsp, rbx);
4324 __ jmp(rcx);
4325 } else {
4326 __ ret(stack_space * kPointerSize);
4327 }
4328
4329 // Re-throw by promoting a scheduled exception.
4330 __ bind(&promote_scheduled_exception);
4331 __ TailCallRuntime(Runtime::kPromoteScheduledException);
4332
4333 // HandleScope limit has changed. Delete allocated extensions.
4334 __ bind(&delete_allocated_handles);
4335 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
4336 __ movp(prev_limit_reg, rax);
4337 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
4338 __ LoadAddress(rax,
4339 ExternalReference::delete_handle_scope_extensions(isolate));
4340 __ call(rax);
4341 __ movp(rax, prev_limit_reg);
4342 __ jmp(&leave_exit_frame);
4343 }
4344
Generate(MacroAssembler * masm)4345 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
4346 // ----------- S t a t e -------------
4347 // -- rdi : callee
4348 // -- rbx : call_data
4349 // -- rcx : holder
4350 // -- rdx : api_function_address
4351 // -- rsi : context
4352 // -- rax : number of arguments if argc is a register
4353 // -- rsp[0] : return address
4354 // -- rsp[8] : last argument
4355 // -- ...
4356 // -- rsp[argc * 8] : first argument
4357 // -- rsp[(argc + 1) * 8] : receiver
4358 // -----------------------------------
4359
4360 Register callee = rdi;
4361 Register call_data = rbx;
4362 Register holder = rcx;
4363 Register api_function_address = rdx;
4364 Register context = rsi;
4365 Register return_address = r8;
4366
4367 typedef FunctionCallbackArguments FCA;
4368
4369 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4370 STATIC_ASSERT(FCA::kCalleeIndex == 5);
4371 STATIC_ASSERT(FCA::kDataIndex == 4);
4372 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4373 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4374 STATIC_ASSERT(FCA::kIsolateIndex == 1);
4375 STATIC_ASSERT(FCA::kHolderIndex == 0);
4376 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
4377 STATIC_ASSERT(FCA::kArgsLength == 8);
4378
4379 __ PopReturnAddressTo(return_address);
4380
4381 // new target
4382 __ PushRoot(Heap::kUndefinedValueRootIndex);
4383
4384 // context save
4385 __ Push(context);
4386
4387 // callee
4388 __ Push(callee);
4389
4390 // call data
4391 __ Push(call_data);
4392 Register scratch = call_data;
4393 if (!this->call_data_undefined()) {
4394 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4395 }
4396 // return value
4397 __ Push(scratch);
4398 // return value default
4399 __ Push(scratch);
4400 // isolate
4401 __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
4402 __ Push(scratch);
4403 // holder
4404 __ Push(holder);
4405
4406 __ movp(scratch, rsp);
4407 // Push return address back on stack.
4408 __ PushReturnAddressFrom(return_address);
4409
4410 if (!this->is_lazy()) {
4411 // load context from callee
4412 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
4413 }
4414
4415 // Allocate the v8::Arguments structure in the arguments' space since
4416 // it's not controlled by GC.
4417 const int kApiStackSpace = 3;
4418
4419 PrepareCallApiFunction(masm, kApiStackSpace);
4420
4421 // FunctionCallbackInfo::implicit_args_.
4422 int argc = this->argc();
4423 __ movp(StackSpaceOperand(0), scratch);
4424 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
4425 // FunctionCallbackInfo::values_.
4426 __ movp(StackSpaceOperand(1), scratch);
4427 // FunctionCallbackInfo::length_.
4428 __ Set(StackSpaceOperand(2), argc);
4429
4430 #if defined(__MINGW64__) || defined(_WIN64)
4431 Register arguments_arg = rcx;
4432 Register callback_arg = rdx;
4433 #else
4434 Register arguments_arg = rdi;
4435 Register callback_arg = rsi;
4436 #endif
4437
4438 // It's okay if api_function_address == callback_arg
4439 // but not arguments_arg
4440 DCHECK(!api_function_address.is(arguments_arg));
4441
4442 // v8::InvocationCallback's argument.
4443 __ leap(arguments_arg, StackSpaceOperand(0));
4444
4445 ExternalReference thunk_ref =
4446 ExternalReference::invoke_function_callback(masm->isolate());
4447
4448 // Accessor for FunctionCallbackInfo and first js arg.
4449 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
4450 ARGUMENTS_DONT_CONTAIN_RECEIVER);
4451 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
4452 FCA::kArgsLength - FCA::kContextSaveIndex);
4453 Operand length_operand = StackSpaceOperand(2);
4454 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
4455 this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
4456 int stack_space = 0;
4457 Operand* stack_space_operand = &length_operand;
4458 stack_space = argc + FCA::kArgsLength + 1;
4459 stack_space_operand = nullptr;
4460 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
4461 stack_space, stack_space_operand,
4462 return_value_operand, &context_restore_operand);
4463 }
4464
4465
Generate(MacroAssembler * masm)4466 void CallApiGetterStub::Generate(MacroAssembler* masm) {
4467 #if defined(__MINGW64__) || defined(_WIN64)
4468 Register getter_arg = r8;
4469 Register accessor_info_arg = rdx;
4470 Register name_arg = rcx;
4471 #else
4472 Register getter_arg = rdx;
4473 Register accessor_info_arg = rsi;
4474 Register name_arg = rdi;
4475 #endif
4476 Register api_function_address = r8;
4477 Register receiver = ApiGetterDescriptor::ReceiverRegister();
4478 Register holder = ApiGetterDescriptor::HolderRegister();
4479 Register callback = ApiGetterDescriptor::CallbackRegister();
4480 Register scratch = rax;
4481 DCHECK(!AreAliased(receiver, holder, callback, scratch));
4482
4483 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4484 // name below the exit frame to make GC aware of them.
4485 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
4486 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
4487 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
4488 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
4489 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
4490 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
4491 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
4492 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
4493
4494 // Insert additional parameters into the stack frame above return address.
4495 __ PopReturnAddressTo(scratch);
4496 __ Push(receiver);
4497 __ Push(FieldOperand(callback, AccessorInfo::kDataOffset));
4498 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
4499 __ Push(kScratchRegister); // return value
4500 __ Push(kScratchRegister); // return value default
4501 __ PushAddress(ExternalReference::isolate_address(isolate()));
4502 __ Push(holder);
4503 __ Push(Smi::kZero); // should_throw_on_error -> false
4504 __ Push(FieldOperand(callback, AccessorInfo::kNameOffset));
4505 __ PushReturnAddressFrom(scratch);
4506
4507 // v8::PropertyCallbackInfo::args_ array and name handle.
4508 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
4509
4510 // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
4511 const int kArgStackSpace = 1;
4512
4513 // Load address of v8::PropertyAccessorInfo::args_ array.
4514 __ leap(scratch, Operand(rsp, 2 * kPointerSize));
4515
4516 PrepareCallApiFunction(masm, kArgStackSpace);
4517 // Create v8::PropertyCallbackInfo object on the stack and initialize
4518 // it's args_ field.
4519 Operand info_object = StackSpaceOperand(0);
4520 __ movp(info_object, scratch);
4521
4522 __ leap(name_arg, Operand(scratch, -kPointerSize));
4523 // The context register (rsi) has been saved in PrepareCallApiFunction and
4524 // could be used to pass arguments.
4525 __ leap(accessor_info_arg, info_object);
4526
4527 ExternalReference thunk_ref =
4528 ExternalReference::invoke_accessor_getter_callback(isolate());
4529
4530 // It's okay if api_function_address == getter_arg
4531 // but not accessor_info_arg or name_arg
4532 DCHECK(!api_function_address.is(accessor_info_arg));
4533 DCHECK(!api_function_address.is(name_arg));
4534 __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
4535 __ movp(api_function_address,
4536 FieldOperand(scratch, Foreign::kForeignAddressOffset));
4537
4538 // +3 is to skip prolog, return address and name handle.
4539 Operand return_value_operand(
4540 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
4541 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
4542 kStackUnwindSpace, nullptr, return_value_operand,
4543 NULL);
4544 }
4545
4546 #undef __
4547
4548 } // namespace internal
4549 } // namespace v8
4550
4551 #endif // V8_TARGET_ARCH_X64
4552