1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h"
9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h"
11 #include "src/codegen.h"
12 #include "src/ia32/code-stubs-ia32.h"
13 #include "src/ia32/frames-ia32.h"
14 #include "src/ic/handler-compiler.h"
15 #include "src/ic/ic.h"
16 #include "src/ic/stub-cache.h"
17 #include "src/isolate.h"
18 #include "src/regexp/jsregexp.h"
19 #include "src/regexp/regexp-macro-assembler.h"
20 #include "src/runtime/runtime.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 #define __ ACCESS_MASM(masm)
26 
Generate(MacroAssembler * masm)27 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
28   __ pop(ecx);
29   __ mov(MemOperand(esp, eax, times_4, 0), edi);
30   __ push(edi);
31   __ push(ebx);
32   __ push(ecx);
33   __ add(eax, Immediate(3));
34   __ TailCallRuntime(Runtime::kNewArray);
35 }
36 
InitializeDescriptor(CodeStubDescriptor * descriptor)37 void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
38   Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
39   descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
40 }
41 
InitializeDescriptor(CodeStubDescriptor * descriptor)42 void FastFunctionBindStub::InitializeDescriptor(
43     CodeStubDescriptor* descriptor) {
44   Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
45   descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
46 }
47 
GenerateLightweightMiss(MacroAssembler * masm,ExternalReference miss)48 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
49                                                ExternalReference miss) {
50   // Update the static counter each time a new code stub is generated.
51   isolate()->counters()->code_stubs()->Increment();
52 
53   CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
54   int param_count = descriptor.GetRegisterParameterCount();
55   {
56     // Call the runtime system in a fresh internal frame.
57     FrameScope scope(masm, StackFrame::INTERNAL);
58     DCHECK(param_count == 0 ||
59            eax.is(descriptor.GetRegisterParameter(param_count - 1)));
60     // Push arguments
61     for (int i = 0; i < param_count; ++i) {
62       __ push(descriptor.GetRegisterParameter(i));
63     }
64     __ CallExternalReference(miss, param_count);
65   }
66 
67   __ ret(0);
68 }
69 
70 
Generate(MacroAssembler * masm)71 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
72   // We don't allow a GC during a store buffer overflow so there is no need to
73   // store the registers in any particular way, but we do have to store and
74   // restore them.
75   __ pushad();
76   if (save_doubles()) {
77     __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
78     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
79       XMMRegister reg = XMMRegister::from_code(i);
80       __ movsd(Operand(esp, i * kDoubleSize), reg);
81     }
82   }
83   const int argument_count = 1;
84 
85   AllowExternalCallThatCantCauseGC scope(masm);
86   __ PrepareCallCFunction(argument_count, ecx);
87   __ mov(Operand(esp, 0 * kPointerSize),
88          Immediate(ExternalReference::isolate_address(isolate())));
89   __ CallCFunction(
90       ExternalReference::store_buffer_overflow_function(isolate()),
91       argument_count);
92   if (save_doubles()) {
93     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
94       XMMRegister reg = XMMRegister::from_code(i);
95       __ movsd(reg, Operand(esp, i * kDoubleSize));
96     }
97     __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
98   }
99   __ popad();
100   __ ret(0);
101 }
102 
103 
104 class FloatingPointHelper : public AllStatic {
105  public:
106   enum ArgLocation {
107     ARGS_ON_STACK,
108     ARGS_IN_REGISTERS
109   };
110 
111   // Code pattern for loading a floating point value. Input value must
112   // be either a smi or a heap number object (fp value). Requirements:
113   // operand in register number. Returns operand as floating point number
114   // on FPU stack.
115   static void LoadFloatOperand(MacroAssembler* masm, Register number);
116 
117   // Test if operands are smi or number objects (fp). Requirements:
118   // operand_1 in eax, operand_2 in edx; falls through on float
119   // operands, jumps to the non_float label otherwise.
120   static void CheckFloatOperands(MacroAssembler* masm,
121                                  Label* non_float,
122                                  Register scratch);
123 
124   // Test if operands are numbers (smi or HeapNumber objects), and load
125   // them into xmm0 and xmm1 if they are.  Jump to label not_numbers if
126   // either operand is not a number.  Operands are in edx and eax.
127   // Leaves operands unchanged.
128   static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
129 };
130 
131 
Generate(MacroAssembler * masm)132 void DoubleToIStub::Generate(MacroAssembler* masm) {
133   Register input_reg = this->source();
134   Register final_result_reg = this->destination();
135   DCHECK(is_truncating());
136 
137   Label check_negative, process_64_bits, done, done_no_stash;
138 
139   int double_offset = offset();
140 
141   // Account for return address and saved regs if input is esp.
142   if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
143 
144   MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
145   MemOperand exponent_operand(MemOperand(input_reg,
146                                          double_offset + kDoubleSize / 2));
147 
148   Register scratch1;
149   {
150     Register scratch_candidates[3] = { ebx, edx, edi };
151     for (int i = 0; i < 3; i++) {
152       scratch1 = scratch_candidates[i];
153       if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
154     }
155   }
156   // Since we must use ecx for shifts below, use some other register (eax)
157   // to calculate the result if ecx is the requested return register.
158   Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
159   // Save ecx if it isn't the return register and therefore volatile, or if it
160   // is the return register, then save the temp register we use in its stead for
161   // the result.
162   Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
163   __ push(scratch1);
164   __ push(save_reg);
165 
166   bool stash_exponent_copy = !input_reg.is(esp);
167   __ mov(scratch1, mantissa_operand);
168   if (CpuFeatures::IsSupported(SSE3)) {
169     CpuFeatureScope scope(masm, SSE3);
170     // Load x87 register with heap number.
171     __ fld_d(mantissa_operand);
172   }
173   __ mov(ecx, exponent_operand);
174   if (stash_exponent_copy) __ push(ecx);
175 
176   __ and_(ecx, HeapNumber::kExponentMask);
177   __ shr(ecx, HeapNumber::kExponentShift);
178   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
179   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
180   __ j(below, &process_64_bits);
181 
182   // Result is entirely in lower 32-bits of mantissa
183   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
184   if (CpuFeatures::IsSupported(SSE3)) {
185     __ fstp(0);
186   }
187   __ sub(ecx, Immediate(delta));
188   __ xor_(result_reg, result_reg);
189   __ cmp(ecx, Immediate(31));
190   __ j(above, &done);
191   __ shl_cl(scratch1);
192   __ jmp(&check_negative);
193 
194   __ bind(&process_64_bits);
195   if (CpuFeatures::IsSupported(SSE3)) {
196     CpuFeatureScope scope(masm, SSE3);
197     if (stash_exponent_copy) {
198       // Already a copy of the exponent on the stack, overwrite it.
199       STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
200       __ sub(esp, Immediate(kDoubleSize / 2));
201     } else {
202       // Reserve space for 64 bit answer.
203       __ sub(esp, Immediate(kDoubleSize));  // Nolint.
204     }
205     // Do conversion, which cannot fail because we checked the exponent.
206     __ fisttp_d(Operand(esp, 0));
207     __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
208     __ add(esp, Immediate(kDoubleSize));
209     __ jmp(&done_no_stash);
210   } else {
211     // Result must be extracted from shifted 32-bit mantissa
212     __ sub(ecx, Immediate(delta));
213     __ neg(ecx);
214     if (stash_exponent_copy) {
215       __ mov(result_reg, MemOperand(esp, 0));
216     } else {
217       __ mov(result_reg, exponent_operand);
218     }
219     __ and_(result_reg,
220             Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
221     __ add(result_reg,
222            Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
223     __ shrd_cl(scratch1, result_reg);
224     __ shr_cl(result_reg);
225     __ test(ecx, Immediate(32));
226     __ cmov(not_equal, scratch1, result_reg);
227   }
228 
229   // If the double was negative, negate the integer result.
230   __ bind(&check_negative);
231   __ mov(result_reg, scratch1);
232   __ neg(result_reg);
233   if (stash_exponent_copy) {
234     __ cmp(MemOperand(esp, 0), Immediate(0));
235   } else {
236     __ cmp(exponent_operand, Immediate(0));
237   }
238     __ cmov(greater, result_reg, scratch1);
239 
240   // Restore registers
241   __ bind(&done);
242   if (stash_exponent_copy) {
243     __ add(esp, Immediate(kDoubleSize / 2));
244   }
245   __ bind(&done_no_stash);
246   if (!final_result_reg.is(result_reg)) {
247     DCHECK(final_result_reg.is(ecx));
248     __ mov(final_result_reg, result_reg);
249   }
250   __ pop(save_reg);
251   __ pop(scratch1);
252   __ ret(0);
253 }
254 
255 
LoadFloatOperand(MacroAssembler * masm,Register number)256 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
257                                            Register number) {
258   Label load_smi, done;
259 
260   __ JumpIfSmi(number, &load_smi, Label::kNear);
261   __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
262   __ jmp(&done, Label::kNear);
263 
264   __ bind(&load_smi);
265   __ SmiUntag(number);
266   __ push(number);
267   __ fild_s(Operand(esp, 0));
268   __ pop(number);
269 
270   __ bind(&done);
271 }
272 
273 
LoadSSE2Operands(MacroAssembler * masm,Label * not_numbers)274 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
275                                            Label* not_numbers) {
276   Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
277   // Load operand in edx into xmm0, or branch to not_numbers.
278   __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
279   Factory* factory = masm->isolate()->factory();
280   __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
281   __ j(not_equal, not_numbers);  // Argument in edx is not a number.
282   __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
283   __ bind(&load_eax);
284   // Load operand in eax into xmm1, or branch to not_numbers.
285   __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
286   __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
287   __ j(equal, &load_float_eax, Label::kNear);
288   __ jmp(not_numbers);  // Argument in eax is not a number.
289   __ bind(&load_smi_edx);
290   __ SmiUntag(edx);  // Untag smi before converting to float.
291   __ Cvtsi2sd(xmm0, edx);
292   __ SmiTag(edx);  // Retag smi for heap number overwriting test.
293   __ jmp(&load_eax);
294   __ bind(&load_smi_eax);
295   __ SmiUntag(eax);  // Untag smi before converting to float.
296   __ Cvtsi2sd(xmm1, eax);
297   __ SmiTag(eax);  // Retag smi for heap number overwriting test.
298   __ jmp(&done, Label::kNear);
299   __ bind(&load_float_eax);
300   __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
301   __ bind(&done);
302 }
303 
304 
CheckFloatOperands(MacroAssembler * masm,Label * non_float,Register scratch)305 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
306                                              Label* non_float,
307                                              Register scratch) {
308   Label test_other, done;
309   // Test if both operands are floats or smi -> scratch=k_is_float;
310   // Otherwise scratch = k_not_float.
311   __ JumpIfSmi(edx, &test_other, Label::kNear);
312   __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
313   Factory* factory = masm->isolate()->factory();
314   __ cmp(scratch, factory->heap_number_map());
315   __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
316 
317   __ bind(&test_other);
318   __ JumpIfSmi(eax, &done, Label::kNear);
319   __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
320   __ cmp(scratch, factory->heap_number_map());
321   __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
322 
323   // Fall-through: Both operands are numbers.
324   __ bind(&done);
325 }
326 
327 
Generate(MacroAssembler * masm)328 void MathPowStub::Generate(MacroAssembler* masm) {
329   const Register exponent = MathPowTaggedDescriptor::exponent();
330   DCHECK(exponent.is(eax));
331   const Register scratch = ecx;
332   const XMMRegister double_result = xmm3;
333   const XMMRegister double_base = xmm2;
334   const XMMRegister double_exponent = xmm1;
335   const XMMRegister double_scratch = xmm4;
336 
337   Label call_runtime, done, exponent_not_smi, int_exponent;
338 
339   // Save 1 in double_result - we need this several times later on.
340   __ mov(scratch, Immediate(1));
341   __ Cvtsi2sd(double_result, scratch);
342 
343   if (exponent_type() == TAGGED) {
344     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
345     __ SmiUntag(exponent);
346     __ jmp(&int_exponent);
347 
348     __ bind(&exponent_not_smi);
349     __ movsd(double_exponent,
350               FieldOperand(exponent, HeapNumber::kValueOffset));
351   }
352 
353   if (exponent_type() != INTEGER) {
354     Label fast_power, try_arithmetic_simplification;
355     __ DoubleToI(exponent, double_exponent, double_scratch,
356                  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
357                  &try_arithmetic_simplification,
358                  &try_arithmetic_simplification);
359     __ jmp(&int_exponent);
360 
361     __ bind(&try_arithmetic_simplification);
362     // Skip to runtime if possibly NaN (indicated by the indefinite integer).
363     __ cvttsd2si(exponent, Operand(double_exponent));
364     __ cmp(exponent, Immediate(0x1));
365     __ j(overflow, &call_runtime);
366 
367     // Using FPU instructions to calculate power.
368     Label fast_power_failed;
369     __ bind(&fast_power);
370     __ fnclex();  // Clear flags to catch exceptions later.
371     // Transfer (B)ase and (E)xponent onto the FPU register stack.
372     __ sub(esp, Immediate(kDoubleSize));
373     __ movsd(Operand(esp, 0), double_exponent);
374     __ fld_d(Operand(esp, 0));  // E
375     __ movsd(Operand(esp, 0), double_base);
376     __ fld_d(Operand(esp, 0));  // B, E
377 
378     // Exponent is in st(1) and base is in st(0)
379     // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
380     // FYL2X calculates st(1) * log2(st(0))
381     __ fyl2x();    // X
382     __ fld(0);     // X, X
383     __ frndint();  // rnd(X), X
384     __ fsub(1);    // rnd(X), X-rnd(X)
385     __ fxch(1);    // X - rnd(X), rnd(X)
386     // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
387     __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
388     __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
389     __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
390     // FSCALE calculates st(0) * 2^st(1)
391     __ fscale();   // 2^X, rnd(X)
392     __ fstp(1);    // 2^X
393     // Bail out to runtime in case of exceptions in the status word.
394     __ fnstsw_ax();
395     __ test_b(eax,
396               Immediate(0x5F));  // We check for all but precision exception.
397     __ j(not_zero, &fast_power_failed, Label::kNear);
398     __ fstp_d(Operand(esp, 0));
399     __ movsd(double_result, Operand(esp, 0));
400     __ add(esp, Immediate(kDoubleSize));
401     __ jmp(&done);
402 
403     __ bind(&fast_power_failed);
404     __ fninit();
405     __ add(esp, Immediate(kDoubleSize));
406     __ jmp(&call_runtime);
407   }
408 
409   // Calculate power with integer exponent.
410   __ bind(&int_exponent);
411   const XMMRegister double_scratch2 = double_exponent;
412   __ mov(scratch, exponent);  // Back up exponent.
413   __ movsd(double_scratch, double_base);  // Back up base.
414   __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
415 
416   // Get absolute value of exponent.
417   Label no_neg, while_true, while_false;
418   __ test(scratch, scratch);
419   __ j(positive, &no_neg, Label::kNear);
420   __ neg(scratch);
421   __ bind(&no_neg);
422 
423   __ j(zero, &while_false, Label::kNear);
424   __ shr(scratch, 1);
425   // Above condition means CF==0 && ZF==0.  This means that the
426   // bit that has been shifted out is 0 and the result is not 0.
427   __ j(above, &while_true, Label::kNear);
428   __ movsd(double_result, double_scratch);
429   __ j(zero, &while_false, Label::kNear);
430 
431   __ bind(&while_true);
432   __ shr(scratch, 1);
433   __ mulsd(double_scratch, double_scratch);
434   __ j(above, &while_true, Label::kNear);
435   __ mulsd(double_result, double_scratch);
436   __ j(not_zero, &while_true);
437 
438   __ bind(&while_false);
439   // scratch has the original value of the exponent - if the exponent is
440   // negative, return 1/result.
441   __ test(exponent, exponent);
442   __ j(positive, &done);
443   __ divsd(double_scratch2, double_result);
444   __ movsd(double_result, double_scratch2);
445   // Test whether result is zero.  Bail out to check for subnormal result.
446   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
447   __ xorps(double_scratch2, double_scratch2);
448   __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
449   // double_exponent aliased as double_scratch2 has already been overwritten
450   // and may not have contained the exponent value in the first place when the
451   // exponent is a smi.  We reset it with exponent value before bailing out.
452   __ j(not_equal, &done);
453   __ Cvtsi2sd(double_exponent, exponent);
454 
455   // Returning or bailing out.
456   __ bind(&call_runtime);
457   {
458     AllowExternalCallThatCantCauseGC scope(masm);
459     __ PrepareCallCFunction(4, scratch);
460     __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
461     __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
462     __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
463                      4);
464   }
465   // Return value is in st(0) on ia32.
466   // Store it into the (fixed) result register.
467   __ sub(esp, Immediate(kDoubleSize));
468   __ fstp_d(Operand(esp, 0));
469   __ movsd(double_result, Operand(esp, 0));
470   __ add(esp, Immediate(kDoubleSize));
471 
472   __ bind(&done);
473   __ ret(0);
474 }
475 
Generate(MacroAssembler * masm)476 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
477   Label miss;
478   Register receiver = LoadDescriptor::ReceiverRegister();
479   // With careful management, we won't have to save slot and vector on
480   // the stack. Simply handle the possibly missing case first.
481   // TODO(mvstanton): this code can be more efficient.
482   __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
483          Immediate(isolate()->factory()->the_hole_value()));
484   __ j(equal, &miss);
485   __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
486   __ ret(0);
487 
488   __ bind(&miss);
489   PropertyAccessCompiler::TailCallBuiltin(
490       masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
491 }
492 
493 
Generate(MacroAssembler * masm)494 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
495   // Return address is on the stack.
496   Label miss;
497 
498   Register receiver = LoadDescriptor::ReceiverRegister();
499   Register index = LoadDescriptor::NameRegister();
500   Register scratch = edi;
501   DCHECK(!scratch.is(receiver) && !scratch.is(index));
502   Register result = eax;
503   DCHECK(!result.is(scratch));
504   DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
505          result.is(LoadDescriptor::SlotRegister()));
506 
507   // StringCharAtGenerator doesn't use the result register until it's passed
508   // the different miss possibilities. If it did, we would have a conflict
509   // when FLAG_vector_ics is true.
510   StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
511                                           &miss,  // When not a string.
512                                           &miss,  // When not a number.
513                                           &miss,  // When index out of range.
514                                           RECEIVER_IS_STRING);
515   char_at_generator.GenerateFast(masm);
516   __ ret(0);
517 
518   StubRuntimeCallHelper call_helper;
519   char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
520 
521   __ bind(&miss);
522   PropertyAccessCompiler::TailCallBuiltin(
523       masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
524 }
525 
526 
Generate(MacroAssembler * masm)527 void RegExpExecStub::Generate(MacroAssembler* masm) {
528   // Just jump directly to runtime if native RegExp is not selected at compile
529   // time or if regexp entry in generated code is turned off runtime switch or
530   // at compilation.
531 #ifdef V8_INTERPRETED_REGEXP
532   __ TailCallRuntime(Runtime::kRegExpExec);
533 #else  // V8_INTERPRETED_REGEXP
534 
535   // Stack frame on entry.
536   //  esp[0]: return address
537   //  esp[4]: last_match_info (expected JSArray)
538   //  esp[8]: previous index
539   //  esp[12]: subject string
540   //  esp[16]: JSRegExp object
541 
542   static const int kLastMatchInfoOffset = 1 * kPointerSize;
543   static const int kPreviousIndexOffset = 2 * kPointerSize;
544   static const int kSubjectOffset = 3 * kPointerSize;
545   static const int kJSRegExpOffset = 4 * kPointerSize;
546 
547   Label runtime;
548   Factory* factory = isolate()->factory();
549 
550   // Ensure that a RegExp stack is allocated.
551   ExternalReference address_of_regexp_stack_memory_address =
552       ExternalReference::address_of_regexp_stack_memory_address(isolate());
553   ExternalReference address_of_regexp_stack_memory_size =
554       ExternalReference::address_of_regexp_stack_memory_size(isolate());
555   __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
556   __ test(ebx, ebx);
557   __ j(zero, &runtime);
558 
559   // Check that the first argument is a JSRegExp object.
560   __ mov(eax, Operand(esp, kJSRegExpOffset));
561   STATIC_ASSERT(kSmiTag == 0);
562   __ JumpIfSmi(eax, &runtime);
563   __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
564   __ j(not_equal, &runtime);
565 
566   // Check that the RegExp has been compiled (data contains a fixed array).
567   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
568   if (FLAG_debug_code) {
569     __ test(ecx, Immediate(kSmiTagMask));
570     __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
571     __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
572     __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
573   }
574 
575   // ecx: RegExp data (FixedArray)
576   // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
577   __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
578   __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
579   __ j(not_equal, &runtime);
580 
581   // ecx: RegExp data (FixedArray)
582   // Check that the number of captures fit in the static offsets vector buffer.
583   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
584   // Check (number_of_captures + 1) * 2 <= offsets vector size
585   // Or          number_of_captures * 2 <= offsets vector size - 2
586   // Multiplying by 2 comes for free since edx is smi-tagged.
587   STATIC_ASSERT(kSmiTag == 0);
588   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
589   STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
590   __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
591   __ j(above, &runtime);
592 
593   // Reset offset for possibly sliced string.
594   __ Move(edi, Immediate(0));
595   __ mov(eax, Operand(esp, kSubjectOffset));
596   __ JumpIfSmi(eax, &runtime);
597   __ mov(edx, eax);  // Make a copy of the original subject string.
598 
599   // eax: subject string
600   // edx: subject string
601   // ecx: RegExp data (FixedArray)
602   // Handle subject string according to its encoding and representation:
603   // (1) Sequential two byte?  If yes, go to (9).
604   // (2) Sequential one byte?  If yes, go to (5).
605   // (3) Sequential or cons?  If not, go to (6).
606   // (4) Cons string.  If the string is flat, replace subject with first string
607   //     and go to (1). Otherwise bail out to runtime.
608   // (5) One byte sequential.  Load regexp code for one byte.
609   // (E) Carry on.
610   /// [...]
611 
612   // Deferred code at the end of the stub:
613   // (6) Long external string?  If not, go to (10).
614   // (7) External string.  Make it, offset-wise, look like a sequential string.
615   // (8) Is the external string one byte?  If yes, go to (5).
616   // (9) Two byte sequential.  Load regexp code for two byte. Go to (E).
617   // (10) Short external string or not a string?  If yes, bail out to runtime.
618   // (11) Sliced string.  Replace subject with parent. Go to (1).
619 
620   Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
621       external_string /* 7 */, check_underlying /* 1 */,
622       not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
623 
624   __ bind(&check_underlying);
625   // (1) Sequential two byte?  If yes, go to (9).
626   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
627   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
628 
629   __ and_(ebx, kIsNotStringMask |
630                kStringRepresentationMask |
631                kStringEncodingMask |
632                kShortExternalStringMask);
633   STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
634   __ j(zero, &seq_two_byte_string);  // Go to (9).
635 
636   // (2) Sequential one byte?  If yes, go to (5).
637   // Any other sequential string must be one byte.
638   __ and_(ebx, Immediate(kIsNotStringMask |
639                          kStringRepresentationMask |
640                          kShortExternalStringMask));
641   __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (5).
642 
643   // (3) Sequential or cons?  If not, go to (6).
644   // We check whether the subject string is a cons, since sequential strings
645   // have already been covered.
646   STATIC_ASSERT(kConsStringTag < kExternalStringTag);
647   STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
648   STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
649   STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
650   __ cmp(ebx, Immediate(kExternalStringTag));
651   __ j(greater_equal, &not_seq_nor_cons);  // Go to (6).
652 
653   // (4) Cons string.  Check that it's flat.
654   // Replace subject with first string and reload instance type.
655   __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
656   __ j(not_equal, &runtime);
657   __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
658   __ jmp(&check_underlying);
659 
660   // eax: sequential subject string (or look-alike, external string)
661   // edx: original subject string
662   // ecx: RegExp data (FixedArray)
663   // (5) One byte sequential.  Load regexp code for one byte.
664   __ bind(&seq_one_byte_string);
665   // Load previous index and check range before edx is overwritten.  We have
666   // to use edx instead of eax here because it might have been only made to
667   // look like a sequential string when it actually is an external string.
668   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
669   __ JumpIfNotSmi(ebx, &runtime);
670   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
671   __ j(above_equal, &runtime);
672   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
673   __ Move(ecx, Immediate(1));  // Type is one byte.
674 
675   // (E) Carry on.  String handling is done.
676   __ bind(&check_code);
677   // edx: irregexp code
678   // Check that the irregexp code has been generated for the actual string
679   // encoding. If it has, the field contains a code object otherwise it contains
680   // a smi (code flushing support).
681   __ JumpIfSmi(edx, &runtime);
682 
683   // eax: subject string
684   // ebx: previous index (smi)
685   // edx: code
686   // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
687   // All checks done. Now push arguments for native regexp code.
688   Counters* counters = isolate()->counters();
689   __ IncrementCounter(counters->regexp_entry_native(), 1);
690 
691   // Isolates: note we add an additional parameter here (isolate pointer).
692   static const int kRegExpExecuteArguments = 9;
693   __ EnterApiExitFrame(kRegExpExecuteArguments);
694 
695   // Argument 9: Pass current isolate address.
696   __ mov(Operand(esp, 8 * kPointerSize),
697       Immediate(ExternalReference::isolate_address(isolate())));
698 
699   // Argument 8: Indicate that this is a direct call from JavaScript.
700   __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
701 
702   // Argument 7: Start (high end) of backtracking stack memory area.
703   __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
704   __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
705   __ mov(Operand(esp, 6 * kPointerSize), esi);
706 
707   // Argument 6: Set the number of capture registers to zero to force global
708   // regexps to behave as non-global.  This does not affect non-global regexps.
709   __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
710 
711   // Argument 5: static offsets vector buffer.
712   __ mov(Operand(esp, 4 * kPointerSize),
713          Immediate(ExternalReference::address_of_static_offsets_vector(
714              isolate())));
715 
716   // Argument 2: Previous index.
717   __ SmiUntag(ebx);
718   __ mov(Operand(esp, 1 * kPointerSize), ebx);
719 
720   // Argument 1: Original subject string.
721   // The original subject is in the previous stack frame. Therefore we have to
722   // use ebp, which points exactly to one pointer size below the previous esp.
723   // (Because creating a new stack frame pushes the previous ebp onto the stack
724   // and thereby moves up esp by one kPointerSize.)
725   __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
726   __ mov(Operand(esp, 0 * kPointerSize), esi);
727 
728   // esi: original subject string
729   // eax: underlying subject string
730   // ebx: previous index
731   // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
732   // edx: code
733   // Argument 4: End of string data
734   // Argument 3: Start of string data
735   // Prepare start and end index of the input.
736   // Load the length from the original sliced string if that is the case.
737   __ mov(esi, FieldOperand(esi, String::kLengthOffset));
738   __ add(esi, edi);  // Calculate input end wrt offset.
739   __ SmiUntag(edi);
740   __ add(ebx, edi);  // Calculate input start wrt offset.
741 
742   // ebx: start index of the input string
743   // esi: end index of the input string
744   Label setup_two_byte, setup_rest;
745   __ test(ecx, ecx);
746   __ j(zero, &setup_two_byte, Label::kNear);
747   __ SmiUntag(esi);
748   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
749   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
750   __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
751   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
752   __ jmp(&setup_rest, Label::kNear);
753 
754   __ bind(&setup_two_byte);
755   STATIC_ASSERT(kSmiTag == 0);
756   STATIC_ASSERT(kSmiTagSize == 1);  // esi is smi (powered by 2).
757   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
758   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
759   __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
760   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
761 
762   __ bind(&setup_rest);
763 
764   // Locate the code entry and call it.
765   __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
766   __ call(edx);
767 
768   // Drop arguments and come back to JS mode.
769   __ LeaveApiExitFrame(true);
770 
771   // Check the result.
772   Label success;
773   __ cmp(eax, 1);
774   // We expect exactly one result since we force the called regexp to behave
775   // as non-global.
776   __ j(equal, &success);
777   Label failure;
778   __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
779   __ j(equal, &failure);
780   __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
781   // If not exception it can only be retry. Handle that in the runtime system.
782   __ j(not_equal, &runtime);
783   // Result must now be exception. If there is no pending exception already a
784   // stack overflow (on the backtrack stack) was detected in RegExp code but
785   // haven't created the exception yet. Handle that in the runtime system.
786   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
787   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
788                                       isolate());
789   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
790   __ mov(eax, Operand::StaticVariable(pending_exception));
791   __ cmp(edx, eax);
792   __ j(equal, &runtime);
793 
794   // For exception, throw the exception again.
795   __ TailCallRuntime(Runtime::kRegExpExecReThrow);
796 
797   __ bind(&failure);
798   // For failure to match, return null.
799   __ mov(eax, factory->null_value());
800   __ ret(4 * kPointerSize);
801 
802   // Load RegExp data.
803   __ bind(&success);
804   __ mov(eax, Operand(esp, kJSRegExpOffset));
805   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
806   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
807   // Calculate number of capture registers (number_of_captures + 1) * 2.
808   STATIC_ASSERT(kSmiTag == 0);
809   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
810   __ add(edx, Immediate(2));  // edx was a smi.
811 
812   // edx: Number of capture registers
813   // Check that the last match info is a FixedArray.
814   __ mov(ebx, Operand(esp, kLastMatchInfoOffset));
815   __ JumpIfSmi(ebx, &runtime);
816   // Check that the object has fast elements.
817   __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
818   __ cmp(eax, factory->fixed_array_map());
819   __ j(not_equal, &runtime);
820   // Check that the last match info has space for the capture registers and the
821   // additional information.
822   __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
823   __ SmiUntag(eax);
824   __ sub(eax, Immediate(RegExpMatchInfo::kLastMatchOverhead));
825   __ cmp(edx, eax);
826   __ j(greater, &runtime);
827 
828   // ebx: last_match_info (FixedArray)
829   // edx: number of capture registers
830   // Store the capture count.
831   __ SmiTag(edx);  // Number of capture registers to smi.
832   __ mov(FieldOperand(ebx, RegExpMatchInfo::kNumberOfCapturesOffset), edx);
833   __ SmiUntag(edx);  // Number of capture registers back from smi.
834   // Store last subject and last input.
835   __ mov(eax, Operand(esp, kSubjectOffset));
836   __ mov(ecx, eax);
837   __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastSubjectOffset), eax);
838   __ RecordWriteField(ebx, RegExpMatchInfo::kLastSubjectOffset, eax, edi,
839                       kDontSaveFPRegs);
840   __ mov(eax, ecx);
841   __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastInputOffset), eax);
842   __ RecordWriteField(ebx, RegExpMatchInfo::kLastInputOffset, eax, edi,
843                       kDontSaveFPRegs);
844 
845   // Get the static offsets vector filled by the native regexp code.
846   ExternalReference address_of_static_offsets_vector =
847       ExternalReference::address_of_static_offsets_vector(isolate());
848   __ mov(ecx, Immediate(address_of_static_offsets_vector));
849 
850   // ebx: last_match_info (FixedArray)
851   // ecx: offsets vector
852   // edx: number of capture registers
853   Label next_capture, done;
854   // Capture register counter starts from number of capture registers and
855   // counts down until wrapping after zero.
856   __ bind(&next_capture);
857   __ sub(edx, Immediate(1));
858   __ j(negative, &done, Label::kNear);
859   // Read the value from the static offsets vector buffer.
860   __ mov(edi, Operand(ecx, edx, times_int_size, 0));
861   __ SmiTag(edi);
862   // Store the smi value in the last match info.
863   __ mov(FieldOperand(ebx, edx, times_pointer_size,
864                       RegExpMatchInfo::kFirstCaptureOffset),
865          edi);
866   __ jmp(&next_capture);
867   __ bind(&done);
868 
869   // Return last match info.
870   __ mov(eax, ebx);
871   __ ret(4 * kPointerSize);
872 
873   // Do the runtime call to execute the regexp.
874   __ bind(&runtime);
875   __ TailCallRuntime(Runtime::kRegExpExec);
876 
877   // Deferred code for string handling.
878   // (6) Long external string?  If not, go to (10).
879   __ bind(&not_seq_nor_cons);
880   // Compare flags are still set from (3).
881   __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
882 
883   // (7) External string.  Short external strings have been ruled out.
884   __ bind(&external_string);
885   // Reload instance type.
886   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
887   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
888   if (FLAG_debug_code) {
889     // Assert that we do not have a cons or slice (indirect strings) here.
890     // Sequential strings have already been ruled out.
891     __ test_b(ebx, Immediate(kIsIndirectStringMask));
892     __ Assert(zero, kExternalStringExpectedButNotFound);
893   }
894   __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
895   // Move the pointer so that offset-wise, it looks like a sequential string.
896   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
897   __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
898   STATIC_ASSERT(kTwoByteStringTag == 0);
899   // (8) Is the external string one byte?  If yes, go to (5).
900   __ test_b(ebx, Immediate(kStringEncodingMask));
901   __ j(not_zero, &seq_one_byte_string);  // Go to (5).
902 
903   // eax: sequential subject string (or look-alike, external string)
904   // edx: original subject string
905   // ecx: RegExp data (FixedArray)
906   // (9) Two byte sequential.  Load regexp code for two byte. Go to (E).
907   __ bind(&seq_two_byte_string);
908   // Load previous index and check range before edx is overwritten.  We have
909   // to use edx instead of eax here because it might have been only made to
910   // look like a sequential string when it actually is an external string.
911   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
912   __ JumpIfNotSmi(ebx, &runtime);
913   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
914   __ j(above_equal, &runtime);
915   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
916   __ Move(ecx, Immediate(0));  // Type is two byte.
917   __ jmp(&check_code);  // Go to (E).
918 
919   // (10) Not a string or a short external string?  If yes, bail out to runtime.
920   __ bind(&not_long_external);
921   // Catch non-string subject or short external string.
922   STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
923   __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
924   __ j(not_zero, &runtime);
925 
926   // (11) Sliced string.  Replace subject with parent.  Go to (1).
927   // Load offset into edi and replace subject string with parent.
928   __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
929   __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
930   __ jmp(&check_underlying);  // Go to (1).
931 #endif  // V8_INTERPRETED_REGEXP
932 }
933 
934 
NegativeComparisonResult(Condition cc)935 static int NegativeComparisonResult(Condition cc) {
936   DCHECK(cc != equal);
937   DCHECK((cc == less) || (cc == less_equal)
938       || (cc == greater) || (cc == greater_equal));
939   return (cc == greater || cc == greater_equal) ? LESS : GREATER;
940 }
941 
942 
CheckInputType(MacroAssembler * masm,Register input,CompareICState::State expected,Label * fail)943 static void CheckInputType(MacroAssembler* masm, Register input,
944                            CompareICState::State expected, Label* fail) {
945   Label ok;
946   if (expected == CompareICState::SMI) {
947     __ JumpIfNotSmi(input, fail);
948   } else if (expected == CompareICState::NUMBER) {
949     __ JumpIfSmi(input, &ok);
950     __ cmp(FieldOperand(input, HeapObject::kMapOffset),
951            Immediate(masm->isolate()->factory()->heap_number_map()));
952     __ j(not_equal, fail);
953   }
954   // We could be strict about internalized/non-internalized here, but as long as
955   // hydrogen doesn't care, the stub doesn't have to care either.
956   __ bind(&ok);
957 }
958 
959 
BranchIfNotInternalizedString(MacroAssembler * masm,Label * label,Register object,Register scratch)960 static void BranchIfNotInternalizedString(MacroAssembler* masm,
961                                           Label* label,
962                                           Register object,
963                                           Register scratch) {
964   __ JumpIfSmi(object, label);
965   __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
966   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
967   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
968   __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
969   __ j(not_zero, label);
970 }
971 
972 
GenerateGeneric(MacroAssembler * masm)973 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
974   Label runtime_call, check_unequal_objects;
975   Condition cc = GetCondition();
976 
977   Label miss;
978   CheckInputType(masm, edx, left(), &miss);
979   CheckInputType(masm, eax, right(), &miss);
980 
981   // Compare two smis.
982   Label non_smi, smi_done;
983   __ mov(ecx, edx);
984   __ or_(ecx, eax);
985   __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
986   __ sub(edx, eax);  // Return on the result of the subtraction.
987   __ j(no_overflow, &smi_done, Label::kNear);
988   __ not_(edx);  // Correct sign in case of overflow. edx is never 0 here.
989   __ bind(&smi_done);
990   __ mov(eax, edx);
991   __ ret(0);
992   __ bind(&non_smi);
993 
994   // NOTICE! This code is only reached after a smi-fast-case check, so
995   // it is certain that at least one operand isn't a smi.
996 
997   // Identical objects can be compared fast, but there are some tricky cases
998   // for NaN and undefined.
999   Label generic_heap_number_comparison;
1000   {
1001     Label not_identical;
1002     __ cmp(eax, edx);
1003     __ j(not_equal, &not_identical);
1004 
1005     if (cc != equal) {
1006       // Check for undefined.  undefined OP undefined is false even though
1007       // undefined == undefined.
1008       __ cmp(edx, isolate()->factory()->undefined_value());
1009       Label check_for_nan;
1010       __ j(not_equal, &check_for_nan, Label::kNear);
1011       __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1012       __ ret(0);
1013       __ bind(&check_for_nan);
1014     }
1015 
1016     // Test for NaN. Compare heap numbers in a general way,
1017     // to handle NaNs correctly.
1018     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
1019            Immediate(isolate()->factory()->heap_number_map()));
1020     __ j(equal, &generic_heap_number_comparison, Label::kNear);
1021     if (cc != equal) {
1022       __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1023       __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
1024       // Call runtime on identical JSObjects.  Otherwise return equal.
1025       __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE));
1026       __ j(above_equal, &runtime_call, Label::kFar);
1027       // Call runtime on identical symbols since we need to throw a TypeError.
1028       __ cmpb(ecx, Immediate(SYMBOL_TYPE));
1029       __ j(equal, &runtime_call, Label::kFar);
1030       // Call runtime on identical SIMD values since we must throw a TypeError.
1031       __ cmpb(ecx, Immediate(SIMD128_VALUE_TYPE));
1032       __ j(equal, &runtime_call, Label::kFar);
1033     }
1034     __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1035     __ ret(0);
1036 
1037 
1038     __ bind(&not_identical);
1039   }
1040 
1041   // Strict equality can quickly decide whether objects are equal.
1042   // Non-strict object equality is slower, so it is handled later in the stub.
1043   if (cc == equal && strict()) {
1044     Label slow;  // Fallthrough label.
1045     Label not_smis;
1046     // If we're doing a strict equality comparison, we don't have to do
1047     // type conversion, so we generate code to do fast comparison for objects
1048     // and oddballs. Non-smi numbers and strings still go through the usual
1049     // slow-case code.
1050     // If either is a Smi (we know that not both are), then they can only
1051     // be equal if the other is a HeapNumber. If so, use the slow case.
1052     STATIC_ASSERT(kSmiTag == 0);
1053     DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
1054     __ mov(ecx, Immediate(kSmiTagMask));
1055     __ and_(ecx, eax);
1056     __ test(ecx, edx);
1057     __ j(not_zero, &not_smis, Label::kNear);
1058     // One operand is a smi.
1059 
1060     // Check whether the non-smi is a heap number.
1061     STATIC_ASSERT(kSmiTagMask == 1);
1062     // ecx still holds eax & kSmiTag, which is either zero or one.
1063     __ sub(ecx, Immediate(0x01));
1064     __ mov(ebx, edx);
1065     __ xor_(ebx, eax);
1066     __ and_(ebx, ecx);  // ebx holds either 0 or eax ^ edx.
1067     __ xor_(ebx, eax);
1068     // if eax was smi, ebx is now edx, else eax.
1069 
1070     // Check if the non-smi operand is a heap number.
1071     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1072            Immediate(isolate()->factory()->heap_number_map()));
1073     // If heap number, handle it in the slow case.
1074     __ j(equal, &slow, Label::kNear);
1075     // Return non-equal (ebx is not zero)
1076     __ mov(eax, ebx);
1077     __ ret(0);
1078 
1079     __ bind(&not_smis);
1080     // If either operand is a JSObject or an oddball value, then they are not
1081     // equal since their pointers are different
1082     // There is no test for undetectability in strict equality.
1083 
1084     // Get the type of the first operand.
1085     // If the first object is a JS object, we have done pointer comparison.
1086     Label first_non_object;
1087     STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1088     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
1089     __ j(below, &first_non_object, Label::kNear);
1090 
1091     // Return non-zero (eax is not zero)
1092     Label return_not_equal;
1093     STATIC_ASSERT(kHeapObjectTag != 0);
1094     __ bind(&return_not_equal);
1095     __ ret(0);
1096 
1097     __ bind(&first_non_object);
1098     // Check for oddballs: true, false, null, undefined.
1099     __ CmpInstanceType(ecx, ODDBALL_TYPE);
1100     __ j(equal, &return_not_equal);
1101 
1102     __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
1103     __ j(above_equal, &return_not_equal);
1104 
1105     // Check for oddballs: true, false, null, undefined.
1106     __ CmpInstanceType(ecx, ODDBALL_TYPE);
1107     __ j(equal, &return_not_equal);
1108 
1109     // Fall through to the general case.
1110     __ bind(&slow);
1111   }
1112 
1113   // Generate the number comparison code.
1114   Label non_number_comparison;
1115   Label unordered;
1116   __ bind(&generic_heap_number_comparison);
1117 
1118   FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
1119   __ ucomisd(xmm0, xmm1);
1120   // Don't base result on EFLAGS when a NaN is involved.
1121   __ j(parity_even, &unordered, Label::kNear);
1122 
1123   __ mov(eax, 0);  // equal
1124   __ mov(ecx, Immediate(Smi::FromInt(1)));
1125   __ cmov(above, eax, ecx);
1126   __ mov(ecx, Immediate(Smi::FromInt(-1)));
1127   __ cmov(below, eax, ecx);
1128   __ ret(0);
1129 
1130   // If one of the numbers was NaN, then the result is always false.
1131   // The cc is never not-equal.
1132   __ bind(&unordered);
1133   DCHECK(cc != not_equal);
1134   if (cc == less || cc == less_equal) {
1135     __ mov(eax, Immediate(Smi::FromInt(1)));
1136   } else {
1137     __ mov(eax, Immediate(Smi::FromInt(-1)));
1138   }
1139   __ ret(0);
1140 
1141   // The number comparison code did not provide a valid result.
1142   __ bind(&non_number_comparison);
1143 
1144   // Fast negative check for internalized-to-internalized equality.
1145   Label check_for_strings;
1146   if (cc == equal) {
1147     BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
1148     BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
1149 
1150     // We've already checked for object identity, so if both operands
1151     // are internalized they aren't equal. Register eax already holds a
1152     // non-zero value, which indicates not equal, so just return.
1153     __ ret(0);
1154   }
1155 
1156   __ bind(&check_for_strings);
1157 
1158   __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
1159                                            &check_unequal_objects);
1160 
1161   // Inline comparison of one-byte strings.
1162   if (cc == equal) {
1163     StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
1164   } else {
1165     StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
1166                                                     edi);
1167   }
1168 #ifdef DEBUG
1169   __ Abort(kUnexpectedFallThroughFromStringComparison);
1170 #endif
1171 
1172   __ bind(&check_unequal_objects);
1173   if (cc == equal && !strict()) {
1174     // Non-strict equality.  Objects are unequal if
1175     // they are both JSObjects and not undetectable,
1176     // and their pointers are different.
1177     Label return_equal, return_unequal, undetectable;
1178     // At most one is a smi, so we can test for smi by adding the two.
1179     // A smi plus a heap object has the low bit set, a heap object plus
1180     // a heap object has the low bit clear.
1181     STATIC_ASSERT(kSmiTag == 0);
1182     STATIC_ASSERT(kSmiTagMask == 1);
1183     __ lea(ecx, Operand(eax, edx, times_1, 0));
1184     __ test(ecx, Immediate(kSmiTagMask));
1185     __ j(not_zero, &runtime_call);
1186 
1187     __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1188     __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
1189 
1190     __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
1191               Immediate(1 << Map::kIsUndetectable));
1192     __ j(not_zero, &undetectable, Label::kNear);
1193     __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1194               Immediate(1 << Map::kIsUndetectable));
1195     __ j(not_zero, &return_unequal, Label::kNear);
1196 
1197     __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE);
1198     __ j(below, &runtime_call, Label::kNear);
1199     __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE);
1200     __ j(below, &runtime_call, Label::kNear);
1201 
1202     __ bind(&return_unequal);
1203     // Return non-equal by returning the non-zero object pointer in eax.
1204     __ ret(0);  // eax, edx were pushed
1205 
1206     __ bind(&undetectable);
1207     __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1208               Immediate(1 << Map::kIsUndetectable));
1209     __ j(zero, &return_unequal, Label::kNear);
1210 
1211     // If both sides are JSReceivers, then the result is false according to
1212     // the HTML specification, which says that only comparisons with null or
1213     // undefined are affected by special casing for document.all.
1214     __ CmpInstanceType(ebx, ODDBALL_TYPE);
1215     __ j(zero, &return_equal, Label::kNear);
1216     __ CmpInstanceType(ecx, ODDBALL_TYPE);
1217     __ j(not_zero, &return_unequal, Label::kNear);
1218 
1219     __ bind(&return_equal);
1220     __ Move(eax, Immediate(EQUAL));
1221     __ ret(0);  // eax, edx were pushed
1222   }
1223   __ bind(&runtime_call);
1224 
1225   if (cc == equal) {
1226     {
1227       FrameScope scope(masm, StackFrame::INTERNAL);
1228       __ Push(edx);
1229       __ Push(eax);
1230       __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1231     }
1232     // Turn true into 0 and false into some non-zero value.
1233     STATIC_ASSERT(EQUAL == 0);
1234     __ sub(eax, Immediate(isolate()->factory()->true_value()));
1235     __ Ret();
1236   } else {
1237     // Push arguments below the return address.
1238     __ pop(ecx);
1239     __ push(edx);
1240     __ push(eax);
1241     __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1242     __ push(ecx);
1243     // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1244     // tagged as a small integer.
1245     __ TailCallRuntime(Runtime::kCompare);
1246   }
1247 
1248   __ bind(&miss);
1249   GenerateMiss(masm);
1250 }
1251 
1252 
CallStubInRecordCallTarget(MacroAssembler * masm,CodeStub * stub)1253 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1254   // eax : number of arguments to the construct function
1255   // ebx : feedback vector
1256   // edx : slot in feedback vector (Smi)
1257   // edi : the function to call
1258 
1259   {
1260     FrameScope scope(masm, StackFrame::INTERNAL);
1261 
1262     // Number-of-arguments register must be smi-tagged to call out.
1263     __ SmiTag(eax);
1264     __ push(eax);
1265     __ push(edi);
1266     __ push(edx);
1267     __ push(ebx);
1268     __ push(esi);
1269 
1270     __ CallStub(stub);
1271 
1272     __ pop(esi);
1273     __ pop(ebx);
1274     __ pop(edx);
1275     __ pop(edi);
1276     __ pop(eax);
1277     __ SmiUntag(eax);
1278   }
1279 }
1280 
1281 
GenerateRecordCallTarget(MacroAssembler * masm)1282 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1283   // Cache the called function in a feedback vector slot.  Cache states
1284   // are uninitialized, monomorphic (indicated by a JSFunction), and
1285   // megamorphic.
1286   // eax : number of arguments to the construct function
1287   // ebx : feedback vector
1288   // edx : slot in feedback vector (Smi)
1289   // edi : the function to call
1290   Isolate* isolate = masm->isolate();
1291   Label initialize, done, miss, megamorphic, not_array_function;
1292 
1293   // Load the cache state into ecx.
1294   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1295                            FixedArray::kHeaderSize));
1296 
1297   // A monomorphic cache hit or an already megamorphic state: invoke the
1298   // function without changing the state.
1299   // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
1300   // at this position in a symbol (see static asserts in
1301   // type-feedback-vector.h).
1302   Label check_allocation_site;
1303   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1304   __ j(equal, &done, Label::kFar);
1305   __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
1306   __ j(equal, &done, Label::kFar);
1307   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1308                  Heap::kWeakCellMapRootIndex);
1309   __ j(not_equal, &check_allocation_site);
1310 
1311   // If the weak cell is cleared, we have a new chance to become monomorphic.
1312   __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
1313   __ jmp(&megamorphic);
1314 
1315   __ bind(&check_allocation_site);
1316   // If we came here, we need to see if we are the array function.
1317   // If we didn't have a matching function, and we didn't find the megamorph
1318   // sentinel, then we have in the slot either some other function or an
1319   // AllocationSite.
1320   __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
1321   __ j(not_equal, &miss);
1322 
1323   // Make sure the function is the Array() function
1324   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1325   __ cmp(edi, ecx);
1326   __ j(not_equal, &megamorphic);
1327   __ jmp(&done, Label::kFar);
1328 
1329   __ bind(&miss);
1330 
1331   // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1332   // megamorphic.
1333   __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
1334   __ j(equal, &initialize);
1335   // MegamorphicSentinel is an immortal immovable object (undefined) so no
1336   // write-barrier is needed.
1337   __ bind(&megamorphic);
1338   __ mov(
1339       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1340       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1341   __ jmp(&done, Label::kFar);
1342 
1343   // An uninitialized cache is patched with the function or sentinel to
1344   // indicate the ElementsKind if function is the Array constructor.
1345   __ bind(&initialize);
1346   // Make sure the function is the Array() function
1347   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1348   __ cmp(edi, ecx);
1349   __ j(not_equal, &not_array_function);
1350 
1351   // The target function is the Array constructor,
1352   // Create an AllocationSite if we don't already have it, store it in the
1353   // slot.
1354   CreateAllocationSiteStub create_stub(isolate);
1355   CallStubInRecordCallTarget(masm, &create_stub);
1356   __ jmp(&done);
1357 
1358   __ bind(&not_array_function);
1359   CreateWeakCellStub weak_cell_stub(isolate);
1360   CallStubInRecordCallTarget(masm, &weak_cell_stub);
1361 
1362   __ bind(&done);
1363   // Increment the call count for all function calls.
1364   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1365                       FixedArray::kHeaderSize + kPointerSize),
1366          Immediate(Smi::FromInt(1)));
1367 }
1368 
1369 
Generate(MacroAssembler * masm)1370 void CallConstructStub::Generate(MacroAssembler* masm) {
1371   // eax : number of arguments
1372   // ebx : feedback vector
1373   // edx : slot in feedback vector (Smi, for RecordCallTarget)
1374   // edi : constructor function
1375 
1376   Label non_function;
1377   // Check that function is not a smi.
1378   __ JumpIfSmi(edi, &non_function);
1379   // Check that function is a JSFunction.
1380   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1381   __ j(not_equal, &non_function);
1382 
1383   GenerateRecordCallTarget(masm);
1384 
1385   Label feedback_register_initialized;
1386   // Put the AllocationSite from the feedback vector into ebx, or undefined.
1387   __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
1388                            FixedArray::kHeaderSize));
1389   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
1390   __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
1391   __ j(equal, &feedback_register_initialized);
1392   __ mov(ebx, isolate()->factory()->undefined_value());
1393   __ bind(&feedback_register_initialized);
1394 
1395   __ AssertUndefinedOrAllocationSite(ebx);
1396 
1397   // Pass new target to construct stub.
1398   __ mov(edx, edi);
1399 
1400   // Tail call to the function-specific construct stub (still in the caller
1401   // context at this point).
1402   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1403   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
1404   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
1405   __ jmp(ecx);
1406 
1407   __ bind(&non_function);
1408   __ mov(edx, edi);
1409   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1410 }
1411 
IncrementCallCount(MacroAssembler * masm,Register feedback_vector,Register slot)1412 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
1413                                Register slot) {
1414   __ add(FieldOperand(feedback_vector, slot, times_half_pointer_size,
1415                       FixedArray::kHeaderSize + kPointerSize),
1416          Immediate(Smi::FromInt(1)));
1417 }
1418 
HandleArrayCase(MacroAssembler * masm,Label * miss)1419 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1420   // eax - number of arguments
1421   // edi - function
1422   // edx - slot id
1423   // ebx - vector
1424   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1425   __ cmp(edi, ecx);
1426   __ j(not_equal, miss);
1427 
1428   // Reload ecx.
1429   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1430                            FixedArray::kHeaderSize));
1431 
1432   // Increment the call count for monomorphic function calls.
1433   IncrementCallCount(masm, ebx, edx);
1434 
1435   __ mov(ebx, ecx);
1436   __ mov(edx, edi);
1437   ArrayConstructorStub stub(masm->isolate());
1438   __ TailCallStub(&stub);
1439 
1440   // Unreachable.
1441 }
1442 
1443 
Generate(MacroAssembler * masm)1444 void CallICStub::Generate(MacroAssembler* masm) {
1445   // edi - number of arguments
1446   // edi - function
1447   // edx - slot id
1448   // ebx - vector
1449   Isolate* isolate = masm->isolate();
1450   Label extra_checks_or_miss, call, call_function, call_count_incremented;
1451 
1452   // The checks. First, does edi match the recorded monomorphic target?
1453   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1454                            FixedArray::kHeaderSize));
1455 
1456   // We don't know that we have a weak cell. We might have a private symbol
1457   // or an AllocationSite, but the memory is safe to examine.
1458   // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1459   // FixedArray.
1460   // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1461   // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1462   // computed, meaning that it can't appear to be a pointer. If the low bit is
1463   // 0, then hash is computed, but the 0 bit prevents the field from appearing
1464   // to be a pointer.
1465   STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1466   STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1467                     WeakCell::kValueOffset &&
1468                 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1469 
1470   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1471   __ j(not_equal, &extra_checks_or_miss);
1472 
1473   // The compare above could have been a SMI/SMI comparison. Guard against this
1474   // convincing us that we have a monomorphic JSFunction.
1475   __ JumpIfSmi(edi, &extra_checks_or_miss);
1476 
1477   __ bind(&call_function);
1478 
1479   // Increment the call count for monomorphic function calls.
1480   IncrementCallCount(masm, ebx, edx);
1481 
1482   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1483                                                     tail_call_mode()),
1484           RelocInfo::CODE_TARGET);
1485 
1486   __ bind(&extra_checks_or_miss);
1487   Label uninitialized, miss, not_allocation_site;
1488 
1489   __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1490   __ j(equal, &call);
1491 
1492   // Check if we have an allocation site.
1493   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1494                  Heap::kAllocationSiteMapRootIndex);
1495   __ j(not_equal, &not_allocation_site);
1496 
1497   // We have an allocation site.
1498   HandleArrayCase(masm, &miss);
1499 
1500   __ bind(&not_allocation_site);
1501 
1502   // The following cases attempt to handle MISS cases without going to the
1503   // runtime.
1504   if (FLAG_trace_ic) {
1505     __ jmp(&miss);
1506   }
1507 
1508   __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
1509   __ j(equal, &uninitialized);
1510 
1511   // We are going megamorphic. If the feedback is a JSFunction, it is fine
1512   // to handle it here. More complex cases are dealt with in the runtime.
1513   __ AssertNotSmi(ecx);
1514   __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
1515   __ j(not_equal, &miss);
1516   __ mov(
1517       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1518       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1519 
1520   __ bind(&call);
1521 
1522   // Increment the call count for megamorphic function calls.
1523   IncrementCallCount(masm, ebx, edx);
1524 
1525   __ bind(&call_count_incremented);
1526 
1527   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
1528           RelocInfo::CODE_TARGET);
1529 
1530   __ bind(&uninitialized);
1531 
1532   // We are going monomorphic, provided we actually have a JSFunction.
1533   __ JumpIfSmi(edi, &miss);
1534 
1535   // Goto miss case if we do not have a function.
1536   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1537   __ j(not_equal, &miss);
1538 
1539   // Make sure the function is not the Array() function, which requires special
1540   // behavior on MISS.
1541   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1542   __ cmp(edi, ecx);
1543   __ j(equal, &miss);
1544 
1545   // Make sure the function belongs to the same native context.
1546   __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
1547   __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
1548   __ cmp(ecx, NativeContextOperand());
1549   __ j(not_equal, &miss);
1550 
1551   // Store the function. Use a stub since we need a frame for allocation.
1552   // eax - number of arguments
1553   // ebx - vector
1554   // edx - slot
1555   // edi - function
1556   {
1557     FrameScope scope(masm, StackFrame::INTERNAL);
1558     CreateWeakCellStub create_stub(isolate);
1559     __ SmiTag(eax);
1560     __ push(eax);
1561     __ push(ebx);
1562     __ push(edx);
1563     __ push(edi);
1564     __ push(esi);
1565     __ CallStub(&create_stub);
1566     __ pop(esi);
1567     __ pop(edi);
1568     __ pop(edx);
1569     __ pop(ebx);
1570     __ pop(eax);
1571     __ SmiUntag(eax);
1572   }
1573 
1574   __ jmp(&call_function);
1575 
1576   // We are here because tracing is on or we encountered a MISS case we can't
1577   // handle here.
1578   __ bind(&miss);
1579   GenerateMiss(masm);
1580 
1581   __ jmp(&call_count_incremented);
1582 
1583   // Unreachable
1584   __ int3();
1585 }
1586 
1587 
GenerateMiss(MacroAssembler * masm)1588 void CallICStub::GenerateMiss(MacroAssembler* masm) {
1589   FrameScope scope(masm, StackFrame::INTERNAL);
1590 
1591   // Preserve the number of arguments.
1592   __ SmiTag(eax);
1593   __ push(eax);
1594 
1595   // Push the function and feedback info.
1596   __ push(edi);
1597   __ push(ebx);
1598   __ push(edx);
1599 
1600   // Call the entry.
1601   __ CallRuntime(Runtime::kCallIC_Miss);
1602 
1603   // Move result to edi and exit the internal frame.
1604   __ mov(edi, eax);
1605 
1606   // Restore number of arguments.
1607   __ pop(eax);
1608   __ SmiUntag(eax);
1609 }
1610 
1611 
NeedsImmovableCode()1612 bool CEntryStub::NeedsImmovableCode() {
1613   return false;
1614 }
1615 
1616 
GenerateStubsAheadOfTime(Isolate * isolate)1617 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1618   CEntryStub::GenerateAheadOfTime(isolate);
1619   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1620   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1621   // It is important that the store buffer overflow stubs are generated first.
1622   CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
1623   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1624   CreateWeakCellStub::GenerateAheadOfTime(isolate);
1625   BinaryOpICStub::GenerateAheadOfTime(isolate);
1626   BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1627   StoreFastElementStub::GenerateAheadOfTime(isolate);
1628 }
1629 
1630 
GenerateFPStubs(Isolate * isolate)1631 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1632   // Generate if not already in cache.
1633   CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
1634   isolate->set_fp_stubs_generated(true);
1635 }
1636 
1637 
GenerateAheadOfTime(Isolate * isolate)1638 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1639   CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1640   stub.GetCode();
1641 }
1642 
1643 
Generate(MacroAssembler * masm)1644 void CEntryStub::Generate(MacroAssembler* masm) {
1645   // eax: number of arguments including receiver
1646   // ebx: pointer to C function  (C callee-saved)
1647   // ebp: frame pointer  (restored after C call)
1648   // esp: stack pointer  (restored after C call)
1649   // esi: current context (C callee-saved)
1650   // edi: JS function of the caller (C callee-saved)
1651   //
1652   // If argv_in_register():
1653   // ecx: pointer to the first argument
1654 
1655   ProfileEntryHookStub::MaybeCallEntryHook(masm);
1656 
1657   // Reserve space on the stack for the three arguments passed to the call. If
1658   // result size is greater than can be returned in registers, also reserve
1659   // space for the hidden argument for the result location, and space for the
1660   // result itself.
1661   int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size();
1662 
1663   // Enter the exit frame that transitions from JavaScript to C++.
1664   if (argv_in_register()) {
1665     DCHECK(!save_doubles());
1666     DCHECK(!is_builtin_exit());
1667     __ EnterApiExitFrame(arg_stack_space);
1668 
1669     // Move argc and argv into the correct registers.
1670     __ mov(esi, ecx);
1671     __ mov(edi, eax);
1672   } else {
1673     __ EnterExitFrame(
1674         arg_stack_space, save_doubles(),
1675         is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
1676   }
1677 
1678   // ebx: pointer to C function  (C callee-saved)
1679   // ebp: frame pointer  (restored after C call)
1680   // esp: stack pointer  (restored after C call)
1681   // edi: number of arguments including receiver  (C callee-saved)
1682   // esi: pointer to the first argument (C callee-saved)
1683 
1684   // Result returned in eax, or eax+edx if result size is 2.
1685 
1686   // Check stack alignment.
1687   if (FLAG_debug_code) {
1688     __ CheckStackAlignment();
1689   }
1690   // Call C function.
1691   if (result_size() <= 2) {
1692     __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
1693     __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
1694     __ mov(Operand(esp, 2 * kPointerSize),
1695            Immediate(ExternalReference::isolate_address(isolate())));
1696   } else {
1697     DCHECK_EQ(3, result_size());
1698     // Pass a pointer to the result location as the first argument.
1699     __ lea(eax, Operand(esp, 4 * kPointerSize));
1700     __ mov(Operand(esp, 0 * kPointerSize), eax);
1701     __ mov(Operand(esp, 1 * kPointerSize), edi);  // argc.
1702     __ mov(Operand(esp, 2 * kPointerSize), esi);  // argv.
1703     __ mov(Operand(esp, 3 * kPointerSize),
1704            Immediate(ExternalReference::isolate_address(isolate())));
1705   }
1706   __ call(ebx);
1707 
1708   if (result_size() > 2) {
1709     DCHECK_EQ(3, result_size());
1710 #ifndef _WIN32
1711     // Restore the "hidden" argument on the stack which was popped by caller.
1712     __ sub(esp, Immediate(kPointerSize));
1713 #endif
1714     // Read result values stored on stack. Result is stored above the arguments.
1715     __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize));
1716     __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize));
1717     __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize));
1718   }
1719   // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers!
1720 
1721   // Check result for exception sentinel.
1722   Label exception_returned;
1723   __ cmp(eax, isolate()->factory()->exception());
1724   __ j(equal, &exception_returned);
1725 
1726   // Check that there is no pending exception, otherwise we
1727   // should have returned the exception sentinel.
1728   if (FLAG_debug_code) {
1729     __ push(edx);
1730     __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1731     Label okay;
1732     ExternalReference pending_exception_address(
1733         Isolate::kPendingExceptionAddress, isolate());
1734     __ cmp(edx, Operand::StaticVariable(pending_exception_address));
1735     // Cannot use check here as it attempts to generate call into runtime.
1736     __ j(equal, &okay, Label::kNear);
1737     __ int3();
1738     __ bind(&okay);
1739     __ pop(edx);
1740   }
1741 
1742   // Exit the JavaScript to C++ exit frame.
1743   __ LeaveExitFrame(save_doubles(), !argv_in_register());
1744   __ ret(0);
1745 
1746   // Handling of exception.
1747   __ bind(&exception_returned);
1748 
1749   ExternalReference pending_handler_context_address(
1750       Isolate::kPendingHandlerContextAddress, isolate());
1751   ExternalReference pending_handler_code_address(
1752       Isolate::kPendingHandlerCodeAddress, isolate());
1753   ExternalReference pending_handler_offset_address(
1754       Isolate::kPendingHandlerOffsetAddress, isolate());
1755   ExternalReference pending_handler_fp_address(
1756       Isolate::kPendingHandlerFPAddress, isolate());
1757   ExternalReference pending_handler_sp_address(
1758       Isolate::kPendingHandlerSPAddress, isolate());
1759 
1760   // Ask the runtime for help to determine the handler. This will set eax to
1761   // contain the current pending exception, don't clobber it.
1762   ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1763                                  isolate());
1764   {
1765     FrameScope scope(masm, StackFrame::MANUAL);
1766     __ PrepareCallCFunction(3, eax);
1767     __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));  // argc.
1768     __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));  // argv.
1769     __ mov(Operand(esp, 2 * kPointerSize),
1770            Immediate(ExternalReference::isolate_address(isolate())));
1771     __ CallCFunction(find_handler, 3);
1772   }
1773 
1774   // Retrieve the handler context, SP and FP.
1775   __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
1776   __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
1777   __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
1778 
1779   // If the handler is a JS frame, restore the context to the frame. Note that
1780   // the context will be set to (esi == 0) for non-JS frames.
1781   Label skip;
1782   __ test(esi, esi);
1783   __ j(zero, &skip, Label::kNear);
1784   __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1785   __ bind(&skip);
1786 
1787   // Compute the handler entry address and jump to it.
1788   __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
1789   __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
1790   __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1791   __ jmp(edi);
1792 }
1793 
1794 
Generate(MacroAssembler * masm)1795 void JSEntryStub::Generate(MacroAssembler* masm) {
1796   Label invoke, handler_entry, exit;
1797   Label not_outermost_js, not_outermost_js_2;
1798 
1799   ProfileEntryHookStub::MaybeCallEntryHook(masm);
1800 
1801   // Set up frame.
1802   __ push(ebp);
1803   __ mov(ebp, esp);
1804 
1805   // Push marker in two places.
1806   int marker = type();
1807   __ push(Immediate(Smi::FromInt(marker)));  // marker
1808   ExternalReference context_address(Isolate::kContextAddress, isolate());
1809   __ push(Operand::StaticVariable(context_address));  // context
1810   // Save callee-saved registers (C calling conventions).
1811   __ push(edi);
1812   __ push(esi);
1813   __ push(ebx);
1814 
1815   // Save copies of the top frame descriptor on the stack.
1816   ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
1817   __ push(Operand::StaticVariable(c_entry_fp));
1818 
1819   // If this is the outermost JS call, set js_entry_sp value.
1820   ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1821   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
1822   __ j(not_equal, &not_outermost_js, Label::kNear);
1823   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
1824   __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1825   __ jmp(&invoke, Label::kNear);
1826   __ bind(&not_outermost_js);
1827   __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1828 
1829   // Jump to a faked try block that does the invoke, with a faked catch
1830   // block that sets the pending exception.
1831   __ jmp(&invoke);
1832   __ bind(&handler_entry);
1833   handler_offset_ = handler_entry.pos();
1834   // Caught exception: Store result (exception) in the pending exception
1835   // field in the JSEnv and return a failure sentinel.
1836   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1837                                       isolate());
1838   __ mov(Operand::StaticVariable(pending_exception), eax);
1839   __ mov(eax, Immediate(isolate()->factory()->exception()));
1840   __ jmp(&exit);
1841 
1842   // Invoke: Link this frame into the handler chain.
1843   __ bind(&invoke);
1844   __ PushStackHandler();
1845 
1846   // Fake a receiver (NULL).
1847   __ push(Immediate(0));  // receiver
1848 
1849   // Invoke the function by calling through JS entry trampoline builtin and
1850   // pop the faked function when we return. Notice that we cannot store a
1851   // reference to the trampoline code directly in this stub, because the
1852   // builtin stubs may not have been generated yet.
1853   if (type() == StackFrame::ENTRY_CONSTRUCT) {
1854     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1855                                       isolate());
1856     __ mov(edx, Immediate(construct_entry));
1857   } else {
1858     ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
1859     __ mov(edx, Immediate(entry));
1860   }
1861   __ mov(edx, Operand(edx, 0));  // deref address
1862   __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
1863   __ call(edx);
1864 
1865   // Unlink this frame from the handler chain.
1866   __ PopStackHandler();
1867 
1868   __ bind(&exit);
1869   // Check if the current stack frame is marked as the outermost JS frame.
1870   __ pop(ebx);
1871   __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1872   __ j(not_equal, &not_outermost_js_2);
1873   __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
1874   __ bind(&not_outermost_js_2);
1875 
1876   // Restore the top frame descriptor from the stack.
1877   __ pop(Operand::StaticVariable(ExternalReference(
1878       Isolate::kCEntryFPAddress, isolate())));
1879 
1880   // Restore callee-saved registers (C calling conventions).
1881   __ pop(ebx);
1882   __ pop(esi);
1883   __ pop(edi);
1884   __ add(esp, Immediate(2 * kPointerSize));  // remove markers
1885 
1886   // Restore frame pointer and return.
1887   __ pop(ebp);
1888   __ ret(0);
1889 }
1890 
1891 
1892 // -------------------------------------------------------------------------
1893 // StringCharCodeAtGenerator
1894 
GenerateFast(MacroAssembler * masm)1895 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
1896   // If the receiver is a smi trigger the non-string case.
1897   STATIC_ASSERT(kSmiTag == 0);
1898   if (check_mode_ == RECEIVER_IS_UNKNOWN) {
1899     __ JumpIfSmi(object_, receiver_not_string_);
1900 
1901     // Fetch the instance type of the receiver into result register.
1902     __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1903     __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1904     // If the receiver is not a string trigger the non-string case.
1905     __ test(result_, Immediate(kIsNotStringMask));
1906     __ j(not_zero, receiver_not_string_);
1907   }
1908 
1909   // If the index is non-smi trigger the non-smi case.
1910   STATIC_ASSERT(kSmiTag == 0);
1911   __ JumpIfNotSmi(index_, &index_not_smi_);
1912   __ bind(&got_smi_index_);
1913 
1914   // Check for index out of range.
1915   __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
1916   __ j(above_equal, index_out_of_range_);
1917 
1918   __ SmiUntag(index_);
1919 
1920   Factory* factory = masm->isolate()->factory();
1921   StringCharLoadGenerator::Generate(
1922       masm, factory, object_, index_, result_, &call_runtime_);
1923 
1924   __ SmiTag(result_);
1925   __ bind(&exit_);
1926 }
1927 
1928 
GenerateSlow(MacroAssembler * masm,EmbedMode embed_mode,const RuntimeCallHelper & call_helper)1929 void StringCharCodeAtGenerator::GenerateSlow(
1930     MacroAssembler* masm, EmbedMode embed_mode,
1931     const RuntimeCallHelper& call_helper) {
1932   __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
1933 
1934   // Index is not a smi.
1935   __ bind(&index_not_smi_);
1936   // If index is a heap number, try converting it to an integer.
1937   __ CheckMap(index_,
1938               masm->isolate()->factory()->heap_number_map(),
1939               index_not_number_,
1940               DONT_DO_SMI_CHECK);
1941   call_helper.BeforeCall(masm);
1942   if (embed_mode == PART_OF_IC_HANDLER) {
1943     __ push(LoadWithVectorDescriptor::VectorRegister());
1944     __ push(LoadDescriptor::SlotRegister());
1945   }
1946   __ push(object_);
1947   __ push(index_);  // Consumed by runtime conversion function.
1948   __ CallRuntime(Runtime::kNumberToSmi);
1949   if (!index_.is(eax)) {
1950     // Save the conversion result before the pop instructions below
1951     // have a chance to overwrite it.
1952     __ mov(index_, eax);
1953   }
1954   __ pop(object_);
1955   if (embed_mode == PART_OF_IC_HANDLER) {
1956     __ pop(LoadDescriptor::SlotRegister());
1957     __ pop(LoadWithVectorDescriptor::VectorRegister());
1958   }
1959   // Reload the instance type.
1960   __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1961   __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1962   call_helper.AfterCall(masm);
1963   // If index is still not a smi, it must be out of range.
1964   STATIC_ASSERT(kSmiTag == 0);
1965   __ JumpIfNotSmi(index_, index_out_of_range_);
1966   // Otherwise, return to the fast path.
1967   __ jmp(&got_smi_index_);
1968 
1969   // Call runtime. We get here when the receiver is a string and the
1970   // index is a number, but the code of getting the actual character
1971   // is too complex (e.g., when the string needs to be flattened).
1972   __ bind(&call_runtime_);
1973   call_helper.BeforeCall(masm);
1974   __ push(object_);
1975   __ SmiTag(index_);
1976   __ push(index_);
1977   __ CallRuntime(Runtime::kStringCharCodeAtRT);
1978   if (!result_.is(eax)) {
1979     __ mov(result_, eax);
1980   }
1981   call_helper.AfterCall(masm);
1982   __ jmp(&exit_);
1983 
1984   __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
1985 }
1986 
1987 
1988 // -------------------------------------------------------------------------
1989 // StringCharFromCodeGenerator
1990 
GenerateFast(MacroAssembler * masm)1991 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
1992   // Fast case of Heap::LookupSingleCharacterStringFromCode.
1993   STATIC_ASSERT(kSmiTag == 0);
1994   STATIC_ASSERT(kSmiShiftSize == 0);
1995   DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
1996   __ test(code_, Immediate(kSmiTagMask |
1997                            ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
1998   __ j(not_zero, &slow_case_);
1999 
2000   Factory* factory = masm->isolate()->factory();
2001   __ Move(result_, Immediate(factory->single_character_string_cache()));
2002   STATIC_ASSERT(kSmiTag == 0);
2003   STATIC_ASSERT(kSmiTagSize == 1);
2004   STATIC_ASSERT(kSmiShiftSize == 0);
2005   // At this point code register contains smi tagged one byte char code.
2006   __ mov(result_, FieldOperand(result_,
2007                                code_, times_half_pointer_size,
2008                                FixedArray::kHeaderSize));
2009   __ cmp(result_, factory->undefined_value());
2010   __ j(equal, &slow_case_);
2011   __ bind(&exit_);
2012 }
2013 
2014 
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)2015 void StringCharFromCodeGenerator::GenerateSlow(
2016     MacroAssembler* masm,
2017     const RuntimeCallHelper& call_helper) {
2018   __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2019 
2020   __ bind(&slow_case_);
2021   call_helper.BeforeCall(masm);
2022   __ push(code_);
2023   __ CallRuntime(Runtime::kStringCharFromCode);
2024   if (!result_.is(eax)) {
2025     __ mov(result_, eax);
2026   }
2027   call_helper.AfterCall(masm);
2028   __ jmp(&exit_);
2029 
2030   __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2031 }
2032 
2033 
GenerateCopyCharacters(MacroAssembler * masm,Register dest,Register src,Register count,Register scratch,String::Encoding encoding)2034 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2035                                           Register dest,
2036                                           Register src,
2037                                           Register count,
2038                                           Register scratch,
2039                                           String::Encoding encoding) {
2040   DCHECK(!scratch.is(dest));
2041   DCHECK(!scratch.is(src));
2042   DCHECK(!scratch.is(count));
2043 
2044   // Nothing to do for zero characters.
2045   Label done;
2046   __ test(count, count);
2047   __ j(zero, &done);
2048 
2049   // Make count the number of bytes to copy.
2050   if (encoding == String::TWO_BYTE_ENCODING) {
2051     __ shl(count, 1);
2052   }
2053 
2054   Label loop;
2055   __ bind(&loop);
2056   __ mov_b(scratch, Operand(src, 0));
2057   __ mov_b(Operand(dest, 0), scratch);
2058   __ inc(src);
2059   __ inc(dest);
2060   __ dec(count);
2061   __ j(not_zero, &loop);
2062 
2063   __ bind(&done);
2064 }
2065 
2066 
GenerateFlatOneByteStringEquals(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2)2067 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2068                                                    Register left,
2069                                                    Register right,
2070                                                    Register scratch1,
2071                                                    Register scratch2) {
2072   Register length = scratch1;
2073 
2074   // Compare lengths.
2075   Label strings_not_equal, check_zero_length;
2076   __ mov(length, FieldOperand(left, String::kLengthOffset));
2077   __ cmp(length, FieldOperand(right, String::kLengthOffset));
2078   __ j(equal, &check_zero_length, Label::kNear);
2079   __ bind(&strings_not_equal);
2080   __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
2081   __ ret(0);
2082 
2083   // Check if the length is zero.
2084   Label compare_chars;
2085   __ bind(&check_zero_length);
2086   STATIC_ASSERT(kSmiTag == 0);
2087   __ test(length, length);
2088   __ j(not_zero, &compare_chars, Label::kNear);
2089   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2090   __ ret(0);
2091 
2092   // Compare characters.
2093   __ bind(&compare_chars);
2094   GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2095                                   &strings_not_equal, Label::kNear);
2096 
2097   // Characters are equal.
2098   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2099   __ ret(0);
2100 }
2101 
2102 
GenerateCompareFlatOneByteStrings(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2,Register scratch3)2103 void StringHelper::GenerateCompareFlatOneByteStrings(
2104     MacroAssembler* masm, Register left, Register right, Register scratch1,
2105     Register scratch2, Register scratch3) {
2106   Counters* counters = masm->isolate()->counters();
2107   __ IncrementCounter(counters->string_compare_native(), 1);
2108 
2109   // Find minimum length.
2110   Label left_shorter;
2111   __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
2112   __ mov(scratch3, scratch1);
2113   __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
2114 
2115   Register length_delta = scratch3;
2116 
2117   __ j(less_equal, &left_shorter, Label::kNear);
2118   // Right string is shorter. Change scratch1 to be length of right string.
2119   __ sub(scratch1, length_delta);
2120   __ bind(&left_shorter);
2121 
2122   Register min_length = scratch1;
2123 
2124   // If either length is zero, just compare lengths.
2125   Label compare_lengths;
2126   __ test(min_length, min_length);
2127   __ j(zero, &compare_lengths, Label::kNear);
2128 
2129   // Compare characters.
2130   Label result_not_equal;
2131   GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2132                                   &result_not_equal, Label::kNear);
2133 
2134   // Compare lengths -  strings up to min-length are equal.
2135   __ bind(&compare_lengths);
2136   __ test(length_delta, length_delta);
2137   Label length_not_equal;
2138   __ j(not_zero, &length_not_equal, Label::kNear);
2139 
2140   // Result is EQUAL.
2141   STATIC_ASSERT(EQUAL == 0);
2142   STATIC_ASSERT(kSmiTag == 0);
2143   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2144   __ ret(0);
2145 
2146   Label result_greater;
2147   Label result_less;
2148   __ bind(&length_not_equal);
2149   __ j(greater, &result_greater, Label::kNear);
2150   __ jmp(&result_less, Label::kNear);
2151   __ bind(&result_not_equal);
2152   __ j(above, &result_greater, Label::kNear);
2153   __ bind(&result_less);
2154 
2155   // Result is LESS.
2156   __ Move(eax, Immediate(Smi::FromInt(LESS)));
2157   __ ret(0);
2158 
2159   // Result is GREATER.
2160   __ bind(&result_greater);
2161   __ Move(eax, Immediate(Smi::FromInt(GREATER)));
2162   __ ret(0);
2163 }
2164 
2165 
GenerateOneByteCharsCompareLoop(MacroAssembler * masm,Register left,Register right,Register length,Register scratch,Label * chars_not_equal,Label::Distance chars_not_equal_near)2166 void StringHelper::GenerateOneByteCharsCompareLoop(
2167     MacroAssembler* masm, Register left, Register right, Register length,
2168     Register scratch, Label* chars_not_equal,
2169     Label::Distance chars_not_equal_near) {
2170   // Change index to run from -length to -1 by adding length to string
2171   // start. This means that loop ends when index reaches zero, which
2172   // doesn't need an additional compare.
2173   __ SmiUntag(length);
2174   __ lea(left,
2175          FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2176   __ lea(right,
2177          FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2178   __ neg(length);
2179   Register index = length;  // index = -length;
2180 
2181   // Compare loop.
2182   Label loop;
2183   __ bind(&loop);
2184   __ mov_b(scratch, Operand(left, index, times_1, 0));
2185   __ cmpb(scratch, Operand(right, index, times_1, 0));
2186   __ j(not_equal, chars_not_equal, chars_not_equal_near);
2187   __ inc(index);
2188   __ j(not_zero, &loop);
2189 }
2190 
2191 
Generate(MacroAssembler * masm)2192 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2193   // ----------- S t a t e -------------
2194   //  -- edx    : left
2195   //  -- eax    : right
2196   //  -- esp[0] : return address
2197   // -----------------------------------
2198 
2199   // Load ecx with the allocation site.  We stick an undefined dummy value here
2200   // and replace it with the real allocation site later when we instantiate this
2201   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2202   __ mov(ecx, isolate()->factory()->undefined_value());
2203 
2204   // Make sure that we actually patched the allocation site.
2205   if (FLAG_debug_code) {
2206     __ test(ecx, Immediate(kSmiTagMask));
2207     __ Assert(not_equal, kExpectedAllocationSite);
2208     __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
2209            isolate()->factory()->allocation_site_map());
2210     __ Assert(equal, kExpectedAllocationSite);
2211   }
2212 
2213   // Tail call into the stub that handles binary operations with allocation
2214   // sites.
2215   BinaryOpWithAllocationSiteStub stub(isolate(), state());
2216   __ TailCallStub(&stub);
2217 }
2218 
2219 
GenerateBooleans(MacroAssembler * masm)2220 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2221   DCHECK_EQ(CompareICState::BOOLEAN, state());
2222   Label miss;
2223   Label::Distance const miss_distance =
2224       masm->emit_debug_code() ? Label::kFar : Label::kNear;
2225 
2226   __ JumpIfSmi(edx, &miss, miss_distance);
2227   __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
2228   __ JumpIfSmi(eax, &miss, miss_distance);
2229   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2230   __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2231   __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2232   if (!Token::IsEqualityOp(op())) {
2233     __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
2234     __ AssertSmi(eax);
2235     __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
2236     __ AssertSmi(edx);
2237     __ push(eax);
2238     __ mov(eax, edx);
2239     __ pop(edx);
2240   }
2241   __ sub(eax, edx);
2242   __ Ret();
2243 
2244   __ bind(&miss);
2245   GenerateMiss(masm);
2246 }
2247 
2248 
GenerateSmis(MacroAssembler * masm)2249 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2250   DCHECK(state() == CompareICState::SMI);
2251   Label miss;
2252   __ mov(ecx, edx);
2253   __ or_(ecx, eax);
2254   __ JumpIfNotSmi(ecx, &miss, Label::kNear);
2255 
2256   if (GetCondition() == equal) {
2257     // For equality we do not care about the sign of the result.
2258     __ sub(eax, edx);
2259   } else {
2260     Label done;
2261     __ sub(edx, eax);
2262     __ j(no_overflow, &done, Label::kNear);
2263     // Correct sign of result in case of overflow.
2264     __ not_(edx);
2265     __ bind(&done);
2266     __ mov(eax, edx);
2267   }
2268   __ ret(0);
2269 
2270   __ bind(&miss);
2271   GenerateMiss(masm);
2272 }
2273 
2274 
GenerateNumbers(MacroAssembler * masm)2275 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2276   DCHECK(state() == CompareICState::NUMBER);
2277 
2278   Label generic_stub;
2279   Label unordered, maybe_undefined1, maybe_undefined2;
2280   Label miss;
2281 
2282   if (left() == CompareICState::SMI) {
2283     __ JumpIfNotSmi(edx, &miss);
2284   }
2285   if (right() == CompareICState::SMI) {
2286     __ JumpIfNotSmi(eax, &miss);
2287   }
2288 
2289   // Load left and right operand.
2290   Label done, left, left_smi, right_smi;
2291   __ JumpIfSmi(eax, &right_smi, Label::kNear);
2292   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
2293          isolate()->factory()->heap_number_map());
2294   __ j(not_equal, &maybe_undefined1, Label::kNear);
2295   __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2296   __ jmp(&left, Label::kNear);
2297   __ bind(&right_smi);
2298   __ mov(ecx, eax);  // Can't clobber eax because we can still jump away.
2299   __ SmiUntag(ecx);
2300   __ Cvtsi2sd(xmm1, ecx);
2301 
2302   __ bind(&left);
2303   __ JumpIfSmi(edx, &left_smi, Label::kNear);
2304   __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2305          isolate()->factory()->heap_number_map());
2306   __ j(not_equal, &maybe_undefined2, Label::kNear);
2307   __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2308   __ jmp(&done);
2309   __ bind(&left_smi);
2310   __ mov(ecx, edx);  // Can't clobber edx because we can still jump away.
2311   __ SmiUntag(ecx);
2312   __ Cvtsi2sd(xmm0, ecx);
2313 
2314   __ bind(&done);
2315   // Compare operands.
2316   __ ucomisd(xmm0, xmm1);
2317 
2318   // Don't base result on EFLAGS when a NaN is involved.
2319   __ j(parity_even, &unordered, Label::kNear);
2320 
2321   // Return a result of -1, 0, or 1, based on EFLAGS.
2322   // Performing mov, because xor would destroy the flag register.
2323   __ mov(eax, 0);  // equal
2324   __ mov(ecx, Immediate(Smi::FromInt(1)));
2325   __ cmov(above, eax, ecx);
2326   __ mov(ecx, Immediate(Smi::FromInt(-1)));
2327   __ cmov(below, eax, ecx);
2328   __ ret(0);
2329 
2330   __ bind(&unordered);
2331   __ bind(&generic_stub);
2332   CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
2333                      CompareICState::GENERIC, CompareICState::GENERIC);
2334   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2335 
2336   __ bind(&maybe_undefined1);
2337   if (Token::IsOrderedRelationalCompareOp(op())) {
2338     __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
2339     __ j(not_equal, &miss);
2340     __ JumpIfSmi(edx, &unordered);
2341     __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
2342     __ j(not_equal, &maybe_undefined2, Label::kNear);
2343     __ jmp(&unordered);
2344   }
2345 
2346   __ bind(&maybe_undefined2);
2347   if (Token::IsOrderedRelationalCompareOp(op())) {
2348     __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
2349     __ j(equal, &unordered);
2350   }
2351 
2352   __ bind(&miss);
2353   GenerateMiss(masm);
2354 }
2355 
2356 
GenerateInternalizedStrings(MacroAssembler * masm)2357 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2358   DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2359   DCHECK(GetCondition() == equal);
2360 
2361   // Registers containing left and right operands respectively.
2362   Register left = edx;
2363   Register right = eax;
2364   Register tmp1 = ecx;
2365   Register tmp2 = ebx;
2366 
2367   // Check that both operands are heap objects.
2368   Label miss;
2369   __ mov(tmp1, left);
2370   STATIC_ASSERT(kSmiTag == 0);
2371   __ and_(tmp1, right);
2372   __ JumpIfSmi(tmp1, &miss, Label::kNear);
2373 
2374   // Check that both operands are internalized strings.
2375   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2376   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2377   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2378   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2379   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2380   __ or_(tmp1, tmp2);
2381   __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2382   __ j(not_zero, &miss, Label::kNear);
2383 
2384   // Internalized strings are compared by identity.
2385   Label done;
2386   __ cmp(left, right);
2387   // Make sure eax is non-zero. At this point input operands are
2388   // guaranteed to be non-zero.
2389   DCHECK(right.is(eax));
2390   __ j(not_equal, &done, Label::kNear);
2391   STATIC_ASSERT(EQUAL == 0);
2392   STATIC_ASSERT(kSmiTag == 0);
2393   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2394   __ bind(&done);
2395   __ ret(0);
2396 
2397   __ bind(&miss);
2398   GenerateMiss(masm);
2399 }
2400 
2401 
GenerateUniqueNames(MacroAssembler * masm)2402 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2403   DCHECK(state() == CompareICState::UNIQUE_NAME);
2404   DCHECK(GetCondition() == equal);
2405 
2406   // Registers containing left and right operands respectively.
2407   Register left = edx;
2408   Register right = eax;
2409   Register tmp1 = ecx;
2410   Register tmp2 = ebx;
2411 
2412   // Check that both operands are heap objects.
2413   Label miss;
2414   __ mov(tmp1, left);
2415   STATIC_ASSERT(kSmiTag == 0);
2416   __ and_(tmp1, right);
2417   __ JumpIfSmi(tmp1, &miss, Label::kNear);
2418 
2419   // Check that both operands are unique names. This leaves the instance
2420   // types loaded in tmp1 and tmp2.
2421   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2422   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2423   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2424   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2425 
2426   __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2427   __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2428 
2429   // Unique names are compared by identity.
2430   Label done;
2431   __ cmp(left, right);
2432   // Make sure eax is non-zero. At this point input operands are
2433   // guaranteed to be non-zero.
2434   DCHECK(right.is(eax));
2435   __ j(not_equal, &done, Label::kNear);
2436   STATIC_ASSERT(EQUAL == 0);
2437   STATIC_ASSERT(kSmiTag == 0);
2438   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2439   __ bind(&done);
2440   __ ret(0);
2441 
2442   __ bind(&miss);
2443   GenerateMiss(masm);
2444 }
2445 
2446 
GenerateStrings(MacroAssembler * masm)2447 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2448   DCHECK(state() == CompareICState::STRING);
2449   Label miss;
2450 
2451   bool equality = Token::IsEqualityOp(op());
2452 
2453   // Registers containing left and right operands respectively.
2454   Register left = edx;
2455   Register right = eax;
2456   Register tmp1 = ecx;
2457   Register tmp2 = ebx;
2458   Register tmp3 = edi;
2459 
2460   // Check that both operands are heap objects.
2461   __ mov(tmp1, left);
2462   STATIC_ASSERT(kSmiTag == 0);
2463   __ and_(tmp1, right);
2464   __ JumpIfSmi(tmp1, &miss);
2465 
2466   // Check that both operands are strings. This leaves the instance
2467   // types loaded in tmp1 and tmp2.
2468   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2469   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2470   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2471   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2472   __ mov(tmp3, tmp1);
2473   STATIC_ASSERT(kNotStringTag != 0);
2474   __ or_(tmp3, tmp2);
2475   __ test(tmp3, Immediate(kIsNotStringMask));
2476   __ j(not_zero, &miss);
2477 
2478   // Fast check for identical strings.
2479   Label not_same;
2480   __ cmp(left, right);
2481   __ j(not_equal, &not_same, Label::kNear);
2482   STATIC_ASSERT(EQUAL == 0);
2483   STATIC_ASSERT(kSmiTag == 0);
2484   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2485   __ ret(0);
2486 
2487   // Handle not identical strings.
2488   __ bind(&not_same);
2489 
2490   // Check that both strings are internalized. If they are, we're done
2491   // because we already know they are not identical.  But in the case of
2492   // non-equality compare, we still need to determine the order. We
2493   // also know they are both strings.
2494   if (equality) {
2495     Label do_compare;
2496     STATIC_ASSERT(kInternalizedTag == 0);
2497     __ or_(tmp1, tmp2);
2498     __ test(tmp1, Immediate(kIsNotInternalizedMask));
2499     __ j(not_zero, &do_compare, Label::kNear);
2500     // Make sure eax is non-zero. At this point input operands are
2501     // guaranteed to be non-zero.
2502     DCHECK(right.is(eax));
2503     __ ret(0);
2504     __ bind(&do_compare);
2505   }
2506 
2507   // Check that both strings are sequential one-byte.
2508   Label runtime;
2509   __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
2510 
2511   // Compare flat one byte strings. Returns when done.
2512   if (equality) {
2513     StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2514                                                   tmp2);
2515   } else {
2516     StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
2517                                                     tmp2, tmp3);
2518   }
2519 
2520   // Handle more complex cases in runtime.
2521   __ bind(&runtime);
2522   if (equality) {
2523     {
2524       FrameScope scope(masm, StackFrame::INTERNAL);
2525       __ Push(left);
2526       __ Push(right);
2527       __ CallRuntime(Runtime::kStringEqual);
2528     }
2529     __ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
2530     __ Ret();
2531   } else {
2532     __ pop(tmp1);  // Return address.
2533     __ push(left);
2534     __ push(right);
2535     __ push(tmp1);
2536     __ TailCallRuntime(Runtime::kStringCompare);
2537   }
2538 
2539   __ bind(&miss);
2540   GenerateMiss(masm);
2541 }
2542 
2543 
GenerateReceivers(MacroAssembler * masm)2544 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2545   DCHECK_EQ(CompareICState::RECEIVER, state());
2546   Label miss;
2547   __ mov(ecx, edx);
2548   __ and_(ecx, eax);
2549   __ JumpIfSmi(ecx, &miss, Label::kNear);
2550 
2551   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2552   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
2553   __ j(below, &miss, Label::kNear);
2554   __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
2555   __ j(below, &miss, Label::kNear);
2556 
2557   DCHECK_EQ(equal, GetCondition());
2558   __ sub(eax, edx);
2559   __ ret(0);
2560 
2561   __ bind(&miss);
2562   GenerateMiss(masm);
2563 }
2564 
2565 
GenerateKnownReceivers(MacroAssembler * masm)2566 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
2567   Label miss;
2568   Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
2569   __ mov(ecx, edx);
2570   __ and_(ecx, eax);
2571   __ JumpIfSmi(ecx, &miss, Label::kNear);
2572 
2573   __ GetWeakValue(edi, cell);
2574   __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
2575   __ j(not_equal, &miss, Label::kNear);
2576   __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
2577   __ j(not_equal, &miss, Label::kNear);
2578 
2579   if (Token::IsEqualityOp(op())) {
2580     __ sub(eax, edx);
2581     __ ret(0);
2582   } else {
2583     __ PopReturnAddressTo(ecx);
2584     __ Push(edx);
2585     __ Push(eax);
2586     __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
2587     __ PushReturnAddressFrom(ecx);
2588     __ TailCallRuntime(Runtime::kCompare);
2589   }
2590 
2591   __ bind(&miss);
2592   GenerateMiss(masm);
2593 }
2594 
2595 
GenerateMiss(MacroAssembler * masm)2596 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
2597   {
2598     // Call the runtime system in a fresh internal frame.
2599     FrameScope scope(masm, StackFrame::INTERNAL);
2600     __ push(edx);  // Preserve edx and eax.
2601     __ push(eax);
2602     __ push(edx);  // And also use them as the arguments.
2603     __ push(eax);
2604     __ push(Immediate(Smi::FromInt(op())));
2605     __ CallRuntime(Runtime::kCompareIC_Miss);
2606     // Compute the entry point of the rewritten stub.
2607     __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
2608     __ pop(eax);
2609     __ pop(edx);
2610   }
2611 
2612   // Do a tail call to the rewritten stub.
2613   __ jmp(edi);
2614 }
2615 
2616 
2617 // Helper function used to check that the dictionary doesn't contain
2618 // the property. This function may return false negatives, so miss_label
2619 // must always call a backup property check that is complete.
2620 // This function is safe to call if the receiver has fast properties.
2621 // Name must be a unique name and receiver must be a heap object.
GenerateNegativeLookup(MacroAssembler * masm,Label * miss,Label * done,Register properties,Handle<Name> name,Register r0)2622 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
2623                                                       Label* miss,
2624                                                       Label* done,
2625                                                       Register properties,
2626                                                       Handle<Name> name,
2627                                                       Register r0) {
2628   DCHECK(name->IsUniqueName());
2629 
2630   // If names of slots in range from 1 to kProbes - 1 for the hash value are
2631   // not equal to the name and kProbes-th slot is not used (its name is the
2632   // undefined value), it guarantees the hash table doesn't contain the
2633   // property. It's true even if some slots represent deleted properties
2634   // (their names are the hole value).
2635   for (int i = 0; i < kInlinedProbes; i++) {
2636     // Compute the masked index: (hash + i + i * i) & mask.
2637     Register index = r0;
2638     // Capacity is smi 2^n.
2639     __ mov(index, FieldOperand(properties, kCapacityOffset));
2640     __ dec(index);
2641     __ and_(index,
2642             Immediate(Smi::FromInt(name->Hash() +
2643                                    NameDictionary::GetProbeOffset(i))));
2644 
2645     // Scale the index by multiplying by the entry size.
2646     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2647     __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
2648     Register entity_name = r0;
2649     // Having undefined at this place means the name is not contained.
2650     STATIC_ASSERT(kSmiTagSize == 1);
2651     __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
2652                                 kElementsStartOffset - kHeapObjectTag));
2653     __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
2654     __ j(equal, done);
2655 
2656     // Stop if found the property.
2657     __ cmp(entity_name, Handle<Name>(name));
2658     __ j(equal, miss);
2659 
2660     Label good;
2661     // Check for the hole and skip.
2662     __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
2663     __ j(equal, &good, Label::kNear);
2664 
2665     // Check if the entry name is not a unique name.
2666     __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
2667     __ JumpIfNotUniqueNameInstanceType(
2668         FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
2669     __ bind(&good);
2670   }
2671 
2672   NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
2673                                 NEGATIVE_LOOKUP);
2674   __ push(Immediate(Handle<Object>(name)));
2675   __ push(Immediate(name->Hash()));
2676   __ CallStub(&stub);
2677   __ test(r0, r0);
2678   __ j(not_zero, miss);
2679   __ jmp(done);
2680 }
2681 
2682 
2683 // Probe the name dictionary in the |elements| register. Jump to the
2684 // |done| label if a property with the given name is found leaving the
2685 // index into the dictionary in |r0|. Jump to the |miss| label
2686 // otherwise.
GeneratePositiveLookup(MacroAssembler * masm,Label * miss,Label * done,Register elements,Register name,Register r0,Register r1)2687 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
2688                                                       Label* miss,
2689                                                       Label* done,
2690                                                       Register elements,
2691                                                       Register name,
2692                                                       Register r0,
2693                                                       Register r1) {
2694   DCHECK(!elements.is(r0));
2695   DCHECK(!elements.is(r1));
2696   DCHECK(!name.is(r0));
2697   DCHECK(!name.is(r1));
2698 
2699   __ AssertName(name);
2700 
2701   __ mov(r1, FieldOperand(elements, kCapacityOffset));
2702   __ shr(r1, kSmiTagSize);  // convert smi to int
2703   __ dec(r1);
2704 
2705   // Generate an unrolled loop that performs a few probes before
2706   // giving up. Measurements done on Gmail indicate that 2 probes
2707   // cover ~93% of loads from dictionaries.
2708   for (int i = 0; i < kInlinedProbes; i++) {
2709     // Compute the masked index: (hash + i + i * i) & mask.
2710     __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
2711     __ shr(r0, Name::kHashShift);
2712     if (i > 0) {
2713       __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
2714     }
2715     __ and_(r0, r1);
2716 
2717     // Scale the index by multiplying by the entry size.
2718     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2719     __ lea(r0, Operand(r0, r0, times_2, 0));  // r0 = r0 * 3
2720 
2721     // Check if the key is identical to the name.
2722     __ cmp(name, Operand(elements,
2723                          r0,
2724                          times_4,
2725                          kElementsStartOffset - kHeapObjectTag));
2726     __ j(equal, done);
2727   }
2728 
2729   NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
2730                                 POSITIVE_LOOKUP);
2731   __ push(name);
2732   __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
2733   __ shr(r0, Name::kHashShift);
2734   __ push(r0);
2735   __ CallStub(&stub);
2736 
2737   __ test(r1, r1);
2738   __ j(zero, miss);
2739   __ jmp(done);
2740 }
2741 
2742 
Generate(MacroAssembler * masm)2743 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
2744   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
2745   // we cannot call anything that could cause a GC from this stub.
2746   // Stack frame on entry:
2747   //  esp[0 * kPointerSize]: return address.
2748   //  esp[1 * kPointerSize]: key's hash.
2749   //  esp[2 * kPointerSize]: key.
2750   // Registers:
2751   //  dictionary_: NameDictionary to probe.
2752   //  result_: used as scratch.
2753   //  index_: will hold an index of entry if lookup is successful.
2754   //          might alias with result_.
2755   // Returns:
2756   //  result_ is zero if lookup failed, non zero otherwise.
2757 
2758   Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
2759 
2760   Register scratch = result();
2761 
2762   __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
2763   __ dec(scratch);
2764   __ SmiUntag(scratch);
2765   __ push(scratch);
2766 
2767   // If names of slots in range from 1 to kProbes - 1 for the hash value are
2768   // not equal to the name and kProbes-th slot is not used (its name is the
2769   // undefined value), it guarantees the hash table doesn't contain the
2770   // property. It's true even if some slots represent deleted properties
2771   // (their names are the null value).
2772   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
2773     // Compute the masked index: (hash + i + i * i) & mask.
2774     __ mov(scratch, Operand(esp, 2 * kPointerSize));
2775     if (i > 0) {
2776       __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
2777     }
2778     __ and_(scratch, Operand(esp, 0));
2779 
2780     // Scale the index by multiplying by the entry size.
2781     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2782     __ lea(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
2783 
2784     // Having undefined at this place means the name is not contained.
2785     STATIC_ASSERT(kSmiTagSize == 1);
2786     __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
2787                             kElementsStartOffset - kHeapObjectTag));
2788     __ cmp(scratch, isolate()->factory()->undefined_value());
2789     __ j(equal, &not_in_dictionary);
2790 
2791     // Stop if found the property.
2792     __ cmp(scratch, Operand(esp, 3 * kPointerSize));
2793     __ j(equal, &in_dictionary);
2794 
2795     if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
2796       // If we hit a key that is not a unique name during negative
2797       // lookup we have to bailout as this key might be equal to the
2798       // key we are looking for.
2799 
2800       // Check if the entry name is not a unique name.
2801       __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2802       __ JumpIfNotUniqueNameInstanceType(
2803           FieldOperand(scratch, Map::kInstanceTypeOffset),
2804           &maybe_in_dictionary);
2805     }
2806   }
2807 
2808   __ bind(&maybe_in_dictionary);
2809   // If we are doing negative lookup then probing failure should be
2810   // treated as a lookup success. For positive lookup probing failure
2811   // should be treated as lookup failure.
2812   if (mode() == POSITIVE_LOOKUP) {
2813     __ mov(result(), Immediate(0));
2814     __ Drop(1);
2815     __ ret(2 * kPointerSize);
2816   }
2817 
2818   __ bind(&in_dictionary);
2819   __ mov(result(), Immediate(1));
2820   __ Drop(1);
2821   __ ret(2 * kPointerSize);
2822 
2823   __ bind(&not_in_dictionary);
2824   __ mov(result(), Immediate(0));
2825   __ Drop(1);
2826   __ ret(2 * kPointerSize);
2827 }
2828 
2829 
GenerateFixedRegStubsAheadOfTime(Isolate * isolate)2830 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
2831     Isolate* isolate) {
2832   StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
2833   stub.GetCode();
2834   StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
2835   stub2.GetCode();
2836 }
2837 
2838 
2839 // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
2840 // the value has just been written into the object, now this stub makes sure
2841 // we keep the GC informed.  The word in the object where the value has been
2842 // written is in the address register.
Generate(MacroAssembler * masm)2843 void RecordWriteStub::Generate(MacroAssembler* masm) {
2844   Label skip_to_incremental_noncompacting;
2845   Label skip_to_incremental_compacting;
2846 
2847   // The first two instructions are generated with labels so as to get the
2848   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
2849   // forth between a compare instructions (a nop in this position) and the
2850   // real branch when we start and stop incremental heap marking.
2851   __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
2852   __ jmp(&skip_to_incremental_compacting, Label::kFar);
2853 
2854   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2855     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2856                            MacroAssembler::kReturnAtEnd);
2857   } else {
2858     __ ret(0);
2859   }
2860 
2861   __ bind(&skip_to_incremental_noncompacting);
2862   GenerateIncremental(masm, INCREMENTAL);
2863 
2864   __ bind(&skip_to_incremental_compacting);
2865   GenerateIncremental(masm, INCREMENTAL_COMPACTION);
2866 
2867   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
2868   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
2869   masm->set_byte_at(0, kTwoByteNopInstruction);
2870   masm->set_byte_at(2, kFiveByteNopInstruction);
2871 }
2872 
2873 
GenerateIncremental(MacroAssembler * masm,Mode mode)2874 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
2875   regs_.Save(masm);
2876 
2877   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2878     Label dont_need_remembered_set;
2879 
2880     __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
2881     __ JumpIfNotInNewSpace(regs_.scratch0(),  // Value.
2882                            regs_.scratch0(),
2883                            &dont_need_remembered_set);
2884 
2885     __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
2886                         &dont_need_remembered_set);
2887 
2888     // First notify the incremental marker if necessary, then update the
2889     // remembered set.
2890     CheckNeedsToInformIncrementalMarker(
2891         masm,
2892         kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
2893         mode);
2894     InformIncrementalMarker(masm);
2895     regs_.Restore(masm);
2896     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2897                            MacroAssembler::kReturnAtEnd);
2898 
2899     __ bind(&dont_need_remembered_set);
2900   }
2901 
2902   CheckNeedsToInformIncrementalMarker(
2903       masm,
2904       kReturnOnNoNeedToInformIncrementalMarker,
2905       mode);
2906   InformIncrementalMarker(masm);
2907   regs_.Restore(masm);
2908   __ ret(0);
2909 }
2910 
2911 
InformIncrementalMarker(MacroAssembler * masm)2912 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
2913   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
2914   int argument_count = 3;
2915   __ PrepareCallCFunction(argument_count, regs_.scratch0());
2916   __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
2917   __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
2918   __ mov(Operand(esp, 2 * kPointerSize),
2919          Immediate(ExternalReference::isolate_address(isolate())));
2920 
2921   AllowExternalCallThatCantCauseGC scope(masm);
2922   __ CallCFunction(
2923       ExternalReference::incremental_marking_record_write_function(isolate()),
2924       argument_count);
2925 
2926   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
2927 }
2928 
2929 
CheckNeedsToInformIncrementalMarker(MacroAssembler * masm,OnNoNeedToInformIncrementalMarker on_no_need,Mode mode)2930 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
2931     MacroAssembler* masm,
2932     OnNoNeedToInformIncrementalMarker on_no_need,
2933     Mode mode) {
2934   Label object_is_black, need_incremental, need_incremental_pop_object;
2935 
2936   // Let's look at the color of the object:  If it is not black we don't have
2937   // to inform the incremental marker.
2938   __ JumpIfBlack(regs_.object(),
2939                  regs_.scratch0(),
2940                  regs_.scratch1(),
2941                  &object_is_black,
2942                  Label::kNear);
2943 
2944   regs_.Restore(masm);
2945   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2946     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2947                            MacroAssembler::kReturnAtEnd);
2948   } else {
2949     __ ret(0);
2950   }
2951 
2952   __ bind(&object_is_black);
2953 
2954   // Get the value from the slot.
2955   __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
2956 
2957   if (mode == INCREMENTAL_COMPACTION) {
2958     Label ensure_not_white;
2959 
2960     __ CheckPageFlag(regs_.scratch0(),  // Contains value.
2961                      regs_.scratch1(),  // Scratch.
2962                      MemoryChunk::kEvacuationCandidateMask,
2963                      zero,
2964                      &ensure_not_white,
2965                      Label::kNear);
2966 
2967     __ CheckPageFlag(regs_.object(),
2968                      regs_.scratch1(),  // Scratch.
2969                      MemoryChunk::kSkipEvacuationSlotsRecordingMask,
2970                      not_zero,
2971                      &ensure_not_white,
2972                      Label::kNear);
2973 
2974     __ jmp(&need_incremental);
2975 
2976     __ bind(&ensure_not_white);
2977   }
2978 
2979   // We need an extra register for this, so we push the object register
2980   // temporarily.
2981   __ push(regs_.object());
2982   __ JumpIfWhite(regs_.scratch0(),  // The value.
2983                  regs_.scratch1(),  // Scratch.
2984                  regs_.object(),    // Scratch.
2985                  &need_incremental_pop_object, Label::kNear);
2986   __ pop(regs_.object());
2987 
2988   regs_.Restore(masm);
2989   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2990     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2991                            MacroAssembler::kReturnAtEnd);
2992   } else {
2993     __ ret(0);
2994   }
2995 
2996   __ bind(&need_incremental_pop_object);
2997   __ pop(regs_.object());
2998 
2999   __ bind(&need_incremental);
3000 
3001   // Fall through when we need to inform the incremental marker.
3002 }
3003 
3004 
Generate(MacroAssembler * masm)3005 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3006   CEntryStub ces(isolate(), 1, kSaveFPRegs);
3007   __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
3008   int parameter_count_offset =
3009       StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
3010   __ mov(ebx, MemOperand(ebp, parameter_count_offset));
3011   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3012   __ pop(ecx);
3013   int additional_offset =
3014       function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3015   __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
3016   __ jmp(ecx);  // Return to IC Miss stub, continuation still on stack.
3017 }
3018 
Generate(MacroAssembler * masm)3019 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3020   __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
3021   KeyedStoreICStub stub(isolate(), state());
3022   stub.GenerateForTrampoline(masm);
3023 }
3024 
3025 // value is on the stack already.
HandlePolymorphicStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register feedback,bool is_polymorphic,Label * miss)3026 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
3027                                        Register key, Register vector,
3028                                        Register slot, Register feedback,
3029                                        bool is_polymorphic, Label* miss) {
3030   // feedback initially contains the feedback array
3031   Label next, next_loop, prepare_next;
3032   Label load_smi_map, compare_map;
3033   Label start_polymorphic;
3034   Label pop_and_miss;
3035 
3036   __ push(receiver);
3037   // Value, vector and slot are passed on the stack, so no need to save/restore
3038   // them.
3039 
3040   Register receiver_map = receiver;
3041   Register cached_map = vector;
3042 
3043   // Receiver might not be a heap object.
3044   __ JumpIfSmi(receiver, &load_smi_map);
3045   __ mov(receiver_map, FieldOperand(receiver, 0));
3046   __ bind(&compare_map);
3047   __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3048 
3049   // A named keyed store might have a 2 element array, all other cases can count
3050   // on an array with at least 2 {map, handler} pairs, so they can go right
3051   // into polymorphic array handling.
3052   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3053   __ j(not_equal, &start_polymorphic);
3054 
3055   // found, now call handler.
3056   Register handler = feedback;
3057   DCHECK(handler.is(StoreWithVectorDescriptor::ValueRegister()));
3058   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3059   __ pop(receiver);
3060   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3061   __ jmp(handler);
3062 
3063   // Polymorphic, we have to loop from 2 to N
3064   __ bind(&start_polymorphic);
3065   __ push(key);
3066   Register counter = key;
3067   __ mov(counter, Immediate(Smi::FromInt(2)));
3068 
3069   if (!is_polymorphic) {
3070     // If is_polymorphic is false, we may only have a two element array.
3071     // Check against length now in that case.
3072     __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3073     __ j(greater_equal, &pop_and_miss);
3074   }
3075 
3076   __ bind(&next_loop);
3077   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3078                                   FixedArray::kHeaderSize));
3079   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3080   __ j(not_equal, &prepare_next);
3081   __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
3082                                FixedArray::kHeaderSize + kPointerSize));
3083   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3084   __ pop(key);
3085   __ pop(receiver);
3086   __ jmp(handler);
3087 
3088   __ bind(&prepare_next);
3089   __ add(counter, Immediate(Smi::FromInt(2)));
3090   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3091   __ j(less, &next_loop);
3092 
3093   // We exhausted our array of map handler pairs.
3094   __ bind(&pop_and_miss);
3095   __ pop(key);
3096   __ pop(receiver);
3097   __ jmp(miss);
3098 
3099   __ bind(&load_smi_map);
3100   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3101   __ jmp(&compare_map);
3102 }
3103 
3104 
HandleMonomorphicStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register weak_cell,Label * miss)3105 static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
3106                                        Register key, Register vector,
3107                                        Register slot, Register weak_cell,
3108                                        Label* miss) {
3109   // The store ic value is on the stack.
3110   DCHECK(weak_cell.is(StoreWithVectorDescriptor::ValueRegister()));
3111 
3112   // feedback initially contains the feedback array
3113   Label compare_smi_map;
3114 
3115   // Move the weak map into the weak_cell register.
3116   Register ic_map = weak_cell;
3117   __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
3118 
3119   // Receiver might not be a heap object.
3120   __ JumpIfSmi(receiver, &compare_smi_map);
3121   __ cmp(ic_map, FieldOperand(receiver, 0));
3122   __ j(not_equal, miss);
3123   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
3124                                  FixedArray::kHeaderSize + kPointerSize));
3125   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
3126   // jump to the handler.
3127   __ jmp(weak_cell);
3128 
3129   // In microbenchmarks, it made sense to unroll this code so that the call to
3130   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
3131   __ bind(&compare_smi_map);
3132   __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
3133   __ j(not_equal, miss);
3134   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
3135                                  FixedArray::kHeaderSize + kPointerSize));
3136   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
3137   // jump to the handler.
3138   __ jmp(weak_cell);
3139 }
3140 
Generate(MacroAssembler * masm)3141 void KeyedStoreICStub::Generate(MacroAssembler* masm) {
3142   GenerateImpl(masm, false);
3143 }
3144 
GenerateForTrampoline(MacroAssembler * masm)3145 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3146   GenerateImpl(masm, true);
3147 }
3148 
3149 
HandlePolymorphicKeyedStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register feedback,Label * miss)3150 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3151                                             Register receiver, Register key,
3152                                             Register vector, Register slot,
3153                                             Register feedback, Label* miss) {
3154   // feedback initially contains the feedback array
3155   Label next, next_loop, prepare_next;
3156   Label load_smi_map, compare_map;
3157   Label transition_call;
3158   Label pop_and_miss;
3159 
3160   __ push(receiver);
3161   // Value, vector and slot are passed on the stack, so no need to save/restore
3162   // them.
3163 
3164   Register receiver_map = receiver;
3165   Register cached_map = vector;
3166 
3167   // Receiver might not be a heap object.
3168   __ JumpIfSmi(receiver, &load_smi_map);
3169   __ mov(receiver_map, FieldOperand(receiver, 0));
3170   __ bind(&compare_map);
3171 
3172   // Polymorphic, we have to loop from 0 to N - 1
3173   __ push(key);
3174   // Current stack layout:
3175   // - esp[0]    -- key
3176   // - esp[4]    -- receiver
3177   // - esp[8]    -- return address
3178   // - esp[12]   -- vector
3179   // - esp[16]   -- slot
3180   // - esp[20]   -- value
3181   //
3182   // Required stack layout for handler call (see StoreWithVectorDescriptor):
3183   // - esp[0]    -- return address
3184   // - esp[4]    -- vector
3185   // - esp[8]    -- slot
3186   // - esp[12]   -- value
3187   // - receiver, key, handler in registers.
3188   Register counter = key;
3189   __ mov(counter, Immediate(Smi::kZero));
3190   __ bind(&next_loop);
3191   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3192                                   FixedArray::kHeaderSize));
3193   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3194   __ j(not_equal, &prepare_next);
3195   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3196                                   FixedArray::kHeaderSize + kPointerSize));
3197   __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3198   __ j(not_equal, &transition_call);
3199   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3200                                 FixedArray::kHeaderSize + 2 * kPointerSize));
3201   __ pop(key);
3202   __ pop(receiver);
3203   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3204   __ jmp(feedback);
3205 
3206   __ bind(&transition_call);
3207   // Current stack layout:
3208   // - esp[0]    -- key
3209   // - esp[4]    -- receiver
3210   // - esp[8]    -- return address
3211   // - esp[12]   -- vector
3212   // - esp[16]   -- slot
3213   // - esp[20]   -- value
3214   //
3215   // Required stack layout for handler call (see StoreTransitionDescriptor):
3216   // - esp[0]    -- return address
3217   // - esp[4]    -- vector
3218   // - esp[8]    -- slot
3219   // - esp[12]   -- value
3220   // - receiver, key, map, handler in registers.
3221   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3222                                 FixedArray::kHeaderSize + 2 * kPointerSize));
3223   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3224 
3225   __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3226   // The weak cell may have been cleared.
3227   __ JumpIfSmi(cached_map, &pop_and_miss);
3228   DCHECK(!cached_map.is(StoreTransitionDescriptor::MapRegister()));
3229   __ mov(StoreTransitionDescriptor::MapRegister(), cached_map);
3230 
3231   // Call store transition handler using StoreTransitionDescriptor calling
3232   // convention.
3233   __ pop(key);
3234   __ pop(receiver);
3235   // Ensure that the transition handler we are going to call has the same
3236   // number of stack arguments which means that we don't have to adapt them
3237   // before the call.
3238   STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
3239   STATIC_ASSERT(StoreTransitionDescriptor::kStackArgumentsCount == 3);
3240   STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
3241                     StoreWithVectorDescriptor::kValue ==
3242                 StoreTransitionDescriptor::kParameterCount -
3243                     StoreTransitionDescriptor::kValue);
3244   STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
3245                     StoreWithVectorDescriptor::kSlot ==
3246                 StoreTransitionDescriptor::kParameterCount -
3247                     StoreTransitionDescriptor::kSlot);
3248   STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
3249                     StoreWithVectorDescriptor::kVector ==
3250                 StoreTransitionDescriptor::kParameterCount -
3251                     StoreTransitionDescriptor::kVector);
3252   __ jmp(feedback);
3253 
3254   __ bind(&prepare_next);
3255   __ add(counter, Immediate(Smi::FromInt(3)));
3256   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3257   __ j(less, &next_loop);
3258 
3259   // We exhausted our array of map handler pairs.
3260   __ bind(&pop_and_miss);
3261   __ pop(key);
3262   __ pop(receiver);
3263   __ jmp(miss);
3264 
3265   __ bind(&load_smi_map);
3266   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3267   __ jmp(&compare_map);
3268 }
3269 
GenerateImpl(MacroAssembler * masm,bool in_frame)3270 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3271   Register receiver = StoreWithVectorDescriptor::ReceiverRegister();  // edx
3272   Register key = StoreWithVectorDescriptor::NameRegister();           // ecx
3273   Register value = StoreWithVectorDescriptor::ValueRegister();        // eax
3274   Register vector = StoreWithVectorDescriptor::VectorRegister();      // ebx
3275   Register slot = StoreWithVectorDescriptor::SlotRegister();          // edi
3276   Label miss;
3277 
3278   if (StoreWithVectorDescriptor::kPassLastArgsOnStack) {
3279     // Current stack layout:
3280     // - esp[8]    -- value
3281     // - esp[4]    -- slot
3282     // - esp[0]    -- return address
3283     STATIC_ASSERT(StoreDescriptor::kStackArgumentsCount == 2);
3284     STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
3285     if (in_frame) {
3286       __ RecordComment("[ StoreDescriptor -> StoreWithVectorDescriptor");
3287       // If the vector is not on the stack, then insert the vector beneath
3288       // return address in order to prepare for calling handler with
3289       // StoreWithVector calling convention.
3290       __ push(Operand(esp, 0));
3291       __ mov(Operand(esp, 4), StoreWithVectorDescriptor::VectorRegister());
3292       __ RecordComment("]");
3293     } else {
3294       __ mov(vector, Operand(esp, 1 * kPointerSize));
3295     }
3296     __ mov(slot, Operand(esp, 2 * kPointerSize));
3297   }
3298 
3299   Register scratch = value;
3300   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3301                                FixedArray::kHeaderSize));
3302 
3303   // Is it a weak cell?
3304   Label try_array;
3305   Label not_array, smi_key, key_okay;
3306   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
3307   __ j(not_equal, &try_array);
3308   HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
3309 
3310   // Is it a fixed array?
3311   __ bind(&try_array);
3312   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
3313   __ j(not_equal, &not_array);
3314   HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
3315                                   &miss);
3316 
3317   __ bind(&not_array);
3318   Label try_poly_name;
3319   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
3320   __ j(not_equal, &try_poly_name);
3321 
3322   Handle<Code> megamorphic_stub =
3323       KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3324   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3325 
3326   __ bind(&try_poly_name);
3327   // We might have a name in feedback, and a fixed array in the next slot.
3328   __ cmp(key, scratch);
3329   __ j(not_equal, &miss);
3330   // If the name comparison succeeded, we know we have a fixed array with
3331   // at least one map/handler pair.
3332   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3333                                FixedArray::kHeaderSize + kPointerSize));
3334   HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
3335                              &miss);
3336 
3337   __ bind(&miss);
3338   KeyedStoreIC::GenerateMiss(masm);
3339 }
3340 
3341 
Generate(MacroAssembler * masm)3342 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3343   __ EmitLoadTypeFeedbackVector(ebx);
3344   CallICStub stub(isolate(), state());
3345   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3346 }
3347 
3348 
MaybeCallEntryHook(MacroAssembler * masm)3349 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3350   if (masm->isolate()->function_entry_hook() != NULL) {
3351     ProfileEntryHookStub stub(masm->isolate());
3352     masm->CallStub(&stub);
3353   }
3354 }
3355 
3356 
Generate(MacroAssembler * masm)3357 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3358   // Save volatile registers.
3359   const int kNumSavedRegisters = 3;
3360   __ push(eax);
3361   __ push(ecx);
3362   __ push(edx);
3363 
3364   // Calculate and push the original stack pointer.
3365   __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3366   __ push(eax);
3367 
3368   // Retrieve our return address and use it to calculate the calling
3369   // function's address.
3370   __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3371   __ sub(eax, Immediate(Assembler::kCallInstructionLength));
3372   __ push(eax);
3373 
3374   // Call the entry hook.
3375   DCHECK(isolate()->function_entry_hook() != NULL);
3376   __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
3377           RelocInfo::RUNTIME_ENTRY);
3378   __ add(esp, Immediate(2 * kPointerSize));
3379 
3380   // Restore ecx.
3381   __ pop(edx);
3382   __ pop(ecx);
3383   __ pop(eax);
3384 
3385   __ ret(0);
3386 }
3387 
3388 
3389 template<class T>
CreateArrayDispatch(MacroAssembler * masm,AllocationSiteOverrideMode mode)3390 static void CreateArrayDispatch(MacroAssembler* masm,
3391                                 AllocationSiteOverrideMode mode) {
3392   if (mode == DISABLE_ALLOCATION_SITES) {
3393     T stub(masm->isolate(),
3394            GetInitialFastElementsKind(),
3395            mode);
3396     __ TailCallStub(&stub);
3397   } else if (mode == DONT_OVERRIDE) {
3398     int last_index = GetSequenceIndexFromFastElementsKind(
3399         TERMINAL_FAST_ELEMENTS_KIND);
3400     for (int i = 0; i <= last_index; ++i) {
3401       Label next;
3402       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3403       __ cmp(edx, kind);
3404       __ j(not_equal, &next);
3405       T stub(masm->isolate(), kind);
3406       __ TailCallStub(&stub);
3407       __ bind(&next);
3408     }
3409 
3410     // If we reached this point there is a problem.
3411     __ Abort(kUnexpectedElementsKindInArrayConstructor);
3412   } else {
3413     UNREACHABLE();
3414   }
3415 }
3416 
3417 
CreateArrayDispatchOneArgument(MacroAssembler * masm,AllocationSiteOverrideMode mode)3418 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3419                                            AllocationSiteOverrideMode mode) {
3420   // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
3421   // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
3422   // eax - number of arguments
3423   // edi - constructor?
3424   // esp[0] - return address
3425   // esp[4] - last argument
3426   Label normal_sequence;
3427   if (mode == DONT_OVERRIDE) {
3428     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3429     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3430     STATIC_ASSERT(FAST_ELEMENTS == 2);
3431     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
3432     STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
3433     STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
3434 
3435     // is the low bit set? If so, we are holey and that is good.
3436     __ test_b(edx, Immediate(1));
3437     __ j(not_zero, &normal_sequence);
3438   }
3439 
3440   // look at the first argument
3441   __ mov(ecx, Operand(esp, kPointerSize));
3442   __ test(ecx, ecx);
3443   __ j(zero, &normal_sequence);
3444 
3445   if (mode == DISABLE_ALLOCATION_SITES) {
3446     ElementsKind initial = GetInitialFastElementsKind();
3447     ElementsKind holey_initial = GetHoleyElementsKind(initial);
3448 
3449     ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
3450                                                   holey_initial,
3451                                                   DISABLE_ALLOCATION_SITES);
3452     __ TailCallStub(&stub_holey);
3453 
3454     __ bind(&normal_sequence);
3455     ArraySingleArgumentConstructorStub stub(masm->isolate(),
3456                                             initial,
3457                                             DISABLE_ALLOCATION_SITES);
3458     __ TailCallStub(&stub);
3459   } else if (mode == DONT_OVERRIDE) {
3460     // We are going to create a holey array, but our kind is non-holey.
3461     // Fix kind and retry.
3462     __ inc(edx);
3463 
3464     if (FLAG_debug_code) {
3465       Handle<Map> allocation_site_map =
3466           masm->isolate()->factory()->allocation_site_map();
3467       __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
3468       __ Assert(equal, kExpectedAllocationSite);
3469     }
3470 
3471     // Save the resulting elements kind in type info. We can't just store r3
3472     // in the AllocationSite::transition_info field because elements kind is
3473     // restricted to a portion of the field...upper bits need to be left alone.
3474     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3475     __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
3476            Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
3477 
3478     __ bind(&normal_sequence);
3479     int last_index = GetSequenceIndexFromFastElementsKind(
3480         TERMINAL_FAST_ELEMENTS_KIND);
3481     for (int i = 0; i <= last_index; ++i) {
3482       Label next;
3483       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3484       __ cmp(edx, kind);
3485       __ j(not_equal, &next);
3486       ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
3487       __ TailCallStub(&stub);
3488       __ bind(&next);
3489     }
3490 
3491     // If we reached this point there is a problem.
3492     __ Abort(kUnexpectedElementsKindInArrayConstructor);
3493   } else {
3494     UNREACHABLE();
3495   }
3496 }
3497 
3498 
3499 template<class T>
ArrayConstructorStubAheadOfTimeHelper(Isolate * isolate)3500 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
3501   int to_index = GetSequenceIndexFromFastElementsKind(
3502       TERMINAL_FAST_ELEMENTS_KIND);
3503   for (int i = 0; i <= to_index; ++i) {
3504     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3505     T stub(isolate, kind);
3506     stub.GetCode();
3507     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
3508       T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
3509       stub1.GetCode();
3510     }
3511   }
3512 }
3513 
GenerateStubsAheadOfTime(Isolate * isolate)3514 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
3515   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
3516       isolate);
3517   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
3518       isolate);
3519   ArrayNArgumentsConstructorStub stub(isolate);
3520   stub.GetCode();
3521 
3522   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
3523   for (int i = 0; i < 2; i++) {
3524     // For internal arrays we only need a few things
3525     InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
3526     stubh1.GetCode();
3527     InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
3528     stubh2.GetCode();
3529   }
3530 }
3531 
GenerateDispatchToArrayStub(MacroAssembler * masm,AllocationSiteOverrideMode mode)3532 void ArrayConstructorStub::GenerateDispatchToArrayStub(
3533     MacroAssembler* masm, AllocationSiteOverrideMode mode) {
3534   Label not_zero_case, not_one_case;
3535   __ test(eax, eax);
3536   __ j(not_zero, &not_zero_case);
3537   CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
3538 
3539   __ bind(&not_zero_case);
3540   __ cmp(eax, 1);
3541   __ j(greater, &not_one_case);
3542   CreateArrayDispatchOneArgument(masm, mode);
3543 
3544   __ bind(&not_one_case);
3545   ArrayNArgumentsConstructorStub stub(masm->isolate());
3546   __ TailCallStub(&stub);
3547 }
3548 
Generate(MacroAssembler * masm)3549 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
3550   // ----------- S t a t e -------------
3551   //  -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
3552   //  -- ebx : AllocationSite or undefined
3553   //  -- edi : constructor
3554   //  -- edx : Original constructor
3555   //  -- esp[0] : return address
3556   //  -- esp[4] : last argument
3557   // -----------------------------------
3558   if (FLAG_debug_code) {
3559     // The array construct code is only set for the global and natives
3560     // builtin Array functions which always have maps.
3561 
3562     // Initial map for the builtin Array function should be a map.
3563     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3564     // Will both indicate a NULL and a Smi.
3565     __ test(ecx, Immediate(kSmiTagMask));
3566     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
3567     __ CmpObjectType(ecx, MAP_TYPE, ecx);
3568     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
3569 
3570     // We should either have undefined in ebx or a valid AllocationSite
3571     __ AssertUndefinedOrAllocationSite(ebx);
3572   }
3573 
3574   Label subclassing;
3575 
3576   // Enter the context of the Array function.
3577   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
3578 
3579   __ cmp(edx, edi);
3580   __ j(not_equal, &subclassing);
3581 
3582   Label no_info;
3583   // If the feedback vector is the undefined value call an array constructor
3584   // that doesn't use AllocationSites.
3585   __ cmp(ebx, isolate()->factory()->undefined_value());
3586   __ j(equal, &no_info);
3587 
3588   // Only look at the lower 16 bits of the transition info.
3589   __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
3590   __ SmiUntag(edx);
3591   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3592   __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
3593   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
3594 
3595   __ bind(&no_info);
3596   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
3597 
3598   // Subclassing.
3599   __ bind(&subclassing);
3600   __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
3601   __ add(eax, Immediate(3));
3602   __ PopReturnAddressTo(ecx);
3603   __ Push(edx);
3604   __ Push(ebx);
3605   __ PushReturnAddressFrom(ecx);
3606   __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
3607 }
3608 
3609 
GenerateCase(MacroAssembler * masm,ElementsKind kind)3610 void InternalArrayConstructorStub::GenerateCase(
3611     MacroAssembler* masm, ElementsKind kind) {
3612   Label not_zero_case, not_one_case;
3613   Label normal_sequence;
3614 
3615   __ test(eax, eax);
3616   __ j(not_zero, &not_zero_case);
3617   InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
3618   __ TailCallStub(&stub0);
3619 
3620   __ bind(&not_zero_case);
3621   __ cmp(eax, 1);
3622   __ j(greater, &not_one_case);
3623 
3624   if (IsFastPackedElementsKind(kind)) {
3625     // We might need to create a holey array
3626     // look at the first argument
3627     __ mov(ecx, Operand(esp, kPointerSize));
3628     __ test(ecx, ecx);
3629     __ j(zero, &normal_sequence);
3630 
3631     InternalArraySingleArgumentConstructorStub
3632         stub1_holey(isolate(), GetHoleyElementsKind(kind));
3633     __ TailCallStub(&stub1_holey);
3634   }
3635 
3636   __ bind(&normal_sequence);
3637   InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
3638   __ TailCallStub(&stub1);
3639 
3640   __ bind(&not_one_case);
3641   ArrayNArgumentsConstructorStub stubN(isolate());
3642   __ TailCallStub(&stubN);
3643 }
3644 
3645 
Generate(MacroAssembler * masm)3646 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
3647   // ----------- S t a t e -------------
3648   //  -- eax : argc
3649   //  -- edi : constructor
3650   //  -- esp[0] : return address
3651   //  -- esp[4] : last argument
3652   // -----------------------------------
3653 
3654   if (FLAG_debug_code) {
3655     // The array construct code is only set for the global and natives
3656     // builtin Array functions which always have maps.
3657 
3658     // Initial map for the builtin Array function should be a map.
3659     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3660     // Will both indicate a NULL and a Smi.
3661     __ test(ecx, Immediate(kSmiTagMask));
3662     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
3663     __ CmpObjectType(ecx, MAP_TYPE, ecx);
3664     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
3665   }
3666 
3667   // Figure out the right elements kind
3668   __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3669 
3670   // Load the map's "bit field 2" into |result|. We only need the first byte,
3671   // but the following masking takes care of that anyway.
3672   __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
3673   // Retrieve elements_kind from bit field 2.
3674   __ DecodeField<Map::ElementsKindBits>(ecx);
3675 
3676   if (FLAG_debug_code) {
3677     Label done;
3678     __ cmp(ecx, Immediate(FAST_ELEMENTS));
3679     __ j(equal, &done);
3680     __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
3681     __ Assert(equal,
3682               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
3683     __ bind(&done);
3684   }
3685 
3686   Label fast_elements_case;
3687   __ cmp(ecx, Immediate(FAST_ELEMENTS));
3688   __ j(equal, &fast_elements_case);
3689   GenerateCase(masm, FAST_HOLEY_ELEMENTS);
3690 
3691   __ bind(&fast_elements_case);
3692   GenerateCase(masm, FAST_ELEMENTS);
3693 }
3694 
3695 
Generate(MacroAssembler * masm)3696 void FastNewObjectStub::Generate(MacroAssembler* masm) {
3697   // ----------- S t a t e -------------
3698   //  -- edi    : target
3699   //  -- edx    : new target
3700   //  -- esi    : context
3701   //  -- esp[0] : return address
3702   // -----------------------------------
3703   __ AssertFunction(edi);
3704   __ AssertReceiver(edx);
3705 
3706   // Verify that the new target is a JSFunction.
3707   Label new_object;
3708   __ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx);
3709   __ j(not_equal, &new_object);
3710 
3711   // Load the initial map and verify that it's in fact a map.
3712   __ mov(ecx, FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset));
3713   __ JumpIfSmi(ecx, &new_object);
3714   __ CmpObjectType(ecx, MAP_TYPE, ebx);
3715   __ j(not_equal, &new_object);
3716 
3717   // Fall back to runtime if the target differs from the new target's
3718   // initial map constructor.
3719   __ cmp(edi, FieldOperand(ecx, Map::kConstructorOrBackPointerOffset));
3720   __ j(not_equal, &new_object);
3721 
3722   // Allocate the JSObject on the heap.
3723   Label allocate, done_allocate;
3724   __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
3725   __ lea(ebx, Operand(ebx, times_pointer_size, 0));
3726   __ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
3727   __ bind(&done_allocate);
3728 
3729   // Initialize the JSObject fields.
3730   __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
3731   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3732          masm->isolate()->factory()->empty_fixed_array());
3733   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3734          masm->isolate()->factory()->empty_fixed_array());
3735   STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
3736   __ lea(ebx, FieldOperand(eax, JSObject::kHeaderSize));
3737 
3738   // ----------- S t a t e -------------
3739   //  -- eax    : result (tagged)
3740   //  -- ebx    : result fields (untagged)
3741   //  -- edi    : result end (untagged)
3742   //  -- ecx    : initial map
3743   //  -- esi    : context
3744   //  -- esp[0] : return address
3745   // -----------------------------------
3746 
3747   // Perform in-object slack tracking if requested.
3748   Label slack_tracking;
3749   STATIC_ASSERT(Map::kNoSlackTracking == 0);
3750   __ test(FieldOperand(ecx, Map::kBitField3Offset),
3751           Immediate(Map::ConstructionCounter::kMask));
3752   __ j(not_zero, &slack_tracking, Label::kNear);
3753   {
3754     // Initialize all in-object fields with undefined.
3755     __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
3756     __ InitializeFieldsWithFiller(ebx, edi, edx);
3757     __ Ret();
3758   }
3759   __ bind(&slack_tracking);
3760   {
3761     // Decrease generous allocation count.
3762     STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
3763     __ sub(FieldOperand(ecx, Map::kBitField3Offset),
3764            Immediate(1 << Map::ConstructionCounter::kShift));
3765 
3766     // Initialize the in-object fields with undefined.
3767     __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
3768     __ neg(edx);
3769     __ lea(edx, Operand(edi, edx, times_pointer_size, 0));
3770     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
3771     __ InitializeFieldsWithFiller(ebx, edx, edi);
3772 
3773     // Initialize the remaining (reserved) fields with one pointer filler map.
3774     __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
3775     __ lea(edx, Operand(ebx, edx, times_pointer_size, 0));
3776     __ LoadRoot(edi, Heap::kOnePointerFillerMapRootIndex);
3777     __ InitializeFieldsWithFiller(ebx, edx, edi);
3778 
3779     // Check if we can finalize the instance size.
3780     Label finalize;
3781     STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3782     __ test(FieldOperand(ecx, Map::kBitField3Offset),
3783             Immediate(Map::ConstructionCounter::kMask));
3784     __ j(zero, &finalize, Label::kNear);
3785     __ Ret();
3786 
3787     // Finalize the instance size.
3788     __ bind(&finalize);
3789     {
3790       FrameScope scope(masm, StackFrame::INTERNAL);
3791       __ Push(eax);
3792       __ Push(ecx);
3793       __ CallRuntime(Runtime::kFinalizeInstanceSize);
3794       __ Pop(eax);
3795     }
3796     __ Ret();
3797   }
3798 
3799   // Fall back to %AllocateInNewSpace.
3800   __ bind(&allocate);
3801   {
3802     FrameScope scope(masm, StackFrame::INTERNAL);
3803     __ SmiTag(ebx);
3804     __ Push(ecx);
3805     __ Push(ebx);
3806     __ CallRuntime(Runtime::kAllocateInNewSpace);
3807     __ Pop(ecx);
3808   }
3809   __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
3810   __ lea(edi, Operand(eax, ebx, times_pointer_size, 0));
3811   STATIC_ASSERT(kHeapObjectTag == 1);
3812   __ dec(edi);
3813   __ jmp(&done_allocate);
3814 
3815   // Fall back to %NewObject.
3816   __ bind(&new_object);
3817   __ PopReturnAddressTo(ecx);
3818   __ Push(edi);
3819   __ Push(edx);
3820   __ PushReturnAddressFrom(ecx);
3821   __ TailCallRuntime(Runtime::kNewObject);
3822 }
3823 
3824 
Generate(MacroAssembler * masm)3825 void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
3826   // ----------- S t a t e -------------
3827   //  -- edi    : function
3828   //  -- esi    : context
3829   //  -- ebp    : frame pointer
3830   //  -- esp[0] : return address
3831   // -----------------------------------
3832   __ AssertFunction(edi);
3833 
3834   // Make edx point to the JavaScript frame.
3835   __ mov(edx, ebp);
3836   if (skip_stub_frame()) {
3837     // For Ignition we need to skip the handler/stub frame to reach the
3838     // JavaScript frame for the function.
3839     __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
3840   }
3841   if (FLAG_debug_code) {
3842     Label ok;
3843     __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
3844     __ j(equal, &ok);
3845     __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
3846     __ bind(&ok);
3847   }
3848 
3849   // Check if we have rest parameters (only possible if we have an
3850   // arguments adaptor frame below the function frame).
3851   Label no_rest_parameters;
3852   __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
3853   __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
3854          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3855   __ j(not_equal, &no_rest_parameters, Label::kNear);
3856 
3857   // Check if the arguments adaptor frame contains more arguments than
3858   // specified by the function's internal formal parameter count.
3859   Label rest_parameters;
3860   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3861   __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3862   __ sub(eax,
3863          FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
3864   __ j(greater, &rest_parameters);
3865 
3866   // Return an empty rest parameter array.
3867   __ bind(&no_rest_parameters);
3868   {
3869     // ----------- S t a t e -------------
3870     //  -- esi    : context
3871     //  -- esp[0] : return address
3872     // -----------------------------------
3873 
3874     // Allocate an empty rest parameter array.
3875     Label allocate, done_allocate;
3876     __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS);
3877     __ bind(&done_allocate);
3878 
3879     // Setup the rest parameter array in rax.
3880     __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
3881     __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx);
3882     __ mov(ecx, isolate()->factory()->empty_fixed_array());
3883     __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx);
3884     __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx);
3885     __ mov(FieldOperand(eax, JSArray::kLengthOffset), Immediate(Smi::kZero));
3886     STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
3887     __ Ret();
3888 
3889     // Fall back to %AllocateInNewSpace.
3890     __ bind(&allocate);
3891     {
3892       FrameScope scope(masm, StackFrame::INTERNAL);
3893       __ Push(Smi::FromInt(JSArray::kSize));
3894       __ CallRuntime(Runtime::kAllocateInNewSpace);
3895     }
3896     __ jmp(&done_allocate);
3897   }
3898 
3899   __ bind(&rest_parameters);
3900   {
3901     // Compute the pointer to the first rest parameter (skippping the receiver).
3902     __ lea(ebx,
3903            Operand(ebx, eax, times_half_pointer_size,
3904                    StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
3905 
3906     // ----------- S t a t e -------------
3907     //  -- esi    : context
3908     //  -- eax    : number of rest parameters (tagged)
3909     //  -- ebx    : pointer to first rest parameters
3910     //  -- esp[0] : return address
3911     // -----------------------------------
3912 
3913     // Allocate space for the rest parameter array plus the backing store.
3914     Label allocate, done_allocate;
3915     __ lea(ecx, Operand(eax, times_half_pointer_size,
3916                         JSArray::kSize + FixedArray::kHeaderSize));
3917     __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
3918     __ bind(&done_allocate);
3919 
3920     // Setup the elements array in edx.
3921     __ mov(FieldOperand(edx, FixedArray::kMapOffset),
3922            isolate()->factory()->fixed_array_map());
3923     __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
3924     {
3925       Label loop, done_loop;
3926       __ Move(ecx, Smi::kZero);
3927       __ bind(&loop);
3928       __ cmp(ecx, eax);
3929       __ j(equal, &done_loop, Label::kNear);
3930       __ mov(edi, Operand(ebx, 0 * kPointerSize));
3931       __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
3932                           FixedArray::kHeaderSize),
3933              edi);
3934       __ sub(ebx, Immediate(1 * kPointerSize));
3935       __ add(ecx, Immediate(Smi::FromInt(1)));
3936       __ jmp(&loop);
3937       __ bind(&done_loop);
3938     }
3939 
3940     // Setup the rest parameter array in edi.
3941     __ lea(edi,
3942            Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
3943     __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
3944     __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx);
3945     __ mov(FieldOperand(edi, JSArray::kPropertiesOffset),
3946            isolate()->factory()->empty_fixed_array());
3947     __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx);
3948     __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax);
3949     STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
3950     __ mov(eax, edi);
3951     __ Ret();
3952 
3953     // Fall back to %AllocateInNewSpace (if not too big).
3954     Label too_big_for_new_space;
3955     __ bind(&allocate);
3956     __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
3957     __ j(greater, &too_big_for_new_space);
3958     {
3959       FrameScope scope(masm, StackFrame::INTERNAL);
3960       __ SmiTag(ecx);
3961       __ Push(eax);
3962       __ Push(ebx);
3963       __ Push(ecx);
3964       __ CallRuntime(Runtime::kAllocateInNewSpace);
3965       __ mov(edx, eax);
3966       __ Pop(ebx);
3967       __ Pop(eax);
3968     }
3969     __ jmp(&done_allocate);
3970 
3971     // Fall back to %NewRestParameter.
3972     __ bind(&too_big_for_new_space);
3973     __ PopReturnAddressTo(ecx);
3974     // We reload the function from the caller frame due to register pressure
3975     // within this stub. This is the slow path, hence reloading is preferable.
3976     if (skip_stub_frame()) {
3977       // For Ignition we need to skip the handler/stub frame to reach the
3978       // JavaScript frame for the function.
3979       __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3980       __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
3981     } else {
3982       __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
3983     }
3984     __ PushReturnAddressFrom(ecx);
3985     __ TailCallRuntime(Runtime::kNewRestParameter);
3986   }
3987 }
3988 
3989 
Generate(MacroAssembler * masm)3990 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
3991   // ----------- S t a t e -------------
3992   //  -- edi    : function
3993   //  -- esi    : context
3994   //  -- ebp    : frame pointer
3995   //  -- esp[0] : return address
3996   // -----------------------------------
3997   __ AssertFunction(edi);
3998 
3999   // Make ecx point to the JavaScript frame.
4000   __ mov(ecx, ebp);
4001   if (skip_stub_frame()) {
4002     // For Ignition we need to skip the handler/stub frame to reach the
4003     // JavaScript frame for the function.
4004     __ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
4005   }
4006   if (FLAG_debug_code) {
4007     Label ok;
4008     __ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset));
4009     __ j(equal, &ok);
4010     __ Abort(kInvalidFrameForFastNewSloppyArgumentsStub);
4011     __ bind(&ok);
4012   }
4013 
4014   // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4015   __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4016   __ mov(ebx,
4017          FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
4018   __ lea(edx, Operand(ecx, ebx, times_half_pointer_size,
4019                       StandardFrameConstants::kCallerSPOffset));
4020 
4021   // ebx : number of parameters (tagged)
4022   // edx : parameters pointer
4023   // edi : function
4024   // ecx : JavaScript frame pointer.
4025   // esp[0] : return address
4026 
4027   // Check if the calling frame is an arguments adaptor frame.
4028   Label adaptor_frame, try_allocate, runtime;
4029   __ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
4030   __ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset));
4031   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4032   __ j(equal, &adaptor_frame, Label::kNear);
4033 
4034   // No adaptor, parameter count = argument count.
4035   __ mov(ecx, ebx);
4036   __ push(ebx);
4037   __ jmp(&try_allocate, Label::kNear);
4038 
4039   // We have an adaptor frame. Patch the parameters pointer.
4040   __ bind(&adaptor_frame);
4041   __ push(ebx);
4042   __ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
4043   __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4044   __ lea(edx, Operand(edx, ecx, times_2,
4045                       StandardFrameConstants::kCallerSPOffset));
4046 
4047   // ebx = parameter count (tagged)
4048   // ecx = argument count (smi-tagged)
4049   // Compute the mapped parameter count = min(ebx, ecx) in ebx.
4050   __ cmp(ebx, ecx);
4051   __ j(less_equal, &try_allocate, Label::kNear);
4052   __ mov(ebx, ecx);
4053 
4054   // Save mapped parameter count and function.
4055   __ bind(&try_allocate);
4056   __ push(edi);
4057   __ push(ebx);
4058 
4059   // Compute the sizes of backing store, parameter map, and arguments object.
4060   // 1. Parameter map, has 2 extra words containing context and backing store.
4061   const int kParameterMapHeaderSize =
4062       FixedArray::kHeaderSize + 2 * kPointerSize;
4063   Label no_parameter_map;
4064   __ test(ebx, ebx);
4065   __ j(zero, &no_parameter_map, Label::kNear);
4066   __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
4067   __ bind(&no_parameter_map);
4068 
4069   // 2. Backing store.
4070   __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
4071 
4072   // 3. Arguments object.
4073   __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize));
4074 
4075   // Do the allocation of all three objects in one go.
4076   __ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS);
4077 
4078   // eax = address of new object(s) (tagged)
4079   // ecx = argument count (smi-tagged)
4080   // esp[0] = mapped parameter count (tagged)
4081   // esp[4] = function
4082   // esp[8] = parameter count (tagged)
4083   // Get the arguments map from the current native context into edi.
4084   Label has_mapped_parameters, instantiate;
4085   __ mov(edi, NativeContextOperand());
4086   __ mov(ebx, Operand(esp, 0 * kPointerSize));
4087   __ test(ebx, ebx);
4088   __ j(not_zero, &has_mapped_parameters, Label::kNear);
4089   __ mov(
4090       edi,
4091       Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
4092   __ jmp(&instantiate, Label::kNear);
4093 
4094   __ bind(&has_mapped_parameters);
4095   __ mov(edi, Operand(edi, Context::SlotOffset(
4096                                Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
4097   __ bind(&instantiate);
4098 
4099   // eax = address of new object (tagged)
4100   // ebx = mapped parameter count (tagged)
4101   // ecx = argument count (smi-tagged)
4102   // edi = address of arguments map (tagged)
4103   // esp[0] = mapped parameter count (tagged)
4104   // esp[4] = function
4105   // esp[8] = parameter count (tagged)
4106   // Copy the JS object part.
4107   __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
4108   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
4109          masm->isolate()->factory()->empty_fixed_array());
4110   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
4111          masm->isolate()->factory()->empty_fixed_array());
4112 
4113   // Set up the callee in-object property.
4114   STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1);
4115   __ mov(edi, Operand(esp, 1 * kPointerSize));
4116   __ AssertNotSmi(edi);
4117   __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi);
4118 
4119   // Use the length (smi tagged) and set that as an in-object property too.
4120   __ AssertSmi(ecx);
4121   __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx);
4122 
4123   // Set up the elements pointer in the allocated arguments object.
4124   // If we allocated a parameter map, edi will point there, otherwise to the
4125   // backing store.
4126   __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize));
4127   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
4128 
4129   // eax = address of new object (tagged)
4130   // ebx = mapped parameter count (tagged)
4131   // ecx = argument count (tagged)
4132   // edx = address of receiver argument
4133   // edi = address of parameter map or backing store (tagged)
4134   // esp[0] = mapped parameter count (tagged)
4135   // esp[4] = function
4136   // esp[8] = parameter count (tagged)
4137   // Free two registers.
4138   __ push(edx);
4139   __ push(eax);
4140 
4141   // Initialize parameter map. If there are no mapped arguments, we're done.
4142   Label skip_parameter_map;
4143   __ test(ebx, ebx);
4144   __ j(zero, &skip_parameter_map);
4145 
4146   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
4147          Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
4148   __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
4149   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
4150   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
4151   __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
4152   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
4153 
4154   // Copy the parameter slots and the holes in the arguments.
4155   // We need to fill in mapped_parameter_count slots. They index the context,
4156   // where parameters are stored in reverse order, at
4157   //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4158   // The mapped parameter thus need to get indices
4159   //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
4160   //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4161   // We loop from right to left.
4162   Label parameters_loop, parameters_test;
4163   __ push(ecx);
4164   __ mov(eax, Operand(esp, 3 * kPointerSize));
4165   __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4166   __ add(ebx, Operand(esp, 5 * kPointerSize));
4167   __ sub(ebx, eax);
4168   __ mov(ecx, isolate()->factory()->the_hole_value());
4169   __ mov(edx, edi);
4170   __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
4171   // eax = loop variable (tagged)
4172   // ebx = mapping index (tagged)
4173   // ecx = the hole value
4174   // edx = address of parameter map (tagged)
4175   // edi = address of backing store (tagged)
4176   // esp[0] = argument count (tagged)
4177   // esp[4] = address of new object (tagged)
4178   // esp[8] = address of receiver argument
4179   // esp[12] = mapped parameter count (tagged)
4180   // esp[16] = function
4181   // esp[20] = parameter count (tagged)
4182   __ jmp(&parameters_test, Label::kNear);
4183 
4184   __ bind(&parameters_loop);
4185   __ sub(eax, Immediate(Smi::FromInt(1)));
4186   __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
4187   __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
4188   __ add(ebx, Immediate(Smi::FromInt(1)));
4189   __ bind(&parameters_test);
4190   __ test(eax, eax);
4191   __ j(not_zero, &parameters_loop, Label::kNear);
4192   __ pop(ecx);
4193 
4194   __ bind(&skip_parameter_map);
4195 
4196   // ecx = argument count (tagged)
4197   // edi = address of backing store (tagged)
4198   // esp[0] = address of new object (tagged)
4199   // esp[4] = address of receiver argument
4200   // esp[8] = mapped parameter count (tagged)
4201   // esp[12] = function
4202   // esp[16] = parameter count (tagged)
4203   // Copy arguments header and remaining slots (if there are any).
4204   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
4205          Immediate(isolate()->factory()->fixed_array_map()));
4206   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
4207 
4208   Label arguments_loop, arguments_test;
4209   __ mov(ebx, Operand(esp, 2 * kPointerSize));
4210   __ mov(edx, Operand(esp, 1 * kPointerSize));
4211   __ sub(edx, ebx);  // Is there a smarter way to do negative scaling?
4212   __ sub(edx, ebx);
4213   __ jmp(&arguments_test, Label::kNear);
4214 
4215   __ bind(&arguments_loop);
4216   __ sub(edx, Immediate(kPointerSize));
4217   __ mov(eax, Operand(edx, 0));
4218   __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
4219   __ add(ebx, Immediate(Smi::FromInt(1)));
4220 
4221   __ bind(&arguments_test);
4222   __ cmp(ebx, ecx);
4223   __ j(less, &arguments_loop, Label::kNear);
4224 
4225   // Restore.
4226   __ pop(eax);  // Address of arguments object.
4227   __ Drop(4);
4228 
4229   // Return.
4230   __ ret(0);
4231 
4232   // Do the runtime call to allocate the arguments object.
4233   __ bind(&runtime);
4234   __ pop(eax);   // Remove saved mapped parameter count.
4235   __ pop(edi);   // Pop saved function.
4236   __ pop(eax);   // Remove saved parameter count.
4237   __ pop(eax);   // Pop return address.
4238   __ push(edi);  // Push function.
4239   __ push(edx);  // Push parameters pointer.
4240   __ push(ecx);  // Push parameter count.
4241   __ push(eax);  // Push return address.
4242   __ TailCallRuntime(Runtime::kNewSloppyArguments);
4243 }
4244 
4245 
Generate(MacroAssembler * masm)4246 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4247   // ----------- S t a t e -------------
4248   //  -- edi    : function
4249   //  -- esi    : context
4250   //  -- ebp    : frame pointer
4251   //  -- esp[0] : return address
4252   // -----------------------------------
4253   __ AssertFunction(edi);
4254 
4255   // Make edx point to the JavaScript frame.
4256   __ mov(edx, ebp);
4257   if (skip_stub_frame()) {
4258     // For Ignition we need to skip the handler/stub frame to reach the
4259     // JavaScript frame for the function.
4260     __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
4261   }
4262   if (FLAG_debug_code) {
4263     Label ok;
4264     __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
4265     __ j(equal, &ok);
4266     __ Abort(kInvalidFrameForFastNewStrictArgumentsStub);
4267     __ bind(&ok);
4268   }
4269 
4270   // Check if we have an arguments adaptor frame below the function frame.
4271   Label arguments_adaptor, arguments_done;
4272   __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
4273   __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
4274          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4275   __ j(equal, &arguments_adaptor, Label::kNear);
4276   {
4277     __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4278     __ mov(eax,
4279            FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
4280     __ lea(ebx,
4281            Operand(edx, eax, times_half_pointer_size,
4282                    StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4283   }
4284   __ jmp(&arguments_done, Label::kNear);
4285   __ bind(&arguments_adaptor);
4286   {
4287     __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4288     __ lea(ebx,
4289            Operand(ebx, eax, times_half_pointer_size,
4290                    StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4291   }
4292   __ bind(&arguments_done);
4293 
4294   // ----------- S t a t e -------------
4295   //  -- eax    : number of arguments (tagged)
4296   //  -- ebx    : pointer to the first argument
4297   //  -- esi    : context
4298   //  -- esp[0] : return address
4299   // -----------------------------------
4300 
4301   // Allocate space for the strict arguments object plus the backing store.
4302   Label allocate, done_allocate;
4303   __ lea(ecx,
4304          Operand(eax, times_half_pointer_size,
4305                  JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
4306   __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4307   __ bind(&done_allocate);
4308 
4309   // Setup the elements array in edx.
4310   __ mov(FieldOperand(edx, FixedArray::kMapOffset),
4311          isolate()->factory()->fixed_array_map());
4312   __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
4313   {
4314     Label loop, done_loop;
4315     __ Move(ecx, Smi::kZero);
4316     __ bind(&loop);
4317     __ cmp(ecx, eax);
4318     __ j(equal, &done_loop, Label::kNear);
4319     __ mov(edi, Operand(ebx, 0 * kPointerSize));
4320     __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
4321                         FixedArray::kHeaderSize),
4322            edi);
4323     __ sub(ebx, Immediate(1 * kPointerSize));
4324     __ add(ecx, Immediate(Smi::FromInt(1)));
4325     __ jmp(&loop);
4326     __ bind(&done_loop);
4327   }
4328 
4329   // Setup the rest parameter array in edi.
4330   __ lea(edi,
4331          Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
4332   __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx);
4333   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx);
4334   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset),
4335          isolate()->factory()->empty_fixed_array());
4336   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx);
4337   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax);
4338   STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
4339   __ mov(eax, edi);
4340   __ Ret();
4341 
4342   // Fall back to %AllocateInNewSpace (if not too big).
4343   Label too_big_for_new_space;
4344   __ bind(&allocate);
4345   __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
4346   __ j(greater, &too_big_for_new_space);
4347   {
4348     FrameScope scope(masm, StackFrame::INTERNAL);
4349     __ SmiTag(ecx);
4350     __ Push(eax);
4351     __ Push(ebx);
4352     __ Push(ecx);
4353     __ CallRuntime(Runtime::kAllocateInNewSpace);
4354     __ mov(edx, eax);
4355     __ Pop(ebx);
4356     __ Pop(eax);
4357   }
4358   __ jmp(&done_allocate);
4359 
4360   // Fall back to %NewStrictArguments.
4361   __ bind(&too_big_for_new_space);
4362   __ PopReturnAddressTo(ecx);
4363   // We reload the function from the caller frame due to register pressure
4364   // within this stub. This is the slow path, hence reloading is preferable.
4365   if (skip_stub_frame()) {
4366     // For Ignition we need to skip the handler/stub frame to reach the
4367     // JavaScript frame for the function.
4368     __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4369     __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
4370   } else {
4371     __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
4372   }
4373   __ PushReturnAddressFrom(ecx);
4374   __ TailCallRuntime(Runtime::kNewStrictArguments);
4375 }
4376 
4377 
4378 // Generates an Operand for saving parameters after PrepareCallApiFunction.
ApiParameterOperand(int index)4379 static Operand ApiParameterOperand(int index) {
4380   return Operand(esp, index * kPointerSize);
4381 }
4382 
4383 
4384 // Prepares stack to put arguments (aligns and so on). Reserves
4385 // space for return value if needed (assumes the return value is a handle).
4386 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
4387 // etc. Saves context (esi). If space was reserved for return value then
4388 // stores the pointer to the reserved slot into esi.
PrepareCallApiFunction(MacroAssembler * masm,int argc)4389 static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
4390   __ EnterApiExitFrame(argc);
4391   if (__ emit_debug_code()) {
4392     __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
4393   }
4394 }
4395 
4396 
4397 // Calls an API function.  Allocates HandleScope, extracts returned value
4398 // from handle and propagates exceptions.  Clobbers ebx, edi and
4399 // caller-save registers.  Restores context.  On return removes
4400 // stack_space * kPointerSize (GCed).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand * stack_space_operand,Operand return_value_operand,Operand * context_restore_operand)4401 static void CallApiFunctionAndReturn(MacroAssembler* masm,
4402                                      Register function_address,
4403                                      ExternalReference thunk_ref,
4404                                      Operand thunk_last_arg, int stack_space,
4405                                      Operand* stack_space_operand,
4406                                      Operand return_value_operand,
4407                                      Operand* context_restore_operand) {
4408   Isolate* isolate = masm->isolate();
4409 
4410   ExternalReference next_address =
4411       ExternalReference::handle_scope_next_address(isolate);
4412   ExternalReference limit_address =
4413       ExternalReference::handle_scope_limit_address(isolate);
4414   ExternalReference level_address =
4415       ExternalReference::handle_scope_level_address(isolate);
4416 
4417   DCHECK(edx.is(function_address));
4418   // Allocate HandleScope in callee-save registers.
4419   __ mov(ebx, Operand::StaticVariable(next_address));
4420   __ mov(edi, Operand::StaticVariable(limit_address));
4421   __ add(Operand::StaticVariable(level_address), Immediate(1));
4422 
4423   if (FLAG_log_timer_events) {
4424     FrameScope frame(masm, StackFrame::MANUAL);
4425     __ PushSafepointRegisters();
4426     __ PrepareCallCFunction(1, eax);
4427     __ mov(Operand(esp, 0),
4428            Immediate(ExternalReference::isolate_address(isolate)));
4429     __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
4430                      1);
4431     __ PopSafepointRegisters();
4432   }
4433 
4434 
4435   Label profiler_disabled;
4436   Label end_profiler_check;
4437   __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
4438   __ cmpb(Operand(eax, 0), Immediate(0));
4439   __ j(zero, &profiler_disabled);
4440 
4441   // Additional parameter is the address of the actual getter function.
4442   __ mov(thunk_last_arg, function_address);
4443   // Call the api function.
4444   __ mov(eax, Immediate(thunk_ref));
4445   __ call(eax);
4446   __ jmp(&end_profiler_check);
4447 
4448   __ bind(&profiler_disabled);
4449   // Call the api function.
4450   __ call(function_address);
4451   __ bind(&end_profiler_check);
4452 
4453   if (FLAG_log_timer_events) {
4454     FrameScope frame(masm, StackFrame::MANUAL);
4455     __ PushSafepointRegisters();
4456     __ PrepareCallCFunction(1, eax);
4457     __ mov(Operand(esp, 0),
4458            Immediate(ExternalReference::isolate_address(isolate)));
4459     __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
4460                      1);
4461     __ PopSafepointRegisters();
4462   }
4463 
4464   Label prologue;
4465   // Load the value from ReturnValue
4466   __ mov(eax, return_value_operand);
4467 
4468   Label promote_scheduled_exception;
4469   Label delete_allocated_handles;
4470   Label leave_exit_frame;
4471 
4472   __ bind(&prologue);
4473   // No more valid handles (the result handle was the last one). Restore
4474   // previous handle scope.
4475   __ mov(Operand::StaticVariable(next_address), ebx);
4476   __ sub(Operand::StaticVariable(level_address), Immediate(1));
4477   __ Assert(above_equal, kInvalidHandleScopeLevel);
4478   __ cmp(edi, Operand::StaticVariable(limit_address));
4479   __ j(not_equal, &delete_allocated_handles);
4480 
4481   // Leave the API exit frame.
4482   __ bind(&leave_exit_frame);
4483   bool restore_context = context_restore_operand != NULL;
4484   if (restore_context) {
4485     __ mov(esi, *context_restore_operand);
4486   }
4487   if (stack_space_operand != nullptr) {
4488     __ mov(ebx, *stack_space_operand);
4489   }
4490   __ LeaveApiExitFrame(!restore_context);
4491 
4492   // Check if the function scheduled an exception.
4493   ExternalReference scheduled_exception_address =
4494       ExternalReference::scheduled_exception_address(isolate);
4495   __ cmp(Operand::StaticVariable(scheduled_exception_address),
4496          Immediate(isolate->factory()->the_hole_value()));
4497   __ j(not_equal, &promote_scheduled_exception);
4498 
4499 #if DEBUG
4500   // Check if the function returned a valid JavaScript value.
4501   Label ok;
4502   Register return_value = eax;
4503   Register map = ecx;
4504 
4505   __ JumpIfSmi(return_value, &ok, Label::kNear);
4506   __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
4507 
4508   __ CmpInstanceType(map, LAST_NAME_TYPE);
4509   __ j(below_equal, &ok, Label::kNear);
4510 
4511   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
4512   __ j(above_equal, &ok, Label::kNear);
4513 
4514   __ cmp(map, isolate->factory()->heap_number_map());
4515   __ j(equal, &ok, Label::kNear);
4516 
4517   __ cmp(return_value, isolate->factory()->undefined_value());
4518   __ j(equal, &ok, Label::kNear);
4519 
4520   __ cmp(return_value, isolate->factory()->true_value());
4521   __ j(equal, &ok, Label::kNear);
4522 
4523   __ cmp(return_value, isolate->factory()->false_value());
4524   __ j(equal, &ok, Label::kNear);
4525 
4526   __ cmp(return_value, isolate->factory()->null_value());
4527   __ j(equal, &ok, Label::kNear);
4528 
4529   __ Abort(kAPICallReturnedInvalidObject);
4530 
4531   __ bind(&ok);
4532 #endif
4533 
4534   if (stack_space_operand != nullptr) {
4535     DCHECK_EQ(0, stack_space);
4536     __ pop(ecx);
4537     __ add(esp, ebx);
4538     __ jmp(ecx);
4539   } else {
4540     __ ret(stack_space * kPointerSize);
4541   }
4542 
4543   // Re-throw by promoting a scheduled exception.
4544   __ bind(&promote_scheduled_exception);
4545   __ TailCallRuntime(Runtime::kPromoteScheduledException);
4546 
4547   // HandleScope limit has changed. Delete allocated extensions.
4548   ExternalReference delete_extensions =
4549       ExternalReference::delete_handle_scope_extensions(isolate);
4550   __ bind(&delete_allocated_handles);
4551   __ mov(Operand::StaticVariable(limit_address), edi);
4552   __ mov(edi, eax);
4553   __ mov(Operand(esp, 0),
4554          Immediate(ExternalReference::isolate_address(isolate)));
4555   __ mov(eax, Immediate(delete_extensions));
4556   __ call(eax);
4557   __ mov(eax, edi);
4558   __ jmp(&leave_exit_frame);
4559 }
4560 
Generate(MacroAssembler * masm)4561 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
4562   // ----------- S t a t e -------------
4563   //  -- edi                 : callee
4564   //  -- ebx                 : call_data
4565   //  -- ecx                 : holder
4566   //  -- edx                 : api_function_address
4567   //  -- esi                 : context
4568   //  --
4569   //  -- esp[0]              : return address
4570   //  -- esp[4]              : last argument
4571   //  -- ...
4572   //  -- esp[argc * 4]       : first argument
4573   //  -- esp[(argc + 1) * 4] : receiver
4574   // -----------------------------------
4575 
4576   Register callee = edi;
4577   Register call_data = ebx;
4578   Register holder = ecx;
4579   Register api_function_address = edx;
4580   Register context = esi;
4581   Register return_address = eax;
4582 
4583   typedef FunctionCallbackArguments FCA;
4584 
4585   STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4586   STATIC_ASSERT(FCA::kCalleeIndex == 5);
4587   STATIC_ASSERT(FCA::kDataIndex == 4);
4588   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4589   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4590   STATIC_ASSERT(FCA::kIsolateIndex == 1);
4591   STATIC_ASSERT(FCA::kHolderIndex == 0);
4592   STATIC_ASSERT(FCA::kNewTargetIndex == 7);
4593   STATIC_ASSERT(FCA::kArgsLength == 8);
4594 
4595   __ pop(return_address);
4596 
4597   // new target
4598   __ PushRoot(Heap::kUndefinedValueRootIndex);
4599 
4600   // context save.
4601   __ push(context);
4602 
4603   // callee
4604   __ push(callee);
4605 
4606   // call data
4607   __ push(call_data);
4608 
4609   Register scratch = call_data;
4610   if (!call_data_undefined()) {
4611     // return value
4612     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
4613     // return value default
4614     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
4615   } else {
4616     // return value
4617     __ push(scratch);
4618     // return value default
4619     __ push(scratch);
4620   }
4621   // isolate
4622   __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
4623   // holder
4624   __ push(holder);
4625 
4626   __ mov(scratch, esp);
4627 
4628   // push return address
4629   __ push(return_address);
4630 
4631   if (!is_lazy()) {
4632     // load context from callee
4633     __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
4634   }
4635 
4636   // API function gets reference to the v8::Arguments. If CPU profiler
4637   // is enabled wrapper function will be called and we need to pass
4638   // address of the callback as additional parameter, always allocate
4639   // space for it.
4640   const int kApiArgc = 1 + 1;
4641 
4642   // Allocate the v8::Arguments structure in the arguments' space since
4643   // it's not controlled by GC.
4644   const int kApiStackSpace = 3;
4645 
4646   PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
4647 
4648   // FunctionCallbackInfo::implicit_args_.
4649   __ mov(ApiParameterOperand(2), scratch);
4650   __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
4651   // FunctionCallbackInfo::values_.
4652   __ mov(ApiParameterOperand(3), scratch);
4653   // FunctionCallbackInfo::length_.
4654   __ Move(ApiParameterOperand(4), Immediate(argc()));
4655 
4656   // v8::InvocationCallback's argument.
4657   __ lea(scratch, ApiParameterOperand(2));
4658   __ mov(ApiParameterOperand(0), scratch);
4659 
4660   ExternalReference thunk_ref =
4661       ExternalReference::invoke_function_callback(masm->isolate());
4662 
4663   Operand context_restore_operand(ebp,
4664                                   (2 + FCA::kContextSaveIndex) * kPointerSize);
4665   // Stores return the first js argument
4666   int return_value_offset = 0;
4667   if (is_store()) {
4668     return_value_offset = 2 + FCA::kArgsLength;
4669   } else {
4670     return_value_offset = 2 + FCA::kReturnValueOffset;
4671   }
4672   Operand return_value_operand(ebp, return_value_offset * kPointerSize);
4673   int stack_space = 0;
4674   Operand length_operand = ApiParameterOperand(4);
4675   Operand* stack_space_operand = &length_operand;
4676   stack_space = argc() + FCA::kArgsLength + 1;
4677   stack_space_operand = nullptr;
4678   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
4679                            ApiParameterOperand(1), stack_space,
4680                            stack_space_operand, return_value_operand,
4681                            &context_restore_operand);
4682 }
4683 
4684 
Generate(MacroAssembler * masm)4685 void CallApiGetterStub::Generate(MacroAssembler* masm) {
4686   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4687   // name below the exit frame to make GC aware of them.
4688   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
4689   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
4690   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
4691   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
4692   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
4693   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
4694   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
4695   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
4696 
4697   Register receiver = ApiGetterDescriptor::ReceiverRegister();
4698   Register holder = ApiGetterDescriptor::HolderRegister();
4699   Register callback = ApiGetterDescriptor::CallbackRegister();
4700   Register scratch = ebx;
4701   DCHECK(!AreAliased(receiver, holder, callback, scratch));
4702 
4703   __ pop(scratch);  // Pop return address to extend the frame.
4704   __ push(receiver);
4705   __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
4706   __ PushRoot(Heap::kUndefinedValueRootIndex);  // ReturnValue
4707   // ReturnValue default value
4708   __ PushRoot(Heap::kUndefinedValueRootIndex);
4709   __ push(Immediate(ExternalReference::isolate_address(isolate())));
4710   __ push(holder);
4711   __ push(Immediate(Smi::kZero));  // should_throw_on_error -> false
4712   __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
4713   __ push(scratch);  // Restore return address.
4714 
4715   // v8::PropertyCallbackInfo::args_ array and name handle.
4716   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
4717 
4718   // Allocate v8::PropertyCallbackInfo object, arguments for callback and
4719   // space for optional callback address parameter (in case CPU profiler is
4720   // active) in non-GCed stack space.
4721   const int kApiArgc = 3 + 1;
4722 
4723   // Load address of v8::PropertyAccessorInfo::args_ array.
4724   __ lea(scratch, Operand(esp, 2 * kPointerSize));
4725 
4726   PrepareCallApiFunction(masm, kApiArgc);
4727   // Create v8::PropertyCallbackInfo object on the stack and initialize
4728   // it's args_ field.
4729   Operand info_object = ApiParameterOperand(3);
4730   __ mov(info_object, scratch);
4731 
4732   // Name as handle.
4733   __ sub(scratch, Immediate(kPointerSize));
4734   __ mov(ApiParameterOperand(0), scratch);
4735   // Arguments pointer.
4736   __ lea(scratch, info_object);
4737   __ mov(ApiParameterOperand(1), scratch);
4738   // Reserve space for optional callback address parameter.
4739   Operand thunk_last_arg = ApiParameterOperand(2);
4740 
4741   ExternalReference thunk_ref =
4742       ExternalReference::invoke_accessor_getter_callback(isolate());
4743 
4744   __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
4745   Register function_address = edx;
4746   __ mov(function_address,
4747          FieldOperand(scratch, Foreign::kForeignAddressOffset));
4748   // +3 is to skip prolog, return address and name handle.
4749   Operand return_value_operand(
4750       ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
4751   CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
4752                            kStackUnwindSpace, nullptr, return_value_operand,
4753                            NULL);
4754 }
4755 
4756 #undef __
4757 
4758 }  // namespace internal
4759 }  // namespace v8
4760 
4761 #endif  // V8_TARGET_ARCH_IA32
4762