1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS
6 
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define __ ACCESS_MASM(masm)
17 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19                                 ExitFrameType exit_frame_type) {
20   // ----------- S t a t e -------------
21   //  -- a0                 : number of arguments excluding receiver
22   //  -- a1                 : target
23   //  -- a3                 : new.target
24   //  -- sp[0]              : last argument
25   //  -- ...
26   //  -- sp[4 * (argc - 1)] : first argument
27   //  -- sp[4 * agrc]       : receiver
28   // -----------------------------------
29   __ AssertFunction(a1);
30 
31   // Make sure we operate in the context of the called function (for example
32   // ConstructStubs implemented in C++ will be run in the context of the caller
33   // instead of the callee, due to the way that [[Construct]] is defined for
34   // ordinary functions).
35   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
36 
37   // JumpToExternalReference expects a0 to contain the number of arguments
38   // including the receiver and the extra arguments.
39   const int num_extra_args = 3;
40   __ Addu(a0, a0, num_extra_args + 1);
41 
42   // Insert extra arguments.
43   __ SmiTag(a0);
44   __ Push(a0, a1, a3);
45   __ SmiUntag(a0);
46 
47   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48                              PROTECT, exit_frame_type == BUILTIN_EXIT);
49 }
50 
51 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53                                               Register result) {
54   // Load the InternalArray function from the native context.
55   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56 }
57 
58 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60   // Load the Array function from the native context.
61   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62 }
63 
Generate_InternalArrayCode(MacroAssembler * masm)64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65   // ----------- S t a t e -------------
66   //  -- a0     : number of arguments
67   //  -- ra     : return address
68   //  -- sp[...]: constructor arguments
69   // -----------------------------------
70   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71 
72   // Get the InternalArray function.
73   GenerateLoadInternalArrayFunction(masm, a1);
74 
75   if (FLAG_debug_code) {
76     // Initial map for the builtin InternalArray functions should be maps.
77     __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
78     __ SmiTst(a2, t0);
79     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, t0,
80               Operand(zero_reg));
81     __ GetObjectType(a2, a3, t0);
82     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, t0,
83               Operand(MAP_TYPE));
84   }
85 
86   // Run the native code for the InternalArray function called as a normal
87   // function.
88   // Tail call a stub.
89   InternalArrayConstructorStub stub(masm->isolate());
90   __ TailCallStub(&stub);
91 }
92 
Generate_ArrayCode(MacroAssembler * masm)93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
94   // ----------- S t a t e -------------
95   //  -- a0     : number of arguments
96   //  -- ra     : return address
97   //  -- sp[...]: constructor arguments
98   // -----------------------------------
99   Label generic_array_code;
100 
101   // Get the Array function.
102   GenerateLoadArrayFunction(masm, a1);
103 
104   if (FLAG_debug_code) {
105     // Initial map for the builtin Array functions should be maps.
106     __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
107     __ SmiTst(a2, t0);
108     __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, t0,
109               Operand(zero_reg));
110     __ GetObjectType(a2, a3, t0);
111     __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, t0,
112               Operand(MAP_TYPE));
113   }
114 
115   // Run the native code for the Array function called as a normal function.
116   // Tail call a stub.
117   __ mov(a3, a1);
118   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
119   ArrayConstructorStub stub(masm->isolate());
120   __ TailCallStub(&stub);
121 }
122 
123 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)124 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
125   // ----------- S t a t e -------------
126   //  -- a0                     : number of arguments
127   //  -- a1                     : function
128   //  -- cp                     : context
129   //  -- ra                     : return address
130   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
131   //  -- sp[argc * 4]           : receiver
132   // -----------------------------------
133   Heap::RootListIndex const root_index =
134       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
135                                      : Heap::kMinusInfinityValueRootIndex;
136 
137   // Load the accumulator with the default return value (either -Infinity or
138   // +Infinity), with the tagged value in t2 and the double value in f0.
139   __ LoadRoot(t2, root_index);
140   __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
141 
142   Label done_loop, loop;
143   __ mov(a3, a0);
144   __ bind(&loop);
145   {
146     // Check if all parameters done.
147     __ Subu(a3, a3, Operand(1));
148     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
149 
150     // Load the next parameter tagged value into a2.
151     __ Lsa(at, sp, a3, kPointerSizeLog2);
152     __ lw(a2, MemOperand(at));
153 
154     // Load the double value of the parameter into f2, maybe converting the
155     // parameter to a number first using the ToNumber builtin if necessary.
156     Label convert, convert_smi, convert_number, done_convert;
157     __ bind(&convert);
158     __ JumpIfSmi(a2, &convert_smi);
159     __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset));
160     __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number);
161     {
162       // Parameter is not a Number, use the ToNumber builtin to convert it.
163       FrameScope scope(masm, StackFrame::MANUAL);
164       __ SmiTag(a0);
165       __ SmiTag(a3);
166       __ EnterBuiltinFrame(cp, a1, a0);
167       __ Push(t2, a3);
168       __ mov(a0, a2);
169       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
170       __ mov(a2, v0);
171       __ Pop(t2, a3);
172       __ LeaveBuiltinFrame(cp, a1, a0);
173       __ SmiUntag(a3);
174       __ SmiUntag(a0);
175       {
176         // Restore the double accumulator value (f0).
177         Label restore_smi, done_restore;
178         __ JumpIfSmi(t2, &restore_smi);
179         __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
180         __ jmp(&done_restore);
181         __ bind(&restore_smi);
182         __ SmiToDoubleFPURegister(t2, f0, t0);
183         __ bind(&done_restore);
184       }
185     }
186     __ jmp(&convert);
187     __ bind(&convert_number);
188     __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
189     __ jmp(&done_convert);
190     __ bind(&convert_smi);
191     __ SmiToDoubleFPURegister(a2, f2, t0);
192     __ bind(&done_convert);
193 
194     // Perform the actual comparison with using Min/Max macro instructions the
195     // accumulator value on the left hand side (f0) and the next parameter value
196     // on the right hand side (f2).
197     // We need to work out which HeapNumber (or smi) the result came from.
198     Label compare_nan, set_value;
199     __ BranchF(nullptr, &compare_nan, eq, f0, f2);
200     __ Move(t0, t1, f0);
201     if (kind == MathMaxMinKind::kMin) {
202       __ MinNaNCheck_d(f0, f0, f2);
203     } else {
204       DCHECK(kind == MathMaxMinKind::kMax);
205       __ MaxNaNCheck_d(f0, f0, f2);
206     }
207     __ Move(at, t8, f0);
208     __ Branch(&set_value, ne, t0, Operand(at));
209     __ Branch(&set_value, ne, t1, Operand(t8));
210     __ jmp(&loop);
211     __ bind(&set_value);
212     __ mov(t2, a2);
213     __ jmp(&loop);
214 
215     // At least one side is NaN, which means that the result will be NaN too.
216     __ bind(&compare_nan);
217     __ LoadRoot(t2, Heap::kNanValueRootIndex);
218     __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
219     __ jmp(&loop);
220   }
221 
222   __ bind(&done_loop);
223   // Drop all slots, including the receiver.
224   __ Addu(a0, a0, Operand(1));
225   __ Lsa(sp, sp, a0, kPointerSizeLog2);
226   __ Ret(USE_DELAY_SLOT);
227   __ mov(v0, t2);  // In delay slot.
228 }
229 
230 // static
Generate_NumberConstructor(MacroAssembler * masm)231 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
232   // ----------- S t a t e -------------
233   //  -- a0                     : number of arguments
234   //  -- a1                     : constructor function
235   //  -- cp                     : context
236   //  -- ra                     : return address
237   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
238   //  -- sp[argc * 4]           : receiver
239   // -----------------------------------
240 
241   // 1. Load the first argument into a0.
242   Label no_arguments;
243   {
244     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
245     __ Subu(t1, a0, Operand(1));  // In delay slot.
246     __ mov(t0, a0);               // Store argc in t0.
247     __ Lsa(at, sp, t1, kPointerSizeLog2);
248     __ lw(a0, MemOperand(at));
249   }
250 
251   // 2a. Convert first argument to number.
252   {
253     FrameScope scope(masm, StackFrame::MANUAL);
254     __ SmiTag(t0);
255     __ EnterBuiltinFrame(cp, a1, t0);
256     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
257     __ LeaveBuiltinFrame(cp, a1, t0);
258     __ SmiUntag(t0);
259   }
260 
261   {
262     // Drop all arguments including the receiver.
263     __ Lsa(sp, sp, t0, kPointerSizeLog2);
264     __ DropAndRet(1);
265   }
266 
267   // 2b. No arguments, return +0.
268   __ bind(&no_arguments);
269   __ Move(v0, Smi::kZero);
270   __ DropAndRet(1);
271 }
272 
273 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)274 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
275   // ----------- S t a t e -------------
276   //  -- a0                     : number of arguments
277   //  -- a1                     : constructor function
278   //  -- a3                     : new target
279   //  -- cp                     : context
280   //  -- ra                     : return address
281   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
282   //  -- sp[argc * 4]           : receiver
283   // -----------------------------------
284 
285   // 1. Make sure we operate in the context of the called function.
286   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
287 
288   // 2. Load the first argument into a0.
289   {
290     Label no_arguments, done;
291     __ mov(t0, a0);  // Store argc in t0.
292     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
293     __ Subu(t1, a0, Operand(1));  // In delay slot.
294     __ Lsa(at, sp, t1, kPointerSizeLog2);
295     __ lw(a0, MemOperand(at));
296     __ jmp(&done);
297     __ bind(&no_arguments);
298     __ Move(a0, Smi::kZero);
299     __ bind(&done);
300   }
301 
302   // 3. Make sure a0 is a number.
303   {
304     Label done_convert;
305     __ JumpIfSmi(a0, &done_convert);
306     __ GetObjectType(a0, a2, a2);
307     __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
308     {
309       FrameScope scope(masm, StackFrame::MANUAL);
310       __ SmiTag(t0);
311       __ EnterBuiltinFrame(cp, a1, t0);
312       __ Push(a3);
313       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
314       __ Move(a0, v0);
315       __ Pop(a3);
316       __ LeaveBuiltinFrame(cp, a1, t0);
317       __ SmiUntag(t0);
318     }
319     __ bind(&done_convert);
320   }
321 
322   // 4. Check if new target and constructor differ.
323   Label drop_frame_and_ret, new_object;
324   __ Branch(&new_object, ne, a1, Operand(a3));
325 
326   // 5. Allocate a JSValue wrapper for the number.
327   __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
328   __ jmp(&drop_frame_and_ret);
329 
330   // 6. Fallback to the runtime to create new object.
331   __ bind(&new_object);
332   {
333     FrameScope scope(masm, StackFrame::MANUAL);
334     FastNewObjectStub stub(masm->isolate());
335     __ SmiTag(t0);
336     __ EnterBuiltinFrame(cp, a1, t0);
337     __ Push(a0);  // first argument
338     __ CallStub(&stub);
339     __ Pop(a0);
340     __ LeaveBuiltinFrame(cp, a1, t0);
341     __ SmiUntag(t0);
342   }
343   __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));
344 
345   __ bind(&drop_frame_and_ret);
346   {
347     __ Lsa(sp, sp, t0, kPointerSizeLog2);
348     __ DropAndRet(1);
349   }
350 }
351 
352 // static
Generate_StringConstructor(MacroAssembler * masm)353 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
354   // ----------- S t a t e -------------
355   //  -- a0                     : number of arguments
356   //  -- a1                     : constructor function
357   //  -- cp                     : context
358   //  -- ra                     : return address
359   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
360   //  -- sp[argc * 4]           : receiver
361   // -----------------------------------
362 
363   // 1. Load the first argument into a0.
364   Label no_arguments;
365   {
366     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
367     __ Subu(t1, a0, Operand(1));
368     __ mov(t0, a0);  // Store argc in t0.
369     __ Lsa(at, sp, t1, kPointerSizeLog2);
370     __ lw(a0, MemOperand(at));
371   }
372 
373   // 2a. At least one argument, return a0 if it's a string, otherwise
374   // dispatch to appropriate conversion.
375   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
376   {
377     __ JumpIfSmi(a0, &to_string);
378     __ GetObjectType(a0, t1, t1);
379     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
380     __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE));
381     __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg));
382     __ Branch(&to_string, gt, t1, Operand(zero_reg));
383     __ mov(v0, a0);
384     __ jmp(&drop_frame_and_ret);
385   }
386 
387   // 2b. No arguments, return the empty string (and pop the receiver).
388   __ bind(&no_arguments);
389   {
390     __ LoadRoot(v0, Heap::kempty_stringRootIndex);
391     __ DropAndRet(1);
392   }
393 
394   // 3a. Convert a0 to a string.
395   __ bind(&to_string);
396   {
397     FrameScope scope(masm, StackFrame::MANUAL);
398     __ SmiTag(t0);
399     __ EnterBuiltinFrame(cp, a1, t0);
400     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
401     __ LeaveBuiltinFrame(cp, a1, t0);
402     __ SmiUntag(t0);
403   }
404   __ jmp(&drop_frame_and_ret);
405 
406   // 3b. Convert symbol in a0 to a string.
407   __ bind(&symbol_descriptive_string);
408   {
409     __ Lsa(sp, sp, t0, kPointerSizeLog2);
410     __ Drop(1);
411     __ Push(a0);
412     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
413   }
414 
415   __ bind(&drop_frame_and_ret);
416   {
417     __ Lsa(sp, sp, t0, kPointerSizeLog2);
418     __ DropAndRet(1);
419   }
420 }
421 
422 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)423 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
424   // ----------- S t a t e -------------
425   //  -- a0                     : number of arguments
426   //  -- a1                     : constructor function
427   //  -- a3                     : new target
428   //  -- cp                     : context
429   //  -- ra                     : return address
430   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
431   //  -- sp[argc * 4]           : receiver
432   // -----------------------------------
433 
434   // 1. Make sure we operate in the context of the called function.
435   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
436 
437   // 2. Load the first argument into a0.
438   {
439     Label no_arguments, done;
440     __ mov(t0, a0);  // Store argc in t0.
441     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
442     __ Subu(t1, a0, Operand(1));
443     __ Lsa(at, sp, t1, kPointerSizeLog2);
444     __ lw(a0, MemOperand(at));
445     __ jmp(&done);
446     __ bind(&no_arguments);
447     __ LoadRoot(a0, Heap::kempty_stringRootIndex);
448     __ bind(&done);
449   }
450 
451   // 3. Make sure a0 is a string.
452   {
453     Label convert, done_convert;
454     __ JumpIfSmi(a0, &convert);
455     __ GetObjectType(a0, a2, a2);
456     __ And(t1, a2, Operand(kIsNotStringMask));
457     __ Branch(&done_convert, eq, t1, Operand(zero_reg));
458     __ bind(&convert);
459     {
460       FrameScope scope(masm, StackFrame::MANUAL);
461       __ SmiTag(t0);
462       __ EnterBuiltinFrame(cp, a1, t0);
463       __ Push(a3);
464       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
465       __ Move(a0, v0);
466       __ Pop(a3);
467       __ LeaveBuiltinFrame(cp, a1, t0);
468       __ SmiUntag(t0);
469     }
470     __ bind(&done_convert);
471   }
472 
473   // 4. Check if new target and constructor differ.
474   Label drop_frame_and_ret, new_object;
475   __ Branch(&new_object, ne, a1, Operand(a3));
476 
477   // 5. Allocate a JSValue wrapper for the string.
478   __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
479   __ jmp(&drop_frame_and_ret);
480 
481   // 6. Fallback to the runtime to create new object.
482   __ bind(&new_object);
483   {
484     FrameScope scope(masm, StackFrame::MANUAL);
485     FastNewObjectStub stub(masm->isolate());
486     __ SmiTag(t0);
487     __ EnterBuiltinFrame(cp, a1, t0);
488     __ Push(a0);  // first argument
489     __ CallStub(&stub);
490     __ Pop(a0);
491     __ LeaveBuiltinFrame(cp, a1, t0);
492     __ SmiUntag(t0);
493   }
494   __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));
495 
496   __ bind(&drop_frame_and_ret);
497   {
498     __ Lsa(sp, sp, t0, kPointerSizeLog2);
499     __ DropAndRet(1);
500   }
501 }
502 
GenerateTailCallToSharedCode(MacroAssembler * masm)503 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
504   __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
505   __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
506   __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
507   __ Jump(at);
508 }
509 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)510 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
511                                            Runtime::FunctionId function_id) {
512   // ----------- S t a t e -------------
513   //  -- a0 : argument count (preserved for callee)
514   //  -- a1 : target function (preserved for callee)
515   //  -- a3 : new target (preserved for callee)
516   // -----------------------------------
517   {
518     FrameScope scope(masm, StackFrame::INTERNAL);
519     // Push a copy of the target function and the new target.
520     // Push function as parameter to the runtime call.
521     __ SmiTag(a0);
522     __ Push(a0, a1, a3, a1);
523 
524     __ CallRuntime(function_id, 1);
525 
526     // Restore target function and new target.
527     __ Pop(a0, a1, a3);
528     __ SmiUntag(a0);
529   }
530 
531   __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
532   __ Jump(at);
533 }
534 
Generate_InOptimizationQueue(MacroAssembler * masm)535 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
536   // Checking whether the queued function is ready for install is optional,
537   // since we come across interrupts and stack checks elsewhere.  However,
538   // not checking may delay installing ready functions, and always checking
539   // would be quite expensive.  A good compromise is to first check against
540   // stack limit as a cue for an interrupt signal.
541   Label ok;
542   __ LoadRoot(t0, Heap::kStackLimitRootIndex);
543   __ Branch(&ok, hs, sp, Operand(t0));
544 
545   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
546 
547   __ bind(&ok);
548   GenerateTailCallToSharedCode(masm);
549 }
550 
551 namespace {
552 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)553 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
554                                     bool create_implicit_receiver,
555                                     bool check_derived_construct) {
556   // ----------- S t a t e -------------
557   //  -- a0     : number of arguments
558   //  -- a1     : constructor function
559   //  -- a3     : new target
560   //  -- cp     : context
561   //  -- ra     : return address
562   //  -- sp[...]: constructor arguments
563   // -----------------------------------
564 
565   Isolate* isolate = masm->isolate();
566 
567   // Enter a construct frame.
568   {
569     FrameScope scope(masm, StackFrame::CONSTRUCT);
570 
571     // Preserve the incoming parameters on the stack.
572     __ SmiTag(a0);
573     __ Push(cp, a0);
574 
575     if (create_implicit_receiver) {
576       // Allocate the new receiver object.
577       __ Push(a1, a3);
578       FastNewObjectStub stub(masm->isolate());
579       __ CallStub(&stub);
580       __ mov(t4, v0);
581       __ Pop(a1, a3);
582 
583       // ----------- S t a t e -------------
584       //  -- a1: constructor function
585       //  -- a3: new target
586       //  -- t0: newly allocated object
587       // -----------------------------------
588 
589       // Retrieve smi-tagged arguments count from the stack.
590       __ lw(a0, MemOperand(sp));
591     }
592 
593     __ SmiUntag(a0);
594 
595     if (create_implicit_receiver) {
596       // Push the allocated receiver to the stack. We need two copies
597       // because we may have to return the original one and the calling
598       // conventions dictate that the called function pops the receiver.
599       __ Push(t4, t4);
600     } else {
601       __ PushRoot(Heap::kTheHoleValueRootIndex);
602     }
603 
604     // Set up pointer to last argument.
605     __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
606 
607     // Copy arguments and receiver to the expression stack.
608     // a0: number of arguments
609     // a1: constructor function
610     // a2: address of last argument (caller sp)
611     // a3: new target
612     // t4: number of arguments (smi-tagged)
613     // sp[0]: receiver
614     // sp[1]: receiver
615     // sp[2]: number of arguments (smi-tagged)
616     Label loop, entry;
617     __ SmiTag(t4, a0);
618     __ jmp(&entry);
619     __ bind(&loop);
620     __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize);
621     __ lw(t1, MemOperand(t0));
622     __ push(t1);
623     __ bind(&entry);
624     __ Addu(t4, t4, Operand(-2));
625     __ Branch(&loop, greater_equal, t4, Operand(zero_reg));
626 
627     // Call the function.
628     // a0: number of arguments
629     // a1: constructor function
630     // a3: new target
631     ParameterCount actual(a0);
632     __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
633                       CheckDebugStepCallWrapper());
634 
635     // Store offset of return address for deoptimizer.
636     if (create_implicit_receiver && !is_api_function) {
637       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
638     }
639 
640     // Restore context from the frame.
641     __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
642 
643     if (create_implicit_receiver) {
644       // If the result is an object (in the ECMA sense), we should get rid
645       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
646       // on page 74.
647       Label use_receiver, exit;
648 
649       // If the result is a smi, it is *not* an object in the ECMA sense.
650       // v0: result
651       // sp[0]: receiver (newly allocated object)
652       // sp[1]: number of arguments (smi-tagged)
653       __ JumpIfSmi(v0, &use_receiver);
654 
655       // If the type of the result (stored in its map) is less than
656       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
657       __ GetObjectType(v0, a1, a3);
658       __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
659 
660       // Throw away the result of the constructor invocation and use the
661       // on-stack receiver as the result.
662       __ bind(&use_receiver);
663       __ lw(v0, MemOperand(sp));
664 
665       // Remove receiver from the stack, remove caller arguments, and
666       // return.
667       __ bind(&exit);
668       // v0: result
669       // sp[0]: receiver (newly allocated object)
670       // sp[1]: number of arguments (smi-tagged)
671       __ lw(a1, MemOperand(sp, 1 * kPointerSize));
672     } else {
673       __ lw(a1, MemOperand(sp));
674     }
675 
676     // Leave construct frame.
677   }
678 
679   // ES6 9.2.2. Step 13+
680   // Check that the result is not a Smi, indicating that the constructor result
681   // from a derived class is neither undefined nor an Object.
682   if (check_derived_construct) {
683     Label dont_throw;
684     __ JumpIfNotSmi(v0, &dont_throw);
685     {
686       FrameScope scope(masm, StackFrame::INTERNAL);
687       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
688     }
689     __ bind(&dont_throw);
690   }
691 
692   __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1);
693   __ Addu(sp, sp, kPointerSize);
694   if (create_implicit_receiver) {
695     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
696   }
697   __ Ret();
698 }
699 
700 }  // namespace
701 
Generate_JSConstructStubGeneric(MacroAssembler * masm)702 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
703   Generate_JSConstructStubHelper(masm, false, true, false);
704 }
705 
Generate_JSConstructStubApi(MacroAssembler * masm)706 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
707   Generate_JSConstructStubHelper(masm, true, false, false);
708 }
709 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)710 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
711   Generate_JSConstructStubHelper(masm, false, false, false);
712 }
713 
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)714 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
715     MacroAssembler* masm) {
716   Generate_JSConstructStubHelper(masm, false, false, true);
717 }
718 
Generate_ConstructedNonConstructable(MacroAssembler * masm)719 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
720   FrameScope scope(masm, StackFrame::INTERNAL);
721   __ Push(a1);
722   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
723 }
724 
725 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
726 
727 // Clobbers a2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)728 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
729                                         IsTagged argc_is_tagged) {
730   // Check the stack for overflow. We are not trying to catch
731   // interruptions (e.g. debug break and preemption) here, so the "real stack
732   // limit" is checked.
733   Label okay;
734   __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
735   // Make a2 the space we have left. The stack might already be overflowed
736   // here which will cause a2 to become negative.
737   __ Subu(a2, sp, a2);
738   // Check if the arguments will overflow the stack.
739   if (argc_is_tagged == kArgcIsSmiTagged) {
740     __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
741   } else {
742     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
743     __ sll(t3, argc, kPointerSizeLog2);
744   }
745   // Signed comparison.
746   __ Branch(&okay, gt, a2, Operand(t3));
747 
748   // Out of stack space.
749   __ CallRuntime(Runtime::kThrowStackOverflow);
750 
751   __ bind(&okay);
752 }
753 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)754 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
755                                              bool is_construct) {
756   // Called from JSEntryStub::GenerateBody
757 
758   // ----------- S t a t e -------------
759   //  -- a0: new.target
760   //  -- a1: function
761   //  -- a2: receiver_pointer
762   //  -- a3: argc
763   //  -- s0: argv
764   // -----------------------------------
765   ProfileEntryHookStub::MaybeCallEntryHook(masm);
766 
767   // Enter an internal frame.
768   {
769     FrameScope scope(masm, StackFrame::INTERNAL);
770 
771     // Setup the context (we need to use the caller context from the isolate).
772     ExternalReference context_address(Isolate::kContextAddress,
773                                       masm->isolate());
774     __ li(cp, Operand(context_address));
775     __ lw(cp, MemOperand(cp));
776 
777     // Push the function and the receiver onto the stack.
778     __ Push(a1, a2);
779 
780     // Check if we have enough stack space to push all arguments.
781     // Clobbers a2.
782     Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
783 
784     // Remember new.target.
785     __ mov(t1, a0);
786 
787     // Copy arguments to the stack in a loop.
788     // a3: argc
789     // s0: argv, i.e. points to first arg
790     Label loop, entry;
791     __ Lsa(t2, s0, a3, kPointerSizeLog2);
792     __ b(&entry);
793     __ nop();  // Branch delay slot nop.
794     // t2 points past last arg.
795     __ bind(&loop);
796     __ lw(t0, MemOperand(s0));  // Read next parameter.
797     __ addiu(s0, s0, kPointerSize);
798     __ lw(t0, MemOperand(t0));  // Dereference handle.
799     __ push(t0);                // Push parameter.
800     __ bind(&entry);
801     __ Branch(&loop, ne, s0, Operand(t2));
802 
803     // Setup new.target and argc.
804     __ mov(a0, a3);
805     __ mov(a3, t1);
806 
807     // Initialize all JavaScript callee-saved registers, since they will be seen
808     // by the garbage collector as part of handlers.
809     __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
810     __ mov(s1, t0);
811     __ mov(s2, t0);
812     __ mov(s3, t0);
813     __ mov(s4, t0);
814     __ mov(s5, t0);
815     // s6 holds the root address. Do not clobber.
816     // s7 is cp. Do not init.
817 
818     // Invoke the code.
819     Handle<Code> builtin = is_construct
820                                ? masm->isolate()->builtins()->Construct()
821                                : masm->isolate()->builtins()->Call();
822     __ Call(builtin, RelocInfo::CODE_TARGET);
823 
824     // Leave internal frame.
825   }
826 
827   __ Jump(ra);
828 }
829 
Generate_JSEntryTrampoline(MacroAssembler * masm)830 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
831   Generate_JSEntryTrampolineHelper(masm, false);
832 }
833 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)834 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
835   Generate_JSEntryTrampolineHelper(masm, true);
836 }
837 
838 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)839 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
840   // ----------- S t a t e -------------
841   //  -- v0 : the value to pass to the generator
842   //  -- a1 : the JSGeneratorObject to resume
843   //  -- a2 : the resume mode (tagged)
844   //  -- ra : return address
845   // -----------------------------------
846   __ AssertGeneratorObject(a1);
847 
848   // Store input value into generator object.
849   __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
850   __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
851                       kRAHasNotBeenSaved, kDontSaveFPRegs);
852 
853   // Store resume mode into generator object.
854   __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
855 
856   // Load suspended function and context.
857   __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
858   __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
859 
860   // Flood function if we are stepping.
861   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
862   Label stepping_prepared;
863   ExternalReference last_step_action =
864       ExternalReference::debug_last_step_action_address(masm->isolate());
865   STATIC_ASSERT(StepFrame > StepIn);
866   __ li(t1, Operand(last_step_action));
867   __ lb(t1, MemOperand(t1));
868   __ Branch(&prepare_step_in_if_stepping, ge, t1, Operand(StepIn));
869 
870   // Flood function if we need to continue stepping in the suspended generator.
871   ExternalReference debug_suspended_generator =
872       ExternalReference::debug_suspended_generator_address(masm->isolate());
873   __ li(t1, Operand(debug_suspended_generator));
874   __ lw(t1, MemOperand(t1));
875   __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1));
876   __ bind(&stepping_prepared);
877 
878   // Push receiver.
879   __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
880   __ Push(t1);
881 
882   // ----------- S t a t e -------------
883   //  -- a1    : the JSGeneratorObject to resume
884   //  -- a2    : the resume mode (tagged)
885   //  -- t0    : generator function
886   //  -- cp    : generator context
887   //  -- ra    : return address
888   //  -- sp[0] : generator receiver
889   // -----------------------------------
890 
891   // Push holes for arguments to generator function. Since the parser forced
892   // context allocation for any variables in generators, the actual argument
893   // values have already been copied into the context and these dummy values
894   // will never be used.
895   __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
896   __ lw(a3,
897         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
898   {
899     Label done_loop, loop;
900     __ bind(&loop);
901     __ Subu(a3, a3, Operand(Smi::FromInt(1)));
902     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
903     __ PushRoot(Heap::kTheHoleValueRootIndex);
904     __ Branch(&loop);
905     __ bind(&done_loop);
906   }
907 
908   // Dispatch on the kind of generator object.
909   Label old_generator;
910   __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
911   __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
912   __ GetObjectType(a3, a3, a3);
913   __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE));
914 
915   // New-style (ignition/turbofan) generator object.
916   {
917     __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
918     __ lw(a0,
919           FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
920     __ SmiUntag(a0);
921     // We abuse new.target both to indicate that this is a resume call and to
922     // pass in the generator object.  In ordinary calls, new.target is always
923     // undefined because generator functions are non-constructable.
924     __ Move(a3, a1);
925     __ Move(a1, t0);
926     __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
927     __ Jump(a2);
928   }
929 
930   // Old-style (full-codegen) generator object
931   __ bind(&old_generator);
932   {
933     // Enter a new JavaScript frame, and initialize its slots as they were when
934     // the generator was suspended.
935     FrameScope scope(masm, StackFrame::MANUAL);
936     __ Push(ra, fp);
937     __ Move(fp, sp);
938     __ Push(cp, t0);
939 
940     // Restore the operand stack.
941     __ lw(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
942     __ lw(a3, FieldMemOperand(a0, FixedArray::kLengthOffset));
943     __ Addu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
944     __ Lsa(a3, a0, a3, kPointerSizeLog2 - 1);
945     {
946       Label done_loop, loop;
947       __ bind(&loop);
948       __ Branch(&done_loop, eq, a0, Operand(a3));
949       __ lw(t1, MemOperand(a0));
950       __ Push(t1);
951       __ Branch(USE_DELAY_SLOT, &loop);
952       __ addiu(a0, a0, kPointerSize);  // In delay slot.
953       __ bind(&done_loop);
954     }
955 
956     // Reset operand stack so we don't leak.
957     __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
958     __ sw(t1, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
959 
960     // Resume the generator function at the continuation.
961     __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
962     __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
963     __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
964     __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
965     __ SmiUntag(a2);
966     __ Addu(a3, a3, Operand(a2));
967     __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
968     __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
969     __ Move(v0, a1);  // Continuation expects generator object in v0.
970     __ Jump(a3);
971   }
972 
973   __ bind(&prepare_step_in_if_stepping);
974   {
975     FrameScope scope(masm, StackFrame::INTERNAL);
976     __ Push(a1, a2, t0);
977     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
978     __ Pop(a1, a2);
979   }
980   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
981   __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
982 
983   __ bind(&prepare_step_in_suspended_generator);
984   {
985     FrameScope scope(masm, StackFrame::INTERNAL);
986     __ Push(a1, a2);
987     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
988     __ Pop(a1, a2);
989   }
990   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
991   __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
992 }
993 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)994 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
995   Register args_count = scratch;
996 
997   // Get the arguments + receiver count.
998   __ lw(args_count,
999         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1000   __ lw(args_count,
1001         FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1002 
1003   // Leave the frame (also dropping the register file).
1004   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1005 
1006   // Drop receiver + arguments.
1007   __ Addu(sp, sp, args_count);
1008 }
1009 
1010 // Generate code for entering a JS function with the interpreter.
1011 // On entry to the function the receiver and arguments have been pushed on the
1012 // stack left to right.  The actual argument count matches the formal parameter
1013 // count expected by the function.
1014 //
1015 // The live registers are:
1016 //   o a1: the JS function object being called.
1017 //   o a3: the new target
1018 //   o cp: our context
1019 //   o fp: the caller's frame pointer
1020 //   o sp: stack pointer
1021 //   o ra: return address
1022 //
1023 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1024 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1025 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1026   ProfileEntryHookStub::MaybeCallEntryHook(masm);
1027 
1028   // Open a frame scope to indicate that there is a frame on the stack.  The
1029   // MANUAL indicates that the scope shouldn't actually generate code to set up
1030   // the frame (that is done below).
1031   FrameScope frame_scope(masm, StackFrame::MANUAL);
1032   __ PushStandardFrame(a1);
1033 
1034   // Get the bytecode array from the function object (or from the DebugInfo if
1035   // it is present) and load it into kInterpreterBytecodeArrayRegister.
1036   __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1037   Label load_debug_bytecode_array, bytecode_array_loaded;
1038   Register debug_info = kInterpreterBytecodeArrayRegister;
1039   DCHECK(!debug_info.is(a0));
1040   __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
1041   __ Branch(&load_debug_bytecode_array, ne, debug_info,
1042             Operand(DebugInfo::uninitialized()));
1043   __ lw(kInterpreterBytecodeArrayRegister,
1044         FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
1045   __ bind(&bytecode_array_loaded);
1046 
1047   // Check whether we should continue to use the interpreter.
1048   Label switch_to_different_code_kind;
1049   __ lw(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset));
1050   __ Branch(&switch_to_different_code_kind, ne, a0,
1051             Operand(masm->CodeObject()));  // Self-reference to this code.
1052 
1053   // Increment invocation count for the function.
1054   __ lw(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
1055   __ lw(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
1056   __ lw(t0, FieldMemOperand(
1057                 a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
1058                         TypeFeedbackVector::kHeaderSize));
1059   __ Addu(t0, t0, Operand(Smi::FromInt(1)));
1060   __ sw(t0, FieldMemOperand(
1061                 a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
1062                         TypeFeedbackVector::kHeaderSize));
1063 
1064   // Check function data field is actually a BytecodeArray object.
1065   if (FLAG_debug_code) {
1066     __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
1067     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1068               Operand(zero_reg));
1069     __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
1070     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1071               Operand(BYTECODE_ARRAY_TYPE));
1072   }
1073 
1074   // Load initial bytecode offset.
1075   __ li(kInterpreterBytecodeOffsetRegister,
1076         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1077 
1078   // Push new.target, bytecode array and Smi tagged bytecode array offset.
1079   __ SmiTag(t0, kInterpreterBytecodeOffsetRegister);
1080   __ Push(a3, kInterpreterBytecodeArrayRegister, t0);
1081 
1082   // Allocate the local and temporary register file on the stack.
1083   {
1084     // Load frame size from the BytecodeArray object.
1085     __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1086                               BytecodeArray::kFrameSizeOffset));
1087 
1088     // Do a stack check to ensure we don't go over the limit.
1089     Label ok;
1090     __ Subu(t1, sp, Operand(t0));
1091     __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1092     __ Branch(&ok, hs, t1, Operand(a2));
1093     __ CallRuntime(Runtime::kThrowStackOverflow);
1094     __ bind(&ok);
1095 
1096     // If ok, push undefined as the initial value for all register file entries.
1097     Label loop_header;
1098     Label loop_check;
1099     __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
1100     __ Branch(&loop_check);
1101     __ bind(&loop_header);
1102     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1103     __ push(t1);
1104     // Continue loop if not done.
1105     __ bind(&loop_check);
1106     __ Subu(t0, t0, Operand(kPointerSize));
1107     __ Branch(&loop_header, ge, t0, Operand(zero_reg));
1108   }
1109 
1110   // Load accumulator and dispatch table into registers.
1111   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1112   __ li(kInterpreterDispatchTableRegister,
1113         Operand(ExternalReference::interpreter_dispatch_table_address(
1114             masm->isolate())));
1115 
1116   // Dispatch to the first bytecode handler for the function.
1117   __ Addu(a0, kInterpreterBytecodeArrayRegister,
1118           kInterpreterBytecodeOffsetRegister);
1119   __ lbu(a0, MemOperand(a0));
1120   __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
1121   __ lw(at, MemOperand(at));
1122   __ Call(at);
1123   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1124 
1125   // The return value is in v0.
1126   LeaveInterpreterFrame(masm, t0);
1127   __ Jump(ra);
1128 
1129   // Load debug copy of the bytecode array.
1130   __ bind(&load_debug_bytecode_array);
1131   __ lw(kInterpreterBytecodeArrayRegister,
1132         FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1133   __ Branch(&bytecode_array_loaded);
1134 
1135   // If the shared code is no longer this entry trampoline, then the underlying
1136   // function has been switched to a different kind of code and we heal the
1137   // closure by switching the code entry field over to the new code as well.
1138   __ bind(&switch_to_different_code_kind);
1139   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1140   __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1141   __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset));
1142   __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1143   __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1144   __ RecordWriteCodeEntryField(a1, t0, t1);
1145   __ Jump(t0);
1146 }
1147 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow)1148 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1149                                         Register scratch1, Register scratch2,
1150                                         Label* stack_overflow) {
1151   // Check the stack for overflow. We are not trying to catch
1152   // interruptions (e.g. debug break and preemption) here, so the "real stack
1153   // limit" is checked.
1154   __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
1155   // Make scratch1 the space we have left. The stack might already be overflowed
1156   // here which will cause scratch1 to become negative.
1157   __ subu(scratch1, sp, scratch1);
1158   // Check if the arguments will overflow the stack.
1159   __ sll(scratch2, num_args, kPointerSizeLog2);
1160   // Signed comparison.
1161   __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
1162 }
1163 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register scratch,Register scratch2,Label * stack_overflow)1164 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1165                                          Register num_args, Register index,
1166                                          Register scratch, Register scratch2,
1167                                          Label* stack_overflow) {
1168   Generate_StackOverflowCheck(masm, num_args, scratch, scratch2,
1169                               stack_overflow);
1170 
1171   // Find the address of the last argument.
1172   __ mov(scratch2, num_args);
1173   __ sll(scratch2, scratch2, kPointerSizeLog2);
1174   __ Subu(scratch2, index, Operand(scratch2));
1175 
1176   // Push the arguments.
1177   Label loop_header, loop_check;
1178   __ Branch(&loop_check);
1179   __ bind(&loop_header);
1180   __ lw(scratch, MemOperand(index));
1181   __ Addu(index, index, Operand(-kPointerSize));
1182   __ push(scratch);
1183   __ bind(&loop_check);
1184   __ Branch(&loop_header, gt, index, Operand(scratch2));
1185 }
1186 
1187 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)1188 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1189     MacroAssembler* masm, TailCallMode tail_call_mode,
1190     CallableType function_type) {
1191   // ----------- S t a t e -------------
1192   //  -- a0 : the number of arguments (not including the receiver)
1193   //  -- a2 : the address of the first argument to be pushed. Subsequent
1194   //          arguments should be consecutive above this, in the same order as
1195   //          they are to be pushed onto the stack.
1196   //  -- a1 : the target to call (can be any Object).
1197   // -----------------------------------
1198   Label stack_overflow;
1199 
1200   __ Addu(t0, a0, Operand(1));  // Add one for receiver.
1201 
1202   // This function modifies a2, t4 and t1.
1203   Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow);
1204 
1205   // Call the target.
1206   if (function_type == CallableType::kJSFunction) {
1207     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1208                                                       tail_call_mode),
1209             RelocInfo::CODE_TARGET);
1210   } else {
1211     DCHECK_EQ(function_type, CallableType::kAny);
1212     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1213                                               tail_call_mode),
1214             RelocInfo::CODE_TARGET);
1215   }
1216 
1217   __ bind(&stack_overflow);
1218   {
1219     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1220     // Unreachable code.
1221     __ break_(0xCC);
1222   }
1223 }
1224 
1225 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)1226 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1227     MacroAssembler* masm, CallableType construct_type) {
1228   // ----------- S t a t e -------------
1229   // -- a0 : argument count (not including receiver)
1230   // -- a3 : new target
1231   // -- a1 : constructor to call
1232   // -- a2 : allocation site feedback if available, undefined otherwise.
1233   // -- t4 : address of the first argument
1234   // -----------------------------------
1235   Label stack_overflow;
1236 
1237   // Push a slot for the receiver.
1238   __ push(zero_reg);
1239 
1240   // This function modified t4, t1 and t0.
1241   Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow);
1242 
1243   __ AssertUndefinedOrAllocationSite(a2, t0);
1244   if (construct_type == CallableType::kJSFunction) {
1245     __ AssertFunction(a1);
1246 
1247     // Tail call to the function-specific construct stub (still in the caller
1248     // context at this point).
1249     __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1250     __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
1251     __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1252     __ Jump(at);
1253   } else {
1254     DCHECK_EQ(construct_type, CallableType::kAny);
1255     // Call the constructor with a0, a1, and a3 unmodified.
1256     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1257   }
1258 
1259   __ bind(&stack_overflow);
1260   {
1261     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1262     // Unreachable code.
1263     __ break_(0xCC);
1264   }
1265 }
1266 
1267 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)1268 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1269     MacroAssembler* masm) {
1270   // ----------- S t a t e -------------
1271   //  -- a0 : the number of arguments (not including the receiver)
1272   //  -- a1 : the target to call checked to be Array function.
1273   //  -- a2 : allocation site feedback.
1274   //  -- a3 : the address of the first argument to be pushed. Subsequent
1275   //          arguments should be consecutive above this, in the same order as
1276   //          they are to be pushed onto the stack.
1277   // -----------------------------------
1278   Label stack_overflow;
1279 
1280   __ Addu(t0, a0, Operand(1));  // Add one for receiver.
1281 
1282   // This function modifies a3, t4, and t1.
1283   Generate_InterpreterPushArgs(masm, t0, a3, t1, t4, &stack_overflow);
1284 
1285   // ArrayConstructor stub expects constructor in a3. Set it here.
1286   __ mov(a3, a1);
1287 
1288   ArrayConstructorStub stub(masm->isolate());
1289   __ TailCallStub(&stub);
1290 
1291   __ bind(&stack_overflow);
1292   {
1293     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1294     // Unreachable code.
1295     __ break_(0xCC);
1296   }
1297 }
1298 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1299 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1300   // Set the return address to the correct point in the interpreter entry
1301   // trampoline.
1302   Smi* interpreter_entry_return_pc_offset(
1303       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1304   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1305   __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1306   __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1307                           Code::kHeaderSize - kHeapObjectTag));
1308 
1309   // Initialize the dispatch table register.
1310   __ li(kInterpreterDispatchTableRegister,
1311         Operand(ExternalReference::interpreter_dispatch_table_address(
1312             masm->isolate())));
1313 
1314   // Get the bytecode array pointer from the frame.
1315   __ lw(kInterpreterBytecodeArrayRegister,
1316         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1317 
1318   if (FLAG_debug_code) {
1319     // Check function data field is actually a BytecodeArray object.
1320     __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1321     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1322               Operand(zero_reg));
1323     __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1324     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1325               Operand(BYTECODE_ARRAY_TYPE));
1326   }
1327 
1328   // Get the target bytecode offset from the frame.
1329   __ lw(kInterpreterBytecodeOffsetRegister,
1330         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1331   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1332 
1333   // Dispatch to the target bytecode.
1334   __ Addu(a1, kInterpreterBytecodeArrayRegister,
1335           kInterpreterBytecodeOffsetRegister);
1336   __ lbu(a1, MemOperand(a1));
1337   __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
1338   __ lw(a1, MemOperand(a1));
1339   __ Jump(a1);
1340 }
1341 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1342 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1343   // Advance the current bytecode offset stored within the given interpreter
1344   // stack frame. This simulates what all bytecode handlers do upon completion
1345   // of the underlying operation.
1346   __ lw(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1347   __ lw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1348   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1349   {
1350     FrameScope scope(masm, StackFrame::INTERNAL);
1351     __ Push(kInterpreterAccumulatorRegister, a1, a2);
1352     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1353     __ mov(a2, v0);  // Result is the new bytecode offset.
1354     __ Pop(kInterpreterAccumulatorRegister);
1355   }
1356   __ sw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1357 
1358   Generate_InterpreterEnterBytecode(masm);
1359 }
1360 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1361 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1362   Generate_InterpreterEnterBytecode(masm);
1363 }
1364 
Generate_CompileLazy(MacroAssembler * masm)1365 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1366   // ----------- S t a t e -------------
1367   //  -- a0 : argument count (preserved for callee)
1368   //  -- a3 : new target (preserved for callee)
1369   //  -- a1 : target function (preserved for callee)
1370   // -----------------------------------
1371   // First lookup code, maybe we don't need to compile!
1372   Label gotta_call_runtime, gotta_call_runtime_no_stack;
1373   Label try_shared;
1374   Label loop_top, loop_bottom;
1375 
1376   Register argument_count = a0;
1377   Register closure = a1;
1378   Register new_target = a3;
1379   __ push(argument_count);
1380   __ push(new_target);
1381   __ push(closure);
1382 
1383   Register map = a0;
1384   Register index = a2;
1385   __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1386   __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1387   __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1388   __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
1389 
1390   // Find literals.
1391   // a3  : native context
1392   // a2  : length / index
1393   // a0  : optimized code map
1394   // stack[0] : new target
1395   // stack[4] : closure
1396   Register native_context = a3;
1397   __ lw(native_context, NativeContextMemOperand());
1398 
1399   __ bind(&loop_top);
1400   Register temp = a1;
1401   Register array_pointer = t1;
1402 
1403   // Does the native context match?
1404   __ sll(at, index, kPointerSizeLog2 - kSmiTagSize);
1405   __ Addu(array_pointer, map, Operand(at));
1406   __ lw(temp, FieldMemOperand(array_pointer,
1407                               SharedFunctionInfo::kOffsetToPreviousContext));
1408   __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1409   __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1410   // OSR id set to none?
1411   __ lw(temp, FieldMemOperand(array_pointer,
1412                               SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1413   const int bailout_id = BailoutId::None().ToInt();
1414   __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
1415   // Literals available?
1416   __ lw(temp, FieldMemOperand(array_pointer,
1417                               SharedFunctionInfo::kOffsetToPreviousLiterals));
1418   __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1419   __ JumpIfSmi(temp, &gotta_call_runtime);
1420 
1421   // Save the literals in the closure.
1422   __ lw(t0, MemOperand(sp, 0));
1423   __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
1424   __ push(index);
1425   __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index,
1426                       kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1427                       OMIT_SMI_CHECK);
1428   __ pop(index);
1429 
1430   // Code available?
1431   Register entry = t0;
1432   __ lw(entry,
1433         FieldMemOperand(array_pointer,
1434                         SharedFunctionInfo::kOffsetToPreviousCachedCode));
1435   __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1436   __ JumpIfSmi(entry, &try_shared);
1437 
1438   // Found literals and code. Get them into the closure and return.
1439   __ pop(closure);
1440   // Store code entry in the closure.
1441   __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1442   __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1443   __ RecordWriteCodeEntryField(closure, entry, t1);
1444 
1445   // Link the closure into the optimized function list.
1446   // t0 : code entry
1447   // a3 : native context
1448   // a1 : closure
1449   __ lw(t1,
1450         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1451   __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1452   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0,
1453                       kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1454                       OMIT_SMI_CHECK);
1455   const int function_list_offset =
1456       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1457   __ sw(closure,
1458         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1459   // Save closure before the write barrier.
1460   __ mov(t1, closure);
1461   __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1462                             kRAHasNotBeenSaved, kDontSaveFPRegs);
1463   __ mov(closure, t1);
1464   __ pop(new_target);
1465   __ pop(argument_count);
1466   __ Jump(entry);
1467 
1468   __ bind(&loop_bottom);
1469   __ Subu(index, index,
1470           Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1471   __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1472 
1473   // We found neither literals nor code.
1474   __ jmp(&gotta_call_runtime);
1475 
1476   __ bind(&try_shared);
1477   __ pop(closure);
1478   __ pop(new_target);
1479   __ pop(argument_count);
1480   __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1481   // Is the shared function marked for tier up?
1482   __ lbu(t1, FieldMemOperand(entry,
1483                              SharedFunctionInfo::kMarkedForTierUpByteOffset));
1484   __ And(t1, t1,
1485          Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1486   __ Branch(&gotta_call_runtime_no_stack, ne, t1, Operand(zero_reg));
1487   // Is the full code valid?
1488   __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1489   __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset));
1490   __ And(t1, t1, Operand(Code::KindField::kMask));
1491   __ srl(t1, t1, Code::KindField::kShift);
1492   __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN));
1493   // Yes, install the full code.
1494   __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1495   __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1496   __ RecordWriteCodeEntryField(closure, entry, t1);
1497   __ Jump(entry);
1498 
1499   __ bind(&gotta_call_runtime);
1500   __ pop(closure);
1501   __ pop(new_target);
1502   __ pop(argument_count);
1503   __ bind(&gotta_call_runtime_no_stack);
1504   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1505 }
1506 
Generate_CompileBaseline(MacroAssembler * masm)1507 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1508   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1509 }
1510 
Generate_CompileOptimized(MacroAssembler * masm)1511 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1512   GenerateTailCallToReturnedCode(masm,
1513                                  Runtime::kCompileOptimized_NotConcurrent);
1514 }
1515 
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1516 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1517   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1518 }
1519 
Generate_InstantiateAsmJs(MacroAssembler * masm)1520 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1521   // ----------- S t a t e -------------
1522   //  -- a0 : argument count (preserved for callee)
1523   //  -- a1 : new target (preserved for callee)
1524   //  -- a3 : target function (preserved for callee)
1525   // -----------------------------------
1526   Label failed;
1527   {
1528     FrameScope scope(masm, StackFrame::INTERNAL);
1529     // Preserve argument count for later compare.
1530     __ Move(t4, a0);
1531     // Push a copy of the target function and the new target.
1532     // Push function as parameter to the runtime call.
1533     __ SmiTag(a0);
1534     __ Push(a0, a1, a3, a1);
1535 
1536     // Copy arguments from caller (stdlib, foreign, heap).
1537     Label args_done;
1538     for (int j = 0; j < 4; ++j) {
1539       Label over;
1540       if (j < 3) {
1541         __ Branch(&over, ne, t4, Operand(j));
1542       }
1543       for (int i = j - 1; i >= 0; --i) {
1544         __ lw(t4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1545                                      i * kPointerSize));
1546         __ push(t4);
1547       }
1548       for (int i = 0; i < 3 - j; ++i) {
1549         __ PushRoot(Heap::kUndefinedValueRootIndex);
1550       }
1551       if (j < 3) {
1552         __ jmp(&args_done);
1553         __ bind(&over);
1554       }
1555     }
1556     __ bind(&args_done);
1557 
1558     // Call runtime, on success unwind frame, and parent frame.
1559     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1560     // A smi 0 is returned on failure, an object on success.
1561     __ JumpIfSmi(v0, &failed);
1562 
1563     __ Drop(2);
1564     __ pop(t4);
1565     __ SmiUntag(t4);
1566     scope.GenerateLeaveFrame();
1567 
1568     __ Addu(t4, t4, Operand(1));
1569     __ Lsa(sp, sp, t4, kPointerSizeLog2);
1570     __ Ret();
1571 
1572     __ bind(&failed);
1573     // Restore target function and new target.
1574     __ Pop(a0, a1, a3);
1575     __ SmiUntag(a0);
1576   }
1577   // On failure, tail call back to regular js.
1578   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1579 }
1580 
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1581 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1582   // For now, we are relying on the fact that make_code_young doesn't do any
1583   // garbage collection which allows us to save/restore the registers without
1584   // worrying about which of them contain pointers. We also don't build an
1585   // internal frame to make the code faster, since we shouldn't have to do stack
1586   // crawls in MakeCodeYoung. This seems a bit fragile.
1587 
1588   // Set a0 to point to the head of the PlatformCodeAge sequence.
1589   __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1590 
1591   // The following registers must be saved and restored when calling through to
1592   // the runtime:
1593   //   a0 - contains return address (beginning of patch sequence)
1594   //   a1 - isolate
1595   //   a3 - new target
1596   RegList saved_regs =
1597       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1598   FrameScope scope(masm, StackFrame::MANUAL);
1599   __ MultiPush(saved_regs);
1600   __ PrepareCallCFunction(2, 0, a2);
1601   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1602   __ CallCFunction(
1603       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1604   __ MultiPop(saved_regs);
1605   __ Jump(a0);
1606 }
1607 
1608 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                  \
1609   void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1610       MacroAssembler* masm) {                                 \
1611     GenerateMakeCodeYoungAgainCommon(masm);                   \
1612   }                                                           \
1613   void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(  \
1614       MacroAssembler* masm) {                                 \
1615     GenerateMakeCodeYoungAgainCommon(masm);                   \
1616   }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1617 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1618 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1619 
1620 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1621   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1622   // that make_code_young doesn't do any garbage collection which allows us to
1623   // save/restore the registers without worrying about which of them contain
1624   // pointers.
1625 
1626   // Set a0 to point to the head of the PlatformCodeAge sequence.
1627   __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1628 
1629   // The following registers must be saved and restored when calling through to
1630   // the runtime:
1631   //   a0 - contains return address (beginning of patch sequence)
1632   //   a1 - isolate
1633   //   a3 - new target
1634   RegList saved_regs =
1635       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1636   FrameScope scope(masm, StackFrame::MANUAL);
1637   __ MultiPush(saved_regs);
1638   __ PrepareCallCFunction(2, 0, a2);
1639   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1640   __ CallCFunction(
1641       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1642       2);
1643   __ MultiPop(saved_regs);
1644 
1645   // Perform prologue operations usually performed by the young code stub.
1646   __ PushStandardFrame(a1);
1647 
1648   // Jump to point after the code-age stub.
1649   __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1650   __ Jump(a0);
1651 }
1652 
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1653 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1654   GenerateMakeCodeYoungAgainCommon(masm);
1655 }
1656 
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1657 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1658   Generate_MarkCodeAsExecutedOnce(masm);
1659 }
1660 
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1661 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1662                                              SaveFPRegsMode save_doubles) {
1663   {
1664     FrameScope scope(masm, StackFrame::INTERNAL);
1665 
1666     // Preserve registers across notification, this is important for compiled
1667     // stubs that tail call the runtime on deopts passing their parameters in
1668     // registers.
1669     __ MultiPush(kJSCallerSaved | kCalleeSaved);
1670     // Pass the function and deoptimization type to the runtime system.
1671     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1672     __ MultiPop(kJSCallerSaved | kCalleeSaved);
1673   }
1674 
1675   __ Addu(sp, sp, Operand(kPointerSize));  // Ignore state
1676   __ Jump(ra);                             // Jump to miss handler
1677 }
1678 
Generate_NotifyStubFailure(MacroAssembler * masm)1679 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1680   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1681 }
1682 
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1683 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1684   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1685 }
1686 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1687 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1688                                              Deoptimizer::BailoutType type) {
1689   {
1690     FrameScope scope(masm, StackFrame::INTERNAL);
1691     // Pass the function and deoptimization type to the runtime system.
1692     __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1693     __ push(a0);
1694     __ CallRuntime(Runtime::kNotifyDeoptimized);
1695   }
1696 
1697   // Get the full codegen state from the stack and untag it -> t2.
1698   __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1699   __ SmiUntag(t2);
1700   // Switch on the state.
1701   Label with_tos_register, unknown_state;
1702   __ Branch(&with_tos_register, ne, t2,
1703             Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
1704   __ Ret(USE_DELAY_SLOT);
1705   // Safe to fill delay slot Addu will emit one instruction.
1706   __ Addu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1707 
1708   __ bind(&with_tos_register);
1709   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1710   __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1711   __ Branch(&unknown_state, ne, t2,
1712             Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1713 
1714   __ Ret(USE_DELAY_SLOT);
1715   // Safe to fill delay slot Addu will emit one instruction.
1716   __ Addu(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1717 
1718   __ bind(&unknown_state);
1719   __ stop("no cases left");
1720 }
1721 
Generate_NotifyDeoptimized(MacroAssembler * masm)1722 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1723   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1724 }
1725 
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1726 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1727   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1728 }
1729 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1730 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1731   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1732 }
1733 
1734 // Clobbers {t2, t3, t4, t5}.
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Label * receiver_check_failed)1735 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1736                                     Register function_template_info,
1737                                     Label* receiver_check_failed) {
1738   Register signature = t2;
1739   Register map = t3;
1740   Register constructor = t4;
1741   Register scratch = t5;
1742 
1743   // If there is no signature, return the holder.
1744   __ lw(signature, FieldMemOperand(function_template_info,
1745                                    FunctionTemplateInfo::kSignatureOffset));
1746   Label receiver_check_passed;
1747   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1748                 &receiver_check_passed);
1749 
1750   // Walk the prototype chain.
1751   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1752   Label prototype_loop_start;
1753   __ bind(&prototype_loop_start);
1754 
1755   // Get the constructor, if any.
1756   __ GetMapConstructor(constructor, map, scratch, scratch);
1757   Label next_prototype;
1758   __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1759   Register type = constructor;
1760   __ lw(type,
1761         FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1762   __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1763 
1764   // Loop through the chain of inheriting function templates.
1765   Label function_template_loop;
1766   __ bind(&function_template_loop);
1767 
1768   // If the signatures match, we have a compatible receiver.
1769   __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1770             USE_DELAY_SLOT);
1771 
1772   // If the current type is not a FunctionTemplateInfo, load the next prototype
1773   // in the chain.
1774   __ JumpIfSmi(type, &next_prototype);
1775   __ GetObjectType(type, scratch, scratch);
1776   __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1777 
1778   // Otherwise load the parent function template and iterate.
1779   __ lw(type,
1780         FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1781   __ Branch(&function_template_loop);
1782 
1783   // Load the next prototype and iterate.
1784   __ bind(&next_prototype);
1785   __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1786   __ DecodeField<Map::HasHiddenPrototype>(scratch);
1787   __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
1788   __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1789   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1790 
1791   __ Branch(&prototype_loop_start);
1792 
1793   __ bind(&receiver_check_passed);
1794 }
1795 
Generate_HandleFastApiCall(MacroAssembler * masm)1796 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1797   // ----------- S t a t e -------------
1798   //  -- a0                 : number of arguments excluding receiver
1799   //  -- a1                 : callee
1800   //  -- ra                 : return address
1801   //  -- sp[0]              : last argument
1802   //  -- ...
1803   //  -- sp[4 * (argc - 1)] : first argument
1804   //  -- sp[4 * argc]       : receiver
1805   // -----------------------------------
1806 
1807   // Load the FunctionTemplateInfo.
1808   __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1809   __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1810 
1811   // Do the compatible receiver check.
1812   Label receiver_check_failed;
1813   __ Lsa(t8, sp, a0, kPointerSizeLog2);
1814   __ lw(t0, MemOperand(t8));
1815   CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1816 
1817   // Get the callback offset from the FunctionTemplateInfo, and jump to the
1818   // beginning of the code.
1819   __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1820   __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1821   __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1822   __ Jump(t2);
1823 
1824   // Compatible receiver check failed: throw an Illegal Invocation exception.
1825   __ bind(&receiver_check_failed);
1826   // Drop the arguments (including the receiver);
1827   __ Addu(t8, t8, Operand(kPointerSize));
1828   __ addu(sp, t8, zero_reg);
1829   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1830 }
1831 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1832 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1833                                               bool has_handler_frame) {
1834   // Lookup the function in the JavaScript frame.
1835   if (has_handler_frame) {
1836     __ lw(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1837     __ lw(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
1838   } else {
1839     __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1840   }
1841 
1842   {
1843     FrameScope scope(masm, StackFrame::INTERNAL);
1844     // Pass function as argument.
1845     __ push(a0);
1846     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1847   }
1848 
1849   // If the code object is null, just return to the caller.
1850   __ Ret(eq, v0, Operand(Smi::kZero));
1851 
1852   // Drop any potential handler frame that is be sitting on top of the actual
1853   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1854   if (has_handler_frame) {
1855     __ LeaveFrame(StackFrame::STUB);
1856   }
1857 
1858   // Load deoptimization data from the code object.
1859   // <deopt_data> = <code>[#deoptimization_data_offset]
1860   __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1861 
1862   // Load the OSR entrypoint offset from the deoptimization data.
1863   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1864   __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1865                                DeoptimizationInputData::kOsrPcOffsetIndex) -
1866                                kHeapObjectTag));
1867   __ SmiUntag(a1);
1868 
1869   // Compute the target address = code_obj + header_size + osr_offset
1870   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1871   __ addu(v0, v0, a1);
1872   __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1873 
1874   // And "return" to the OSR entry point of the function.
1875   __ Ret();
1876 }
1877 
Generate_OnStackReplacement(MacroAssembler * masm)1878 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1879   Generate_OnStackReplacementHelper(masm, false);
1880 }
1881 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1882 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1883   Generate_OnStackReplacementHelper(masm, true);
1884 }
1885 
1886 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1887 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1888   // ----------- S t a t e -------------
1889   //  -- a0    : argc
1890   //  -- sp[0] : argArray
1891   //  -- sp[4] : thisArg
1892   //  -- sp[8] : receiver
1893   // -----------------------------------
1894 
1895   // 1. Load receiver into a1, argArray into a0 (if present), remove all
1896   // arguments from the stack (including the receiver), and push thisArg (if
1897   // present) instead.
1898   {
1899     Label no_arg;
1900     Register scratch = t0;
1901     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1902     __ mov(a3, a2);
1903     // Lsa() cannot be used hare as scratch value used later.
1904     __ sll(scratch, a0, kPointerSizeLog2);
1905     __ Addu(a0, sp, Operand(scratch));
1906     __ lw(a1, MemOperand(a0));  // receiver
1907     __ Subu(a0, a0, Operand(kPointerSize));
1908     __ Branch(&no_arg, lt, a0, Operand(sp));
1909     __ lw(a2, MemOperand(a0));  // thisArg
1910     __ Subu(a0, a0, Operand(kPointerSize));
1911     __ Branch(&no_arg, lt, a0, Operand(sp));
1912     __ lw(a3, MemOperand(a0));  // argArray
1913     __ bind(&no_arg);
1914     __ Addu(sp, sp, Operand(scratch));
1915     __ sw(a2, MemOperand(sp));
1916     __ mov(a0, a3);
1917   }
1918 
1919   // ----------- S t a t e -------------
1920   //  -- a0    : argArray
1921   //  -- a1    : receiver
1922   //  -- sp[0] : thisArg
1923   // -----------------------------------
1924 
1925   // 2. Make sure the receiver is actually callable.
1926   Label receiver_not_callable;
1927   __ JumpIfSmi(a1, &receiver_not_callable);
1928   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1929   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1930   __ And(t0, t0, Operand(1 << Map::kIsCallable));
1931   __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
1932 
1933   // 3. Tail call with no arguments if argArray is null or undefined.
1934   Label no_arguments;
1935   __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1936   __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1937 
1938   // 4a. Apply the receiver to the given argArray (passing undefined for
1939   // new.target).
1940   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1941   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1942 
1943   // 4b. The argArray is either null or undefined, so we tail call without any
1944   // arguments to the receiver.
1945   __ bind(&no_arguments);
1946   {
1947     __ mov(a0, zero_reg);
1948     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1949   }
1950 
1951   // 4c. The receiver is not callable, throw an appropriate TypeError.
1952   __ bind(&receiver_not_callable);
1953   {
1954     __ sw(a1, MemOperand(sp));
1955     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1956   }
1957 }
1958 
1959 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1960 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1961   // 1. Make sure we have at least one argument.
1962   // a0: actual number of arguments
1963   {
1964     Label done;
1965     __ Branch(&done, ne, a0, Operand(zero_reg));
1966     __ PushRoot(Heap::kUndefinedValueRootIndex);
1967     __ Addu(a0, a0, Operand(1));
1968     __ bind(&done);
1969   }
1970 
1971   // 2. Get the function to call (passed as receiver) from the stack.
1972   // a0: actual number of arguments
1973   __ Lsa(at, sp, a0, kPointerSizeLog2);
1974   __ lw(a1, MemOperand(at));
1975 
1976   // 3. Shift arguments and return address one slot down on the stack
1977   //    (overwriting the original receiver).  Adjust argument count to make
1978   //    the original first argument the new receiver.
1979   // a0: actual number of arguments
1980   // a1: function
1981   {
1982     Label loop;
1983     // Calculate the copy start address (destination). Copy end address is sp.
1984     __ Lsa(a2, sp, a0, kPointerSizeLog2);
1985 
1986     __ bind(&loop);
1987     __ lw(at, MemOperand(a2, -kPointerSize));
1988     __ sw(at, MemOperand(a2));
1989     __ Subu(a2, a2, Operand(kPointerSize));
1990     __ Branch(&loop, ne, a2, Operand(sp));
1991     // Adjust the actual number of arguments and remove the top element
1992     // (which is a copy of the last argument).
1993     __ Subu(a0, a0, Operand(1));
1994     __ Pop();
1995   }
1996 
1997   // 4. Call the callable.
1998   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1999 }
2000 
Generate_ReflectApply(MacroAssembler * masm)2001 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2002   // ----------- S t a t e -------------
2003   //  -- a0     : argc
2004   //  -- sp[0]  : argumentsList
2005   //  -- sp[4]  : thisArgument
2006   //  -- sp[8]  : target
2007   //  -- sp[12] : receiver
2008   // -----------------------------------
2009 
2010   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
2011   // remove all arguments from the stack (including the receiver), and push
2012   // thisArgument (if present) instead.
2013   {
2014     Label no_arg;
2015     Register scratch = t0;
2016     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2017     __ mov(a2, a1);
2018     __ mov(a3, a1);
2019     __ sll(scratch, a0, kPointerSizeLog2);
2020     __ mov(a0, scratch);
2021     __ Subu(a0, a0, Operand(kPointerSize));
2022     __ Branch(&no_arg, lt, a0, Operand(zero_reg));
2023     __ Addu(a0, sp, Operand(a0));
2024     __ lw(a1, MemOperand(a0));  // target
2025     __ Subu(a0, a0, Operand(kPointerSize));
2026     __ Branch(&no_arg, lt, a0, Operand(sp));
2027     __ lw(a2, MemOperand(a0));  // thisArgument
2028     __ Subu(a0, a0, Operand(kPointerSize));
2029     __ Branch(&no_arg, lt, a0, Operand(sp));
2030     __ lw(a3, MemOperand(a0));  // argumentsList
2031     __ bind(&no_arg);
2032     __ Addu(sp, sp, Operand(scratch));
2033     __ sw(a2, MemOperand(sp));
2034     __ mov(a0, a3);
2035   }
2036 
2037   // ----------- S t a t e -------------
2038   //  -- a0    : argumentsList
2039   //  -- a1    : target
2040   //  -- sp[0] : thisArgument
2041   // -----------------------------------
2042 
2043   // 2. Make sure the target is actually callable.
2044   Label target_not_callable;
2045   __ JumpIfSmi(a1, &target_not_callable);
2046   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
2047   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2048   __ And(t0, t0, Operand(1 << Map::kIsCallable));
2049   __ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
2050 
2051   // 3a. Apply the target to the given argumentsList (passing undefined for
2052   // new.target).
2053   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
2054   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2055 
2056   // 3b. The target is not callable, throw an appropriate TypeError.
2057   __ bind(&target_not_callable);
2058   {
2059     __ sw(a1, MemOperand(sp));
2060     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2061   }
2062 }
2063 
Generate_ReflectConstruct(MacroAssembler * masm)2064 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2065   // ----------- S t a t e -------------
2066   //  -- a0     : argc
2067   //  -- sp[0]  : new.target (optional)
2068   //  -- sp[4]  : argumentsList
2069   //  -- sp[8]  : target
2070   //  -- sp[12] : receiver
2071   // -----------------------------------
2072 
2073   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
2074   // new.target into a3 (if present, otherwise use target), remove all
2075   // arguments from the stack (including the receiver), and push thisArgument
2076   // (if present) instead.
2077   {
2078     Label no_arg;
2079     Register scratch = t0;
2080     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2081     __ mov(a2, a1);
2082     // Lsa() cannot be used hare as scratch value used later.
2083     __ sll(scratch, a0, kPointerSizeLog2);
2084     __ Addu(a0, sp, Operand(scratch));
2085     __ sw(a2, MemOperand(a0));  // receiver
2086     __ Subu(a0, a0, Operand(kPointerSize));
2087     __ Branch(&no_arg, lt, a0, Operand(sp));
2088     __ lw(a1, MemOperand(a0));  // target
2089     __ mov(a3, a1);             // new.target defaults to target
2090     __ Subu(a0, a0, Operand(kPointerSize));
2091     __ Branch(&no_arg, lt, a0, Operand(sp));
2092     __ lw(a2, MemOperand(a0));  // argumentsList
2093     __ Subu(a0, a0, Operand(kPointerSize));
2094     __ Branch(&no_arg, lt, a0, Operand(sp));
2095     __ lw(a3, MemOperand(a0));  // new.target
2096     __ bind(&no_arg);
2097     __ Addu(sp, sp, Operand(scratch));
2098     __ mov(a0, a2);
2099   }
2100 
2101   // ----------- S t a t e -------------
2102   //  -- a0    : argumentsList
2103   //  -- a3    : new.target
2104   //  -- a1    : target
2105   //  -- sp[0] : receiver (undefined)
2106   // -----------------------------------
2107 
2108   // 2. Make sure the target is actually a constructor.
2109   Label target_not_constructor;
2110   __ JumpIfSmi(a1, &target_not_constructor);
2111   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
2112   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2113   __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2114   __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg));
2115 
2116   // 3. Make sure the target is actually a constructor.
2117   Label new_target_not_constructor;
2118   __ JumpIfSmi(a3, &new_target_not_constructor);
2119   __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
2120   __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2121   __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2122   __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg));
2123 
2124   // 4a. Construct the target with the given new.target and argumentsList.
2125   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2126 
2127   // 4b. The target is not a constructor, throw an appropriate TypeError.
2128   __ bind(&target_not_constructor);
2129   {
2130     __ sw(a1, MemOperand(sp));
2131     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2132   }
2133 
2134   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2135   __ bind(&new_target_not_constructor);
2136   {
2137     __ sw(a3, MemOperand(sp));
2138     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2139   }
2140 }
2141 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2142 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2143   __ sll(a0, a0, kSmiTagSize);
2144   __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2145   __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
2146   __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2147                           kPointerSize));
2148 }
2149 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2150 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2151   // ----------- S t a t e -------------
2152   //  -- v0 : result being passed through
2153   // -----------------------------------
2154   // Get the number of arguments passed (as a smi), tear down the frame and
2155   // then tear down the parameters.
2156   __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2157                              kPointerSize)));
2158   __ mov(sp, fp);
2159   __ MultiPop(fp.bit() | ra.bit());
2160   __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
2161   // Adjust for the receiver.
2162   __ Addu(sp, sp, Operand(kPointerSize));
2163 }
2164 
2165 // static
Generate_Apply(MacroAssembler * masm)2166 void Builtins::Generate_Apply(MacroAssembler* masm) {
2167   // ----------- S t a t e -------------
2168   //  -- a0    : argumentsList
2169   //  -- a1    : target
2170   //  -- a3    : new.target (checked to be constructor or undefined)
2171   //  -- sp[0] : thisArgument
2172   // -----------------------------------
2173 
2174   // Create the list of arguments from the array-like argumentsList.
2175   {
2176     Label create_arguments, create_array, create_runtime, done_create;
2177     __ JumpIfSmi(a0, &create_runtime);
2178 
2179     // Load the map of argumentsList into a2.
2180     __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
2181 
2182     // Load native context into t0.
2183     __ lw(t0, NativeContextMemOperand());
2184 
2185     // Check if argumentsList is an (unmodified) arguments object.
2186     __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2187     __ Branch(&create_arguments, eq, a2, Operand(at));
2188     __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX));
2189     __ Branch(&create_arguments, eq, a2, Operand(at));
2190 
2191     // Check if argumentsList is a fast JSArray.
2192     __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
2193     __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
2194     __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2195 
2196     // Ask the runtime to create the list (actually a FixedArray).
2197     __ bind(&create_runtime);
2198     {
2199       FrameScope scope(masm, StackFrame::INTERNAL);
2200       __ Push(a1, a3, a0);
2201       __ CallRuntime(Runtime::kCreateListFromArrayLike);
2202       __ mov(a0, v0);
2203       __ Pop(a1, a3);
2204       __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
2205       __ SmiUntag(a2);
2206     }
2207     __ Branch(&done_create);
2208 
2209     // Try to create the list from an arguments object.
2210     __ bind(&create_arguments);
2211     __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
2212     __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
2213     __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset));
2214     __ Branch(&create_runtime, ne, a2, Operand(at));
2215     __ SmiUntag(a2);
2216     __ mov(a0, t0);
2217     __ Branch(&done_create);
2218 
2219     // Try to create the list from a JSArray object.
2220     __ bind(&create_array);
2221     __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset));
2222     __ DecodeField<Map::ElementsKindBits>(a2);
2223     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2224     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2225     STATIC_ASSERT(FAST_ELEMENTS == 2);
2226     __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
2227     __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2228     __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2229     __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2230     __ SmiUntag(a2);
2231 
2232     __ bind(&done_create);
2233   }
2234 
2235   // Check for stack overflow.
2236   {
2237     // Check the stack for overflow. We are not trying to catch interruptions
2238     // (i.e. debug break and preemption) here, so check the "real stack limit".
2239     Label done;
2240     __ LoadRoot(t0, Heap::kRealStackLimitRootIndex);
2241     // Make ip the space we have left. The stack might already be overflowed
2242     // here which will cause ip to become negative.
2243     __ Subu(t0, sp, t0);
2244     // Check if the arguments will overflow the stack.
2245     __ sll(at, a2, kPointerSizeLog2);
2246     __ Branch(&done, gt, t0, Operand(at));  // Signed comparison.
2247     __ TailCallRuntime(Runtime::kThrowStackOverflow);
2248     __ bind(&done);
2249   }
2250 
2251   // ----------- S t a t e -------------
2252   //  -- a1    : target
2253   //  -- a0    : args (a FixedArray built from argumentsList)
2254   //  -- a2    : len (number of elements to push from args)
2255   //  -- a3    : new.target (checked to be constructor or undefined)
2256   //  -- sp[0] : thisArgument
2257   // -----------------------------------
2258 
2259   // Push arguments onto the stack (thisArgument is already on the stack).
2260   {
2261     __ mov(t0, zero_reg);
2262     Label done, loop;
2263     __ bind(&loop);
2264     __ Branch(&done, eq, t0, Operand(a2));
2265     __ Lsa(at, a0, t0, kPointerSizeLog2);
2266     __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize));
2267     __ Push(at);
2268     __ Addu(t0, t0, Operand(1));
2269     __ Branch(&loop);
2270     __ bind(&done);
2271     __ Move(a0, t0);
2272   }
2273 
2274   // Dispatch to Call or Construct depending on whether new.target is undefined.
2275   {
2276     Label construct;
2277     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2278     __ Branch(&construct, ne, a3, Operand(at));
2279     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2280     __ bind(&construct);
2281     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2282   }
2283 }
2284 
2285 namespace {
2286 
2287 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2288 // present) preserving all the arguments prepared for current call.
2289 // Does nothing if debugger is currently active.
2290 // ES6 14.6.3. PrepareForTailCall
2291 //
2292 // Stack structure for the function g() tail calling f():
2293 //
2294 // ------- Caller frame: -------
2295 // |  ...
2296 // |  g()'s arg M
2297 // |  ...
2298 // |  g()'s arg 1
2299 // |  g()'s receiver arg
2300 // |  g()'s caller pc
2301 // ------- g()'s frame: -------
2302 // |  g()'s caller fp      <- fp
2303 // |  g()'s context
2304 // |  function pointer: g
2305 // |  -------------------------
2306 // |  ...
2307 // |  ...
2308 // |  f()'s arg N
2309 // |  ...
2310 // |  f()'s arg 1
2311 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2312 // ----------------------
2313 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2314 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2315                         Register scratch1, Register scratch2,
2316                         Register scratch3) {
2317   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2318   Comment cmnt(masm, "[ PrepareForTailCall");
2319 
2320   // Prepare for tail call only if ES2015 tail call elimination is enabled.
2321   Label done;
2322   ExternalReference is_tail_call_elimination_enabled =
2323       ExternalReference::is_tail_call_elimination_enabled_address(
2324           masm->isolate());
2325   __ li(at, Operand(is_tail_call_elimination_enabled));
2326   __ lb(scratch1, MemOperand(at));
2327   __ Branch(&done, eq, scratch1, Operand(zero_reg));
2328 
2329   // Drop possible interpreter handler/stub frame.
2330   {
2331     Label no_interpreter_frame;
2332     __ lw(scratch3,
2333           MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2334     __ Branch(&no_interpreter_frame, ne, scratch3,
2335               Operand(Smi::FromInt(StackFrame::STUB)));
2336     __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2337     __ bind(&no_interpreter_frame);
2338   }
2339 
2340   // Check if next frame is an arguments adaptor frame.
2341   Register caller_args_count_reg = scratch1;
2342   Label no_arguments_adaptor, formal_parameter_count_loaded;
2343   __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2344   __ lw(scratch3,
2345         MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2346   __ Branch(&no_arguments_adaptor, ne, scratch3,
2347             Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2348 
2349   // Drop current frame and load arguments count from arguments adaptor frame.
2350   __ mov(fp, scratch2);
2351   __ lw(caller_args_count_reg,
2352         MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2353   __ SmiUntag(caller_args_count_reg);
2354   __ Branch(&formal_parameter_count_loaded);
2355 
2356   __ bind(&no_arguments_adaptor);
2357   // Load caller's formal parameter count
2358   __ lw(scratch1,
2359         MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2360   __ lw(scratch1,
2361         FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2362   __ lw(caller_args_count_reg,
2363         FieldMemOperand(scratch1,
2364                         SharedFunctionInfo::kFormalParameterCountOffset));
2365   __ SmiUntag(caller_args_count_reg);
2366 
2367   __ bind(&formal_parameter_count_loaded);
2368 
2369   ParameterCount callee_args_count(args_reg);
2370   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2371                         scratch3);
2372   __ bind(&done);
2373 }
2374 }  // namespace
2375 
2376 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2377 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2378                                      ConvertReceiverMode mode,
2379                                      TailCallMode tail_call_mode) {
2380   // ----------- S t a t e -------------
2381   //  -- a0 : the number of arguments (not including the receiver)
2382   //  -- a1 : the function to call (checked to be a JSFunction)
2383   // -----------------------------------
2384   __ AssertFunction(a1);
2385 
2386   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2387   // Check that the function is not a "classConstructor".
2388   Label class_constructor;
2389   __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2390   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2391   __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2392   __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2393 
2394   // Enter the context of the function; ToObject has to run in the function
2395   // context, and we also need to take the global proxy from the function
2396   // context in case of conversion.
2397   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2398                 SharedFunctionInfo::kStrictModeByteOffset);
2399   __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2400   // We need to convert the receiver for non-native sloppy mode functions.
2401   Label done_convert;
2402   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2403   __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2404                          (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2405   __ Branch(&done_convert, ne, at, Operand(zero_reg));
2406   {
2407     // ----------- S t a t e -------------
2408     //  -- a0 : the number of arguments (not including the receiver)
2409     //  -- a1 : the function to call (checked to be a JSFunction)
2410     //  -- a2 : the shared function info.
2411     //  -- cp : the function context.
2412     // -----------------------------------
2413 
2414     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2415       // Patch receiver to global proxy.
2416       __ LoadGlobalProxy(a3);
2417     } else {
2418       Label convert_to_object, convert_receiver;
2419       __ Lsa(at, sp, a0, kPointerSizeLog2);
2420       __ lw(a3, MemOperand(at));
2421       __ JumpIfSmi(a3, &convert_to_object);
2422       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2423       __ GetObjectType(a3, t0, t0);
2424       __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
2425       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2426         Label convert_global_proxy;
2427         __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2428                       &convert_global_proxy);
2429         __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2430         __ bind(&convert_global_proxy);
2431         {
2432           // Patch receiver to global proxy.
2433           __ LoadGlobalProxy(a3);
2434         }
2435         __ Branch(&convert_receiver);
2436       }
2437       __ bind(&convert_to_object);
2438       {
2439         // Convert receiver using ToObject.
2440         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2441         // in the fast case? (fall back to AllocateInNewSpace?)
2442         FrameScope scope(masm, StackFrame::INTERNAL);
2443         __ sll(a0, a0, kSmiTagSize);  // Smi tagged.
2444         __ Push(a0, a1);
2445         __ mov(a0, a3);
2446         __ Push(cp);
2447         __ Call(masm->isolate()->builtins()->ToObject(),
2448                 RelocInfo::CODE_TARGET);
2449         __ Pop(cp);
2450         __ mov(a3, v0);
2451         __ Pop(a0, a1);
2452         __ sra(a0, a0, kSmiTagSize);  // Un-tag.
2453       }
2454       __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2455       __ bind(&convert_receiver);
2456     }
2457     __ Lsa(at, sp, a0, kPointerSizeLog2);
2458     __ sw(a3, MemOperand(at));
2459   }
2460   __ bind(&done_convert);
2461 
2462   // ----------- S t a t e -------------
2463   //  -- a0 : the number of arguments (not including the receiver)
2464   //  -- a1 : the function to call (checked to be a JSFunction)
2465   //  -- a2 : the shared function info.
2466   //  -- cp : the function context.
2467   // -----------------------------------
2468 
2469   if (tail_call_mode == TailCallMode::kAllow) {
2470     PrepareForTailCall(masm, a0, t0, t1, t2);
2471   }
2472 
2473   __ lw(a2,
2474         FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2475   __ sra(a2, a2, kSmiTagSize);  // Un-tag.
2476   ParameterCount actual(a0);
2477   ParameterCount expected(a2);
2478   __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2479                         CheckDebugStepCallWrapper());
2480 
2481   // The function is a "classConstructor", need to raise an exception.
2482   __ bind(&class_constructor);
2483   {
2484     FrameScope frame(masm, StackFrame::INTERNAL);
2485     __ Push(a1);
2486     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2487   }
2488 }
2489 
2490 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2491 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2492                                               TailCallMode tail_call_mode) {
2493   // ----------- S t a t e -------------
2494   //  -- a0 : the number of arguments (not including the receiver)
2495   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2496   // -----------------------------------
2497   __ AssertBoundFunction(a1);
2498 
2499   if (tail_call_mode == TailCallMode::kAllow) {
2500     PrepareForTailCall(masm, a0, t0, t1, t2);
2501   }
2502 
2503   // Patch the receiver to [[BoundThis]].
2504   {
2505     __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2506     __ Lsa(t0, sp, a0, kPointerSizeLog2);
2507     __ sw(at, MemOperand(t0));
2508   }
2509 
2510   // Load [[BoundArguments]] into a2 and length of that into t0.
2511   __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2512   __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2513   __ SmiUntag(t0);
2514 
2515   // ----------- S t a t e -------------
2516   //  -- a0 : the number of arguments (not including the receiver)
2517   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2518   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2519   //  -- t0 : the number of [[BoundArguments]]
2520   // -----------------------------------
2521 
2522   // Reserve stack space for the [[BoundArguments]].
2523   {
2524     Label done;
2525     __ sll(t1, t0, kPointerSizeLog2);
2526     __ Subu(sp, sp, Operand(t1));
2527     // Check the stack for overflow. We are not trying to catch interruptions
2528     // (i.e. debug break and preemption) here, so check the "real stack limit".
2529     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2530     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2531     // Restore the stack pointer.
2532     __ Addu(sp, sp, Operand(t1));
2533     {
2534       FrameScope scope(masm, StackFrame::MANUAL);
2535       __ EnterFrame(StackFrame::INTERNAL);
2536       __ CallRuntime(Runtime::kThrowStackOverflow);
2537     }
2538     __ bind(&done);
2539   }
2540 
2541   // Relocate arguments down the stack.
2542   {
2543     Label loop, done_loop;
2544     __ mov(t1, zero_reg);
2545     __ bind(&loop);
2546     __ Branch(&done_loop, gt, t1, Operand(a0));
2547     __ Lsa(t2, sp, t0, kPointerSizeLog2);
2548     __ lw(at, MemOperand(t2));
2549     __ Lsa(t2, sp, t1, kPointerSizeLog2);
2550     __ sw(at, MemOperand(t2));
2551     __ Addu(t0, t0, Operand(1));
2552     __ Addu(t1, t1, Operand(1));
2553     __ Branch(&loop);
2554     __ bind(&done_loop);
2555   }
2556 
2557   // Copy [[BoundArguments]] to the stack (below the arguments).
2558   {
2559     Label loop, done_loop;
2560     __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2561     __ SmiUntag(t0);
2562     __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2563     __ bind(&loop);
2564     __ Subu(t0, t0, Operand(1));
2565     __ Branch(&done_loop, lt, t0, Operand(zero_reg));
2566     __ Lsa(t1, a2, t0, kPointerSizeLog2);
2567     __ lw(at, MemOperand(t1));
2568     __ Lsa(t1, sp, a0, kPointerSizeLog2);
2569     __ sw(at, MemOperand(t1));
2570     __ Addu(a0, a0, Operand(1));
2571     __ Branch(&loop);
2572     __ bind(&done_loop);
2573   }
2574 
2575   // Call the [[BoundTargetFunction]] via the Call builtin.
2576   __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2577   __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2578                                       masm->isolate())));
2579   __ lw(at, MemOperand(at));
2580   __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2581   __ Jump(at);
2582 }
2583 
2584 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2585 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2586                              TailCallMode tail_call_mode) {
2587   // ----------- S t a t e -------------
2588   //  -- a0 : the number of arguments (not including the receiver)
2589   //  -- a1 : the target to call (can be any Object).
2590   // -----------------------------------
2591 
2592   Label non_callable, non_function, non_smi;
2593   __ JumpIfSmi(a1, &non_callable);
2594   __ bind(&non_smi);
2595   __ GetObjectType(a1, t1, t2);
2596   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2597           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2598   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2599           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2600 
2601   // Check if target has a [[Call]] internal method.
2602   __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2603   __ And(t1, t1, Operand(1 << Map::kIsCallable));
2604   __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2605 
2606   __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2607 
2608   // 0. Prepare for tail call if necessary.
2609   if (tail_call_mode == TailCallMode::kAllow) {
2610     PrepareForTailCall(masm, a0, t0, t1, t2);
2611   }
2612 
2613   // 1. Runtime fallback for Proxy [[Call]].
2614   __ Push(a1);
2615   // Increase the arguments size to include the pushed function and the
2616   // existing receiver on the stack.
2617   __ Addu(a0, a0, 2);
2618   // Tail-call to the runtime.
2619   __ JumpToExternalReference(
2620       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2621 
2622   // 2. Call to something else, which might have a [[Call]] internal method (if
2623   // not we raise an exception).
2624   __ bind(&non_function);
2625   // Overwrite the original receiver with the (original) target.
2626   __ Lsa(at, sp, a0, kPointerSizeLog2);
2627   __ sw(a1, MemOperand(at));
2628   // Let the "call_as_function_delegate" take care of the rest.
2629   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2630   __ Jump(masm->isolate()->builtins()->CallFunction(
2631               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2632           RelocInfo::CODE_TARGET);
2633 
2634   // 3. Call to something that is not callable.
2635   __ bind(&non_callable);
2636   {
2637     FrameScope scope(masm, StackFrame::INTERNAL);
2638     __ Push(a1);
2639     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2640   }
2641 }
2642 
2643 // static
Generate_ConstructFunction(MacroAssembler * masm)2644 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2645   // ----------- S t a t e -------------
2646   //  -- a0 : the number of arguments (not including the receiver)
2647   //  -- a1 : the constructor to call (checked to be a JSFunction)
2648   //  -- a3 : the new target (checked to be a constructor)
2649   // -----------------------------------
2650   __ AssertFunction(a1);
2651 
2652   // Calling convention for function specific ConstructStubs require
2653   // a2 to contain either an AllocationSite or undefined.
2654   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2655 
2656   // Tail call to the function-specific construct stub (still in the caller
2657   // context at this point).
2658   __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2659   __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
2660   __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
2661   __ Jump(at);
2662 }
2663 
2664 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2665 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2666   // ----------- S t a t e -------------
2667   //  -- a0 : the number of arguments (not including the receiver)
2668   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2669   //  -- a3 : the new target (checked to be a constructor)
2670   // -----------------------------------
2671   __ AssertBoundFunction(a1);
2672 
2673   // Load [[BoundArguments]] into a2 and length of that into t0.
2674   __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2675   __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2676   __ SmiUntag(t0);
2677 
2678   // ----------- S t a t e -------------
2679   //  -- a0 : the number of arguments (not including the receiver)
2680   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2681   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2682   //  -- a3 : the new target (checked to be a constructor)
2683   //  -- t0 : the number of [[BoundArguments]]
2684   // -----------------------------------
2685 
2686   // Reserve stack space for the [[BoundArguments]].
2687   {
2688     Label done;
2689     __ sll(t1, t0, kPointerSizeLog2);
2690     __ Subu(sp, sp, Operand(t1));
2691     // Check the stack for overflow. We are not trying to catch interruptions
2692     // (i.e. debug break and preemption) here, so check the "real stack limit".
2693     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2694     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2695     // Restore the stack pointer.
2696     __ Addu(sp, sp, Operand(t1));
2697     {
2698       FrameScope scope(masm, StackFrame::MANUAL);
2699       __ EnterFrame(StackFrame::INTERNAL);
2700       __ CallRuntime(Runtime::kThrowStackOverflow);
2701     }
2702     __ bind(&done);
2703   }
2704 
2705   // Relocate arguments down the stack.
2706   {
2707     Label loop, done_loop;
2708     __ mov(t1, zero_reg);
2709     __ bind(&loop);
2710     __ Branch(&done_loop, ge, t1, Operand(a0));
2711     __ Lsa(t2, sp, t0, kPointerSizeLog2);
2712     __ lw(at, MemOperand(t2));
2713     __ Lsa(t2, sp, t1, kPointerSizeLog2);
2714     __ sw(at, MemOperand(t2));
2715     __ Addu(t0, t0, Operand(1));
2716     __ Addu(t1, t1, Operand(1));
2717     __ Branch(&loop);
2718     __ bind(&done_loop);
2719   }
2720 
2721   // Copy [[BoundArguments]] to the stack (below the arguments).
2722   {
2723     Label loop, done_loop;
2724     __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2725     __ SmiUntag(t0);
2726     __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2727     __ bind(&loop);
2728     __ Subu(t0, t0, Operand(1));
2729     __ Branch(&done_loop, lt, t0, Operand(zero_reg));
2730     __ Lsa(t1, a2, t0, kPointerSizeLog2);
2731     __ lw(at, MemOperand(t1));
2732     __ Lsa(t1, sp, a0, kPointerSizeLog2);
2733     __ sw(at, MemOperand(t1));
2734     __ Addu(a0, a0, Operand(1));
2735     __ Branch(&loop);
2736     __ bind(&done_loop);
2737   }
2738 
2739   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2740   {
2741     Label skip_load;
2742     __ Branch(&skip_load, ne, a1, Operand(a3));
2743     __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2744     __ bind(&skip_load);
2745   }
2746 
2747   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2748   __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2749   __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2750   __ lw(at, MemOperand(at));
2751   __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2752   __ Jump(at);
2753 }
2754 
2755 // static
Generate_ConstructProxy(MacroAssembler * masm)2756 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2757   // ----------- S t a t e -------------
2758   //  -- a0 : the number of arguments (not including the receiver)
2759   //  -- a1 : the constructor to call (checked to be a JSProxy)
2760   //  -- a3 : the new target (either the same as the constructor or
2761   //          the JSFunction on which new was invoked initially)
2762   // -----------------------------------
2763 
2764   // Call into the Runtime for Proxy [[Construct]].
2765   __ Push(a1, a3);
2766   // Include the pushed new_target, constructor and the receiver.
2767   __ Addu(a0, a0, Operand(3));
2768   // Tail-call to the runtime.
2769   __ JumpToExternalReference(
2770       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2771 }
2772 
2773 // static
Generate_Construct(MacroAssembler * masm)2774 void Builtins::Generate_Construct(MacroAssembler* masm) {
2775   // ----------- S t a t e -------------
2776   //  -- a0 : the number of arguments (not including the receiver)
2777   //  -- a1 : the constructor to call (can be any Object)
2778   //  -- a3 : the new target (either the same as the constructor or
2779   //          the JSFunction on which new was invoked initially)
2780   // -----------------------------------
2781 
2782   // Check if target is a Smi.
2783   Label non_constructor;
2784   __ JumpIfSmi(a1, &non_constructor);
2785 
2786   // Dispatch based on instance type.
2787   __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2788   __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2789   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2790           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2791 
2792   // Check if target has a [[Construct]] internal method.
2793   __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2794   __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2795   __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2796 
2797   // Only dispatch to bound functions after checking whether they are
2798   // constructors.
2799   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2800           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2801 
2802   // Only dispatch to proxies after checking whether they are constructors.
2803   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2804           eq, t2, Operand(JS_PROXY_TYPE));
2805 
2806   // Called Construct on an exotic Object with a [[Construct]] internal method.
2807   {
2808     // Overwrite the original receiver with the (original) target.
2809     __ Lsa(at, sp, a0, kPointerSizeLog2);
2810     __ sw(a1, MemOperand(at));
2811     // Let the "call_as_constructor_delegate" take care of the rest.
2812     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2813     __ Jump(masm->isolate()->builtins()->CallFunction(),
2814             RelocInfo::CODE_TARGET);
2815   }
2816 
2817   // Called Construct on an Object that doesn't have a [[Construct]] internal
2818   // method.
2819   __ bind(&non_constructor);
2820   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2821           RelocInfo::CODE_TARGET);
2822 }
2823 
2824 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2825 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2826   // ----------- S t a t e -------------
2827   //  -- a0 : requested object size (untagged)
2828   //  -- ra : return address
2829   // -----------------------------------
2830   __ SmiTag(a0);
2831   __ Push(a0);
2832   __ Move(cp, Smi::kZero);
2833   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2834 }
2835 
2836 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2837 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2838   // ----------- S t a t e -------------
2839   //  -- a0 : requested object size (untagged)
2840   //  -- ra : return address
2841   // -----------------------------------
2842   __ SmiTag(a0);
2843   __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2844   __ Push(a0, a1);
2845   __ Move(cp, Smi::kZero);
2846   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2847 }
2848 
2849 // static
Generate_Abort(MacroAssembler * masm)2850 void Builtins::Generate_Abort(MacroAssembler* masm) {
2851   // ----------- S t a t e -------------
2852   //  -- a0 : message_id as Smi
2853   //  -- ra : return address
2854   // -----------------------------------
2855   __ Push(a0);
2856   __ Move(cp, Smi::kZero);
2857   __ TailCallRuntime(Runtime::kAbort);
2858 }
2859 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2860 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2861   // State setup as expected by MacroAssembler::InvokePrologue.
2862   // ----------- S t a t e -------------
2863   //  -- a0: actual arguments count
2864   //  -- a1: function (passed through to callee)
2865   //  -- a2: expected arguments count
2866   //  -- a3: new target (passed through to callee)
2867   // -----------------------------------
2868 
2869   Label invoke, dont_adapt_arguments, stack_overflow;
2870 
2871   Label enough, too_few;
2872   __ Branch(&dont_adapt_arguments, eq, a2,
2873             Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2874   // We use Uless as the number of argument should always be greater than 0.
2875   __ Branch(&too_few, Uless, a0, Operand(a2));
2876 
2877   {  // Enough parameters: actual >= expected.
2878     // a0: actual number of arguments as a smi
2879     // a1: function
2880     // a2: expected number of arguments
2881     // a3: new target (passed through to callee)
2882     __ bind(&enough);
2883     EnterArgumentsAdaptorFrame(masm);
2884     Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
2885 
2886     // Calculate copy start address into a0 and copy end address into t1.
2887     __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
2888     // Adjust for return address and receiver.
2889     __ Addu(a0, a0, Operand(2 * kPointerSize));
2890     // Compute copy end address.
2891     __ sll(t1, a2, kPointerSizeLog2);
2892     __ subu(t1, a0, t1);
2893 
2894     // Copy the arguments (including the receiver) to the new stack frame.
2895     // a0: copy start address
2896     // a1: function
2897     // a2: expected number of arguments
2898     // a3: new target (passed through to callee)
2899     // t1: copy end address
2900 
2901     Label copy;
2902     __ bind(&copy);
2903     __ lw(t0, MemOperand(a0));
2904     __ push(t0);
2905     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t1));
2906     __ addiu(a0, a0, -kPointerSize);  // In delay slot.
2907 
2908     __ jmp(&invoke);
2909   }
2910 
2911   {  // Too few parameters: Actual < expected.
2912     __ bind(&too_few);
2913     EnterArgumentsAdaptorFrame(masm);
2914     Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
2915 
2916     // Calculate copy start address into a0 and copy end address into t3.
2917     // a0: actual number of arguments as a smi
2918     // a1: function
2919     // a2: expected number of arguments
2920     // a3: new target (passed through to callee)
2921     __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
2922     // Adjust for return address and receiver.
2923     __ Addu(a0, a0, Operand(2 * kPointerSize));
2924     // Compute copy end address. Also adjust for return address.
2925     __ Addu(t3, fp, kPointerSize);
2926 
2927     // Copy the arguments (including the receiver) to the new stack frame.
2928     // a0: copy start address
2929     // a1: function
2930     // a2: expected number of arguments
2931     // a3: new target (passed through to callee)
2932     // t3: copy end address
2933     Label copy;
2934     __ bind(&copy);
2935     __ lw(t0, MemOperand(a0));  // Adjusted above for return addr and receiver.
2936     __ Subu(sp, sp, kPointerSize);
2937     __ Subu(a0, a0, kPointerSize);
2938     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
2939     __ sw(t0, MemOperand(sp));  // In the delay slot.
2940 
2941     // Fill the remaining expected arguments with undefined.
2942     // a1: function
2943     // a2: expected number of arguments
2944     // a3: new target (passed through to callee)
2945     __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
2946     __ sll(t2, a2, kPointerSizeLog2);
2947     __ Subu(t1, fp, Operand(t2));
2948     // Adjust for frame.
2949     __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2950                             2 * kPointerSize));
2951 
2952     Label fill;
2953     __ bind(&fill);
2954     __ Subu(sp, sp, kPointerSize);
2955     __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
2956     __ sw(t0, MemOperand(sp));
2957   }
2958 
2959   // Call the entry point.
2960   __ bind(&invoke);
2961   __ mov(a0, a2);
2962   // a0 : expected number of arguments
2963   // a1 : function (passed through to callee)
2964   // a3 : new target (passed through to callee)
2965   __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2966   __ Call(t0);
2967 
2968   // Store offset of return address for deoptimizer.
2969   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2970 
2971   // Exit frame and return.
2972   LeaveArgumentsAdaptorFrame(masm);
2973   __ Ret();
2974 
2975   // -------------------------------------------
2976   // Don't adapt arguments.
2977   // -------------------------------------------
2978   __ bind(&dont_adapt_arguments);
2979   __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2980   __ Jump(t0);
2981 
2982   __ bind(&stack_overflow);
2983   {
2984     FrameScope frame(masm, StackFrame::MANUAL);
2985     __ CallRuntime(Runtime::kThrowStackOverflow);
2986     __ break_(0xCC);
2987   }
2988 }
2989 
2990 #undef __
2991 
2992 }  // namespace internal
2993 }  // namespace v8
2994 
2995 #endif  // V8_TARGET_ARCH_MIPS
2996