1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM64
6 
7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h"
9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h"
11 #include "src/full-codegen/full-codegen.h"
12 #include "src/runtime/runtime.h"
13 
14 namespace v8 {
15 namespace internal {
16 
17 #define __ ACCESS_MASM(masm)
18 
19 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)20 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
21   // Load the InternalArray function from the native context.
22   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
23 }
24 
25 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)26 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
27                                               Register result) {
28   // Load the InternalArray function from the native context.
29   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
30 }
31 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)32 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
33                                 ExitFrameType exit_frame_type) {
34   // ----------- S t a t e -------------
35   //  -- x0                 : number of arguments excluding receiver
36   //  -- x1                 : target
37   //  -- x3                 : new target
38   //  -- sp[0]              : last argument
39   //  -- ...
40   //  -- sp[4 * (argc - 1)] : first argument
41   //  -- sp[4 * argc]       : receiver
42   // -----------------------------------
43   __ AssertFunction(x1);
44 
45   // Make sure we operate in the context of the called function (for example
46   // ConstructStubs implemented in C++ will be run in the context of the caller
47   // instead of the callee, due to the way that [[Construct]] is defined for
48   // ordinary functions).
49   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
50 
51   // JumpToExternalReference expects x0 to contain the number of arguments
52   // including the receiver and the extra arguments.
53   const int num_extra_args = 3;
54   __ Add(x0, x0, num_extra_args + 1);
55 
56   // Insert extra arguments.
57   __ SmiTag(x0);
58   __ Push(x0, x1, x3);
59   __ SmiUntag(x0);
60 
61   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
62                              exit_frame_type == BUILTIN_EXIT);
63 }
64 
Generate_InternalArrayCode(MacroAssembler * masm)65 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
66   // ----------- S t a t e -------------
67   //  -- x0     : number of arguments
68   //  -- lr     : return address
69   //  -- sp[...]: constructor arguments
70   // -----------------------------------
71   ASM_LOCATION("Builtins::Generate_InternalArrayCode");
72   Label generic_array_code;
73 
74   // Get the InternalArray function.
75   GenerateLoadInternalArrayFunction(masm, x1);
76 
77   if (FLAG_debug_code) {
78     // Initial map for the builtin InternalArray functions should be maps.
79     __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
80     __ Tst(x10, kSmiTagMask);
81     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
82     __ CompareObjectType(x10, x11, x12, MAP_TYPE);
83     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
84   }
85 
86   // Run the native code for the InternalArray function called as a normal
87   // function.
88   InternalArrayConstructorStub stub(masm->isolate());
89   __ TailCallStub(&stub);
90 }
91 
Generate_ArrayCode(MacroAssembler * masm)92 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
93   // ----------- S t a t e -------------
94   //  -- x0     : number of arguments
95   //  -- lr     : return address
96   //  -- sp[...]: constructor arguments
97   // -----------------------------------
98   ASM_LOCATION("Builtins::Generate_ArrayCode");
99   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
100 
101   // Get the Array function.
102   GenerateLoadArrayFunction(masm, x1);
103 
104   if (FLAG_debug_code) {
105     // Initial map for the builtin Array functions should be maps.
106     __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
107     __ Tst(x10, kSmiTagMask);
108     __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
109     __ CompareObjectType(x10, x11, x12, MAP_TYPE);
110     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
111   }
112 
113   // Run the native code for the Array function called as a normal function.
114   __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
115   __ Mov(x3, x1);
116   ArrayConstructorStub stub(masm->isolate());
117   __ TailCallStub(&stub);
118 }
119 
120 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)121 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
122   // ----------- S t a t e -------------
123   //  -- x0                     : number of arguments
124   //  -- x1                     : function
125   //  -- cp                     : context
126   //  -- lr                     : return address
127   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero-based)
128   //  -- sp[argc * 8]           : receiver
129   // -----------------------------------
130   ASM_LOCATION("Builtins::Generate_MathMaxMin");
131 
132   Heap::RootListIndex const root_index =
133       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
134                                      : Heap::kMinusInfinityValueRootIndex;
135 
136   // Load the accumulator with the default return value (either -Infinity or
137   // +Infinity), with the tagged value in x5 and the double value in d5.
138   __ LoadRoot(x5, root_index);
139   __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
140 
141   Label done_loop, loop;
142   __ mov(x4, x0);
143   __ Bind(&loop);
144   {
145     // Check if all parameters done.
146     __ Subs(x4, x4, 1);
147     __ B(lt, &done_loop);
148 
149     // Load the next parameter tagged value into x2.
150     __ Peek(x2, Operand(x4, LSL, kPointerSizeLog2));
151 
152     // Load the double value of the parameter into d2, maybe converting the
153     // parameter to a number first using the ToNumber builtin if necessary.
154     Label convert_smi, convert_number, done_convert;
155     __ JumpIfSmi(x2, &convert_smi);
156     __ JumpIfHeapNumber(x2, &convert_number);
157     {
158       // Parameter is not a Number, use the ToNumber builtin to convert it.
159       FrameScope scope(masm, StackFrame::MANUAL);
160       __ SmiTag(x0);
161       __ SmiTag(x4);
162       __ EnterBuiltinFrame(cp, x1, x0);
163       __ Push(x5, x4);
164       __ Mov(x0, x2);
165       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
166       __ Mov(x2, x0);
167       __ Pop(x4, x5);
168       __ LeaveBuiltinFrame(cp, x1, x0);
169       __ SmiUntag(x4);
170       __ SmiUntag(x0);
171       {
172         // Restore the double accumulator value (d5).
173         Label done_restore;
174         __ SmiUntagToDouble(d5, x5, kSpeculativeUntag);
175         __ JumpIfSmi(x5, &done_restore);
176         __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
177         __ Bind(&done_restore);
178       }
179     }
180     __ AssertNumber(x2);
181     __ JumpIfSmi(x2, &convert_smi);
182 
183     __ Bind(&convert_number);
184     __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
185     __ B(&done_convert);
186 
187     __ Bind(&convert_smi);
188     __ SmiUntagToDouble(d2, x2);
189     __ Bind(&done_convert);
190 
191     // We can use a single fmin/fmax for the operation itself, but we then need
192     // to work out which HeapNumber (or smi) the result came from.
193     __ Fmov(x11, d5);
194     if (kind == MathMaxMinKind::kMin) {
195       __ Fmin(d5, d5, d2);
196     } else {
197       DCHECK(kind == MathMaxMinKind::kMax);
198       __ Fmax(d5, d5, d2);
199     }
200     __ Fmov(x10, d5);
201     __ Cmp(x10, x11);
202     __ Csel(x5, x5, x2, eq);
203     __ B(&loop);
204   }
205 
206   __ Bind(&done_loop);
207   // Drop all slots, including the receiver.
208   __ Add(x0, x0, 1);
209   __ Drop(x0);
210   __ Mov(x0, x5);
211   __ Ret();
212 }
213 
214 // static
Generate_NumberConstructor(MacroAssembler * masm)215 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
216   // ----------- S t a t e -------------
217   //  -- x0                     : number of arguments
218   //  -- x1                     : constructor function
219   //  -- cp                     : context
220   //  -- lr                     : return address
221   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
222   //  -- sp[argc * 8]           : receiver
223   // -----------------------------------
224   ASM_LOCATION("Builtins::Generate_NumberConstructor");
225 
226   // 1. Load the first argument into x0.
227   Label no_arguments;
228   {
229     __ Cbz(x0, &no_arguments);
230     __ Mov(x2, x0);  // Store argc in x2.
231     __ Sub(x0, x0, 1);
232     __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
233   }
234 
235   // 2a. Convert first argument to number.
236   {
237     FrameScope scope(masm, StackFrame::MANUAL);
238     __ SmiTag(x2);
239     __ EnterBuiltinFrame(cp, x1, x2);
240     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
241     __ LeaveBuiltinFrame(cp, x1, x2);
242     __ SmiUntag(x2);
243   }
244 
245   {
246     // Drop all arguments.
247     __ Drop(x2);
248   }
249 
250   // 2b. No arguments, return +0 (already in x0).
251   __ Bind(&no_arguments);
252   __ Drop(1);
253   __ Ret();
254 }
255 
256 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)257 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
258   // ----------- S t a t e -------------
259   //  -- x0                     : number of arguments
260   //  -- x1                     : constructor function
261   //  -- x3                     : new target
262   //  -- cp                     : context
263   //  -- lr                     : return address
264   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
265   //  -- sp[argc * 8]           : receiver
266   // -----------------------------------
267   ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
268 
269   // 1. Make sure we operate in the context of the called function.
270   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
271 
272   // 2. Load the first argument into x2.
273   {
274     Label no_arguments, done;
275     __ Move(x6, x0);  // Store argc in x6.
276     __ Cbz(x0, &no_arguments);
277     __ Sub(x0, x0, 1);
278     __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
279     __ B(&done);
280     __ Bind(&no_arguments);
281     __ Mov(x2, Smi::kZero);
282     __ Bind(&done);
283   }
284 
285   // 3. Make sure x2 is a number.
286   {
287     Label done_convert;
288     __ JumpIfSmi(x2, &done_convert);
289     __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
290     {
291       FrameScope scope(masm, StackFrame::MANUAL);
292       __ SmiTag(x6);
293       __ EnterBuiltinFrame(cp, x1, x6);
294       __ Push(x3);
295       __ Move(x0, x2);
296       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
297       __ Move(x2, x0);
298       __ Pop(x3);
299       __ LeaveBuiltinFrame(cp, x1, x6);
300       __ SmiUntag(x6);
301     }
302     __ Bind(&done_convert);
303   }
304 
305   // 4. Check if new target and constructor differ.
306   Label drop_frame_and_ret, new_object;
307   __ Cmp(x1, x3);
308   __ B(ne, &new_object);
309 
310   // 5. Allocate a JSValue wrapper for the number.
311   __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
312   __ B(&drop_frame_and_ret);
313 
314   // 6. Fallback to the runtime to create new object.
315   __ bind(&new_object);
316   {
317     FrameScope scope(masm, StackFrame::MANUAL);
318     FastNewObjectStub stub(masm->isolate());
319     __ SmiTag(x6);
320     __ EnterBuiltinFrame(cp, x1, x6);
321     __ Push(x2);  // first argument
322     __ CallStub(&stub);
323     __ Pop(x2);
324     __ LeaveBuiltinFrame(cp, x1, x6);
325     __ SmiUntag(x6);
326   }
327   __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
328 
329   __ bind(&drop_frame_and_ret);
330   {
331     __ Drop(x6);
332     __ Drop(1);
333     __ Ret();
334   }
335 }
336 
337 // static
Generate_StringConstructor(MacroAssembler * masm)338 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
339   // ----------- S t a t e -------------
340   //  -- x0                     : number of arguments
341   //  -- x1                     : constructor function
342   //  -- cp                     : context
343   //  -- lr                     : return address
344   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
345   //  -- sp[argc * 8]           : receiver
346   // -----------------------------------
347   ASM_LOCATION("Builtins::Generate_StringConstructor");
348 
349   // 1. Load the first argument into x0.
350   Label no_arguments;
351   {
352     __ Cbz(x0, &no_arguments);
353     __ Mov(x2, x0);  // Store argc in x2.
354     __ Sub(x0, x0, 1);
355     __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
356   }
357 
358   // 2a. At least one argument, return x0 if it's a string, otherwise
359   // dispatch to appropriate conversion.
360   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
361   {
362     __ JumpIfSmi(x0, &to_string);
363     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
364     __ CompareObjectType(x0, x3, x3, FIRST_NONSTRING_TYPE);
365     __ B(hi, &to_string);
366     __ B(eq, &symbol_descriptive_string);
367     __ b(&drop_frame_and_ret);
368   }
369 
370   // 2b. No arguments, return the empty string (and pop the receiver).
371   __ Bind(&no_arguments);
372   {
373     __ LoadRoot(x0, Heap::kempty_stringRootIndex);
374     __ Drop(1);
375     __ Ret();
376   }
377 
378   // 3a. Convert x0 to a string.
379   __ Bind(&to_string);
380   {
381     FrameScope scope(masm, StackFrame::MANUAL);
382     __ SmiTag(x2);
383     __ EnterBuiltinFrame(cp, x1, x2);
384     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
385     __ LeaveBuiltinFrame(cp, x1, x2);
386     __ SmiUntag(x2);
387   }
388   __ b(&drop_frame_and_ret);
389 
390   // 3b. Convert symbol in x0 to a string.
391   __ Bind(&symbol_descriptive_string);
392   {
393     __ Drop(x2);
394     __ Drop(1);
395     __ Push(x0);
396     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
397   }
398 
399   __ bind(&drop_frame_and_ret);
400   {
401     __ Drop(x2);
402     __ Drop(1);
403     __ Ret();
404   }
405 }
406 
407 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)408 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
409   // ----------- S t a t e -------------
410   //  -- x0                     : number of arguments
411   //  -- x1                     : constructor function
412   //  -- x3                     : new target
413   //  -- cp                     : context
414   //  -- lr                     : return address
415   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
416   //  -- sp[argc * 8]           : receiver
417   // -----------------------------------
418   ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
419 
420   // 1. Make sure we operate in the context of the called function.
421   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
422 
423   // 2. Load the first argument into x2.
424   {
425     Label no_arguments, done;
426     __ mov(x6, x0);  // Store argc in x6.
427     __ Cbz(x0, &no_arguments);
428     __ Sub(x0, x0, 1);
429     __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
430     __ B(&done);
431     __ Bind(&no_arguments);
432     __ LoadRoot(x2, Heap::kempty_stringRootIndex);
433     __ Bind(&done);
434   }
435 
436   // 3. Make sure x2 is a string.
437   {
438     Label convert, done_convert;
439     __ JumpIfSmi(x2, &convert);
440     __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
441     __ Bind(&convert);
442     {
443       FrameScope scope(masm, StackFrame::MANUAL);
444       __ SmiTag(x6);
445       __ EnterBuiltinFrame(cp, x1, x6);
446       __ Push(x3);
447       __ Move(x0, x2);
448       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
449       __ Move(x2, x0);
450       __ Pop(x3);
451       __ LeaveBuiltinFrame(cp, x1, x6);
452       __ SmiUntag(x6);
453     }
454     __ Bind(&done_convert);
455   }
456 
457   // 4. Check if new target and constructor differ.
458   Label drop_frame_and_ret, new_object;
459   __ Cmp(x1, x3);
460   __ B(ne, &new_object);
461 
462   // 5. Allocate a JSValue wrapper for the string.
463   __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
464   __ B(&drop_frame_and_ret);
465 
466   // 6. Fallback to the runtime to create new object.
467   __ bind(&new_object);
468   {
469     FrameScope scope(masm, StackFrame::MANUAL);
470     FastNewObjectStub stub(masm->isolate());
471     __ SmiTag(x6);
472     __ EnterBuiltinFrame(cp, x1, x6);
473     __ Push(x2);  // first argument
474     __ CallStub(&stub);
475     __ Pop(x2);
476     __ LeaveBuiltinFrame(cp, x1, x6);
477     __ SmiUntag(x6);
478   }
479   __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
480 
481   __ bind(&drop_frame_and_ret);
482   {
483     __ Drop(x6);
484     __ Drop(1);
485     __ Ret();
486   }
487 }
488 
GenerateTailCallToSharedCode(MacroAssembler * masm)489 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
490   __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
491   __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
492   __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
493   __ Br(x2);
494 }
495 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)496 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
497                                            Runtime::FunctionId function_id) {
498   // ----------- S t a t e -------------
499   //  -- x0 : argument count (preserved for callee)
500   //  -- x1 : target function (preserved for callee)
501   //  -- x3 : new target (preserved for callee)
502   // -----------------------------------
503   {
504     FrameScope scope(masm, StackFrame::INTERNAL);
505     // Push a copy of the target function and the new target.
506     // Push another copy as a parameter to the runtime call.
507     __ SmiTag(x0);
508     __ Push(x0, x1, x3, x1);
509 
510     __ CallRuntime(function_id, 1);
511     __ Move(x2, x0);
512 
513     // Restore target function and new target.
514     __ Pop(x3, x1, x0);
515     __ SmiUntag(x0);
516   }
517 
518   __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
519   __ Br(x2);
520 }
521 
Generate_InOptimizationQueue(MacroAssembler * masm)522 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
523   // Checking whether the queued function is ready for install is optional,
524   // since we come across interrupts and stack checks elsewhere. However, not
525   // checking may delay installing ready functions, and always checking would be
526   // quite expensive. A good compromise is to first check against stack limit as
527   // a cue for an interrupt signal.
528   Label ok;
529   __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
530   __ B(hs, &ok);
531 
532   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
533 
534   __ Bind(&ok);
535   GenerateTailCallToSharedCode(masm);
536 }
537 
538 namespace {
539 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)540 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
541                                     bool create_implicit_receiver,
542                                     bool check_derived_construct) {
543   // ----------- S t a t e -------------
544   //  -- x0     : number of arguments
545   //  -- x1     : constructor function
546   //  -- x3     : new target
547   //  -- lr     : return address
548   //  -- cp     : context pointer
549   //  -- sp[...]: constructor arguments
550   // -----------------------------------
551 
552   ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
553 
554   Isolate* isolate = masm->isolate();
555 
556   // Enter a construct frame.
557   {
558     FrameScope scope(masm, StackFrame::CONSTRUCT);
559 
560     // Preserve the four incoming parameters on the stack.
561     Register argc = x0;
562     Register constructor = x1;
563     Register new_target = x3;
564 
565     // Preserve the incoming parameters on the stack.
566     __ SmiTag(argc);
567     __ Push(cp, argc);
568 
569     if (create_implicit_receiver) {
570       // Allocate the new receiver object.
571       __ Push(constructor, new_target);
572       FastNewObjectStub stub(masm->isolate());
573       __ CallStub(&stub);
574       __ Mov(x4, x0);
575       __ Pop(new_target, constructor);
576 
577       // ----------- S t a t e -------------
578       //  -- x1: constructor function
579       //  -- x3: new target
580       //  -- x4: newly allocated object
581       // -----------------------------------
582 
583       // Reload the number of arguments from the stack.
584       // Set it up in x0 for the function call below.
585       // jssp[0]: number of arguments (smi-tagged)
586       __ Peek(argc, 0);  // Load number of arguments.
587     }
588 
589     __ SmiUntag(argc);
590 
591     if (create_implicit_receiver) {
592       // Push the allocated receiver to the stack. We need two copies
593       // because we may have to return the original one and the calling
594       // conventions dictate that the called function pops the receiver.
595       __ Push(x4, x4);
596     } else {
597       __ PushRoot(Heap::kTheHoleValueRootIndex);
598     }
599 
600     // Set up pointer to last argument.
601     __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
602 
603     // Copy arguments and receiver to the expression stack.
604     // Copy 2 values every loop to use ldp/stp.
605     // x0: number of arguments
606     // x1: constructor function
607     // x2: address of last argument (caller sp)
608     // x3: new target
609     // jssp[0]: receiver
610     // jssp[1]: receiver
611     // jssp[2]: number of arguments (smi-tagged)
612     // Compute the start address of the copy in x3.
613     __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
614     Label loop, entry, done_copying_arguments;
615     __ B(&entry);
616     __ Bind(&loop);
617     __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
618     __ Push(x11, x10);
619     __ Bind(&entry);
620     __ Cmp(x4, x2);
621     __ B(gt, &loop);
622     // Because we copied values 2 by 2 we may have copied one extra value.
623     // Drop it if that is the case.
624     __ B(eq, &done_copying_arguments);
625     __ Drop(1);
626     __ Bind(&done_copying_arguments);
627 
628     // Call the function.
629     // x0: number of arguments
630     // x1: constructor function
631     // x3: new target
632     ParameterCount actual(argc);
633     __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
634                       CheckDebugStepCallWrapper());
635 
636     // Store offset of return address for deoptimizer.
637     if (create_implicit_receiver && !is_api_function) {
638       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
639     }
640 
641     // Restore the context from the frame.
642     // x0: result
643     // jssp[0]: receiver
644     // jssp[1]: number of arguments (smi-tagged)
645     __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
646 
647     if (create_implicit_receiver) {
648       // If the result is an object (in the ECMA sense), we should get rid
649       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
650       // on page 74.
651       Label use_receiver, exit;
652 
653       // If the result is a smi, it is *not* an object in the ECMA sense.
654       // x0: result
655       // jssp[0]: receiver (newly allocated object)
656       // jssp[1]: number of arguments (smi-tagged)
657       __ JumpIfSmi(x0, &use_receiver);
658 
659       // If the type of the result (stored in its map) is less than
660       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
661       __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
662 
663       // Throw away the result of the constructor invocation and use the
664       // on-stack receiver as the result.
665       __ Bind(&use_receiver);
666       __ Peek(x0, 0);
667 
668       // Remove the receiver from the stack, remove caller arguments, and
669       // return.
670       __ Bind(&exit);
671       // x0: result
672       // jssp[0]: receiver (newly allocated object)
673       // jssp[1]: number of arguments (smi-tagged)
674       __ Peek(x1, 1 * kXRegSize);
675     } else {
676       __ Peek(x1, 0);
677     }
678 
679     // Leave construct frame.
680   }
681 
682   // ES6 9.2.2. Step 13+
683   // Check that the result is not a Smi, indicating that the constructor result
684   // from a derived class is neither undefined nor an Object.
685   if (check_derived_construct) {
686     Label dont_throw;
687     __ JumpIfNotSmi(x0, &dont_throw);
688     {
689       FrameScope scope(masm, StackFrame::INTERNAL);
690       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
691     }
692     __ Bind(&dont_throw);
693   }
694 
695   __ DropBySMI(x1);
696   __ Drop(1);
697   if (create_implicit_receiver) {
698     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
699   }
700   __ Ret();
701 }
702 
703 }  // namespace
704 
Generate_JSConstructStubGeneric(MacroAssembler * masm)705 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
706   Generate_JSConstructStubHelper(masm, false, true, false);
707 }
708 
Generate_JSConstructStubApi(MacroAssembler * masm)709 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
710   Generate_JSConstructStubHelper(masm, true, false, false);
711 }
712 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)713 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
714   Generate_JSConstructStubHelper(masm, false, false, false);
715 }
716 
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)717 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
718     MacroAssembler* masm) {
719   Generate_JSConstructStubHelper(masm, false, false, true);
720 }
721 
Generate_ConstructedNonConstructable(MacroAssembler * masm)722 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
723   FrameScope scope(masm, StackFrame::INTERNAL);
724   __ Push(x1);
725   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
726 }
727 
728 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)729 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
730   // ----------- S t a t e -------------
731   //  -- x0 : the value to pass to the generator
732   //  -- x1 : the JSGeneratorObject to resume
733   //  -- x2 : the resume mode (tagged)
734   //  -- lr : return address
735   // -----------------------------------
736   __ AssertGeneratorObject(x1);
737 
738   // Store input value into generator object.
739   __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset));
740   __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3,
741                       kLRHasNotBeenSaved, kDontSaveFPRegs);
742 
743   // Store resume mode into generator object.
744   __ Str(x2, FieldMemOperand(x1, JSGeneratorObject::kResumeModeOffset));
745 
746   // Load suspended function and context.
747   __ Ldr(cp, FieldMemOperand(x1, JSGeneratorObject::kContextOffset));
748   __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
749 
750   // Flood function if we are stepping.
751   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
752   Label stepping_prepared;
753   ExternalReference last_step_action =
754       ExternalReference::debug_last_step_action_address(masm->isolate());
755   STATIC_ASSERT(StepFrame > StepIn);
756   __ Mov(x10, Operand(last_step_action));
757   __ Ldrsb(x10, MemOperand(x10));
758   __ CompareAndBranch(x10, Operand(StepIn), ge, &prepare_step_in_if_stepping);
759 
760   // Flood function if we need to continue stepping in the suspended generator.
761   ExternalReference debug_suspended_generator =
762       ExternalReference::debug_suspended_generator_address(masm->isolate());
763   __ Mov(x10, Operand(debug_suspended_generator));
764   __ Ldr(x10, MemOperand(x10));
765   __ CompareAndBranch(x10, Operand(x1), eq,
766                       &prepare_step_in_suspended_generator);
767   __ Bind(&stepping_prepared);
768 
769   // Push receiver.
770   __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset));
771   __ Push(x5);
772 
773   // ----------- S t a t e -------------
774   //  -- x1      : the JSGeneratorObject to resume
775   //  -- x2      : the resume mode (tagged)
776   //  -- x4      : generator function
777   //  -- cp      : generator context
778   //  -- lr      : return address
779   //  -- jssp[0] : generator receiver
780   // -----------------------------------
781 
782   // Push holes for arguments to generator function. Since the parser forced
783   // context allocation for any variables in generators, the actual argument
784   // values have already been copied into the context and these dummy values
785   // will never be used.
786   __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
787   __ Ldr(w10,
788          FieldMemOperand(x10, SharedFunctionInfo::kFormalParameterCountOffset));
789   __ LoadRoot(x11, Heap::kTheHoleValueRootIndex);
790   __ PushMultipleTimes(x11, w10);
791 
792   // Dispatch on the kind of generator object.
793   Label old_generator;
794   __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
795   __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
796   __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
797   __ B(ne, &old_generator);
798 
799   // New-style (ignition/turbofan) generator object
800   {
801     __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
802     __ Ldr(w0, FieldMemOperand(
803                    x0, SharedFunctionInfo::kFormalParameterCountOffset));
804     // We abuse new.target both to indicate that this is a resume call and to
805     // pass in the generator object.  In ordinary calls, new.target is always
806     // undefined because generator functions are non-constructable.
807     __ Move(x3, x1);
808     __ Move(x1, x4);
809     __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
810     __ Jump(x5);
811   }
812 
813   // Old-style (full-codegen) generator object
814   __ bind(&old_generator);
815   {
816     // Enter a new JavaScript frame, and initialize its slots as they were when
817     // the generator was suspended.
818     FrameScope scope(masm, StackFrame::MANUAL);
819     __ Push(lr, fp);
820     __ Move(fp, jssp);
821     __ Push(cp, x4);
822 
823     // Restore the operand stack.
824     __ Ldr(x0, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
825     __ Ldr(w3, UntagSmiFieldMemOperand(x0, FixedArray::kLengthOffset));
826     __ Add(x0, x0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
827     __ Add(x3, x0, Operand(x3, LSL, kPointerSizeLog2));
828     {
829       Label done_loop, loop;
830       __ Bind(&loop);
831       __ Cmp(x0, x3);
832       __ B(eq, &done_loop);
833       __ Ldr(x10, MemOperand(x0, kPointerSize, PostIndex));
834       __ Push(x10);
835       __ B(&loop);
836       __ Bind(&done_loop);
837     }
838 
839     // Reset operand stack so we don't leak.
840     __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
841     __ Str(x10, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
842 
843     // Resume the generator function at the continuation.
844     __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
845     __ Ldr(x10, FieldMemOperand(x10, SharedFunctionInfo::kCodeOffset));
846     __ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag);
847     __ Ldrsw(x11, UntagSmiFieldMemOperand(
848                       x1, JSGeneratorObject::kContinuationOffset));
849     __ Add(x10, x10, x11);
850     __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
851     __ Str(x12, FieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
852     __ Move(x0, x1);  // Continuation expects generator object in x0.
853     __ Br(x10);
854   }
855 
856   __ Bind(&prepare_step_in_if_stepping);
857   {
858     FrameScope scope(masm, StackFrame::INTERNAL);
859     __ Push(x1, x2, x4);
860     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
861     __ Pop(x2, x1);
862     __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
863   }
864   __ B(&stepping_prepared);
865 
866   __ Bind(&prepare_step_in_suspended_generator);
867   {
868     FrameScope scope(masm, StackFrame::INTERNAL);
869     __ Push(x1, x2);
870     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
871     __ Pop(x2, x1);
872     __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
873   }
874   __ B(&stepping_prepared);
875 }
876 
877 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
878 
879 // Clobbers x10, x15; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)880 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
881                                         IsTagged argc_is_tagged) {
882   // Check the stack for overflow.
883   // We are not trying to catch interruptions (e.g. debug break and
884   // preemption) here, so the "real stack limit" is checked.
885   Label enough_stack_space;
886   __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
887   // Make x10 the space we have left. The stack might already be overflowed
888   // here which will cause x10 to become negative.
889   // TODO(jbramley): Check that the stack usage here is safe.
890   __ Sub(x10, jssp, x10);
891   // Check if the arguments will overflow the stack.
892   if (argc_is_tagged == kArgcIsSmiTagged) {
893     __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
894   } else {
895     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
896     __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
897   }
898   __ B(gt, &enough_stack_space);
899   __ CallRuntime(Runtime::kThrowStackOverflow);
900   // We should never return from the APPLY_OVERFLOW builtin.
901   if (__ emit_debug_code()) {
902     __ Unreachable();
903   }
904 
905   __ Bind(&enough_stack_space);
906 }
907 
908 // Input:
909 //   x0: new.target.
910 //   x1: function.
911 //   x2: receiver.
912 //   x3: argc.
913 //   x4: argv.
914 // Output:
915 //   x0: result.
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)916 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
917                                              bool is_construct) {
918   // Called from JSEntryStub::GenerateBody().
919   Register new_target = x0;
920   Register function = x1;
921   Register receiver = x2;
922   Register argc = x3;
923   Register argv = x4;
924   Register scratch = x10;
925 
926   ProfileEntryHookStub::MaybeCallEntryHook(masm);
927 
928   {
929     // Enter an internal frame.
930     FrameScope scope(masm, StackFrame::INTERNAL);
931 
932     // Setup the context (we need to use the caller context from the isolate).
933     __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
934                                               masm->isolate())));
935     __ Ldr(cp, MemOperand(scratch));
936 
937     __ InitializeRootRegister();
938 
939     // Push the function and the receiver onto the stack.
940     __ Push(function, receiver);
941 
942     // Check if we have enough stack space to push all arguments.
943     // Expects argument count in eax. Clobbers ecx, edx, edi.
944     Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
945 
946     // Copy arguments to the stack in a loop, in reverse order.
947     // x3: argc.
948     // x4: argv.
949     Label loop, entry;
950     // Compute the copy end address.
951     __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
952 
953     __ B(&entry);
954     __ Bind(&loop);
955     __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
956     __ Ldr(x12, MemOperand(x11));  // Dereference the handle.
957     __ Push(x12);                  // Push the argument.
958     __ Bind(&entry);
959     __ Cmp(scratch, argv);
960     __ B(ne, &loop);
961 
962     __ Mov(scratch, argc);
963     __ Mov(argc, new_target);
964     __ Mov(new_target, scratch);
965     // x0: argc.
966     // x3: new.target.
967 
968     // Initialize all JavaScript callee-saved registers, since they will be seen
969     // by the garbage collector as part of handlers.
970     // The original values have been saved in JSEntryStub::GenerateBody().
971     __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
972     __ Mov(x20, x19);
973     __ Mov(x21, x19);
974     __ Mov(x22, x19);
975     __ Mov(x23, x19);
976     __ Mov(x24, x19);
977     __ Mov(x25, x19);
978     // Don't initialize the reserved registers.
979     // x26 : root register (root).
980     // x27 : context pointer (cp).
981     // x28 : JS stack pointer (jssp).
982     // x29 : frame pointer (fp).
983 
984     Handle<Code> builtin = is_construct
985                                ? masm->isolate()->builtins()->Construct()
986                                : masm->isolate()->builtins()->Call();
987     __ Call(builtin, RelocInfo::CODE_TARGET);
988 
989     // Exit the JS internal frame and remove the parameters (except function),
990     // and return.
991   }
992 
993   // Result is in x0. Return.
994   __ Ret();
995 }
996 
Generate_JSEntryTrampoline(MacroAssembler * masm)997 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
998   Generate_JSEntryTrampolineHelper(masm, false);
999 }
1000 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)1001 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1002   Generate_JSEntryTrampolineHelper(masm, true);
1003 }
1004 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)1005 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1006   Register args_count = scratch;
1007 
1008   // Get the arguments + receiver count.
1009   __ ldr(args_count,
1010          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1011   __ Ldr(args_count.W(),
1012          FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1013 
1014   // Leave the frame (also dropping the register file).
1015   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1016 
1017   // Drop receiver + arguments.
1018   __ Drop(args_count, 1);
1019 }
1020 
1021 // Generate code for entering a JS function with the interpreter.
1022 // On entry to the function the receiver and arguments have been pushed on the
1023 // stack left to right.  The actual argument count matches the formal parameter
1024 // count expected by the function.
1025 //
1026 // The live registers are:
1027 //   - x1: the JS function object being called.
1028 //   - x3: the new target
1029 //   - cp: our context.
1030 //   - fp: our caller's frame pointer.
1031 //   - jssp: stack pointer.
1032 //   - lr: return address.
1033 //
1034 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1035 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1036 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1037   ProfileEntryHookStub::MaybeCallEntryHook(masm);
1038 
1039   // Open a frame scope to indicate that there is a frame on the stack.  The
1040   // MANUAL indicates that the scope shouldn't actually generate code to set up
1041   // the frame (that is done below).
1042   FrameScope frame_scope(masm, StackFrame::MANUAL);
1043   __ Push(lr, fp, cp, x1);
1044   __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
1045 
1046   // Get the bytecode array from the function object (or from the DebugInfo if
1047   // it is present) and load it into kInterpreterBytecodeArrayRegister.
1048   __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1049   Register debug_info = kInterpreterBytecodeArrayRegister;
1050   Label load_debug_bytecode_array, bytecode_array_loaded;
1051   DCHECK(!debug_info.is(x0));
1052   __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
1053   __ Cmp(debug_info, Operand(DebugInfo::uninitialized()));
1054   __ B(ne, &load_debug_bytecode_array);
1055   __ Ldr(kInterpreterBytecodeArrayRegister,
1056          FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
1057   __ Bind(&bytecode_array_loaded);
1058 
1059   // Check whether we should continue to use the interpreter.
1060   Label switch_to_different_code_kind;
1061   __ Ldr(x0, FieldMemOperand(x0, SharedFunctionInfo::kCodeOffset));
1062   __ Cmp(x0, Operand(masm->CodeObject()));  // Self-reference to this code.
1063   __ B(ne, &switch_to_different_code_kind);
1064 
1065   // Increment invocation count for the function.
1066   __ Ldr(x11, FieldMemOperand(x1, JSFunction::kLiteralsOffset));
1067   __ Ldr(x11, FieldMemOperand(x11, LiteralsArray::kFeedbackVectorOffset));
1068   __ Ldr(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
1069                                            kPointerSize +
1070                                        TypeFeedbackVector::kHeaderSize));
1071   __ Add(x10, x10, Operand(Smi::FromInt(1)));
1072   __ Str(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
1073                                            kPointerSize +
1074                                        TypeFeedbackVector::kHeaderSize));
1075 
1076   // Check function data field is actually a BytecodeArray object.
1077   if (FLAG_debug_code) {
1078     __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1079                     kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1080     __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
1081                          BYTECODE_ARRAY_TYPE);
1082     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1083   }
1084 
1085   // Load the initial bytecode offset.
1086   __ Mov(kInterpreterBytecodeOffsetRegister,
1087          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1088 
1089   // Push new.target, bytecode array and Smi tagged bytecode array offset.
1090   __ SmiTag(x0, kInterpreterBytecodeOffsetRegister);
1091   __ Push(x3, kInterpreterBytecodeArrayRegister, x0);
1092 
1093   // Allocate the local and temporary register file on the stack.
1094   {
1095     // Load frame size from the BytecodeArray object.
1096     __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1097                                 BytecodeArray::kFrameSizeOffset));
1098 
1099     // Do a stack check to ensure we don't go over the limit.
1100     Label ok;
1101     DCHECK(jssp.Is(__ StackPointer()));
1102     __ Sub(x10, jssp, Operand(x11));
1103     __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
1104     __ B(hs, &ok);
1105     __ CallRuntime(Runtime::kThrowStackOverflow);
1106     __ Bind(&ok);
1107 
1108     // If ok, push undefined as the initial value for all register file entries.
1109     // Note: there should always be at least one stack slot for the return
1110     // register in the register file.
1111     Label loop_header;
1112     __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
1113     // TODO(rmcilroy): Ensure we always have an even number of registers to
1114     // allow stack to be 16 bit aligned (and remove need for jssp).
1115     __ Lsr(x11, x11, kPointerSizeLog2);
1116     __ PushMultipleTimes(x10, x11);
1117     __ Bind(&loop_header);
1118   }
1119 
1120   // Load accumulator and dispatch table into registers.
1121   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1122   __ Mov(kInterpreterDispatchTableRegister,
1123          Operand(ExternalReference::interpreter_dispatch_table_address(
1124              masm->isolate())));
1125 
1126   // Dispatch to the first bytecode handler for the function.
1127   __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1128                          kInterpreterBytecodeOffsetRegister));
1129   __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1130   __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1131   __ Call(ip0);
1132   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1133 
1134   // The return value is in x0.
1135   LeaveInterpreterFrame(masm, x2);
1136   __ Ret();
1137 
1138   // Load debug copy of the bytecode array.
1139   __ Bind(&load_debug_bytecode_array);
1140   __ Ldr(kInterpreterBytecodeArrayRegister,
1141          FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1142   __ B(&bytecode_array_loaded);
1143 
1144   // If the shared code is no longer this entry trampoline, then the underlying
1145   // function has been switched to a different kind of code and we heal the
1146   // closure by switching the code entry field over to the new code as well.
1147   __ bind(&switch_to_different_code_kind);
1148   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1149   __ Ldr(x7, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1150   __ Ldr(x7, FieldMemOperand(x7, SharedFunctionInfo::kCodeOffset));
1151   __ Add(x7, x7, Operand(Code::kHeaderSize - kHeapObjectTag));
1152   __ Str(x7, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
1153   __ RecordWriteCodeEntryField(x1, x7, x5);
1154   __ Jump(x7);
1155 }
1156 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)1157 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1158                                         Register scratch,
1159                                         Label* stack_overflow) {
1160   // Check the stack for overflow.
1161   // We are not trying to catch interruptions (e.g. debug break and
1162   // preemption) here, so the "real stack limit" is checked.
1163   Label enough_stack_space;
1164   __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
1165   // Make scratch the space we have left. The stack might already be overflowed
1166   // here which will cause scratch to become negative.
1167   __ Sub(scratch, jssp, scratch);
1168   // Check if the arguments will overflow the stack.
1169   __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
1170   __ B(le, stack_overflow);
1171 }
1172 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register last_arg,Register stack_addr,Register scratch,Label * stack_overflow)1173 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1174                                          Register num_args, Register index,
1175                                          Register last_arg, Register stack_addr,
1176                                          Register scratch,
1177                                          Label* stack_overflow) {
1178   // Add a stack check before pushing arguments.
1179   Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
1180 
1181   __ Mov(scratch, num_args);
1182   __ lsl(scratch, scratch, kPointerSizeLog2);
1183   __ sub(last_arg, index, scratch);
1184 
1185   // Set stack pointer and where to stop.
1186   __ Mov(stack_addr, jssp);
1187   __ Claim(scratch, 1);
1188 
1189   // Push the arguments.
1190   Label loop_header, loop_check;
1191   __ B(&loop_check);
1192   __ Bind(&loop_header);
1193   // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1194   __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
1195   __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex));
1196   __ Bind(&loop_check);
1197   __ Cmp(index, last_arg);
1198   __ B(gt, &loop_header);
1199 }
1200 
1201 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)1202 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1203     MacroAssembler* masm, TailCallMode tail_call_mode,
1204     CallableType function_type) {
1205   // ----------- S t a t e -------------
1206   //  -- x0 : the number of arguments (not including the receiver)
1207   //  -- x2 : the address of the first argument to be pushed. Subsequent
1208   //          arguments should be consecutive above this, in the same order as
1209   //          they are to be pushed onto the stack.
1210   //  -- x1 : the target to call (can be any Object).
1211   // -----------------------------------
1212   Label stack_overflow;
1213 
1214   // Add one for the receiver.
1215   __ add(x3, x0, Operand(1));
1216 
1217   // Push the arguments. x2, x4, x5, x6 will be modified.
1218   Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6, &stack_overflow);
1219 
1220   // Call the target.
1221   if (function_type == CallableType::kJSFunction) {
1222     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1223                                                       tail_call_mode),
1224             RelocInfo::CODE_TARGET);
1225   } else {
1226     DCHECK_EQ(function_type, CallableType::kAny);
1227     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1228                                               tail_call_mode),
1229             RelocInfo::CODE_TARGET);
1230   }
1231 
1232   __ bind(&stack_overflow);
1233   {
1234     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1235     __ Unreachable();
1236   }
1237 }
1238 
1239 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)1240 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1241     MacroAssembler* masm, CallableType construct_type) {
1242   // ----------- S t a t e -------------
1243   // -- x0 : argument count (not including receiver)
1244   // -- x3 : new target
1245   // -- x1 : constructor to call
1246   // -- x2 : allocation site feedback if available, undefined otherwise
1247   // -- x4 : address of the first argument
1248   // -----------------------------------
1249   Label stack_overflow;
1250 
1251   // Push a slot for the receiver.
1252   __ Push(xzr);
1253 
1254   // Push the arguments. x5, x4, x6, x7 will be modified.
1255   Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow);
1256 
1257   __ AssertUndefinedOrAllocationSite(x2, x6);
1258   if (construct_type == CallableType::kJSFunction) {
1259     __ AssertFunction(x1);
1260 
1261     // Tail call to the function-specific construct stub (still in the caller
1262     // context at this point).
1263     __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1264     __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
1265     __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
1266     __ Br(x4);
1267   } else {
1268     DCHECK_EQ(construct_type, CallableType::kAny);
1269     // Call the constructor with x0, x1, and x3 unmodified.
1270     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1271   }
1272 
1273   __ bind(&stack_overflow);
1274   {
1275     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1276     __ Unreachable();
1277   }
1278 }
1279 
1280 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)1281 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1282     MacroAssembler* masm) {
1283   // ----------- S t a t e -------------
1284   // -- x0 : argument count (not including receiver)
1285   // -- x1 : target to call verified to be Array function
1286   // -- x2 : allocation site feedback if available, undefined otherwise.
1287   // -- x3 : address of the first argument
1288   // -----------------------------------
1289   Label stack_overflow;
1290 
1291   __ add(x4, x0, Operand(1));  // Add one for the receiver.
1292 
1293   // Push the arguments. x3, x5, x6, x7 will be modified.
1294   Generate_InterpreterPushArgs(masm, x4, x3, x5, x6, x7, &stack_overflow);
1295 
1296   // Array constructor expects constructor in x3. It is same as call target.
1297   __ mov(x3, x1);
1298 
1299   ArrayConstructorStub stub(masm->isolate());
1300   __ TailCallStub(&stub);
1301 
1302   __ bind(&stack_overflow);
1303   {
1304     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1305     __ Unreachable();
1306   }
1307 }
1308 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1309 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1310   // Set the return address to the correct point in the interpreter entry
1311   // trampoline.
1312   Smi* interpreter_entry_return_pc_offset(
1313       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1314   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1315   __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1316   __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
1317                          Code::kHeaderSize - kHeapObjectTag));
1318 
1319   // Initialize the dispatch table register.
1320   __ Mov(kInterpreterDispatchTableRegister,
1321          Operand(ExternalReference::interpreter_dispatch_table_address(
1322              masm->isolate())));
1323 
1324   // Get the bytecode array pointer from the frame.
1325   __ Ldr(kInterpreterBytecodeArrayRegister,
1326          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1327 
1328   if (FLAG_debug_code) {
1329     // Check function data field is actually a BytecodeArray object.
1330     __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1331                     kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1332     __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1333                          BYTECODE_ARRAY_TYPE);
1334     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1335   }
1336 
1337   // Get the target bytecode offset from the frame.
1338   __ Ldr(kInterpreterBytecodeOffsetRegister,
1339          MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1340   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1341 
1342   // Dispatch to the target bytecode.
1343   __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1344                          kInterpreterBytecodeOffsetRegister));
1345   __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1346   __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1347   __ Jump(ip0);
1348 }
1349 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1350 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1351   // Advance the current bytecode offset stored within the given interpreter
1352   // stack frame. This simulates what all bytecode handlers do upon completion
1353   // of the underlying operation.
1354   __ Ldr(x1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1355   __ Ldr(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1356   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1357   {
1358     FrameScope scope(masm, StackFrame::INTERNAL);
1359     __ Push(kInterpreterAccumulatorRegister, x1, x2);
1360     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1361     __ Mov(x2, x0);  // Result is the new bytecode offset.
1362     __ Pop(kInterpreterAccumulatorRegister);
1363   }
1364   __ Str(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1365 
1366   Generate_InterpreterEnterBytecode(masm);
1367 }
1368 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1369 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1370   Generate_InterpreterEnterBytecode(masm);
1371 }
1372 
Generate_CompileLazy(MacroAssembler * masm)1373 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1374   // ----------- S t a t e -------------
1375   //  -- x0 : argument count (preserved for callee)
1376   //  -- x3 : new target (preserved for callee)
1377   //  -- x1 : target function (preserved for callee)
1378   // -----------------------------------
1379   // First lookup code, maybe we don't need to compile!
1380   Label gotta_call_runtime;
1381   Label try_shared;
1382   Label loop_top, loop_bottom;
1383 
1384   Register closure = x1;
1385   Register map = x13;
1386   Register index = x2;
1387   __ Ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1388   __ Ldr(map,
1389          FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1390   __ Ldrsw(index, UntagSmiFieldMemOperand(map, FixedArray::kLengthOffset));
1391   __ Cmp(index, Operand(2));
1392   __ B(lt, &gotta_call_runtime);
1393 
1394   // Find literals.
1395   // x3  : native context
1396   // x2  : length / index
1397   // x13 : optimized code map
1398   // stack[0] : new target
1399   // stack[4] : closure
1400   Register native_context = x4;
1401   __ Ldr(native_context, NativeContextMemOperand());
1402 
1403   __ Bind(&loop_top);
1404   Register temp = x5;
1405   Register array_pointer = x6;
1406 
1407   // Does the native context match?
1408   __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2));
1409   __ Ldr(temp, FieldMemOperand(array_pointer,
1410                                SharedFunctionInfo::kOffsetToPreviousContext));
1411   __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1412   __ Cmp(temp, native_context);
1413   __ B(ne, &loop_bottom);
1414   // OSR id set to none?
1415   __ Ldr(temp, FieldMemOperand(array_pointer,
1416                                SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1417   const int bailout_id = BailoutId::None().ToInt();
1418   __ Cmp(temp, Operand(Smi::FromInt(bailout_id)));
1419   __ B(ne, &loop_bottom);
1420   // Literals available?
1421   __ Ldr(temp, FieldMemOperand(array_pointer,
1422                                SharedFunctionInfo::kOffsetToPreviousLiterals));
1423   __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1424   __ JumpIfSmi(temp, &gotta_call_runtime);
1425 
1426   // Save the literals in the closure.
1427   __ Str(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
1428   __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, x7,
1429                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1430                       OMIT_SMI_CHECK);
1431 
1432   // Code available?
1433   Register entry = x7;
1434   __ Ldr(entry,
1435          FieldMemOperand(array_pointer,
1436                          SharedFunctionInfo::kOffsetToPreviousCachedCode));
1437   __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1438   __ JumpIfSmi(entry, &try_shared);
1439 
1440   // Found literals and code. Get them into the closure and return.
1441   __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1442   __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1443   __ RecordWriteCodeEntryField(closure, entry, x5);
1444 
1445   // Link the closure into the optimized function list.
1446   // x7 : code entry
1447   // x4 : native context
1448   // x1 : closure
1449   __ Ldr(x8,
1450          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1451   __ Str(x8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1452   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, x8, x13,
1453                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1454                       OMIT_SMI_CHECK);
1455   const int function_list_offset =
1456       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1457   __ Str(closure,
1458          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1459   __ Mov(x5, closure);
1460   __ RecordWriteContextSlot(native_context, function_list_offset, x5, x13,
1461                             kLRHasNotBeenSaved, kDontSaveFPRegs);
1462   __ Jump(entry);
1463 
1464   __ Bind(&loop_bottom);
1465   __ Sub(index, index, Operand(SharedFunctionInfo::kEntryLength));
1466   __ Cmp(index, Operand(1));
1467   __ B(gt, &loop_top);
1468 
1469   // We found neither literals nor code.
1470   __ B(&gotta_call_runtime);
1471 
1472   __ Bind(&try_shared);
1473   __ Ldr(entry,
1474          FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1475   // Is the shared function marked for tier up?
1476   __ Ldrb(temp, FieldMemOperand(
1477                     entry, SharedFunctionInfo::kMarkedForTierUpByteOffset));
1478   __ TestAndBranchIfAnySet(
1479       temp, 1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte,
1480       &gotta_call_runtime);
1481   // Is the full code valid?
1482   __ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1483   __ Ldr(x5, FieldMemOperand(entry, Code::kFlagsOffset));
1484   __ and_(x5, x5, Operand(Code::KindField::kMask));
1485   __ Mov(x5, Operand(x5, LSR, Code::KindField::kShift));
1486   __ Cmp(x5, Operand(Code::BUILTIN));
1487   __ B(eq, &gotta_call_runtime);
1488   // Yes, install the full code.
1489   __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1490   __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1491   __ RecordWriteCodeEntryField(closure, entry, x5);
1492   __ Jump(entry);
1493 
1494   __ Bind(&gotta_call_runtime);
1495   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1496 }
1497 
Generate_CompileBaseline(MacroAssembler * masm)1498 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1499   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1500 }
1501 
Generate_CompileOptimized(MacroAssembler * masm)1502 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1503   GenerateTailCallToReturnedCode(masm,
1504                                  Runtime::kCompileOptimized_NotConcurrent);
1505 }
1506 
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1507 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1508   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1509 }
1510 
Generate_InstantiateAsmJs(MacroAssembler * masm)1511 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1512   // ----------- S t a t e -------------
1513   //  -- x0 : argument count (preserved for callee)
1514   //  -- x1 : new target (preserved for callee)
1515   //  -- x3 : target function (preserved for callee)
1516   // -----------------------------------
1517   Label failed;
1518   {
1519     FrameScope scope(masm, StackFrame::INTERNAL);
1520     // Preserve argument count for later compare.
1521     __ Move(x4, x0);
1522     // Push a copy of the target function and the new target.
1523     __ SmiTag(x0);
1524     // Push another copy as a parameter to the runtime call.
1525     __ Push(x0, x1, x3, x1);
1526 
1527     // Copy arguments from caller (stdlib, foreign, heap).
1528     Label args_done;
1529     for (int j = 0; j < 4; ++j) {
1530       Label over;
1531       if (j < 3) {
1532         __ cmp(x4, Operand(j));
1533         __ B(ne, &over);
1534       }
1535       for (int i = j - 1; i >= 0; --i) {
1536         __ ldr(x4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1537                                       i * kPointerSize));
1538         __ push(x4);
1539       }
1540       for (int i = 0; i < 3 - j; ++i) {
1541         __ PushRoot(Heap::kUndefinedValueRootIndex);
1542       }
1543       if (j < 3) {
1544         __ jmp(&args_done);
1545         __ bind(&over);
1546       }
1547     }
1548     __ bind(&args_done);
1549 
1550     // Call runtime, on success unwind frame, and parent frame.
1551     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1552     // A smi 0 is returned on failure, an object on success.
1553     __ JumpIfSmi(x0, &failed);
1554 
1555     __ Drop(2);
1556     __ pop(x4);
1557     __ SmiUntag(x4);
1558     scope.GenerateLeaveFrame();
1559 
1560     __ add(x4, x4, Operand(1));
1561     __ Drop(x4);
1562     __ Ret();
1563 
1564     __ bind(&failed);
1565     // Restore target function and new target.
1566     __ Pop(x3, x1, x0);
1567     __ SmiUntag(x0);
1568   }
1569   // On failure, tail call back to regular js.
1570   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1571 }
1572 
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1573 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1574   // For now, we are relying on the fact that make_code_young doesn't do any
1575   // garbage collection which allows us to save/restore the registers without
1576   // worrying about which of them contain pointers. We also don't build an
1577   // internal frame to make the code fast, since we shouldn't have to do stack
1578   // crawls in MakeCodeYoung. This seems a bit fragile.
1579 
1580   // The following caller-saved registers must be saved and restored when
1581   // calling through to the runtime:
1582   //   x0 - The address from which to resume execution.
1583   //   x1 - isolate
1584   //   x3 - new target
1585   //   lr - The return address for the JSFunction itself. It has not yet been
1586   //        preserved on the stack because the frame setup code was replaced
1587   //        with a call to this stub, to handle code ageing.
1588   {
1589     FrameScope scope(masm, StackFrame::MANUAL);
1590     __ Push(x0, x1, x3, fp, lr);
1591     __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1592     __ CallCFunction(
1593         ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1594     __ Pop(lr, fp, x3, x1, x0);
1595   }
1596 
1597   // The calling function has been made young again, so return to execute the
1598   // real frame set-up code.
1599   __ Br(x0);
1600 }
1601 
1602 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                  \
1603   void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1604       MacroAssembler* masm) {                                 \
1605     GenerateMakeCodeYoungAgainCommon(masm);                   \
1606   }                                                           \
1607   void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(  \
1608       MacroAssembler* masm) {                                 \
1609     GenerateMakeCodeYoungAgainCommon(masm);                   \
1610   }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1611 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1612 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1613 
1614 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1615   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1616   // that make_code_young doesn't do any garbage collection which allows us to
1617   // save/restore the registers without worrying about which of them contain
1618   // pointers.
1619 
1620   // The following caller-saved registers must be saved and restored when
1621   // calling through to the runtime:
1622   //   x0 - The address from which to resume execution.
1623   //   x1 - isolate
1624   //   x3 - new target
1625   //   lr - The return address for the JSFunction itself. It has not yet been
1626   //        preserved on the stack because the frame setup code was replaced
1627   //        with a call to this stub, to handle code ageing.
1628   {
1629     FrameScope scope(masm, StackFrame::MANUAL);
1630     __ Push(x0, x1, x3, fp, lr);
1631     __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1632     __ CallCFunction(
1633         ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1634         2);
1635     __ Pop(lr, fp, x3, x1, x0);
1636 
1637     // Perform prologue operations usually performed by the young code stub.
1638     __ EmitFrameSetupForCodeAgePatching(masm);
1639   }
1640 
1641   // Jump to point after the code-age stub.
1642   __ Add(x0, x0, kNoCodeAgeSequenceLength);
1643   __ Br(x0);
1644 }
1645 
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1646 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1647   GenerateMakeCodeYoungAgainCommon(masm);
1648 }
1649 
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1650 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1651   Generate_MarkCodeAsExecutedOnce(masm);
1652 }
1653 
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1654 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1655                                              SaveFPRegsMode save_doubles) {
1656   {
1657     FrameScope scope(masm, StackFrame::INTERNAL);
1658 
1659     // Preserve registers across notification, this is important for compiled
1660     // stubs that tail call the runtime on deopts passing their parameters in
1661     // registers.
1662     // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1663     // registers here? According to the comment above, we should only need to
1664     // preserve the registers with parameters.
1665     __ PushXRegList(kSafepointSavedRegisters);
1666     // Pass the function and deoptimization type to the runtime system.
1667     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1668     __ PopXRegList(kSafepointSavedRegisters);
1669   }
1670 
1671   // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1672   __ Drop(1);
1673 
1674   // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1675   // into lr before it jumps here.
1676   __ Br(lr);
1677 }
1678 
Generate_NotifyStubFailure(MacroAssembler * masm)1679 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1680   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1681 }
1682 
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1683 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1684   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1685 }
1686 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1687 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1688                                              Deoptimizer::BailoutType type) {
1689   {
1690     FrameScope scope(masm, StackFrame::INTERNAL);
1691     // Pass the deoptimization type to the runtime system.
1692     __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1693     __ Push(x0);
1694     __ CallRuntime(Runtime::kNotifyDeoptimized);
1695   }
1696 
1697   // Get the full codegen state from the stack and untag it.
1698   Register state = x6;
1699   __ Peek(state, 0);
1700   __ SmiUntag(state);
1701 
1702   // Switch on the state.
1703   Label with_tos_register, unknown_state;
1704   __ CompareAndBranch(state,
1705                       static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS),
1706                       ne, &with_tos_register);
1707   __ Drop(1);  // Remove state.
1708   __ Ret();
1709 
1710   __ Bind(&with_tos_register);
1711   // Reload TOS register.
1712   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), x0.code());
1713   __ Peek(x0, kPointerSize);
1714   __ CompareAndBranch(state,
1715                       static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER),
1716                       ne, &unknown_state);
1717   __ Drop(2);  // Remove state and TOS.
1718   __ Ret();
1719 
1720   __ Bind(&unknown_state);
1721   __ Abort(kInvalidFullCodegenState);
1722 }
1723 
Generate_NotifyDeoptimized(MacroAssembler * masm)1724 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1725   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1726 }
1727 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1728 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1729   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1730 }
1731 
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1732 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1733   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1734 }
1735 
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Register scratch0,Register scratch1,Register scratch2,Label * receiver_check_failed)1736 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1737                                     Register function_template_info,
1738                                     Register scratch0, Register scratch1,
1739                                     Register scratch2,
1740                                     Label* receiver_check_failed) {
1741   Register signature = scratch0;
1742   Register map = scratch1;
1743   Register constructor = scratch2;
1744 
1745   // If there is no signature, return the holder.
1746   __ Ldr(signature, FieldMemOperand(function_template_info,
1747                                     FunctionTemplateInfo::kSignatureOffset));
1748   __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1749   Label receiver_check_passed;
1750   __ B(eq, &receiver_check_passed);
1751 
1752   // Walk the prototype chain.
1753   __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1754   Label prototype_loop_start;
1755   __ Bind(&prototype_loop_start);
1756 
1757   // Get the constructor, if any
1758   __ GetMapConstructor(constructor, map, x16, x16);
1759   __ cmp(x16, Operand(JS_FUNCTION_TYPE));
1760   Label next_prototype;
1761   __ B(ne, &next_prototype);
1762   Register type = constructor;
1763   __ Ldr(type,
1764          FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1765   __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1766 
1767   // Loop through the chain of inheriting function templates.
1768   Label function_template_loop;
1769   __ Bind(&function_template_loop);
1770 
1771   // If the signatures match, we have a compatible receiver.
1772   __ Cmp(signature, type);
1773   __ B(eq, &receiver_check_passed);
1774 
1775   // If the current type is not a FunctionTemplateInfo, load the next prototype
1776   // in the chain.
1777   __ JumpIfSmi(type, &next_prototype);
1778   __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
1779   __ B(ne, &next_prototype);
1780 
1781   // Otherwise load the parent function template and iterate.
1782   __ Ldr(type,
1783          FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1784   __ B(&function_template_loop);
1785 
1786   // Load the next prototype.
1787   __ Bind(&next_prototype);
1788   __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
1789   __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
1790   __ B(eq, receiver_check_failed);
1791   __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1792   __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1793   // Iterate.
1794   __ B(&prototype_loop_start);
1795 
1796   __ Bind(&receiver_check_passed);
1797 }
1798 
Generate_HandleFastApiCall(MacroAssembler * masm)1799 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1800   // ----------- S t a t e -------------
1801   //  -- x0                 : number of arguments excluding receiver
1802   //  -- x1                 : callee
1803   //  -- lr                 : return address
1804   //  -- sp[0]              : last argument
1805   //  -- ...
1806   //  -- sp[8 * (argc - 1)] : first argument
1807   //  -- sp[8 * argc]       : receiver
1808   // -----------------------------------
1809 
1810   // Load the FunctionTemplateInfo.
1811   __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1812   __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
1813 
1814   // Do the compatible receiver check.
1815   Label receiver_check_failed;
1816   __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1817   CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
1818 
1819   // Get the callback offset from the FunctionTemplateInfo, and jump to the
1820   // beginning of the code.
1821   __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
1822   __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
1823   __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
1824   __ Jump(x4);
1825 
1826   // Compatible receiver check failed: throw an Illegal Invocation exception.
1827   __ Bind(&receiver_check_failed);
1828   // Drop the arguments (including the receiver)
1829   __ add(x0, x0, Operand(1));
1830   __ Drop(x0);
1831   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1832 }
1833 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1834 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1835                                               bool has_handler_frame) {
1836   // Lookup the function in the JavaScript frame.
1837   if (has_handler_frame) {
1838     __ Ldr(x0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1839     __ Ldr(x0, MemOperand(x0, JavaScriptFrameConstants::kFunctionOffset));
1840   } else {
1841     __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1842   }
1843 
1844   {
1845     FrameScope scope(masm, StackFrame::INTERNAL);
1846     // Pass function as argument.
1847     __ Push(x0);
1848     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1849   }
1850 
1851   // If the code object is null, just return to the caller.
1852   Label skip;
1853   __ CompareAndBranch(x0, Smi::kZero, ne, &skip);
1854   __ Ret();
1855 
1856   __ Bind(&skip);
1857 
1858   // Drop any potential handler frame that is be sitting on top of the actual
1859   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1860   if (has_handler_frame) {
1861     __ LeaveFrame(StackFrame::STUB);
1862   }
1863 
1864   // Load deoptimization data from the code object.
1865   // <deopt_data> = <code>[#deoptimization_data_offset]
1866   __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1867 
1868   // Load the OSR entrypoint offset from the deoptimization data.
1869   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1870   __ Ldrsw(w1, UntagSmiFieldMemOperand(
1871                    x1, FixedArray::OffsetOfElementAt(
1872                            DeoptimizationInputData::kOsrPcOffsetIndex)));
1873 
1874   // Compute the target address = code_obj + header_size + osr_offset
1875   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1876   __ Add(x0, x0, x1);
1877   __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1878 
1879   // And "return" to the OSR entry point of the function.
1880   __ Ret();
1881 }
1882 
Generate_OnStackReplacement(MacroAssembler * masm)1883 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1884   Generate_OnStackReplacementHelper(masm, false);
1885 }
1886 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1887 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1888   Generate_OnStackReplacementHelper(masm, true);
1889 }
1890 
1891 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1892 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1893   // ----------- S t a t e -------------
1894   //  -- x0       : argc
1895   //  -- jssp[0]  : argArray (if argc == 2)
1896   //  -- jssp[8]  : thisArg  (if argc >= 1)
1897   //  -- jssp[16] : receiver
1898   // -----------------------------------
1899   ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
1900 
1901   Register argc = x0;
1902   Register arg_array = x0;
1903   Register receiver = x1;
1904   Register this_arg = x2;
1905   Register undefined_value = x3;
1906   Register null_value = x4;
1907 
1908   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1909   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1910 
1911   // 1. Load receiver into x1, argArray into x0 (if present), remove all
1912   // arguments from the stack (including the receiver), and push thisArg (if
1913   // present) instead.
1914   {
1915     // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
1916     // consistent state for a simple pop operation.
1917     __ Claim(2);
1918     __ Drop(argc);
1919 
1920     // ----------- S t a t e -------------
1921     //  -- x0       : argc
1922     //  -- jssp[0]  : argArray (dummy value if argc <= 1)
1923     //  -- jssp[8]  : thisArg  (dummy value if argc == 0)
1924     //  -- jssp[16] : receiver
1925     // -----------------------------------
1926     __ Cmp(argc, 1);
1927     __ Pop(arg_array, this_arg);               // Overwrites argc.
1928     __ CmovX(this_arg, undefined_value, lo);   // undefined if argc == 0.
1929     __ CmovX(arg_array, undefined_value, ls);  // undefined if argc <= 1.
1930 
1931     __ Peek(receiver, 0);
1932     __ Poke(this_arg, 0);
1933   }
1934 
1935   // ----------- S t a t e -------------
1936   //  -- x0      : argArray
1937   //  -- x1      : receiver
1938   //  -- x3      : undefined root value
1939   //  -- jssp[0] : thisArg
1940   // -----------------------------------
1941 
1942   // 2. Make sure the receiver is actually callable.
1943   Label receiver_not_callable;
1944   __ JumpIfSmi(receiver, &receiver_not_callable);
1945   __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1946   __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
1947   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
1948                              &receiver_not_callable);
1949 
1950   // 3. Tail call with no arguments if argArray is null or undefined.
1951   Label no_arguments;
1952   __ Cmp(arg_array, null_value);
1953   __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1954   __ B(eq, &no_arguments);
1955 
1956   // 4a. Apply the receiver to the given argArray (passing undefined for
1957   // new.target in x3).
1958   DCHECK(undefined_value.Is(x3));
1959   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1960 
1961   // 4b. The argArray is either null or undefined, so we tail call without any
1962   // arguments to the receiver.
1963   __ Bind(&no_arguments);
1964   {
1965     __ Mov(x0, 0);
1966     DCHECK(receiver.Is(x1));
1967     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1968   }
1969 
1970   // 4c. The receiver is not callable, throw an appropriate TypeError.
1971   __ Bind(&receiver_not_callable);
1972   {
1973     __ Poke(receiver, 0);
1974     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1975   }
1976 }
1977 
1978 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1979 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1980   Register argc = x0;
1981   Register function = x1;
1982   Register scratch1 = x10;
1983   Register scratch2 = x11;
1984 
1985   ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1986 
1987   // 1. Make sure we have at least one argument.
1988   {
1989     Label done;
1990     __ Cbnz(argc, &done);
1991     __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1992     __ Push(scratch1);
1993     __ Mov(argc, 1);
1994     __ Bind(&done);
1995   }
1996 
1997   // 2. Get the callable to call (passed as receiver) from the stack.
1998   __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1999 
2000   // 3. Shift arguments and return address one slot down on the stack
2001   //    (overwriting the original receiver).  Adjust argument count to make
2002   //    the original first argument the new receiver.
2003   {
2004     Label loop;
2005     // Calculate the copy start address (destination). Copy end address is jssp.
2006     __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
2007     __ Sub(scratch1, scratch2, kPointerSize);
2008 
2009     __ Bind(&loop);
2010     __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
2011     __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
2012     __ Cmp(scratch1, jssp);
2013     __ B(ge, &loop);
2014     // Adjust the actual number of arguments and remove the top element
2015     // (which is a copy of the last argument).
2016     __ Sub(argc, argc, 1);
2017     __ Drop(1);
2018   }
2019 
2020   // 4. Call the callable.
2021   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2022 }
2023 
Generate_ReflectApply(MacroAssembler * masm)2024 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2025   // ----------- S t a t e -------------
2026   //  -- x0       : argc
2027   //  -- jssp[0]  : argumentsList (if argc == 3)
2028   //  -- jssp[8]  : thisArgument  (if argc >= 2)
2029   //  -- jssp[16] : target        (if argc >= 1)
2030   //  -- jssp[24] : receiver
2031   // -----------------------------------
2032   ASM_LOCATION("Builtins::Generate_ReflectApply");
2033 
2034   Register argc = x0;
2035   Register arguments_list = x0;
2036   Register target = x1;
2037   Register this_argument = x2;
2038   Register undefined_value = x3;
2039 
2040   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
2041 
2042   // 1. Load target into x1 (if present), argumentsList into x0 (if present),
2043   // remove all arguments from the stack (including the receiver), and push
2044   // thisArgument (if present) instead.
2045   {
2046     // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
2047     // consistent state for a simple pop operation.
2048     __ Claim(3);
2049     __ Drop(argc);
2050 
2051     // ----------- S t a t e -------------
2052     //  -- x0       : argc
2053     //  -- jssp[0]  : argumentsList (dummy value if argc <= 2)
2054     //  -- jssp[8]  : thisArgument  (dummy value if argc <= 1)
2055     //  -- jssp[16] : target        (dummy value if argc == 0)
2056     //  -- jssp[24] : receiver
2057     // -----------------------------------
2058     __ Adds(x10, argc, 0);  // Preserve argc, and set the Z flag if it is zero.
2059     __ Pop(arguments_list, this_argument, target);  // Overwrites argc.
2060     __ CmovX(target, undefined_value, eq);          // undefined if argc == 0.
2061     __ Cmp(x10, 2);
2062     __ CmovX(this_argument, undefined_value, lo);   // undefined if argc <= 1.
2063     __ CmovX(arguments_list, undefined_value, ls);  // undefined if argc <= 2.
2064 
2065     __ Poke(this_argument, 0);  // Overwrite receiver.
2066   }
2067 
2068   // ----------- S t a t e -------------
2069   //  -- x0      : argumentsList
2070   //  -- x1      : target
2071   //  -- jssp[0] : thisArgument
2072   // -----------------------------------
2073 
2074   // 2. Make sure the target is actually callable.
2075   Label target_not_callable;
2076   __ JumpIfSmi(target, &target_not_callable);
2077   __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
2078   __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2079   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
2080 
2081   // 3a. Apply the target to the given argumentsList (passing undefined for
2082   // new.target in x3).
2083   DCHECK(undefined_value.Is(x3));
2084   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2085 
2086   // 3b. The target is not callable, throw an appropriate TypeError.
2087   __ Bind(&target_not_callable);
2088   {
2089     __ Poke(target, 0);
2090     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2091   }
2092 }
2093 
Generate_ReflectConstruct(MacroAssembler * masm)2094 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2095   // ----------- S t a t e -------------
2096   //  -- x0       : argc
2097   //  -- jssp[0]  : new.target (optional)
2098   //  -- jssp[8]  : argumentsList
2099   //  -- jssp[16] : target
2100   //  -- jssp[24] : receiver
2101   // -----------------------------------
2102   ASM_LOCATION("Builtins::Generate_ReflectConstruct");
2103 
2104   Register argc = x0;
2105   Register arguments_list = x0;
2106   Register target = x1;
2107   Register new_target = x3;
2108   Register undefined_value = x4;
2109 
2110   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
2111 
2112   // 1. Load target into x1 (if present), argumentsList into x0 (if present),
2113   // new.target into x3 (if present, otherwise use target), remove all
2114   // arguments from the stack (including the receiver), and push thisArgument
2115   // (if present) instead.
2116   {
2117     // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
2118     // consistent state for a simple pop operation.
2119     __ Claim(3);
2120     __ Drop(argc);
2121 
2122     // ----------- S t a t e -------------
2123     //  -- x0       : argc
2124     //  -- jssp[0]  : new.target    (dummy value if argc <= 2)
2125     //  -- jssp[8]  : argumentsList (dummy value if argc <= 1)
2126     //  -- jssp[16] : target        (dummy value if argc == 0)
2127     //  -- jssp[24] : receiver
2128     // -----------------------------------
2129     __ Adds(x10, argc, 0);  // Preserve argc, and set the Z flag if it is zero.
2130     __ Pop(new_target, arguments_list, target);  // Overwrites argc.
2131     __ CmovX(target, undefined_value, eq);       // undefined if argc == 0.
2132     __ Cmp(x10, 2);
2133     __ CmovX(arguments_list, undefined_value, lo);  // undefined if argc <= 1.
2134     __ CmovX(new_target, target, ls);               // target if argc <= 2.
2135 
2136     __ Poke(undefined_value, 0);  // Overwrite receiver.
2137   }
2138 
2139   // ----------- S t a t e -------------
2140   //  -- x0      : argumentsList
2141   //  -- x1      : target
2142   //  -- x3      : new.target
2143   //  -- jssp[0] : receiver (undefined)
2144   // -----------------------------------
2145 
2146   // 2. Make sure the target is actually a constructor.
2147   Label target_not_constructor;
2148   __ JumpIfSmi(target, &target_not_constructor);
2149   __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
2150   __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2151   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
2152                              &target_not_constructor);
2153 
2154   // 3. Make sure the new.target is actually a constructor.
2155   Label new_target_not_constructor;
2156   __ JumpIfSmi(new_target, &new_target_not_constructor);
2157   __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
2158   __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2159   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
2160                              &new_target_not_constructor);
2161 
2162   // 4a. Construct the target with the given new.target and argumentsList.
2163   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2164 
2165   // 4b. The target is not a constructor, throw an appropriate TypeError.
2166   __ Bind(&target_not_constructor);
2167   {
2168     __ Poke(target, 0);
2169     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2170   }
2171 
2172   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2173   __ Bind(&new_target_not_constructor);
2174   {
2175     __ Poke(new_target, 0);
2176     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2177   }
2178 }
2179 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2180 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2181   __ SmiTag(x10, x0);
2182   __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2183   __ Push(lr, fp);
2184   __ Push(x11, x1, x10);
2185   __ Add(fp, jssp,
2186          StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
2187 }
2188 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2189 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2190   // ----------- S t a t e -------------
2191   //  -- x0 : result being passed through
2192   // -----------------------------------
2193   // Get the number of arguments passed (as a smi), tear down the frame and
2194   // then drop the parameters and the receiver.
2195   __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2196                                kPointerSize)));
2197   __ Mov(jssp, fp);
2198   __ Pop(fp, lr);
2199   __ DropBySMI(x10, kXRegSize);
2200   __ Drop(1);
2201 }
2202 
2203 // static
Generate_Apply(MacroAssembler * masm)2204 void Builtins::Generate_Apply(MacroAssembler* masm) {
2205   // ----------- S t a t e -------------
2206   //  -- x0      : argumentsList
2207   //  -- x1      : target
2208   //  -- x3      : new.target (checked to be constructor or undefined)
2209   //  -- jssp[0] : thisArgument
2210   // -----------------------------------
2211 
2212   Register arguments_list = x0;
2213   Register target = x1;
2214   Register new_target = x3;
2215 
2216   Register args = x0;
2217   Register len = x2;
2218 
2219   // Create the list of arguments from the array-like argumentsList.
2220   {
2221     Label create_arguments, create_array, create_runtime, done_create;
2222     __ JumpIfSmi(arguments_list, &create_runtime);
2223 
2224     // Load native context.
2225     Register native_context = x4;
2226     __ Ldr(native_context, NativeContextMemOperand());
2227 
2228     // Load the map of argumentsList.
2229     Register arguments_list_map = x2;
2230     __ Ldr(arguments_list_map,
2231            FieldMemOperand(arguments_list, HeapObject::kMapOffset));
2232 
2233     // Check if argumentsList is an (unmodified) arguments object.
2234     __ Ldr(x10, ContextMemOperand(native_context,
2235                                   Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2236     __ Ldr(x11, ContextMemOperand(native_context,
2237                                   Context::STRICT_ARGUMENTS_MAP_INDEX));
2238     __ Cmp(arguments_list_map, x10);
2239     __ Ccmp(arguments_list_map, x11, ZFlag, ne);
2240     __ B(eq, &create_arguments);
2241 
2242     // Check if argumentsList is a fast JSArray.
2243     __ CompareInstanceType(arguments_list_map, native_context, JS_ARRAY_TYPE);
2244     __ B(eq, &create_array);
2245 
2246     // Ask the runtime to create the list (actually a FixedArray).
2247     __ Bind(&create_runtime);
2248     {
2249       FrameScope scope(masm, StackFrame::INTERNAL);
2250       __ Push(target, new_target, arguments_list);
2251       __ CallRuntime(Runtime::kCreateListFromArrayLike);
2252       __ Pop(new_target, target);
2253       __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2254                                             FixedArray::kLengthOffset));
2255     }
2256     __ B(&done_create);
2257 
2258     // Try to create the list from an arguments object.
2259     __ Bind(&create_arguments);
2260     __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2261                                           JSArgumentsObject::kLengthOffset));
2262     __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
2263     __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
2264     __ CompareAndBranch(len, x11, ne, &create_runtime);
2265     __ Mov(args, x10);
2266     __ B(&done_create);
2267 
2268     // Try to create the list from a JSArray object.
2269     __ Bind(&create_array);
2270     __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
2271     __ DecodeField<Map::ElementsKindBits>(x10);
2272     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2273     STATIC_ASSERT(FAST_ELEMENTS == 2);
2274     // Branch for anything that's not FAST_{SMI_}ELEMENTS.
2275     __ TestAndBranchIfAnySet(x10, ~FAST_ELEMENTS, &create_runtime);
2276     __ Ldrsw(len,
2277              UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
2278     __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
2279 
2280     __ Bind(&done_create);
2281   }
2282 
2283   // Check for stack overflow.
2284   {
2285     // Check the stack for overflow. We are not trying to catch interruptions
2286     // (i.e. debug break and preemption) here, so check the "real stack limit".
2287     Label done;
2288     __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
2289     // Make x10 the space we have left. The stack might already be overflowed
2290     // here which will cause x10 to become negative.
2291     __ Sub(x10, masm->StackPointer(), x10);
2292     // Check if the arguments will overflow the stack.
2293     __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
2294     __ B(gt, &done);  // Signed comparison.
2295     __ TailCallRuntime(Runtime::kThrowStackOverflow);
2296     __ Bind(&done);
2297   }
2298 
2299   // ----------- S t a t e -------------
2300   //  -- x0      : args (a FixedArray built from argumentsList)
2301   //  -- x1      : target
2302   //  -- x2      : len (number of elements to push from args)
2303   //  -- x3      : new.target (checked to be constructor or undefined)
2304   //  -- jssp[0] : thisArgument
2305   // -----------------------------------
2306 
2307   // Push arguments onto the stack (thisArgument is already on the stack).
2308   {
2309     Label done, loop;
2310     Register src = x4;
2311 
2312     __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
2313     __ Mov(x0, len);  // The 'len' argument for Call() or Construct().
2314     __ Cbz(len, &done);
2315     __ Claim(len);
2316     __ Bind(&loop);
2317     __ Sub(len, len, 1);
2318     __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
2319     __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
2320     __ Cbnz(len, &loop);
2321     __ Bind(&done);
2322   }
2323 
2324   // ----------- S t a t e -------------
2325   //  -- x0              : argument count (len)
2326   //  -- x1              : target
2327   //  -- x3              : new.target (checked to be constructor or undefined)
2328   //  -- jssp[0]         : args[len-1]
2329   //  -- jssp[8]         : args[len-2]
2330   //      ...            :  ...
2331   //  -- jssp[8*(len-2)] : args[1]
2332   //  -- jssp[8*(len-1)] : args[0]
2333   // -----------------------------------
2334 
2335   // Dispatch to Call or Construct depending on whether new.target is undefined.
2336   {
2337     __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
2338     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2339     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2340   }
2341 }
2342 
2343 namespace {
2344 
2345 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2346 // present) preserving all the arguments prepared for current call.
2347 // Does nothing if debugger is currently active.
2348 // ES6 14.6.3. PrepareForTailCall
2349 //
2350 // Stack structure for the function g() tail calling f():
2351 //
2352 // ------- Caller frame: -------
2353 // |  ...
2354 // |  g()'s arg M
2355 // |  ...
2356 // |  g()'s arg 1
2357 // |  g()'s receiver arg
2358 // |  g()'s caller pc
2359 // ------- g()'s frame: -------
2360 // |  g()'s caller fp      <- fp
2361 // |  g()'s context
2362 // |  function pointer: g
2363 // |  -------------------------
2364 // |  ...
2365 // |  ...
2366 // |  f()'s arg N
2367 // |  ...
2368 // |  f()'s arg 1
2369 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2370 // ----------------------
2371 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2372 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2373                         Register scratch1, Register scratch2,
2374                         Register scratch3) {
2375   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2376   Comment cmnt(masm, "[ PrepareForTailCall");
2377 
2378   // Prepare for tail call only if ES2015 tail call elimination is enabled.
2379   Label done;
2380   ExternalReference is_tail_call_elimination_enabled =
2381       ExternalReference::is_tail_call_elimination_enabled_address(
2382           masm->isolate());
2383   __ Mov(scratch1, Operand(is_tail_call_elimination_enabled));
2384   __ Ldrb(scratch1, MemOperand(scratch1));
2385   __ Cmp(scratch1, Operand(0));
2386   __ B(eq, &done);
2387 
2388   // Drop possible interpreter handler/stub frame.
2389   {
2390     Label no_interpreter_frame;
2391     __ Ldr(scratch3,
2392            MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2393     __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
2394     __ B(ne, &no_interpreter_frame);
2395     __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2396     __ bind(&no_interpreter_frame);
2397   }
2398 
2399   // Check if next frame is an arguments adaptor frame.
2400   Register caller_args_count_reg = scratch1;
2401   Label no_arguments_adaptor, formal_parameter_count_loaded;
2402   __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2403   __ Ldr(scratch3,
2404          MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2405   __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2406   __ B(ne, &no_arguments_adaptor);
2407 
2408   // Drop current frame and load arguments count from arguments adaptor frame.
2409   __ mov(fp, scratch2);
2410   __ Ldr(caller_args_count_reg,
2411          MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2412   __ SmiUntag(caller_args_count_reg);
2413   __ B(&formal_parameter_count_loaded);
2414 
2415   __ bind(&no_arguments_adaptor);
2416   // Load caller's formal parameter count
2417   __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2418   __ Ldr(scratch1,
2419          FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2420   __ Ldrsw(caller_args_count_reg,
2421            FieldMemOperand(scratch1,
2422                            SharedFunctionInfo::kFormalParameterCountOffset));
2423   __ bind(&formal_parameter_count_loaded);
2424 
2425   ParameterCount callee_args_count(args_reg);
2426   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2427                         scratch3);
2428   __ bind(&done);
2429 }
2430 }  // namespace
2431 
2432 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2433 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2434                                      ConvertReceiverMode mode,
2435                                      TailCallMode tail_call_mode) {
2436   ASM_LOCATION("Builtins::Generate_CallFunction");
2437   // ----------- S t a t e -------------
2438   //  -- x0 : the number of arguments (not including the receiver)
2439   //  -- x1 : the function to call (checked to be a JSFunction)
2440   // -----------------------------------
2441   __ AssertFunction(x1);
2442 
2443   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2444   // Check that function is not a "classConstructor".
2445   Label class_constructor;
2446   __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2447   __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
2448   __ TestAndBranchIfAnySet(w3, FunctionKind::kClassConstructor
2449                                    << SharedFunctionInfo::kFunctionKindShift,
2450                            &class_constructor);
2451 
2452   // Enter the context of the function; ToObject has to run in the function
2453   // context, and we also need to take the global proxy from the function
2454   // context in case of conversion.
2455   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2456   // We need to convert the receiver for non-native sloppy mode functions.
2457   Label done_convert;
2458   __ TestAndBranchIfAnySet(w3,
2459                            (1 << SharedFunctionInfo::kNative) |
2460                                (1 << SharedFunctionInfo::kStrictModeFunction),
2461                            &done_convert);
2462   {
2463     // ----------- S t a t e -------------
2464     //  -- x0 : the number of arguments (not including the receiver)
2465     //  -- x1 : the function to call (checked to be a JSFunction)
2466     //  -- x2 : the shared function info.
2467     //  -- cp : the function context.
2468     // -----------------------------------
2469 
2470     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2471       // Patch receiver to global proxy.
2472       __ LoadGlobalProxy(x3);
2473     } else {
2474       Label convert_to_object, convert_receiver;
2475       __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2476       __ JumpIfSmi(x3, &convert_to_object);
2477       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2478       __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2479       __ B(hs, &done_convert);
2480       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2481         Label convert_global_proxy;
2482         __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
2483                       &convert_global_proxy);
2484         __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
2485         __ Bind(&convert_global_proxy);
2486         {
2487           // Patch receiver to global proxy.
2488           __ LoadGlobalProxy(x3);
2489         }
2490         __ B(&convert_receiver);
2491       }
2492       __ Bind(&convert_to_object);
2493       {
2494         // Convert receiver using ToObject.
2495         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2496         // in the fast case? (fall back to AllocateInNewSpace?)
2497         FrameScope scope(masm, StackFrame::INTERNAL);
2498         __ SmiTag(x0);
2499         __ Push(x0, x1);
2500         __ Mov(x0, x3);
2501         __ Push(cp);
2502         __ Call(masm->isolate()->builtins()->ToObject(),
2503                 RelocInfo::CODE_TARGET);
2504         __ Pop(cp);
2505         __ Mov(x3, x0);
2506         __ Pop(x1, x0);
2507         __ SmiUntag(x0);
2508       }
2509       __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2510       __ Bind(&convert_receiver);
2511     }
2512     __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2513   }
2514   __ Bind(&done_convert);
2515 
2516   // ----------- S t a t e -------------
2517   //  -- x0 : the number of arguments (not including the receiver)
2518   //  -- x1 : the function to call (checked to be a JSFunction)
2519   //  -- x2 : the shared function info.
2520   //  -- cp : the function context.
2521   // -----------------------------------
2522 
2523   if (tail_call_mode == TailCallMode::kAllow) {
2524     PrepareForTailCall(masm, x0, x3, x4, x5);
2525   }
2526 
2527   __ Ldrsw(
2528       x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2529   ParameterCount actual(x0);
2530   ParameterCount expected(x2);
2531   __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
2532                         CheckDebugStepCallWrapper());
2533 
2534   // The function is a "classConstructor", need to raise an exception.
2535   __ bind(&class_constructor);
2536   {
2537     FrameScope frame(masm, StackFrame::INTERNAL);
2538     __ Push(x1);
2539     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2540   }
2541 }
2542 
2543 namespace {
2544 
Generate_PushBoundArguments(MacroAssembler * masm)2545 void Generate_PushBoundArguments(MacroAssembler* masm) {
2546   // ----------- S t a t e -------------
2547   //  -- x0 : the number of arguments (not including the receiver)
2548   //  -- x1 : target (checked to be a JSBoundFunction)
2549   //  -- x3 : new.target (only in case of [[Construct]])
2550   // -----------------------------------
2551 
2552   // Load [[BoundArguments]] into x2 and length of that into x4.
2553   Label no_bound_arguments;
2554   __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2555   __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2556   __ Cmp(x4, 0);
2557   __ B(eq, &no_bound_arguments);
2558   {
2559     // ----------- S t a t e -------------
2560     //  -- x0 : the number of arguments (not including the receiver)
2561     //  -- x1 : target (checked to be a JSBoundFunction)
2562     //  -- x2 : the [[BoundArguments]] (implemented as FixedArray)
2563     //  -- x3 : new.target (only in case of [[Construct]])
2564     //  -- x4 : the number of [[BoundArguments]]
2565     // -----------------------------------
2566 
2567     // Reserve stack space for the [[BoundArguments]].
2568     {
2569       Label done;
2570       __ Claim(x4);
2571       // Check the stack for overflow. We are not trying to catch interruptions
2572       // (i.e. debug break and preemption) here, so check the "real stack
2573       // limit".
2574       __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
2575       __ B(gt, &done);  // Signed comparison.
2576       // Restore the stack pointer.
2577       __ Drop(x4);
2578       {
2579         FrameScope scope(masm, StackFrame::MANUAL);
2580         __ EnterFrame(StackFrame::INTERNAL);
2581         __ CallRuntime(Runtime::kThrowStackOverflow);
2582       }
2583       __ Bind(&done);
2584     }
2585 
2586     // Relocate arguments down the stack.
2587     {
2588       Label loop, done_loop;
2589       __ Mov(x5, 0);
2590       __ Bind(&loop);
2591       __ Cmp(x5, x0);
2592       __ B(gt, &done_loop);
2593       __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
2594       __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
2595       __ Add(x4, x4, 1);
2596       __ Add(x5, x5, 1);
2597       __ B(&loop);
2598       __ Bind(&done_loop);
2599     }
2600 
2601     // Copy [[BoundArguments]] to the stack (below the arguments).
2602     {
2603       Label loop;
2604       __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2605       __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
2606       __ Bind(&loop);
2607       __ Sub(x4, x4, 1);
2608       __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
2609       __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2610       __ Add(x0, x0, 1);
2611       __ Cmp(x4, 0);
2612       __ B(gt, &loop);
2613     }
2614   }
2615   __ Bind(&no_bound_arguments);
2616 }
2617 
2618 }  // namespace
2619 
2620 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2621 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2622                                               TailCallMode tail_call_mode) {
2623   // ----------- S t a t e -------------
2624   //  -- x0 : the number of arguments (not including the receiver)
2625   //  -- x1 : the function to call (checked to be a JSBoundFunction)
2626   // -----------------------------------
2627   __ AssertBoundFunction(x1);
2628 
2629   if (tail_call_mode == TailCallMode::kAllow) {
2630     PrepareForTailCall(masm, x0, x3, x4, x5);
2631   }
2632 
2633   // Patch the receiver to [[BoundThis]].
2634   __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2635   __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2636 
2637   // Push the [[BoundArguments]] onto the stack.
2638   Generate_PushBoundArguments(masm);
2639 
2640   // Call the [[BoundTargetFunction]] via the Call builtin.
2641   __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2642   __ Mov(x10,
2643          ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2644   __ Ldr(x11, MemOperand(x10));
2645   __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2646   __ Br(x12);
2647 }
2648 
2649 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2650 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2651                              TailCallMode tail_call_mode) {
2652   // ----------- S t a t e -------------
2653   //  -- x0 : the number of arguments (not including the receiver)
2654   //  -- x1 : the target to call (can be any Object).
2655   // -----------------------------------
2656 
2657   Label non_callable, non_function, non_smi;
2658   __ JumpIfSmi(x1, &non_callable);
2659   __ Bind(&non_smi);
2660   __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2661   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2662           RelocInfo::CODE_TARGET, eq);
2663   __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2664   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2665           RelocInfo::CODE_TARGET, eq);
2666 
2667   // Check if target has a [[Call]] internal method.
2668   __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2669   __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
2670 
2671   __ Cmp(x5, JS_PROXY_TYPE);
2672   __ B(ne, &non_function);
2673 
2674   // 0. Prepare for tail call if necessary.
2675   if (tail_call_mode == TailCallMode::kAllow) {
2676     PrepareForTailCall(masm, x0, x3, x4, x5);
2677   }
2678 
2679   // 1. Runtime fallback for Proxy [[Call]].
2680   __ Push(x1);
2681   // Increase the arguments size to include the pushed function and the
2682   // existing receiver on the stack.
2683   __ Add(x0, x0, Operand(2));
2684   // Tail-call to the runtime.
2685   __ JumpToExternalReference(
2686       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2687 
2688   // 2. Call to something else, which might have a [[Call]] internal method (if
2689   // not we raise an exception).
2690   __ Bind(&non_function);
2691   // Overwrite the original receiver with the (original) target.
2692   __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2693   // Let the "call_as_function_delegate" take care of the rest.
2694   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2695   __ Jump(masm->isolate()->builtins()->CallFunction(
2696               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2697           RelocInfo::CODE_TARGET);
2698 
2699   // 3. Call to something that is not callable.
2700   __ bind(&non_callable);
2701   {
2702     FrameScope scope(masm, StackFrame::INTERNAL);
2703     __ Push(x1);
2704     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2705   }
2706 }
2707 
2708 // static
Generate_ConstructFunction(MacroAssembler * masm)2709 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2710   // ----------- S t a t e -------------
2711   //  -- x0 : the number of arguments (not including the receiver)
2712   //  -- x1 : the constructor to call (checked to be a JSFunction)
2713   //  -- x3 : the new target (checked to be a constructor)
2714   // -----------------------------------
2715   __ AssertFunction(x1);
2716 
2717   // Calling convention for function specific ConstructStubs require
2718   // x2 to contain either an AllocationSite or undefined.
2719   __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2720 
2721   // Tail call to the function-specific construct stub (still in the caller
2722   // context at this point).
2723   __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2724   __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2725   __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2726   __ Br(x4);
2727 }
2728 
2729 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2730 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2731   // ----------- S t a t e -------------
2732   //  -- x0 : the number of arguments (not including the receiver)
2733   //  -- x1 : the function to call (checked to be a JSBoundFunction)
2734   //  -- x3 : the new target (checked to be a constructor)
2735   // -----------------------------------
2736   __ AssertBoundFunction(x1);
2737 
2738   // Push the [[BoundArguments]] onto the stack.
2739   Generate_PushBoundArguments(masm);
2740 
2741   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2742   {
2743     Label done;
2744     __ Cmp(x1, x3);
2745     __ B(ne, &done);
2746     __ Ldr(x3,
2747            FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2748     __ Bind(&done);
2749   }
2750 
2751   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2752   __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2753   __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2754   __ Ldr(x11, MemOperand(x10));
2755   __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2756   __ Br(x12);
2757 }
2758 
2759 // static
Generate_ConstructProxy(MacroAssembler * masm)2760 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2761   // ----------- S t a t e -------------
2762   //  -- x0 : the number of arguments (not including the receiver)
2763   //  -- x1 : the constructor to call (checked to be a JSProxy)
2764   //  -- x3 : the new target (either the same as the constructor or
2765   //          the JSFunction on which new was invoked initially)
2766   // -----------------------------------
2767 
2768   // Call into the Runtime for Proxy [[Construct]].
2769   __ Push(x1);
2770   __ Push(x3);
2771   // Include the pushed new_target, constructor and the receiver.
2772   __ Add(x0, x0, 3);
2773   // Tail-call to the runtime.
2774   __ JumpToExternalReference(
2775       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2776 }
2777 
2778 // static
Generate_Construct(MacroAssembler * masm)2779 void Builtins::Generate_Construct(MacroAssembler* masm) {
2780   // ----------- S t a t e -------------
2781   //  -- x0 : the number of arguments (not including the receiver)
2782   //  -- x1 : the constructor to call (can be any Object)
2783   //  -- x3 : the new target (either the same as the constructor or
2784   //          the JSFunction on which new was invoked initially)
2785   // -----------------------------------
2786 
2787   // Check if target is a Smi.
2788   Label non_constructor;
2789   __ JumpIfSmi(x1, &non_constructor);
2790 
2791   // Dispatch based on instance type.
2792   __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2793   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2794           RelocInfo::CODE_TARGET, eq);
2795 
2796   // Check if target has a [[Construct]] internal method.
2797   __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
2798   __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
2799 
2800   // Only dispatch to bound functions after checking whether they are
2801   // constructors.
2802   __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2803   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2804           RelocInfo::CODE_TARGET, eq);
2805 
2806   // Only dispatch to proxies after checking whether they are constructors.
2807   __ Cmp(x5, JS_PROXY_TYPE);
2808   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2809           eq);
2810 
2811   // Called Construct on an exotic Object with a [[Construct]] internal method.
2812   {
2813     // Overwrite the original receiver with the (original) target.
2814     __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2815     // Let the "call_as_constructor_delegate" take care of the rest.
2816     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
2817     __ Jump(masm->isolate()->builtins()->CallFunction(),
2818             RelocInfo::CODE_TARGET);
2819   }
2820 
2821   // Called Construct on an Object that doesn't have a [[Construct]] internal
2822   // method.
2823   __ bind(&non_constructor);
2824   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2825           RelocInfo::CODE_TARGET);
2826 }
2827 
2828 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2829 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2830   ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
2831   // ----------- S t a t e -------------
2832   //  -- x1 : requested object size (untagged)
2833   //  -- lr : return address
2834   // -----------------------------------
2835   __ SmiTag(x1);
2836   __ Push(x1);
2837   __ Move(cp, Smi::kZero);
2838   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2839 }
2840 
2841 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2842 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2843   ASM_LOCATION("Builtins::Generate_AllocateInOldSpace");
2844   // ----------- S t a t e -------------
2845   //  -- x1 : requested object size (untagged)
2846   //  -- lr : return address
2847   // -----------------------------------
2848   __ SmiTag(x1);
2849   __ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2850   __ Push(x1, x2);
2851   __ Move(cp, Smi::kZero);
2852   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2853 }
2854 
2855 // static
Generate_Abort(MacroAssembler * masm)2856 void Builtins::Generate_Abort(MacroAssembler* masm) {
2857   ASM_LOCATION("Builtins::Generate_Abort");
2858   // ----------- S t a t e -------------
2859   //  -- x1 : message_id as Smi
2860   //  -- lr : return address
2861   // -----------------------------------
2862   MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
2863   __ Push(x1);
2864   __ Move(cp, Smi::kZero);
2865   __ TailCallRuntime(Runtime::kAbort);
2866 }
2867 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2868 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2869   ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
2870   // ----------- S t a t e -------------
2871   //  -- x0 : actual number of arguments
2872   //  -- x1 : function (passed through to callee)
2873   //  -- x2 : expected number of arguments
2874   //  -- x3 : new target (passed through to callee)
2875   // -----------------------------------
2876 
2877   Register argc_actual = x0;    // Excluding the receiver.
2878   Register argc_expected = x2;  // Excluding the receiver.
2879   Register function = x1;
2880   Register code_entry = x10;
2881 
2882   Label invoke, dont_adapt_arguments, stack_overflow;
2883 
2884   Label enough, too_few;
2885   __ Cmp(argc_actual, argc_expected);
2886   __ B(lt, &too_few);
2887   __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2888   __ B(eq, &dont_adapt_arguments);
2889 
2890   {  // Enough parameters: actual >= expected
2891     EnterArgumentsAdaptorFrame(masm);
2892     Generate_StackOverflowCheck(masm, x2, x10, &stack_overflow);
2893 
2894     Register copy_start = x10;
2895     Register copy_end = x11;
2896     Register copy_to = x12;
2897     Register scratch1 = x13, scratch2 = x14;
2898 
2899     __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
2900 
2901     // Adjust for fp, lr, and the receiver.
2902     __ Add(copy_start, fp, 3 * kPointerSize);
2903     __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
2904     __ Sub(copy_end, copy_start, scratch2);
2905     __ Sub(copy_end, copy_end, kPointerSize);
2906     __ Mov(copy_to, jssp);
2907 
2908     // Claim space for the arguments, the receiver, and one extra slot.
2909     // The extra slot ensures we do not write under jssp. It will be popped
2910     // later.
2911     __ Add(scratch1, scratch2, 2 * kPointerSize);
2912     __ Claim(scratch1, 1);
2913 
2914     // Copy the arguments (including the receiver) to the new stack frame.
2915     Label copy_2_by_2;
2916     __ Bind(&copy_2_by_2);
2917     __ Ldp(scratch1, scratch2,
2918            MemOperand(copy_start, -2 * kPointerSize, PreIndex));
2919     __ Stp(scratch1, scratch2,
2920            MemOperand(copy_to, -2 * kPointerSize, PreIndex));
2921     __ Cmp(copy_start, copy_end);
2922     __ B(hi, &copy_2_by_2);
2923 
2924     // Correct the space allocated for the extra slot.
2925     __ Drop(1);
2926 
2927     __ B(&invoke);
2928   }
2929 
2930   {  // Too few parameters: Actual < expected
2931     __ Bind(&too_few);
2932 
2933     Register copy_from = x10;
2934     Register copy_end = x11;
2935     Register copy_to = x12;
2936     Register scratch1 = x13, scratch2 = x14;
2937 
2938     EnterArgumentsAdaptorFrame(masm);
2939     Generate_StackOverflowCheck(masm, x2, x10, &stack_overflow);
2940 
2941     __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
2942     __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
2943 
2944     // Adjust for fp, lr, and the receiver.
2945     __ Add(copy_from, fp, 3 * kPointerSize);
2946     __ Add(copy_from, copy_from, argc_actual);
2947     __ Mov(copy_to, jssp);
2948     __ Sub(copy_end, copy_to, 1 * kPointerSize);  // Adjust for the receiver.
2949     __ Sub(copy_end, copy_end, argc_actual);
2950 
2951     // Claim space for the arguments, the receiver, and one extra slot.
2952     // The extra slot ensures we do not write under jssp. It will be popped
2953     // later.
2954     __ Add(scratch1, scratch2, 2 * kPointerSize);
2955     __ Claim(scratch1, 1);
2956 
2957     // Copy the arguments (including the receiver) to the new stack frame.
2958     Label copy_2_by_2;
2959     __ Bind(&copy_2_by_2);
2960     __ Ldp(scratch1, scratch2,
2961            MemOperand(copy_from, -2 * kPointerSize, PreIndex));
2962     __ Stp(scratch1, scratch2,
2963            MemOperand(copy_to, -2 * kPointerSize, PreIndex));
2964     __ Cmp(copy_to, copy_end);
2965     __ B(hi, &copy_2_by_2);
2966 
2967     __ Mov(copy_to, copy_end);
2968 
2969     // Fill the remaining expected arguments with undefined.
2970     __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
2971     __ Add(copy_end, jssp, kPointerSize);
2972 
2973     Label fill;
2974     __ Bind(&fill);
2975     __ Stp(scratch1, scratch1,
2976            MemOperand(copy_to, -2 * kPointerSize, PreIndex));
2977     __ Cmp(copy_to, copy_end);
2978     __ B(hi, &fill);
2979 
2980     // Correct the space allocated for the extra slot.
2981     __ Drop(1);
2982   }
2983 
2984   // Arguments have been adapted. Now call the entry point.
2985   __ Bind(&invoke);
2986   __ Mov(argc_actual, argc_expected);
2987   // x0 : expected number of arguments
2988   // x1 : function (passed through to callee)
2989   // x3 : new target (passed through to callee)
2990   __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
2991   __ Call(code_entry);
2992 
2993   // Store offset of return address for deoptimizer.
2994   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2995 
2996   // Exit frame and return.
2997   LeaveArgumentsAdaptorFrame(masm);
2998   __ Ret();
2999 
3000   // Call the entry point without adapting the arguments.
3001   __ Bind(&dont_adapt_arguments);
3002   __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3003   __ Jump(code_entry);
3004 
3005   __ Bind(&stack_overflow);
3006   {
3007     FrameScope frame(masm, StackFrame::MANUAL);
3008     __ CallRuntime(Runtime::kThrowStackOverflow);
3009     __ Unreachable();
3010   }
3011 }
3012 
3013 #undef __
3014 
3015 }  // namespace internal
3016 }  // namespace v8
3017 
3018 #endif  // V8_TARGET_ARCH_ARM
3019