1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X64
6 
7 #include "src/base/adapters.h"
8 #include "src/code-factory.h"
9 #include "src/counters.h"
10 #include "src/deoptimizer.h"
11 #include "src/frame-constants.h"
12 #include "src/frames.h"
13 #include "src/objects-inl.h"
14 #include "src/objects/debug-objects.h"
15 #include "src/objects/js-generator.h"
16 #include "src/wasm/wasm-linkage.h"
17 #include "src/wasm/wasm-objects.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 #define __ ACCESS_MASM(masm)
23 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)24 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
25                                 ExitFrameType exit_frame_type) {
26   __ LoadAddress(kJavaScriptCallExtraArg1Register,
27                  ExternalReference::Create(address));
28   if (exit_frame_type == BUILTIN_EXIT) {
29     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
30             RelocInfo::CODE_TARGET);
31   } else {
32     DCHECK(exit_frame_type == EXIT);
33     __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
34             RelocInfo::CODE_TARGET);
35   }
36 }
37 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)38 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
39                                            Runtime::FunctionId function_id) {
40   // ----------- S t a t e -------------
41   //  -- rax : argument count (preserved for callee)
42   //  -- rdx : new target (preserved for callee)
43   //  -- rdi : target function (preserved for callee)
44   // -----------------------------------
45   {
46     FrameScope scope(masm, StackFrame::INTERNAL);
47     // Push the number of arguments to the callee.
48     __ SmiTag(rax, rax);
49     __ Push(rax);
50     // Push a copy of the target function and the new target.
51     __ Push(rdi);
52     __ Push(rdx);
53     // Function is also the parameter to the runtime call.
54     __ Push(rdi);
55 
56     __ CallRuntime(function_id, 1);
57     __ movp(rcx, rax);
58 
59     // Restore target function and new target.
60     __ Pop(rdx);
61     __ Pop(rdi);
62     __ Pop(rax);
63     __ SmiUntag(rax, rax);
64   }
65   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
66   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
67   __ jmp(rcx);
68 }
69 
70 namespace {
71 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)72 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
73   // ----------- S t a t e -------------
74   //  -- rax: number of arguments
75   //  -- rdi: constructor function
76   //  -- rdx: new target
77   //  -- rsi: context
78   // -----------------------------------
79 
80   // Enter a construct frame.
81   {
82     FrameScope scope(masm, StackFrame::CONSTRUCT);
83 
84     // Preserve the incoming parameters on the stack.
85     __ SmiTag(rcx, rax);
86     __ Push(rsi);
87     __ Push(rcx);
88 
89     // The receiver for the builtin/api call.
90     __ PushRoot(Heap::kTheHoleValueRootIndex);
91 
92     // Set up pointer to last argument.
93     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
94 
95     // Copy arguments and receiver to the expression stack.
96     Label loop, entry;
97     __ movp(rcx, rax);
98     // ----------- S t a t e -------------
99     //  --                rax: number of arguments (untagged)
100     //  --                rdi: constructor function
101     //  --                rdx: new target
102     //  --                rbx: pointer to last argument
103     //  --                rcx: counter
104     //  -- sp[0*kPointerSize]: the hole (receiver)
105     //  -- sp[1*kPointerSize]: number of arguments (tagged)
106     //  -- sp[2*kPointerSize]: context
107     // -----------------------------------
108     __ jmp(&entry);
109     __ bind(&loop);
110     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
111     __ bind(&entry);
112     __ decp(rcx);
113     __ j(greater_equal, &loop, Label::kNear);
114 
115     // Call the function.
116     // rax: number of arguments (untagged)
117     // rdi: constructor function
118     // rdx: new target
119     ParameterCount actual(rax);
120     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
121 
122     // Restore context from the frame.
123     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
124     // Restore smi-tagged arguments count from the frame.
125     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
126 
127     // Leave construct frame.
128   }
129 
130   // Remove caller arguments from the stack and return.
131   __ PopReturnAddressTo(rcx);
132   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
133   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
134   __ PushReturnAddressFrom(rcx);
135 
136   __ ret(0);
137 }
138 }  // namespace
139 
140 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)141 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
142   // ----------- S t a t e -------------
143   //  -- rax: number of arguments (untagged)
144   //  -- rdi: constructor function
145   //  -- rdx: new target
146   //  -- rsi: context
147   //  -- sp[...]: constructor arguments
148   // -----------------------------------
149 
150   // Enter a construct frame.
151   {
152     FrameScope scope(masm, StackFrame::CONSTRUCT);
153     Label post_instantiation_deopt_entry, not_create_implicit_receiver;
154 
155     // Preserve the incoming parameters on the stack.
156     __ SmiTag(rcx, rax);
157     __ Push(rsi);
158     __ Push(rcx);
159     __ Push(rdi);
160     __ PushRoot(Heap::kTheHoleValueRootIndex);
161     __ Push(rdx);
162 
163     // ----------- S t a t e -------------
164     //  --         sp[0*kPointerSize]: new target
165     //  --         sp[1*kPointerSize]: padding
166     //  -- rdi and sp[2*kPointerSize]: constructor function
167     //  --         sp[3*kPointerSize]: argument count
168     //  --         sp[4*kPointerSize]: context
169     // -----------------------------------
170 
171     __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
172     __ testl(FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset),
173              Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
174     __ j(not_zero, &not_create_implicit_receiver, Label::kNear);
175 
176     // If not derived class constructor: Allocate the new receiver object.
177     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
178     __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
179             RelocInfo::CODE_TARGET);
180     __ jmp(&post_instantiation_deopt_entry, Label::kNear);
181 
182     // Else: use TheHoleValue as receiver for constructor call
183     __ bind(&not_create_implicit_receiver);
184     __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
185 
186     // ----------- S t a t e -------------
187     //  -- rax                          implicit receiver
188     //  -- Slot 4 / sp[0*kPointerSize]  new target
189     //  -- Slot 3 / sp[1*kPointerSize]  padding
190     //  -- Slot 2 / sp[2*kPointerSize]  constructor function
191     //  -- Slot 1 / sp[3*kPointerSize]  number of arguments (tagged)
192     //  -- Slot 0 / sp[4*kPointerSize]  context
193     // -----------------------------------
194     // Deoptimizer enters here.
195     masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
196         masm->pc_offset());
197     __ bind(&post_instantiation_deopt_entry);
198 
199     // Restore new target.
200     __ Pop(rdx);
201 
202     // Push the allocated receiver to the stack. We need two copies
203     // because we may have to return the original one and the calling
204     // conventions dictate that the called function pops the receiver.
205     __ Push(rax);
206     __ Push(rax);
207 
208     // ----------- S t a t e -------------
209     //  -- sp[0*kPointerSize]  implicit receiver
210     //  -- sp[1*kPointerSize]  implicit receiver
211     //  -- sp[2*kPointerSize]  padding
212     //  -- sp[3*kPointerSize]  constructor function
213     //  -- sp[4*kPointerSize]  number of arguments (tagged)
214     //  -- sp[5*kPointerSize]  context
215     // -----------------------------------
216 
217     // Restore constructor function and argument count.
218     __ movp(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
219     __ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
220 
221     // Set up pointer to last argument.
222     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
223 
224     // Copy arguments and receiver to the expression stack.
225     Label loop, entry;
226     __ movp(rcx, rax);
227     // ----------- S t a t e -------------
228     //  --                        rax: number of arguments (untagged)
229     //  --                        rdx: new target
230     //  --                        rbx: pointer to last argument
231     //  --                        rcx: counter (tagged)
232     //  --         sp[0*kPointerSize]: implicit receiver
233     //  --         sp[1*kPointerSize]: implicit receiver
234     //  --         sp[2*kPointerSize]: padding
235     //  -- rdi and sp[3*kPointerSize]: constructor function
236     //  --         sp[4*kPointerSize]: number of arguments (tagged)
237     //  --         sp[5*kPointerSize]: context
238     // -----------------------------------
239     __ jmp(&entry, Label::kNear);
240     __ bind(&loop);
241     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
242     __ bind(&entry);
243     __ decp(rcx);
244     __ j(greater_equal, &loop, Label::kNear);
245 
246     // Call the function.
247     ParameterCount actual(rax);
248     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
249 
250     // ----------- S t a t e -------------
251     //  -- rax                 constructor result
252     //  -- sp[0*kPointerSize]  implicit receiver
253     //  -- sp[1*kPointerSize]  padding
254     //  -- sp[2*kPointerSize]  constructor function
255     //  -- sp[3*kPointerSize]  number of arguments
256     //  -- sp[4*kPointerSize]  context
257     // -----------------------------------
258 
259     // Store offset of return address for deoptimizer.
260     masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
261         masm->pc_offset());
262 
263     // Restore context from the frame.
264     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
265 
266     // If the result is an object (in the ECMA sense), we should get rid
267     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
268     // on page 74.
269     Label use_receiver, do_throw, leave_frame;
270 
271     // If the result is undefined, we jump out to using the implicit receiver.
272     __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &use_receiver,
273                   Label::kNear);
274 
275     // Otherwise we do a smi check and fall through to check if the return value
276     // is a valid receiver.
277 
278     // If the result is a smi, it is *not* an object in the ECMA sense.
279     __ JumpIfSmi(rax, &use_receiver, Label::kNear);
280 
281     // If the type of the result (stored in its map) is less than
282     // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
283     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
284     __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
285     __ j(above_equal, &leave_frame, Label::kNear);
286     __ jmp(&use_receiver, Label::kNear);
287 
288     __ bind(&do_throw);
289     __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
290 
291     // Throw away the result of the constructor invocation and use the
292     // on-stack receiver as the result.
293     __ bind(&use_receiver);
294     __ movp(rax, Operand(rsp, 0 * kPointerSize));
295     __ JumpIfRoot(rax, Heap::kTheHoleValueRootIndex, &do_throw, Label::kNear);
296 
297     __ bind(&leave_frame);
298     // Restore the arguments count.
299     __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
300     // Leave construct frame.
301   }
302   // Remove caller arguments from the stack and return.
303   __ PopReturnAddressTo(rcx);
304   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
305   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
306   __ PushReturnAddressFrom(rcx);
307   __ ret(0);
308 }
309 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)310 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
311   Generate_JSBuiltinsConstructStubHelper(masm);
312 }
313 
Generate_ConstructedNonConstructable(MacroAssembler * masm)314 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
315   FrameScope scope(masm, StackFrame::INTERNAL);
316   __ Push(rdi);
317   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
318 }
319 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow,Label::Distance stack_overflow_distance=Label::kFar)320 static void Generate_StackOverflowCheck(
321     MacroAssembler* masm, Register num_args, Register scratch,
322     Label* stack_overflow,
323     Label::Distance stack_overflow_distance = Label::kFar) {
324   // Check the stack for overflow. We are not trying to catch
325   // interruptions (e.g. debug break and preemption) here, so the "real stack
326   // limit" is checked.
327   __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
328   __ movp(scratch, rsp);
329   // Make scratch the space we have left. The stack might already be overflowed
330   // here which will cause scratch to become negative.
331   __ subp(scratch, kScratchRegister);
332   __ sarp(scratch, Immediate(kPointerSizeLog2));
333   // Check if the arguments will overflow the stack.
334   __ cmpp(scratch, num_args);
335   // Signed comparison.
336   __ j(less_equal, stack_overflow, stack_overflow_distance);
337 }
338 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)339 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
340                                              bool is_construct) {
341   ProfileEntryHookStub::MaybeCallEntryHook(masm);
342 
343   // Expects five C++ function parameters.
344   // - Object* new_target
345   // - JSFunction* function
346   // - Object* receiver
347   // - int argc
348   // - Object*** argv
349   // (see Handle::Invoke in execution.cc).
350 
351   // Open a C++ scope for the FrameScope.
352   {
353 // Platform specific argument handling. After this, the stack contains
354 // an internal frame and the pushed function and receiver, and
355 // register rax and rbx holds the argument count and argument array,
356 // while rdi holds the function pointer, rsi the context, and rdx the
357 // new.target.
358 
359 #ifdef _WIN64
360     // MSVC parameters in:
361     // rcx        : new_target
362     // rdx        : function
363     // r8         : receiver
364     // r9         : argc
365     // [rsp+0x20] : argv
366 
367     // Enter an internal frame.
368     FrameScope scope(masm, StackFrame::INTERNAL);
369 
370     // Setup the context (we need to use the caller context from the isolate).
371     ExternalReference context_address = ExternalReference::Create(
372         IsolateAddressId::kContextAddress, masm->isolate());
373     __ movp(rsi, masm->ExternalOperand(context_address));
374 
375     // Push the function and the receiver onto the stack.
376     __ Push(rdx);
377     __ Push(r8);
378 
379     // Load the number of arguments and setup pointer to the arguments.
380     __ movp(rax, r9);
381     // Load the previous frame pointer to access C argument on stack
382     __ movp(kScratchRegister, Operand(rbp, 0));
383     __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
384     // Load the function pointer into rdi.
385     __ movp(rdi, rdx);
386     // Load the new.target into rdx.
387     __ movp(rdx, rcx);
388 #else   // _WIN64
389     // GCC parameters in:
390     // rdi : new_target
391     // rsi : function
392     // rdx : receiver
393     // rcx : argc
394     // r8  : argv
395 
396     __ movp(r11, rdi);
397     __ movp(rdi, rsi);
398     // rdi : function
399     // r11 : new_target
400 
401     // Clear the context before we push it when entering the internal frame.
402     __ Set(rsi, 0);
403 
404     // Enter an internal frame.
405     FrameScope scope(masm, StackFrame::INTERNAL);
406 
407     // Setup the context (we need to use the caller context from the isolate).
408     ExternalReference context_address = ExternalReference::Create(
409         IsolateAddressId::kContextAddress, masm->isolate());
410     __ movp(rsi, masm->ExternalOperand(context_address));
411 
412     // Push the function and receiver onto the stack.
413     __ Push(rdi);
414     __ Push(rdx);
415 
416     // Load the number of arguments and setup pointer to the arguments.
417     __ movp(rax, rcx);
418     __ movp(rbx, r8);
419 
420     // Load the new.target into rdx.
421     __ movp(rdx, r11);
422 #endif  // _WIN64
423 
424     // Current stack contents:
425     // [rsp + 2 * kPointerSize ... ] : Internal frame
426     // [rsp + kPointerSize]          : function
427     // [rsp]                         : receiver
428     // Current register contents:
429     // rax : argc
430     // rbx : argv
431     // rsi : context
432     // rdi : function
433     // rdx : new.target
434 
435     // Check if we have enough stack space to push all arguments.
436     // Argument count in rax. Clobbers rcx.
437     Label enough_stack_space, stack_overflow;
438     Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
439     __ jmp(&enough_stack_space, Label::kNear);
440 
441     __ bind(&stack_overflow);
442     __ CallRuntime(Runtime::kThrowStackOverflow);
443     // This should be unreachable.
444     __ int3();
445 
446     __ bind(&enough_stack_space);
447 
448     // Copy arguments to the stack in a loop.
449     // Register rbx points to array of pointers to handle locations.
450     // Push the values of these handles.
451     Label loop, entry;
452     __ Set(rcx, 0);  // Set loop variable to 0.
453     __ jmp(&entry, Label::kNear);
454     __ bind(&loop);
455     __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
456     __ Push(Operand(kScratchRegister, 0));  // dereference handle
457     __ addp(rcx, Immediate(1));
458     __ bind(&entry);
459     __ cmpp(rcx, rax);
460     __ j(not_equal, &loop, Label::kNear);
461 
462     // Invoke the builtin code.
463     Handle<Code> builtin = is_construct
464                                ? BUILTIN_CODE(masm->isolate(), Construct)
465                                : masm->isolate()->builtins()->Call();
466     __ Call(builtin, RelocInfo::CODE_TARGET);
467 
468     // Exit the internal frame. Notice that this also removes the empty
469     // context and the function left on the stack by the code
470     // invocation.
471   }
472 
473   __ ret(0);
474 }
475 
Generate_JSEntryTrampoline(MacroAssembler * masm)476 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
477   Generate_JSEntryTrampolineHelper(masm, false);
478 }
479 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)480 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
481   Generate_JSEntryTrampolineHelper(masm, true);
482 }
483 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)484 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
485                                           Register sfi_data,
486                                           Register scratch1) {
487   Label done;
488 
489   __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
490   __ j(not_equal, &done, Label::kNear);
491   __ movp(sfi_data,
492           FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
493 
494   __ bind(&done);
495 }
496 
497 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)498 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
499   // ----------- S t a t e -------------
500   //  -- rax    : the value to pass to the generator
501   //  -- rdx    : the JSGeneratorObject to resume
502   //  -- rsp[0] : return address
503   // -----------------------------------
504   __ AssertGeneratorObject(rdx);
505 
506   // Store input value into generator object.
507   __ movp(FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
508   __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
509                       kDontSaveFPRegs);
510 
511   // Load suspended function and context.
512   __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
513   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
514 
515   // Flood function if we are stepping.
516   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
517   Label stepping_prepared;
518   ExternalReference debug_hook =
519       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
520   Operand debug_hook_operand = masm->ExternalOperand(debug_hook);
521   __ cmpb(debug_hook_operand, Immediate(0));
522   __ j(not_equal, &prepare_step_in_if_stepping);
523 
524   // Flood function if we need to continue stepping in the suspended generator.
525   ExternalReference debug_suspended_generator =
526       ExternalReference::debug_suspended_generator_address(masm->isolate());
527   Operand debug_suspended_generator_operand =
528       masm->ExternalOperand(debug_suspended_generator);
529   __ cmpp(rdx, debug_suspended_generator_operand);
530   __ j(equal, &prepare_step_in_suspended_generator);
531   __ bind(&stepping_prepared);
532 
533   // Check the stack for overflow. We are not trying to catch interruptions
534   // (i.e. debug break and preemption) here, so check the "real stack limit".
535   Label stack_overflow;
536   __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
537   __ j(below, &stack_overflow);
538 
539   // Pop return address.
540   __ PopReturnAddressTo(rax);
541 
542   // Push receiver.
543   __ Push(FieldOperand(rdx, JSGeneratorObject::kReceiverOffset));
544 
545   // ----------- S t a t e -------------
546   //  -- rax    : return address
547   //  -- rdx    : the JSGeneratorObject to resume
548   //  -- rdi    : generator function
549   //  -- rsi    : generator context
550   //  -- rsp[0] : generator receiver
551   // -----------------------------------
552 
553   // Copy the function arguments from the generator object's register file.
554   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
555   __ movzxwq(
556       rcx, FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
557 
558   __ movp(rbx,
559           FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
560 
561   {
562     Label done_loop, loop;
563     __ Set(r9, 0);
564 
565     __ bind(&loop);
566     __ cmpl(r9, rcx);
567     __ j(greater_equal, &done_loop, Label::kNear);
568     __ Push(FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
569     __ addl(r9, Immediate(1));
570     __ jmp(&loop);
571 
572     __ bind(&done_loop);
573   }
574 
575   // Underlying function needs to have bytecode available.
576   if (FLAG_debug_code) {
577     __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
578     __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
579     GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
580     __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
581     __ Assert(equal, AbortReason::kMissingBytecodeArray);
582   }
583 
584   // Resume (Ignition/TurboFan) generator object.
585   {
586     __ PushReturnAddressFrom(rax);
587     __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
588     __ movzxwq(rax, FieldOperand(
589                         rax, SharedFunctionInfo::kFormalParameterCountOffset));
590     // We abuse new.target both to indicate that this is a resume call and to
591     // pass in the generator object.  In ordinary calls, new.target is always
592     // undefined because generator functions are non-constructable.
593     static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
594     __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
595     __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
596     __ jmp(rcx);
597   }
598 
599   __ bind(&prepare_step_in_if_stepping);
600   {
601     FrameScope scope(masm, StackFrame::INTERNAL);
602     __ Push(rdx);
603     __ Push(rdi);
604     // Push hole as receiver since we do not use it for stepping.
605     __ PushRoot(Heap::kTheHoleValueRootIndex);
606     __ CallRuntime(Runtime::kDebugOnFunctionCall);
607     __ Pop(rdx);
608     __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
609   }
610   __ jmp(&stepping_prepared);
611 
612   __ bind(&prepare_step_in_suspended_generator);
613   {
614     FrameScope scope(masm, StackFrame::INTERNAL);
615     __ Push(rdx);
616     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
617     __ Pop(rdx);
618     __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
619   }
620   __ jmp(&stepping_prepared);
621 
622   __ bind(&stack_overflow);
623   {
624     FrameScope scope(masm, StackFrame::INTERNAL);
625     __ CallRuntime(Runtime::kThrowStackOverflow);
626     __ int3();  // This should be unreachable.
627   }
628 }
629 
630 // TODO(juliana): if we remove the code below then we don't need all
631 // the parameters.
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2,Register scratch3)632 static void ReplaceClosureCodeWithOptimizedCode(
633     MacroAssembler* masm, Register optimized_code, Register closure,
634     Register scratch1, Register scratch2, Register scratch3) {
635 
636   // Store the optimized code in the closure.
637   __ movp(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
638   __ movp(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
639   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
640                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
641 }
642 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)643 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
644                                   Register scratch2) {
645   Register args_count = scratch1;
646   Register return_pc = scratch2;
647 
648   // Get the arguments + receiver count.
649   __ movp(args_count,
650           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
651   __ movl(args_count,
652           FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
653 
654   // Leave the frame (also dropping the register file).
655   __ leave();
656 
657   // Drop receiver + arguments.
658   __ PopReturnAddressTo(return_pc);
659   __ addp(rsp, args_count);
660   __ PushReturnAddressFrom(return_pc);
661 }
662 
663 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)664 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
665                                           Register smi_entry,
666                                           OptimizationMarker marker,
667                                           Runtime::FunctionId function_id) {
668   Label no_match;
669   __ SmiCompare(smi_entry, Smi::FromEnum(marker));
670   __ j(not_equal, &no_match);
671   GenerateTailCallToReturnedCode(masm, function_id);
672   __ bind(&no_match);
673 }
674 
MaybeTailCallOptimizedCodeSlot(MacroAssembler * masm,Register feedback_vector,Register scratch1,Register scratch2,Register scratch3)675 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
676                                            Register feedback_vector,
677                                            Register scratch1, Register scratch2,
678                                            Register scratch3) {
679   // ----------- S t a t e -------------
680   //  -- rax : argument count (preserved for callee if needed, and caller)
681   //  -- rdx : new target (preserved for callee if needed, and caller)
682   //  -- rdi : target function (preserved for callee if needed, and caller)
683   //  -- feedback vector (preserved for caller if needed)
684   // -----------------------------------
685   DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
686                      scratch3));
687 
688   Label optimized_code_slot_is_weak_ref, fallthrough;
689 
690   Register closure = rdi;
691   Register optimized_code_entry = scratch1;
692 
693   __ movp(optimized_code_entry,
694           FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
695 
696   // Check if the code entry is a Smi. If yes, we interpret it as an
697   // optimisation marker. Otherwise, interpret it as a weak reference to a code
698   // object.
699   __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
700 
701   {
702     // Optimized code slot is a Smi optimization marker.
703 
704     // Fall through if no optimization trigger.
705     __ SmiCompare(optimized_code_entry,
706                   Smi::FromEnum(OptimizationMarker::kNone));
707     __ j(equal, &fallthrough);
708 
709     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
710                                   OptimizationMarker::kLogFirstExecution,
711                                   Runtime::kFunctionFirstExecution);
712     TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
713                                   OptimizationMarker::kCompileOptimized,
714                                   Runtime::kCompileOptimized_NotConcurrent);
715     TailCallRuntimeIfMarkerEquals(
716         masm, optimized_code_entry,
717         OptimizationMarker::kCompileOptimizedConcurrent,
718         Runtime::kCompileOptimized_Concurrent);
719 
720     {
721       // Otherwise, the marker is InOptimizationQueue, so fall through hoping
722       // that an interrupt will eventually update the slot with optimized code.
723       if (FLAG_debug_code) {
724         __ SmiCompare(optimized_code_entry,
725                       Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
726         __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
727       }
728       __ jmp(&fallthrough);
729     }
730   }
731 
732   {
733     // Optimized code slot is a weak reference.
734     __ bind(&optimized_code_slot_is_weak_ref);
735 
736     __ LoadWeakValue(optimized_code_entry, &fallthrough);
737 
738     // Check if the optimized code is marked for deopt. If it is, call the
739     // runtime to clear it.
740     Label found_deoptimized_code;
741     __ movp(scratch2,
742             FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
743     __ testl(
744         FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
745         Immediate(1 << Code::kMarkedForDeoptimizationBit));
746     __ j(not_zero, &found_deoptimized_code);
747 
748     // Optimized code is good, get it into the closure and link the closure into
749     // the optimized functions list, then tail call the optimized code.
750     // The feedback vector is no longer used, so re-use it as a scratch
751     // register.
752     ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
753                                         scratch2, scratch3, feedback_vector);
754     static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
755     __ Move(rcx, optimized_code_entry);
756     __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
757     __ jmp(rcx);
758 
759     // Optimized code slot contains deoptimized code, evict it and re-enter the
760     // closure's code.
761     __ bind(&found_deoptimized_code);
762     GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
763   }
764 
765   // Fall-through if the optimized code cell is clear and there is no
766   // optimization marker.
767   __ bind(&fallthrough);
768 }
769 
770 // Advance the current bytecode offset. This simulates what all bytecode
771 // handlers do upon completion of the underlying operation. Will bail out to a
772 // label if the bytecode (without prefix) is a return bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Label * if_return)773 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
774                                           Register bytecode_array,
775                                           Register bytecode_offset,
776                                           Register bytecode, Register scratch1,
777                                           Label* if_return) {
778   Register bytecode_size_table = scratch1;
779   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
780                      bytecode));
781 
782   __ Move(bytecode_size_table,
783           ExternalReference::bytecode_size_table_address());
784 
785   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
786   Label process_bytecode, extra_wide;
787   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
788   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
789   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
790   STATIC_ASSERT(3 ==
791                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
792   __ cmpb(bytecode, Immediate(0x3));
793   __ j(above, &process_bytecode, Label::kNear);
794   __ testb(bytecode, Immediate(0x1));
795   __ j(not_equal, &extra_wide, Label::kNear);
796 
797   // Load the next bytecode and update table to the wide scaled table.
798   __ incl(bytecode_offset);
799   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
800   __ addp(bytecode_size_table,
801           Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
802   __ jmp(&process_bytecode, Label::kNear);
803 
804   __ bind(&extra_wide);
805   // Load the next bytecode and update table to the extra wide scaled table.
806   __ incl(bytecode_offset);
807   __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
808   __ addp(bytecode_size_table,
809           Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
810 
811   __ bind(&process_bytecode);
812 
813 // Bailout to the return label if this is a return bytecode.
814 #define JUMP_IF_EQUAL(NAME)                                             \
815   __ cmpb(bytecode,                                                     \
816           Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
817   __ j(equal, if_return, Label::kFar);
818   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
819 #undef JUMP_IF_EQUAL
820 
821   // Otherwise, load the size of the current bytecode and advance the offset.
822   __ addl(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
823 }
824 
825 // Generate code for entering a JS function with the interpreter.
826 // On entry to the function the receiver and arguments have been pushed on the
827 // stack left to right.  The actual argument count matches the formal parameter
828 // count expected by the function.
829 //
830 // The live registers are:
831 //   o rdi: the JS function object being called
832 //   o rdx: the incoming new target or generator object
833 //   o rsi: our context
834 //   o rbp: the caller's frame pointer
835 //   o rsp: stack pointer (pointing to return address)
836 //
837 // The function builds an interpreter frame.  See InterpreterFrameConstants in
838 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)839 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
840   ProfileEntryHookStub::MaybeCallEntryHook(masm);
841 
842   Register closure = rdi;
843   Register feedback_vector = rbx;
844 
845   // Load the feedback vector from the closure.
846   __ movp(feedback_vector,
847           FieldOperand(closure, JSFunction::kFeedbackCellOffset));
848   __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
849   // Read off the optimized code slot in the feedback vector, and if there
850   // is optimized code or an optimization marker, call that instead.
851   MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
852 
853   // Open a frame scope to indicate that there is a frame on the stack.  The
854   // MANUAL indicates that the scope shouldn't actually generate code to set up
855   // the frame (that is done below).
856   FrameScope frame_scope(masm, StackFrame::MANUAL);
857   __ pushq(rbp);  // Caller's frame pointer.
858   __ movp(rbp, rsp);
859   __ Push(rsi);  // Callee's context.
860   __ Push(rdi);  // Callee's JS function.
861 
862   // Get the bytecode array from the function object and load it into
863   // kInterpreterBytecodeArrayRegister.
864   __ movp(rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
865   __ movp(kInterpreterBytecodeArrayRegister,
866           FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
867   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
868                                 kScratchRegister);
869 
870   // Increment invocation count for the function.
871   __ incl(
872       FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
873 
874   // Check function data field is actually a BytecodeArray object.
875   if (FLAG_debug_code) {
876     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
877     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
878                      rax);
879     __ Assert(
880         equal,
881         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
882   }
883 
884   // Reset code age.
885   __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
886                        BytecodeArray::kBytecodeAgeOffset),
887           Immediate(BytecodeArray::kNoAgeBytecodeAge));
888 
889   // Load initial bytecode offset.
890   __ movp(kInterpreterBytecodeOffsetRegister,
891           Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
892 
893   // Push bytecode array and Smi tagged bytecode offset.
894   __ Push(kInterpreterBytecodeArrayRegister);
895   __ SmiTag(rcx, kInterpreterBytecodeOffsetRegister);
896   __ Push(rcx);
897 
898   // Allocate the local and temporary register file on the stack.
899   {
900     // Load frame size from the BytecodeArray object.
901     __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
902                               BytecodeArray::kFrameSizeOffset));
903 
904     // Do a stack check to ensure we don't go over the limit.
905     Label ok;
906     __ movp(rax, rsp);
907     __ subp(rax, rcx);
908     __ CompareRoot(rax, Heap::kRealStackLimitRootIndex);
909     __ j(above_equal, &ok, Label::kNear);
910     __ CallRuntime(Runtime::kThrowStackOverflow);
911     __ bind(&ok);
912 
913     // If ok, push undefined as the initial value for all register file entries.
914     Label loop_header;
915     Label loop_check;
916     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
917     __ j(always, &loop_check, Label::kNear);
918     __ bind(&loop_header);
919     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
920     __ Push(rax);
921     // Continue loop if not done.
922     __ bind(&loop_check);
923     __ subp(rcx, Immediate(kPointerSize));
924     __ j(greater_equal, &loop_header, Label::kNear);
925   }
926 
927   // If the bytecode array has a valid incoming new target or generator object
928   // register, initialize it with incoming value which was passed in rdx.
929   Label no_incoming_new_target_or_generator_register;
930   __ movsxlq(
931       rax,
932       FieldOperand(kInterpreterBytecodeArrayRegister,
933                    BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
934   __ testl(rax, rax);
935   __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
936   __ movp(Operand(rbp, rax, times_pointer_size, 0), rdx);
937   __ bind(&no_incoming_new_target_or_generator_register);
938 
939   // Load accumulator with undefined.
940   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
941 
942   // Load the dispatch table into a register and dispatch to the bytecode
943   // handler at the current bytecode offset.
944   Label do_dispatch;
945   __ bind(&do_dispatch);
946   __ Move(
947       kInterpreterDispatchTableRegister,
948       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
949   __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
950                           kInterpreterBytecodeOffsetRegister, times_1, 0));
951   __ movp(
952       kJavaScriptCallCodeStartRegister,
953       Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
954   __ call(kJavaScriptCallCodeStartRegister);
955   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
956 
957   // Any returns to the entry trampoline are either due to the return bytecode
958   // or the interpreter tail calling a builtin and then a dispatch.
959 
960   // Get bytecode array and bytecode offset from the stack frame.
961   __ movp(kInterpreterBytecodeArrayRegister,
962           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
963   __ movp(kInterpreterBytecodeOffsetRegister,
964           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
965   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
966               kInterpreterBytecodeOffsetRegister);
967 
968   // Either return, or advance to the next bytecode and dispatch.
969   Label do_return;
970   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
971                           kInterpreterBytecodeOffsetRegister, times_1, 0));
972   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
973                                 kInterpreterBytecodeOffsetRegister, rbx, rcx,
974                                 &do_return);
975   __ jmp(&do_dispatch);
976 
977   __ bind(&do_return);
978   // The return value is in rax.
979   LeaveInterpreterFrame(masm, rbx, rcx);
980   __ ret(0);
981 }
982 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register start_address,Register scratch)983 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
984                                          Register num_args,
985                                          Register start_address,
986                                          Register scratch) {
987   // Find the address of the last argument.
988   __ Move(scratch, num_args);
989   __ shlp(scratch, Immediate(kPointerSizeLog2));
990   __ negp(scratch);
991   __ addp(scratch, start_address);
992 
993   // Push the arguments.
994   Label loop_header, loop_check;
995   __ j(always, &loop_check, Label::kNear);
996   __ bind(&loop_header);
997   __ Push(Operand(start_address, 0));
998   __ subp(start_address, Immediate(kPointerSize));
999   __ bind(&loop_check);
1000   __ cmpp(start_address, scratch);
1001   __ j(greater, &loop_header, Label::kNear);
1002 }
1003 
1004 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1005 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1006     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1007     InterpreterPushArgsMode mode) {
1008   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1009   // ----------- S t a t e -------------
1010   //  -- rax : the number of arguments (not including the receiver)
1011   //  -- rbx : the address of the first argument to be pushed. Subsequent
1012   //           arguments should be consecutive above this, in the same order as
1013   //           they are to be pushed onto the stack.
1014   //  -- rdi : the target to call (can be any Object).
1015   // -----------------------------------
1016   Label stack_overflow;
1017 
1018   // Number of values to be pushed.
1019   __ leal(rcx, Operand(rax, 1));  // Add one for receiver.
1020 
1021   // Add a stack check before pushing arguments.
1022   Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1023 
1024   // Pop return address to allow tail-call after pushing arguments.
1025   __ PopReturnAddressTo(kScratchRegister);
1026 
1027   // Push "undefined" as the receiver arg if we need to.
1028   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1029     __ PushRoot(Heap::kUndefinedValueRootIndex);
1030     __ decl(rcx);  // Subtract one for receiver.
1031   }
1032 
1033   // rbx and rdx will be modified.
1034   Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1035 
1036   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1037     __ Pop(rbx);                 // Pass the spread in a register
1038     __ decl(rax);                // Subtract one for spread
1039   }
1040 
1041   // Call the target.
1042   __ PushReturnAddressFrom(kScratchRegister);  // Re-push return address.
1043 
1044   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1045     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1046             RelocInfo::CODE_TARGET);
1047   } else {
1048     __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1049             RelocInfo::CODE_TARGET);
1050   }
1051 
1052   // Throw stack overflow exception.
1053   __ bind(&stack_overflow);
1054   {
1055     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1056     // This should be unreachable.
1057     __ int3();
1058   }
1059 }
1060 
1061 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1062 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1063     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1064   // ----------- S t a t e -------------
1065   //  -- rax : the number of arguments (not including the receiver)
1066   //  -- rdx : the new target (either the same as the constructor or
1067   //           the JSFunction on which new was invoked initially)
1068   //  -- rdi : the constructor to call (can be any Object)
1069   //  -- rbx : the allocation site feedback if available, undefined otherwise
1070   //  -- rcx : the address of the first argument to be pushed. Subsequent
1071   //           arguments should be consecutive above this, in the same order as
1072   //           they are to be pushed onto the stack.
1073   // -----------------------------------
1074   Label stack_overflow;
1075 
1076   // Add a stack check before pushing arguments.
1077   Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1078 
1079   // Pop return address to allow tail-call after pushing arguments.
1080   __ PopReturnAddressTo(kScratchRegister);
1081 
1082   // Push slot for the receiver to be constructed.
1083   __ Push(Immediate(0));
1084 
1085   // rcx and r8 will be modified.
1086   Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1087 
1088   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1089     __ Pop(rbx);                 // Pass the spread in a register
1090     __ decl(rax);                // Subtract one for spread
1091 
1092     // Push return address in preparation for the tail-call.
1093     __ PushReturnAddressFrom(kScratchRegister);
1094   } else {
1095     __ PushReturnAddressFrom(kScratchRegister);
1096     __ AssertUndefinedOrAllocationSite(rbx);
1097   }
1098 
1099   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1100     // Tail call to the array construct stub (still in the caller
1101     // context at this point).
1102     __ AssertFunction(rdi);
1103     // Jump to the constructor function (rax, rbx, rdx passed on).
1104     Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1105     __ Jump(code, RelocInfo::CODE_TARGET);
1106   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1107     // Call the constructor (rax, rdx, rdi passed on).
1108     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1109             RelocInfo::CODE_TARGET);
1110   } else {
1111     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1112     // Call the constructor (rax, rdx, rdi passed on).
1113     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1114   }
1115 
1116   // Throw stack overflow exception.
1117   __ bind(&stack_overflow);
1118   {
1119     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1120     // This should be unreachable.
1121     __ int3();
1122   }
1123 }
1124 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1125 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1126   // Set the return address to the correct point in the interpreter entry
1127   // trampoline.
1128   Label builtin_trampoline, trampoline_loaded;
1129   Smi* interpreter_entry_return_pc_offset(
1130       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1131   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1132 
1133   // If the SFI function_data is an InterpreterData, get the trampoline stored
1134   // in it, otherwise get the trampoline from the builtins list.
1135   __ movp(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1136   __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1137   __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
1138   __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1139   __ j(not_equal, &builtin_trampoline, Label::kNear);
1140 
1141   __ movp(rbx,
1142           FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1143   __ jmp(&trampoline_loaded, Label::kNear);
1144 
1145   __ bind(&builtin_trampoline);
1146   __ Move(rbx, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1147 
1148   __ bind(&trampoline_loaded);
1149   __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
1150                          Code::kHeaderSize - kHeapObjectTag));
1151   __ Push(rbx);
1152 
1153   // Initialize dispatch table register.
1154   __ Move(
1155       kInterpreterDispatchTableRegister,
1156       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1157 
1158   // Get the bytecode array pointer from the frame.
1159   __ movp(kInterpreterBytecodeArrayRegister,
1160           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1161 
1162   if (FLAG_debug_code) {
1163     // Check function data field is actually a BytecodeArray object.
1164     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1165     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1166                      rbx);
1167     __ Assert(
1168         equal,
1169         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1170   }
1171 
1172   // Get the target bytecode offset from the frame.
1173   __ movp(kInterpreterBytecodeOffsetRegister,
1174           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1175   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1176               kInterpreterBytecodeOffsetRegister);
1177 
1178   // Dispatch to the target bytecode.
1179   __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
1180                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1181   __ movp(
1182       kJavaScriptCallCodeStartRegister,
1183       Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
1184   __ jmp(kJavaScriptCallCodeStartRegister);
1185 }
1186 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1187 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1188   // Get bytecode array and bytecode offset from the stack frame.
1189   __ movp(kInterpreterBytecodeArrayRegister,
1190           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1191   __ movp(kInterpreterBytecodeOffsetRegister,
1192           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1193   __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1194               kInterpreterBytecodeOffsetRegister);
1195 
1196   // Load the current bytecode.
1197   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
1198                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1199 
1200   // Advance to the next bytecode.
1201   Label if_return;
1202   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1203                                 kInterpreterBytecodeOffsetRegister, rbx, rcx,
1204                                 &if_return);
1205 
1206   // Convert new bytecode offset to a Smi and save in the stackframe.
1207   __ SmiTag(rbx, kInterpreterBytecodeOffsetRegister);
1208   __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1209 
1210   Generate_InterpreterEnterBytecode(masm);
1211 
1212   // We should never take the if_return path.
1213   __ bind(&if_return);
1214   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1215 }
1216 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1217 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1218   Generate_InterpreterEnterBytecode(masm);
1219 }
1220 
Generate_InstantiateAsmJs(MacroAssembler * masm)1221 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1222   // ----------- S t a t e -------------
1223   //  -- rax : argument count (preserved for callee)
1224   //  -- rdx : new target (preserved for callee)
1225   //  -- rdi : target function (preserved for callee)
1226   // -----------------------------------
1227   Label failed;
1228   {
1229     FrameScope scope(masm, StackFrame::INTERNAL);
1230     // Preserve argument count for later compare.
1231     __ movp(rcx, rax);
1232     // Push the number of arguments to the callee.
1233     __ SmiTag(rax, rax);
1234     __ Push(rax);
1235     // Push a copy of the target function and the new target.
1236     __ Push(rdi);
1237     __ Push(rdx);
1238 
1239     // The function.
1240     __ Push(rdi);
1241     // Copy arguments from caller (stdlib, foreign, heap).
1242     Label args_done;
1243     for (int j = 0; j < 4; ++j) {
1244       Label over;
1245       if (j < 3) {
1246         __ cmpp(rcx, Immediate(j));
1247         __ j(not_equal, &over, Label::kNear);
1248       }
1249       for (int i = j - 1; i >= 0; --i) {
1250         __ Push(Operand(
1251             rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1252       }
1253       for (int i = 0; i < 3 - j; ++i) {
1254         __ PushRoot(Heap::kUndefinedValueRootIndex);
1255       }
1256       if (j < 3) {
1257         __ jmp(&args_done, Label::kNear);
1258         __ bind(&over);
1259       }
1260     }
1261     __ bind(&args_done);
1262 
1263     // Call runtime, on success unwind frame, and parent frame.
1264     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1265     // A smi 0 is returned on failure, an object on success.
1266     __ JumpIfSmi(rax, &failed, Label::kNear);
1267 
1268     __ Drop(2);
1269     __ Pop(rcx);
1270     __ SmiUntag(rcx, rcx);
1271     scope.GenerateLeaveFrame();
1272 
1273     __ PopReturnAddressTo(rbx);
1274     __ incp(rcx);
1275     __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
1276     __ PushReturnAddressFrom(rbx);
1277     __ ret(0);
1278 
1279     __ bind(&failed);
1280     // Restore target function and new target.
1281     __ Pop(rdx);
1282     __ Pop(rdi);
1283     __ Pop(rax);
1284     __ SmiUntag(rax, rax);
1285   }
1286   // On failure, tail call back to regular js by re-calling the function
1287   // which has be reset to the compile lazy builtin.
1288   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1289   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1290   __ jmp(rcx);
1291 }
1292 
1293 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1294 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1295                                       bool java_script_builtin,
1296                                       bool with_result) {
1297   const RegisterConfiguration* config(RegisterConfiguration::Default());
1298   int allocatable_register_count = config->num_allocatable_general_registers();
1299   if (with_result) {
1300     // Overwrite the hole inserted by the deoptimizer with the return value from
1301     // the LAZY deopt point.
1302     __ movq(Operand(rsp,
1303                     config->num_allocatable_general_registers() * kPointerSize +
1304                         BuiltinContinuationFrameConstants::kFixedFrameSize),
1305             rax);
1306   }
1307   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1308     int code = config->GetAllocatableGeneralCode(i);
1309     __ popq(Register::from_code(code));
1310     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1311       __ SmiUntag(Register::from_code(code), Register::from_code(code));
1312     }
1313   }
1314   __ movq(
1315       rbp,
1316       Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1317   const int offsetToPC =
1318       BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
1319   __ popq(Operand(rsp, offsetToPC));
1320   __ Drop(offsetToPC / kPointerSize);
1321   __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1322   __ Ret();
1323 }
1324 }  // namespace
1325 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1326 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1327   Generate_ContinueToBuiltinHelper(masm, false, false);
1328 }
1329 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1330 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1331     MacroAssembler* masm) {
1332   Generate_ContinueToBuiltinHelper(masm, false, true);
1333 }
1334 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1335 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1336   Generate_ContinueToBuiltinHelper(masm, true, false);
1337 }
1338 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1339 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1340     MacroAssembler* masm) {
1341   Generate_ContinueToBuiltinHelper(masm, true, true);
1342 }
1343 
Generate_NotifyDeoptimized(MacroAssembler * masm)1344 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1345   // Enter an internal frame.
1346   {
1347     FrameScope scope(masm, StackFrame::INTERNAL);
1348     __ CallRuntime(Runtime::kNotifyDeoptimized);
1349     // Tear down internal frame.
1350   }
1351 
1352   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1353   __ movp(rax, Operand(rsp, kPCOnStackSize));
1354   __ ret(1 * kPointerSize);  // Remove rax.
1355 }
1356 
1357 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1358 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1359   // ----------- S t a t e -------------
1360   //  -- rax     : argc
1361   //  -- rsp[0]  : return address
1362   //  -- rsp[8]  : argArray
1363   //  -- rsp[16] : thisArg
1364   //  -- rsp[24] : receiver
1365   // -----------------------------------
1366 
1367   // 1. Load receiver into rdi, argArray into rbx (if present), remove all
1368   // arguments from the stack (including the receiver), and push thisArg (if
1369   // present) instead.
1370   {
1371     Label no_arg_array, no_this_arg;
1372     StackArgumentsAccessor args(rsp, rax);
1373     __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1374     __ movp(rbx, rdx);
1375     __ movp(rdi, args.GetReceiverOperand());
1376     __ testp(rax, rax);
1377     __ j(zero, &no_this_arg, Label::kNear);
1378     {
1379       __ movp(rdx, args.GetArgumentOperand(1));
1380       __ cmpp(rax, Immediate(1));
1381       __ j(equal, &no_arg_array, Label::kNear);
1382       __ movp(rbx, args.GetArgumentOperand(2));
1383       __ bind(&no_arg_array);
1384     }
1385     __ bind(&no_this_arg);
1386     __ PopReturnAddressTo(rcx);
1387     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1388     __ Push(rdx);
1389     __ PushReturnAddressFrom(rcx);
1390   }
1391 
1392   // ----------- S t a t e -------------
1393   //  -- rbx     : argArray
1394   //  -- rdi     : receiver
1395   //  -- rsp[0]  : return address
1396   //  -- rsp[8]  : thisArg
1397   // -----------------------------------
1398 
1399   // 2. We don't need to check explicitly for callable receiver here,
1400   // since that's the first thing the Call/CallWithArrayLike builtins
1401   // will do.
1402 
1403   // 3. Tail call with no arguments if argArray is null or undefined.
1404   Label no_arguments;
1405   __ JumpIfRoot(rbx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1406   __ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &no_arguments,
1407                 Label::kNear);
1408 
1409   // 4a. Apply the receiver to the given argArray.
1410   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1411           RelocInfo::CODE_TARGET);
1412 
1413   // 4b. The argArray is either null or undefined, so we tail call without any
1414   // arguments to the receiver. Since we did not create a frame for
1415   // Function.prototype.apply() yet, we use a normal Call builtin here.
1416   __ bind(&no_arguments);
1417   {
1418     __ Set(rax, 0);
1419     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1420   }
1421 }
1422 
1423 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1424 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1425   // Stack Layout:
1426   // rsp[0]           : Return address
1427   // rsp[8]           : Argument n
1428   // rsp[16]          : Argument n-1
1429   //  ...
1430   // rsp[8 * n]       : Argument 1
1431   // rsp[8 * (n + 1)] : Receiver (callable to call)
1432   //
1433   // rax contains the number of arguments, n, not counting the receiver.
1434   //
1435   // 1. Make sure we have at least one argument.
1436   {
1437     Label done;
1438     __ testp(rax, rax);
1439     __ j(not_zero, &done, Label::kNear);
1440     __ PopReturnAddressTo(rbx);
1441     __ PushRoot(Heap::kUndefinedValueRootIndex);
1442     __ PushReturnAddressFrom(rbx);
1443     __ incp(rax);
1444     __ bind(&done);
1445   }
1446 
1447   // 2. Get the callable to call (passed as receiver) from the stack.
1448   {
1449     StackArgumentsAccessor args(rsp, rax);
1450     __ movp(rdi, args.GetReceiverOperand());
1451   }
1452 
1453   // 3. Shift arguments and return address one slot down on the stack
1454   //    (overwriting the original receiver).  Adjust argument count to make
1455   //    the original first argument the new receiver.
1456   {
1457     Label loop;
1458     __ movp(rcx, rax);
1459     StackArgumentsAccessor args(rsp, rcx);
1460     __ bind(&loop);
1461     __ movp(rbx, args.GetArgumentOperand(1));
1462     __ movp(args.GetArgumentOperand(0), rbx);
1463     __ decp(rcx);
1464     __ j(not_zero, &loop);              // While non-zero.
1465     __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
1466     __ decp(rax);  // One fewer argument (first argument is new receiver).
1467   }
1468 
1469   // 4. Call the callable.
1470   // Since we did not create a frame for Function.prototype.call() yet,
1471   // we use a normal Call builtin here.
1472   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1473 }
1474 
Generate_ReflectApply(MacroAssembler * masm)1475 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1476   // ----------- S t a t e -------------
1477   //  -- rax     : argc
1478   //  -- rsp[0]  : return address
1479   //  -- rsp[8]  : argumentsList
1480   //  -- rsp[16] : thisArgument
1481   //  -- rsp[24] : target
1482   //  -- rsp[32] : receiver
1483   // -----------------------------------
1484 
1485   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1486   // remove all arguments from the stack (including the receiver), and push
1487   // thisArgument (if present) instead.
1488   {
1489     Label done;
1490     StackArgumentsAccessor args(rsp, rax);
1491     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1492     __ movp(rdx, rdi);
1493     __ movp(rbx, rdi);
1494     __ cmpp(rax, Immediate(1));
1495     __ j(below, &done, Label::kNear);
1496     __ movp(rdi, args.GetArgumentOperand(1));  // target
1497     __ j(equal, &done, Label::kNear);
1498     __ movp(rdx, args.GetArgumentOperand(2));  // thisArgument
1499     __ cmpp(rax, Immediate(3));
1500     __ j(below, &done, Label::kNear);
1501     __ movp(rbx, args.GetArgumentOperand(3));  // argumentsList
1502     __ bind(&done);
1503     __ PopReturnAddressTo(rcx);
1504     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1505     __ Push(rdx);
1506     __ PushReturnAddressFrom(rcx);
1507   }
1508 
1509   // ----------- S t a t e -------------
1510   //  -- rbx     : argumentsList
1511   //  -- rdi     : target
1512   //  -- rsp[0]  : return address
1513   //  -- rsp[8]  : thisArgument
1514   // -----------------------------------
1515 
1516   // 2. We don't need to check explicitly for callable target here,
1517   // since that's the first thing the Call/CallWithArrayLike builtins
1518   // will do.
1519 
1520   // 3. Apply the target to the given argumentsList.
1521   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1522           RelocInfo::CODE_TARGET);
1523 }
1524 
Generate_ReflectConstruct(MacroAssembler * masm)1525 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1526   // ----------- S t a t e -------------
1527   //  -- rax     : argc
1528   //  -- rsp[0]  : return address
1529   //  -- rsp[8]  : new.target (optional)
1530   //  -- rsp[16] : argumentsList
1531   //  -- rsp[24] : target
1532   //  -- rsp[32] : receiver
1533   // -----------------------------------
1534 
1535   // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1536   // new.target into rdx (if present, otherwise use target), remove all
1537   // arguments from the stack (including the receiver), and push thisArgument
1538   // (if present) instead.
1539   {
1540     Label done;
1541     StackArgumentsAccessor args(rsp, rax);
1542     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1543     __ movp(rdx, rdi);
1544     __ movp(rbx, rdi);
1545     __ cmpp(rax, Immediate(1));
1546     __ j(below, &done, Label::kNear);
1547     __ movp(rdi, args.GetArgumentOperand(1));  // target
1548     __ movp(rdx, rdi);                         // new.target defaults to target
1549     __ j(equal, &done, Label::kNear);
1550     __ movp(rbx, args.GetArgumentOperand(2));  // argumentsList
1551     __ cmpp(rax, Immediate(3));
1552     __ j(below, &done, Label::kNear);
1553     __ movp(rdx, args.GetArgumentOperand(3));  // new.target
1554     __ bind(&done);
1555     __ PopReturnAddressTo(rcx);
1556     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1557     __ PushRoot(Heap::kUndefinedValueRootIndex);
1558     __ PushReturnAddressFrom(rcx);
1559   }
1560 
1561   // ----------- S t a t e -------------
1562   //  -- rbx     : argumentsList
1563   //  -- rdx     : new.target
1564   //  -- rdi     : target
1565   //  -- rsp[0]  : return address
1566   //  -- rsp[8]  : receiver (undefined)
1567   // -----------------------------------
1568 
1569   // 2. We don't need to check explicitly for constructor target here,
1570   // since that's the first thing the Construct/ConstructWithArrayLike
1571   // builtins will do.
1572 
1573   // 3. We don't need to check explicitly for constructor new.target here,
1574   // since that's the second thing the Construct/ConstructWithArrayLike
1575   // builtins will do.
1576 
1577   // 4. Construct the target with the given new.target and argumentsList.
1578   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1579           RelocInfo::CODE_TARGET);
1580 }
1581 
Generate_InternalArrayConstructor(MacroAssembler * masm)1582 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1583   // ----------- S t a t e -------------
1584   //  -- rax    : argc
1585   //  -- rsp[0] : return address
1586   //  -- rsp[8] : last argument
1587   // -----------------------------------
1588   Label generic_array_code;
1589 
1590   if (FLAG_debug_code) {
1591     // Initial map for the builtin InternalArray functions should be maps.
1592     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1593     // Will both indicate a nullptr and a Smi.
1594     STATIC_ASSERT(kSmiTag == 0);
1595     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1596     __ Check(not_smi,
1597              AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1598     __ CmpObjectType(rbx, MAP_TYPE, rcx);
1599     __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1600   }
1601 
1602   // Run the native code for the InternalArray function called as a normal
1603   // function.
1604   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1605   __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1606           RelocInfo::CODE_TARGET);
1607 }
1608 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1609 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1610   __ pushq(rbp);
1611   __ movp(rbp, rsp);
1612 
1613   // Store the arguments adaptor context sentinel.
1614   __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1615 
1616   // Push the function on the stack.
1617   __ Push(rdi);
1618 
1619   // Preserve the number of arguments on the stack. Must preserve rax,
1620   // rbx and rcx because these registers are used when copying the
1621   // arguments and the receiver.
1622   __ SmiTag(r8, rax);
1623   __ Push(r8);
1624 
1625   __ Push(Immediate(0));  // Padding.
1626 }
1627 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1628 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1629   // Retrieve the number of arguments from the stack. Number is a Smi.
1630   __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1631 
1632   // Leave the frame.
1633   __ movp(rsp, rbp);
1634   __ popq(rbp);
1635 
1636   // Remove caller arguments from the stack.
1637   __ PopReturnAddressTo(rcx);
1638   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1639   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1640   __ PushReturnAddressFrom(rcx);
1641 }
1642 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)1643 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1644   // ----------- S t a t e -------------
1645   //  -- rax : actual number of arguments
1646   //  -- rbx : expected number of arguments
1647   //  -- rdx : new target (passed through to callee)
1648   //  -- rdi : function (passed through to callee)
1649   // -----------------------------------
1650 
1651   Label invoke, dont_adapt_arguments, stack_overflow;
1652   Counters* counters = masm->isolate()->counters();
1653   __ IncrementCounter(counters->arguments_adaptors(), 1);
1654 
1655   Label enough, too_few;
1656   __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1657   __ j(equal, &dont_adapt_arguments);
1658   __ cmpp(rax, rbx);
1659   __ j(less, &too_few);
1660 
1661   {  // Enough parameters: Actual >= expected.
1662     __ bind(&enough);
1663     EnterArgumentsAdaptorFrame(masm);
1664     // The registers rcx and r8 will be modified. The register rbx is only read.
1665     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1666 
1667     // Copy receiver and all expected arguments.
1668     const int offset = StandardFrameConstants::kCallerSPOffset;
1669     __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1670     __ Set(r8, -1);  // account for receiver
1671 
1672     Label copy;
1673     __ bind(&copy);
1674     __ incp(r8);
1675     __ Push(Operand(rax, 0));
1676     __ subp(rax, Immediate(kPointerSize));
1677     __ cmpp(r8, rbx);
1678     __ j(less, &copy);
1679     __ jmp(&invoke);
1680   }
1681 
1682   {  // Too few parameters: Actual < expected.
1683     __ bind(&too_few);
1684 
1685     EnterArgumentsAdaptorFrame(masm);
1686     // The registers rcx and r8 will be modified. The register rbx is only read.
1687     Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1688 
1689     // Copy receiver and all actual arguments.
1690     const int offset = StandardFrameConstants::kCallerSPOffset;
1691     __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1692     __ Set(r8, -1);  // account for receiver
1693 
1694     Label copy;
1695     __ bind(&copy);
1696     __ incp(r8);
1697     __ Push(Operand(rdi, 0));
1698     __ subp(rdi, Immediate(kPointerSize));
1699     __ cmpp(r8, rax);
1700     __ j(less, &copy);
1701 
1702     // Fill remaining expected arguments with undefined values.
1703     Label fill;
1704     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1705     __ bind(&fill);
1706     __ incp(r8);
1707     __ Push(kScratchRegister);
1708     __ cmpp(r8, rbx);
1709     __ j(less, &fill);
1710 
1711     // Restore function pointer.
1712     __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
1713   }
1714 
1715   // Call the entry point.
1716   __ bind(&invoke);
1717   __ movp(rax, rbx);
1718   // rax : expected number of arguments
1719   // rdx : new target (passed through to callee)
1720   // rdi : function (passed through to callee)
1721   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1722   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1723   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1724   __ call(rcx);
1725 
1726   // Store offset of return address for deoptimizer.
1727   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1728 
1729   // Leave frame and return.
1730   LeaveArgumentsAdaptorFrame(masm);
1731   __ ret(0);
1732 
1733   // -------------------------------------------
1734   // Dont adapt arguments.
1735   // -------------------------------------------
1736   __ bind(&dont_adapt_arguments);
1737   static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1738   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1739   __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1740   __ jmp(rcx);
1741 
1742   __ bind(&stack_overflow);
1743   {
1744     FrameScope frame(masm, StackFrame::MANUAL);
1745     __ CallRuntime(Runtime::kThrowStackOverflow);
1746     __ int3();
1747   }
1748 }
1749 
1750 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1751 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1752                                                Handle<Code> code) {
1753   // ----------- S t a t e -------------
1754   //  -- rdi    : target
1755   //  -- rax    : number of parameters on the stack (not including the receiver)
1756   //  -- rbx    : arguments list (a FixedArray)
1757   //  -- rcx    : len (number of elements to push from args)
1758   //  -- rdx    : new.target (for [[Construct]])
1759   //  -- rsp[0] : return address
1760   // -----------------------------------
1761   if (masm->emit_debug_code()) {
1762     // Allow rbx to be a FixedArray, or a FixedDoubleArray if rcx == 0.
1763     Label ok, fail;
1764     __ AssertNotSmi(rbx);
1765     Register map = r9;
1766     __ movp(map, FieldOperand(rbx, HeapObject::kMapOffset));
1767     __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
1768     __ j(equal, &ok);
1769     __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
1770     __ j(not_equal, &fail);
1771     __ cmpl(rcx, Immediate(0));
1772     __ j(equal, &ok);
1773     // Fall through.
1774     __ bind(&fail);
1775     __ Abort(AbortReason::kOperandIsNotAFixedArray);
1776 
1777     __ bind(&ok);
1778   }
1779 
1780   // Check for stack overflow.
1781   {
1782     // Check the stack for overflow. We are not trying to catch interruptions
1783     // (i.e. debug break and preemption) here, so check the "real stack limit".
1784     Label done;
1785     __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
1786     __ movp(r8, rsp);
1787     // Make r8 the space we have left. The stack might already be overflowed
1788     // here which will cause r8 to become negative.
1789     __ subp(r8, kScratchRegister);
1790     __ sarp(r8, Immediate(kPointerSizeLog2));
1791     // Check if the arguments will overflow the stack.
1792     __ cmpp(r8, rcx);
1793     __ j(greater, &done, Label::kNear);  // Signed comparison.
1794     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1795     __ bind(&done);
1796   }
1797 
1798   // Push additional arguments onto the stack.
1799   {
1800     __ PopReturnAddressTo(r8);
1801     __ Set(r9, 0);
1802     Label done, push, loop;
1803     __ bind(&loop);
1804     __ cmpl(r9, rcx);
1805     __ j(equal, &done, Label::kNear);
1806     // Turn the hole into undefined as we go.
1807     __ movp(r11,
1808             FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
1809     __ CompareRoot(r11, Heap::kTheHoleValueRootIndex);
1810     __ j(not_equal, &push, Label::kNear);
1811     __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
1812     __ bind(&push);
1813     __ Push(r11);
1814     __ incl(r9);
1815     __ jmp(&loop);
1816     __ bind(&done);
1817     __ PushReturnAddressFrom(r8);
1818     __ addq(rax, r9);
1819   }
1820 
1821   // Tail-call to the actual Call or Construct builtin.
1822   __ Jump(code, RelocInfo::CODE_TARGET);
1823 }
1824 
1825 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)1826 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1827                                                       CallOrConstructMode mode,
1828                                                       Handle<Code> code) {
1829   // ----------- S t a t e -------------
1830   //  -- rax : the number of arguments (not including the receiver)
1831   //  -- rdx : the new target (for [[Construct]] calls)
1832   //  -- rdi : the target to call (can be any Object)
1833   //  -- rcx : start index (to support rest parameters)
1834   // -----------------------------------
1835 
1836   // Check if new.target has a [[Construct]] internal method.
1837   if (mode == CallOrConstructMode::kConstruct) {
1838     Label new_target_constructor, new_target_not_constructor;
1839     __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1840     __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
1841     __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1842              Immediate(Map::IsConstructorBit::kMask));
1843     __ j(not_zero, &new_target_constructor, Label::kNear);
1844     __ bind(&new_target_not_constructor);
1845     {
1846       FrameScope scope(masm, StackFrame::MANUAL);
1847       __ EnterFrame(StackFrame::INTERNAL);
1848       __ Push(rdx);
1849       __ CallRuntime(Runtime::kThrowNotConstructor);
1850     }
1851     __ bind(&new_target_constructor);
1852   }
1853 
1854   // Check if we have an arguments adaptor frame below the function frame.
1855   Label arguments_adaptor, arguments_done;
1856   __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1857   __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
1858           Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1859   __ j(equal, &arguments_adaptor, Label::kNear);
1860   {
1861     __ movp(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1862     __ movp(r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
1863     __ movzxwq(
1864         r8, FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
1865     __ movp(rbx, rbp);
1866   }
1867   __ jmp(&arguments_done, Label::kNear);
1868   __ bind(&arguments_adaptor);
1869   {
1870     __ SmiUntag(r8,
1871                 Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1872   }
1873   __ bind(&arguments_done);
1874 
1875   Label stack_done, stack_overflow;
1876   __ subl(r8, rcx);
1877   __ j(less_equal, &stack_done);
1878   {
1879     // Check for stack overflow.
1880     Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
1881 
1882     // Forward the arguments from the caller frame.
1883     {
1884       Label loop;
1885       __ addl(rax, r8);
1886       __ PopReturnAddressTo(rcx);
1887       __ bind(&loop);
1888       {
1889         StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1890         __ Push(args.GetArgumentOperand(0));
1891         __ decl(r8);
1892         __ j(not_zero, &loop);
1893       }
1894       __ PushReturnAddressFrom(rcx);
1895     }
1896   }
1897   __ jmp(&stack_done, Label::kNear);
1898   __ bind(&stack_overflow);
1899   __ TailCallRuntime(Runtime::kThrowStackOverflow);
1900   __ bind(&stack_done);
1901 
1902   // Tail-call to the {code} handler.
1903   __ Jump(code, RelocInfo::CODE_TARGET);
1904 }
1905 
1906 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)1907 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1908                                      ConvertReceiverMode mode) {
1909   // ----------- S t a t e -------------
1910   //  -- rax : the number of arguments (not including the receiver)
1911   //  -- rdi : the function to call (checked to be a JSFunction)
1912   // -----------------------------------
1913   StackArgumentsAccessor args(rsp, rax);
1914   __ AssertFunction(rdi);
1915 
1916   // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1917   // Check that the function is not a "classConstructor".
1918   Label class_constructor;
1919   __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1920   __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
1921            Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
1922   __ j(not_zero, &class_constructor);
1923 
1924   // ----------- S t a t e -------------
1925   //  -- rax : the number of arguments (not including the receiver)
1926   //  -- rdx : the shared function info.
1927   //  -- rdi : the function to call (checked to be a JSFunction)
1928   // -----------------------------------
1929 
1930   // Enter the context of the function; ToObject has to run in the function
1931   // context, and we also need to take the global proxy from the function
1932   // context in case of conversion.
1933   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1934   // We need to convert the receiver for non-native sloppy mode functions.
1935   Label done_convert;
1936   __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
1937            Immediate(SharedFunctionInfo::IsNativeBit::kMask |
1938                      SharedFunctionInfo::IsStrictBit::kMask));
1939   __ j(not_zero, &done_convert);
1940   {
1941     // ----------- S t a t e -------------
1942     //  -- rax : the number of arguments (not including the receiver)
1943     //  -- rdx : the shared function info.
1944     //  -- rdi : the function to call (checked to be a JSFunction)
1945     //  -- rsi : the function context.
1946     // -----------------------------------
1947 
1948     if (mode == ConvertReceiverMode::kNullOrUndefined) {
1949       // Patch receiver to global proxy.
1950       __ LoadGlobalProxy(rcx);
1951     } else {
1952       Label convert_to_object, convert_receiver;
1953       __ movp(rcx, args.GetReceiverOperand());
1954       __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
1955       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1956       __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
1957       __ j(above_equal, &done_convert);
1958       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1959         Label convert_global_proxy;
1960         __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
1961                       &convert_global_proxy, Label::kNear);
1962         __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
1963                          Label::kNear);
1964         __ bind(&convert_global_proxy);
1965         {
1966           // Patch receiver to global proxy.
1967           __ LoadGlobalProxy(rcx);
1968         }
1969         __ jmp(&convert_receiver);
1970       }
1971       __ bind(&convert_to_object);
1972       {
1973         // Convert receiver using ToObject.
1974         // TODO(bmeurer): Inline the allocation here to avoid building the frame
1975         // in the fast case? (fall back to AllocateInNewSpace?)
1976         FrameScope scope(masm, StackFrame::INTERNAL);
1977         __ SmiTag(rax, rax);
1978         __ Push(rax);
1979         __ Push(rdi);
1980         __ movp(rax, rcx);
1981         __ Push(rsi);
1982         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1983                 RelocInfo::CODE_TARGET);
1984         __ Pop(rsi);
1985         __ movp(rcx, rax);
1986         __ Pop(rdi);
1987         __ Pop(rax);
1988         __ SmiUntag(rax, rax);
1989       }
1990       __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1991       __ bind(&convert_receiver);
1992     }
1993     __ movp(args.GetReceiverOperand(), rcx);
1994   }
1995   __ bind(&done_convert);
1996 
1997   // ----------- S t a t e -------------
1998   //  -- rax : the number of arguments (not including the receiver)
1999   //  -- rdx : the shared function info.
2000   //  -- rdi : the function to call (checked to be a JSFunction)
2001   //  -- rsi : the function context.
2002   // -----------------------------------
2003 
2004   __ movzxwq(
2005       rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2006   ParameterCount actual(rax);
2007   ParameterCount expected(rbx);
2008 
2009   __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2010 
2011   // The function is a "classConstructor", need to raise an exception.
2012   __ bind(&class_constructor);
2013   {
2014     FrameScope frame(masm, StackFrame::INTERNAL);
2015     __ Push(rdi);
2016     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2017   }
2018 }
2019 
2020 namespace {
2021 
Generate_PushBoundArguments(MacroAssembler * masm)2022 void Generate_PushBoundArguments(MacroAssembler* masm) {
2023   // ----------- S t a t e -------------
2024   //  -- rax : the number of arguments (not including the receiver)
2025   //  -- rdx : new.target (only in case of [[Construct]])
2026   //  -- rdi : target (checked to be a JSBoundFunction)
2027   // -----------------------------------
2028 
2029   // Load [[BoundArguments]] into rcx and length of that into rbx.
2030   Label no_bound_arguments;
2031   __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2032   __ SmiUntag(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2033   __ testl(rbx, rbx);
2034   __ j(zero, &no_bound_arguments);
2035   {
2036     // ----------- S t a t e -------------
2037     //  -- rax : the number of arguments (not including the receiver)
2038     //  -- rdx : new.target (only in case of [[Construct]])
2039     //  -- rdi : target (checked to be a JSBoundFunction)
2040     //  -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2041     //  -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2042     // -----------------------------------
2043 
2044     // Reserve stack space for the [[BoundArguments]].
2045     {
2046       Label done;
2047       __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2048       __ subp(rsp, kScratchRegister);
2049       // Check the stack for overflow. We are not trying to catch interruptions
2050       // (i.e. debug break and preemption) here, so check the "real stack
2051       // limit".
2052       __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2053       __ j(greater, &done, Label::kNear);  // Signed comparison.
2054       // Restore the stack pointer.
2055       __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2056       {
2057         FrameScope scope(masm, StackFrame::MANUAL);
2058         __ EnterFrame(StackFrame::INTERNAL);
2059         __ CallRuntime(Runtime::kThrowStackOverflow);
2060       }
2061       __ bind(&done);
2062     }
2063 
2064     // Adjust effective number of arguments to include return address.
2065     __ incl(rax);
2066 
2067     // Relocate arguments and return address down the stack.
2068     {
2069       Label loop;
2070       __ Set(rcx, 0);
2071       __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2072       __ bind(&loop);
2073       __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2074       __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2075       __ incl(rcx);
2076       __ cmpl(rcx, rax);
2077       __ j(less, &loop);
2078     }
2079 
2080     // Copy [[BoundArguments]] to the stack (below the arguments).
2081     {
2082       Label loop;
2083       __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2084       __ SmiUntag(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2085       __ bind(&loop);
2086       __ decl(rbx);
2087       __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2088                                              FixedArray::kHeaderSize));
2089       __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2090       __ leal(rax, Operand(rax, 1));
2091       __ j(greater, &loop);
2092     }
2093 
2094     // Adjust effective number of arguments (rax contains the number of
2095     // arguments from the call plus return address plus the number of
2096     // [[BoundArguments]]), so we need to subtract one for the return address.
2097     __ decl(rax);
2098   }
2099   __ bind(&no_bound_arguments);
2100 }
2101 
2102 }  // namespace
2103 
2104 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2105 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2106   // ----------- S t a t e -------------
2107   //  -- rax : the number of arguments (not including the receiver)
2108   //  -- rdi : the function to call (checked to be a JSBoundFunction)
2109   // -----------------------------------
2110   __ AssertBoundFunction(rdi);
2111 
2112   // Patch the receiver to [[BoundThis]].
2113   StackArgumentsAccessor args(rsp, rax);
2114   __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2115   __ movp(args.GetReceiverOperand(), rbx);
2116 
2117   // Push the [[BoundArguments]] onto the stack.
2118   Generate_PushBoundArguments(masm);
2119 
2120   // Call the [[BoundTargetFunction]] via the Call builtin.
2121   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2122   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2123           RelocInfo::CODE_TARGET);
2124 }
2125 
2126 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2127 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2128   // ----------- S t a t e -------------
2129   //  -- rax : the number of arguments (not including the receiver)
2130   //  -- rdi : the target to call (can be any Object)
2131   // -----------------------------------
2132   StackArgumentsAccessor args(rsp, rax);
2133 
2134   Label non_callable;
2135   __ JumpIfSmi(rdi, &non_callable);
2136   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2137   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2138           RelocInfo::CODE_TARGET, equal);
2139 
2140   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2141   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2142           RelocInfo::CODE_TARGET, equal);
2143 
2144   // Check if target has a [[Call]] internal method.
2145   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2146            Immediate(Map::IsCallableBit::kMask));
2147   __ j(zero, &non_callable, Label::kNear);
2148 
2149   // Check if target is a proxy and call CallProxy external builtin
2150   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2151   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
2152           equal);
2153 
2154   // 2. Call to something else, which might have a [[Call]] internal method (if
2155   // not we raise an exception).
2156 
2157   // Overwrite the original receiver with the (original) target.
2158   __ movp(args.GetReceiverOperand(), rdi);
2159   // Let the "call_as_function_delegate" take care of the rest.
2160   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2161   __ Jump(masm->isolate()->builtins()->CallFunction(
2162               ConvertReceiverMode::kNotNullOrUndefined),
2163           RelocInfo::CODE_TARGET);
2164 
2165   // 3. Call to something that is not callable.
2166   __ bind(&non_callable);
2167   {
2168     FrameScope scope(masm, StackFrame::INTERNAL);
2169     __ Push(rdi);
2170     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2171   }
2172 }
2173 
2174 // static
Generate_ConstructFunction(MacroAssembler * masm)2175 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2176   // ----------- S t a t e -------------
2177   //  -- rax : the number of arguments (not including the receiver)
2178   //  -- rdx : the new target (checked to be a constructor)
2179   //  -- rdi : the constructor to call (checked to be a JSFunction)
2180   // -----------------------------------
2181   __ AssertConstructor(rdi);
2182   __ AssertFunction(rdi);
2183 
2184   // Calling convention for function specific ConstructStubs require
2185   // rbx to contain either an AllocationSite or undefined.
2186   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2187 
2188   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2189   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2190   __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
2191            Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2192   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2193           RelocInfo::CODE_TARGET, not_zero);
2194 
2195   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2196           RelocInfo::CODE_TARGET);
2197 }
2198 
2199 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2200 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2201   // ----------- S t a t e -------------
2202   //  -- rax : the number of arguments (not including the receiver)
2203   //  -- rdx : the new target (checked to be a constructor)
2204   //  -- rdi : the constructor to call (checked to be a JSBoundFunction)
2205   // -----------------------------------
2206   __ AssertConstructor(rdi);
2207   __ AssertBoundFunction(rdi);
2208 
2209   // Push the [[BoundArguments]] onto the stack.
2210   Generate_PushBoundArguments(masm);
2211 
2212   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2213   {
2214     Label done;
2215     __ cmpp(rdi, rdx);
2216     __ j(not_equal, &done, Label::kNear);
2217     __ movp(rdx,
2218             FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2219     __ bind(&done);
2220   }
2221 
2222   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2223   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2224   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2225 }
2226 
2227 // static
Generate_Construct(MacroAssembler * masm)2228 void Builtins::Generate_Construct(MacroAssembler* masm) {
2229   // ----------- S t a t e -------------
2230   //  -- rax : the number of arguments (not including the receiver)
2231   //  -- rdx : the new target (either the same as the constructor or
2232   //           the JSFunction on which new was invoked initially)
2233   //  -- rdi : the constructor to call (can be any Object)
2234   // -----------------------------------
2235   StackArgumentsAccessor args(rsp, rax);
2236 
2237   // Check if target is a Smi.
2238   Label non_constructor;
2239   __ JumpIfSmi(rdi, &non_constructor);
2240 
2241   // Check if target has a [[Construct]] internal method.
2242   __ movq(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
2243   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2244            Immediate(Map::IsConstructorBit::kMask));
2245   __ j(zero, &non_constructor);
2246 
2247   // Dispatch based on instance type.
2248   __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
2249   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2250           RelocInfo::CODE_TARGET, equal);
2251 
2252   // Only dispatch to bound functions after checking whether they are
2253   // constructors.
2254   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2255   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2256           RelocInfo::CODE_TARGET, equal);
2257 
2258   // Only dispatch to proxies after checking whether they are constructors.
2259   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2260   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
2261           equal);
2262 
2263   // Called Construct on an exotic Object with a [[Construct]] internal method.
2264   {
2265     // Overwrite the original receiver with the (original) target.
2266     __ movp(args.GetReceiverOperand(), rdi);
2267     // Let the "call_as_constructor_delegate" take care of the rest.
2268     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2269     __ Jump(masm->isolate()->builtins()->CallFunction(),
2270             RelocInfo::CODE_TARGET);
2271   }
2272 
2273   // Called Construct on an Object that doesn't have a [[Construct]] internal
2274   // method.
2275   __ bind(&non_constructor);
2276   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2277           RelocInfo::CODE_TARGET);
2278 }
2279 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)2280 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
2281                                               bool has_handler_frame) {
2282   // Lookup the function in the JavaScript frame.
2283   if (has_handler_frame) {
2284     __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2285     __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
2286   } else {
2287     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2288   }
2289 
2290   {
2291     FrameScope scope(masm, StackFrame::INTERNAL);
2292     // Pass function as argument.
2293     __ Push(rax);
2294     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2295   }
2296 
2297   Label skip;
2298   // If the code object is null, just return to the caller.
2299   __ testp(rax, rax);
2300   __ j(not_equal, &skip, Label::kNear);
2301   __ ret(0);
2302 
2303   __ bind(&skip);
2304 
2305   // Drop any potential handler frame that is be sitting on top of the actual
2306   // JavaScript frame. This is the case then OSR is triggered from bytecode.
2307   if (has_handler_frame) {
2308     __ leave();
2309   }
2310 
2311   // Load deoptimization data from the code object.
2312   __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2313 
2314   // Load the OSR entrypoint offset from the deoptimization data.
2315   __ SmiUntag(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
2316                                     DeoptimizationData::kOsrPcOffsetIndex) -
2317                                     kHeapObjectTag));
2318 
2319   // Compute the target address = code_obj + header_size + osr_offset
2320   __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2321 
2322   // Overwrite the return address on the stack.
2323   __ movq(StackOperandForReturnAddress(0), rax);
2324 
2325   // And "return" to the OSR entry point of the function.
2326   __ ret(0);
2327 }
2328 
Generate_OnStackReplacement(MacroAssembler * masm)2329 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
2330   Generate_OnStackReplacementHelper(masm, false);
2331 }
2332 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)2333 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2334   Generate_OnStackReplacementHelper(masm, true);
2335 }
2336 
Generate_WasmCompileLazy(MacroAssembler * masm)2337 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2338   // The function index was pushed to the stack by the caller as int32.
2339   __ Pop(r11);
2340   // Convert to Smi for the runtime call.
2341   __ SmiTag(r11, r11);
2342   {
2343     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2344     FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2345 
2346     // Save all parameter registers (see wasm-linkage.cc). They might be
2347     // overwritten in the runtime call below. We don't have any callee-saved
2348     // registers in wasm, so no need to store anything else.
2349     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2350                       arraysize(wasm::kGpParamRegisters),
2351                   "frame size mismatch");
2352     for (Register reg : wasm::kGpParamRegisters) {
2353       __ Push(reg);
2354     }
2355     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2356                       arraysize(wasm::kFpParamRegisters),
2357                   "frame size mismatch");
2358     __ subp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2359     int offset = 0;
2360     for (DoubleRegister reg : wasm::kFpParamRegisters) {
2361       __ movdqu(Operand(rsp, offset), reg);
2362       offset += kSimd128Size;
2363     }
2364 
2365     // Push the WASM instance as an explicit argument to WasmCompileLazy.
2366     __ Push(kWasmInstanceRegister);
2367     // Push the function index as second argument.
2368     __ Push(r11);
2369     // Load the correct CEntry builtin from the instance object.
2370     __ movp(rcx, FieldOperand(kWasmInstanceRegister,
2371                               WasmInstanceObject::kCEntryStubOffset));
2372     // Initialize the JavaScript context with 0. CEntry will use it to
2373     // set the current context on the isolate.
2374     __ Move(kContextRegister, Smi::kZero);
2375     __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, rcx);
2376     // The entrypoint address is the return value.
2377     __ movq(r11, kReturnRegister0);
2378 
2379     // Restore registers.
2380     for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2381       offset -= kSimd128Size;
2382       __ movdqu(reg, Operand(rsp, offset));
2383     }
2384     DCHECK_EQ(0, offset);
2385     __ addp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2386     for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2387       __ Pop(reg);
2388     }
2389   }
2390   // Finally, jump to the entrypoint.
2391   __ jmp(r11);
2392 }
2393 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2394 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2395                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2396                                bool builtin_exit_frame) {
2397   // rax: number of arguments including receiver
2398   // rbx: pointer to C function  (C callee-saved)
2399   // rbp: frame pointer of calling JS frame (restored after C call)
2400   // rsp: stack pointer  (restored after C call)
2401   // rsi: current context (restored)
2402   //
2403   // If argv_mode == kArgvInRegister:
2404   // r15: pointer to the first argument
2405 
2406   ProfileEntryHookStub::MaybeCallEntryHook(masm);
2407 
2408 #ifdef _WIN64
2409   // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
2410   // stack to be aligned to 16 bytes. It only allows a single-word to be
2411   // returned in register rax. Larger return sizes must be written to an address
2412   // passed as a hidden first argument.
2413   const Register kCCallArg0 = rcx;
2414   const Register kCCallArg1 = rdx;
2415   const Register kCCallArg2 = r8;
2416   const Register kCCallArg3 = r9;
2417   const int kArgExtraStackSpace = 2;
2418   const int kMaxRegisterResultSize = 1;
2419 #else
2420   // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
2421   // are returned in rax, and a struct of two pointers are returned in rax+rdx.
2422   // Larger return sizes must be written to an address passed as a hidden first
2423   // argument.
2424   const Register kCCallArg0 = rdi;
2425   const Register kCCallArg1 = rsi;
2426   const Register kCCallArg2 = rdx;
2427   const Register kCCallArg3 = rcx;
2428   const int kArgExtraStackSpace = 0;
2429   const int kMaxRegisterResultSize = 2;
2430 #endif  // _WIN64
2431 
2432   // Enter the exit frame that transitions from JavaScript to C++.
2433   int arg_stack_space =
2434       kArgExtraStackSpace +
2435       (result_size <= kMaxRegisterResultSize ? 0 : result_size);
2436   if (argv_mode == kArgvInRegister) {
2437     DCHECK(save_doubles == kDontSaveFPRegs);
2438     DCHECK(!builtin_exit_frame);
2439     __ EnterApiExitFrame(arg_stack_space);
2440     // Move argc into r14 (argv is already in r15).
2441     __ movp(r14, rax);
2442   } else {
2443     __ EnterExitFrame(
2444         arg_stack_space, save_doubles == kSaveFPRegs,
2445         builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2446   }
2447 
2448   // rbx: pointer to builtin function  (C callee-saved).
2449   // rbp: frame pointer of exit frame  (restored after C call).
2450   // rsp: stack pointer (restored after C call).
2451   // r14: number of arguments including receiver (C callee-saved).
2452   // r15: argv pointer (C callee-saved).
2453 
2454   // Check stack alignment.
2455   if (FLAG_debug_code) {
2456     __ CheckStackAlignment();
2457   }
2458 
2459   // Call C function. The arguments object will be created by stubs declared by
2460   // DECLARE_RUNTIME_FUNCTION().
2461   if (result_size <= kMaxRegisterResultSize) {
2462     // Pass a pointer to the Arguments object as the first argument.
2463     // Return result in single register (rax), or a register pair (rax, rdx).
2464     __ movp(kCCallArg0, r14);  // argc.
2465     __ movp(kCCallArg1, r15);  // argv.
2466     __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
2467   } else {
2468     DCHECK_LE(result_size, 2);
2469     // Pass a pointer to the result location as the first argument.
2470     __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
2471     // Pass a pointer to the Arguments object as the second argument.
2472     __ movp(kCCallArg1, r14);  // argc.
2473     __ movp(kCCallArg2, r15);  // argv.
2474     __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
2475   }
2476   __ call(rbx);
2477 
2478   if (result_size > kMaxRegisterResultSize) {
2479     // Read result values stored on stack. Result is stored
2480     // above the the two Arguments object slots on Win64.
2481     DCHECK_LE(result_size, 2);
2482     __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
2483     __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
2484   }
2485   // Result is in rax or rdx:rax - do not destroy these registers!
2486 
2487   // Check result for exception sentinel.
2488   Label exception_returned;
2489   __ CompareRoot(rax, Heap::kExceptionRootIndex);
2490   __ j(equal, &exception_returned);
2491 
2492   // Check that there is no pending exception, otherwise we
2493   // should have returned the exception sentinel.
2494   if (FLAG_debug_code) {
2495     Label okay;
2496     __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
2497     ExternalReference pending_exception_address = ExternalReference::Create(
2498         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2499     Operand pending_exception_operand =
2500         masm->ExternalOperand(pending_exception_address);
2501     __ cmpp(r14, pending_exception_operand);
2502     __ j(equal, &okay, Label::kNear);
2503     __ int3();
2504     __ bind(&okay);
2505   }
2506 
2507   // Exit the JavaScript to C++ exit frame.
2508   __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2509   __ ret(0);
2510 
2511   // Handling of exception.
2512   __ bind(&exception_returned);
2513 
2514   ExternalReference pending_handler_context_address = ExternalReference::Create(
2515       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2516   ExternalReference pending_handler_entrypoint_address =
2517       ExternalReference::Create(
2518           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2519   ExternalReference pending_handler_fp_address = ExternalReference::Create(
2520       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2521   ExternalReference pending_handler_sp_address = ExternalReference::Create(
2522       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2523 
2524   // Ask the runtime for help to determine the handler. This will set rax to
2525   // contain the current pending exception, don't clobber it.
2526   ExternalReference find_handler =
2527       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2528   {
2529     FrameScope scope(masm, StackFrame::MANUAL);
2530     __ movp(arg_reg_1, Immediate(0));  // argc.
2531     __ movp(arg_reg_2, Immediate(0));  // argv.
2532     __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
2533     __ PrepareCallCFunction(3);
2534     __ CallCFunction(find_handler, 3);
2535   }
2536   // Retrieve the handler context, SP and FP.
2537   __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
2538   __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
2539   __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
2540 
2541   // If the handler is a JS frame, restore the context to the frame. Note that
2542   // the context will be set to (rsi == 0) for non-JS frames.
2543   Label skip;
2544   __ testp(rsi, rsi);
2545   __ j(zero, &skip, Label::kNear);
2546   __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2547   __ bind(&skip);
2548 
2549   // Reset the masking register. This is done independent of the underlying
2550   // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with
2551   // both configurations. It is safe to always do this, because the underlying
2552   // register is caller-saved and can be arbitrarily clobbered.
2553   __ ResetSpeculationPoisonRegister();
2554 
2555   // Compute the handler entry address and jump to it.
2556   __ movp(rdi, masm->ExternalOperand(pending_handler_entrypoint_address));
2557   __ jmp(rdi);
2558 }
2559 
Generate_DoubleToI(MacroAssembler * masm)2560 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2561   Label check_negative, process_64_bits, done;
2562 
2563   // Account for return address and saved regs.
2564   const int kArgumentOffset = 4 * kRegisterSize;
2565 
2566   MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
2567   MemOperand exponent_operand(
2568       MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
2569 
2570   // The result is returned on the stack.
2571   MemOperand return_operand = mantissa_operand;
2572 
2573   Register scratch1 = rbx;
2574 
2575   // Since we must use rcx for shifts below, use some other register (rax)
2576   // to calculate the result if ecx is the requested return register.
2577   Register result_reg = rax;
2578   // Save ecx if it isn't the return register and therefore volatile, or if it
2579   // is the return register, then save the temp register we use in its stead
2580   // for the result.
2581   Register save_reg = rax;
2582   __ pushq(rcx);
2583   __ pushq(scratch1);
2584   __ pushq(save_reg);
2585 
2586   __ movl(scratch1, mantissa_operand);
2587   __ Movsd(kScratchDoubleReg, mantissa_operand);
2588   __ movl(rcx, exponent_operand);
2589 
2590   __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2591   __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
2592   __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
2593   __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
2594   __ j(below, &process_64_bits, Label::kNear);
2595 
2596   // Result is entirely in lower 32-bits of mantissa
2597   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2598   __ subl(rcx, Immediate(delta));
2599   __ xorl(result_reg, result_reg);
2600   __ cmpl(rcx, Immediate(31));
2601   __ j(above, &done, Label::kNear);
2602   __ shll_cl(scratch1);
2603   __ jmp(&check_negative, Label::kNear);
2604 
2605   __ bind(&process_64_bits);
2606   __ Cvttsd2siq(result_reg, kScratchDoubleReg);
2607   __ jmp(&done, Label::kNear);
2608 
2609   // If the double was negative, negate the integer result.
2610   __ bind(&check_negative);
2611   __ movl(result_reg, scratch1);
2612   __ negl(result_reg);
2613   __ cmpl(exponent_operand, Immediate(0));
2614   __ cmovl(greater, result_reg, scratch1);
2615 
2616   // Restore registers
2617   __ bind(&done);
2618   __ movl(return_operand, result_reg);
2619   __ popq(save_reg);
2620   __ popq(scratch1);
2621   __ popq(rcx);
2622   __ ret(0);
2623 }
2624 
Generate_MathPowInternal(MacroAssembler * masm)2625 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2626   const Register exponent = rdx;
2627   const Register scratch = rcx;
2628   const XMMRegister double_result = xmm3;
2629   const XMMRegister double_base = xmm2;
2630   const XMMRegister double_exponent = xmm1;
2631   const XMMRegister double_scratch = xmm4;
2632 
2633   Label call_runtime, done, exponent_not_smi, int_exponent;
2634 
2635   // Save 1 in double_result - we need this several times later on.
2636   __ movp(scratch, Immediate(1));
2637   __ Cvtlsi2sd(double_result, scratch);
2638 
2639   Label fast_power, try_arithmetic_simplification;
2640   // Detect integer exponents stored as double.
2641   __ DoubleToI(exponent, double_exponent, double_scratch,
2642                &try_arithmetic_simplification, &try_arithmetic_simplification);
2643   __ jmp(&int_exponent);
2644 
2645   __ bind(&try_arithmetic_simplification);
2646   __ Cvttsd2si(exponent, double_exponent);
2647   // Skip to runtime if possibly NaN (indicated by the indefinite integer).
2648   __ cmpl(exponent, Immediate(0x1));
2649   __ j(overflow, &call_runtime);
2650 
2651   // Using FPU instructions to calculate power.
2652   Label fast_power_failed;
2653   __ bind(&fast_power);
2654   __ fnclex();  // Clear flags to catch exceptions later.
2655   // Transfer (B)ase and (E)xponent onto the FPU register stack.
2656   __ subp(rsp, Immediate(kDoubleSize));
2657   __ Movsd(Operand(rsp, 0), double_exponent);
2658   __ fld_d(Operand(rsp, 0));  // E
2659   __ Movsd(Operand(rsp, 0), double_base);
2660   __ fld_d(Operand(rsp, 0));  // B, E
2661 
2662   // Exponent is in st(1) and base is in st(0)
2663   // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
2664   // FYL2X calculates st(1) * log2(st(0))
2665   __ fyl2x();    // X
2666   __ fld(0);     // X, X
2667   __ frndint();  // rnd(X), X
2668   __ fsub(1);    // rnd(X), X-rnd(X)
2669   __ fxch(1);    // X - rnd(X), rnd(X)
2670   // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
2671   __ f2xm1();   // 2^(X-rnd(X)) - 1, rnd(X)
2672   __ fld1();    // 1, 2^(X-rnd(X)) - 1, rnd(X)
2673   __ faddp(1);  // 2^(X-rnd(X)), rnd(X)
2674   // FSCALE calculates st(0) * 2^st(1)
2675   __ fscale();  // 2^X, rnd(X)
2676   __ fstp(1);
2677   // Bail out to runtime in case of exceptions in the status word.
2678   __ fnstsw_ax();
2679   __ testb(rax, Immediate(0x5F));  // Check for all but precision exception.
2680   __ j(not_zero, &fast_power_failed, Label::kNear);
2681   __ fstp_d(Operand(rsp, 0));
2682   __ Movsd(double_result, Operand(rsp, 0));
2683   __ addp(rsp, Immediate(kDoubleSize));
2684   __ jmp(&done);
2685 
2686   __ bind(&fast_power_failed);
2687   __ fninit();
2688   __ addp(rsp, Immediate(kDoubleSize));
2689   __ jmp(&call_runtime);
2690 
2691   // Calculate power with integer exponent.
2692   __ bind(&int_exponent);
2693   const XMMRegister double_scratch2 = double_exponent;
2694   // Back up exponent as we need to check if exponent is negative later.
2695   __ movp(scratch, exponent);                // Back up exponent.
2696   __ Movsd(double_scratch, double_base);     // Back up base.
2697   __ Movsd(double_scratch2, double_result);  // Load double_exponent with 1.
2698 
2699   // Get absolute value of exponent.
2700   Label no_neg, while_true, while_false;
2701   __ testl(scratch, scratch);
2702   __ j(positive, &no_neg, Label::kNear);
2703   __ negl(scratch);
2704   __ bind(&no_neg);
2705 
2706   __ j(zero, &while_false, Label::kNear);
2707   __ shrl(scratch, Immediate(1));
2708   // Above condition means CF==0 && ZF==0.  This means that the
2709   // bit that has been shifted out is 0 and the result is not 0.
2710   __ j(above, &while_true, Label::kNear);
2711   __ Movsd(double_result, double_scratch);
2712   __ j(zero, &while_false, Label::kNear);
2713 
2714   __ bind(&while_true);
2715   __ shrl(scratch, Immediate(1));
2716   __ Mulsd(double_scratch, double_scratch);
2717   __ j(above, &while_true, Label::kNear);
2718   __ Mulsd(double_result, double_scratch);
2719   __ j(not_zero, &while_true);
2720 
2721   __ bind(&while_false);
2722   // If the exponent is negative, return 1/result.
2723   __ testl(exponent, exponent);
2724   __ j(greater, &done);
2725   __ Divsd(double_scratch2, double_result);
2726   __ Movsd(double_result, double_scratch2);
2727   // Test whether result is zero.  Bail out to check for subnormal result.
2728   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2729   __ Xorpd(double_scratch2, double_scratch2);
2730   __ Ucomisd(double_scratch2, double_result);
2731   // double_exponent aliased as double_scratch2 has already been overwritten
2732   // and may not have contained the exponent value in the first place when the
2733   // input was a smi.  We reset it with exponent value before bailing out.
2734   __ j(not_equal, &done);
2735   __ Cvtlsi2sd(double_exponent, exponent);
2736 
2737   // Returning or bailing out.
2738   __ bind(&call_runtime);
2739   // Move base to the correct argument register.  Exponent is already in xmm1.
2740   __ Movsd(xmm0, double_base);
2741   DCHECK(double_exponent == xmm1);
2742   {
2743     AllowExternalCallThatCantCauseGC scope(masm);
2744     __ PrepareCallCFunction(2);
2745     __ CallCFunction(ExternalReference::power_double_double_function(), 2);
2746   }
2747   // Return value is in xmm0.
2748   __ Movsd(double_result, xmm0);
2749 
2750   __ bind(&done);
2751   __ ret(0);
2752 }
2753 
2754 namespace {
2755 
GenerateInternalArrayConstructorCase(MacroAssembler * masm,ElementsKind kind)2756 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2757                                           ElementsKind kind) {
2758   Label not_zero_case, not_one_case;
2759   Label normal_sequence;
2760 
2761   __ testp(rax, rax);
2762   __ j(not_zero, &not_zero_case);
2763   __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2764               .code(),
2765           RelocInfo::CODE_TARGET);
2766 
2767   __ bind(&not_zero_case);
2768   __ cmpl(rax, Immediate(1));
2769   __ j(greater, &not_one_case);
2770 
2771   if (IsFastPackedElementsKind(kind)) {
2772     // We might need to create a holey array
2773     // look at the first argument
2774     StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2775     __ movp(rcx, args.GetArgumentOperand(0));
2776     __ testp(rcx, rcx);
2777     __ j(zero, &normal_sequence);
2778 
2779     __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2780                 masm->isolate(), GetHoleyElementsKind(kind))
2781                 .code(),
2782             RelocInfo::CODE_TARGET);
2783   }
2784 
2785   __ bind(&normal_sequence);
2786   __ Jump(
2787       CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2788           .code(),
2789       RelocInfo::CODE_TARGET);
2790 
2791   __ bind(&not_one_case);
2792   Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
2793   __ Jump(code, RelocInfo::CODE_TARGET);
2794 }
2795 
2796 }  // namespace
2797 
Generate_InternalArrayConstructorImpl(MacroAssembler * masm)2798 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2799   // ----------- S t a t e -------------
2800   //  -- rax    : argc
2801   //  -- rdi    : constructor
2802   //  -- rsp[0] : return address
2803   //  -- rsp[8] : last argument
2804   // -----------------------------------
2805 
2806   if (FLAG_debug_code) {
2807     // The array construct code is only set for the global and natives
2808     // builtin Array functions which always have maps.
2809 
2810     // Initial map for the builtin Array function should be a map.
2811     __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2812     // Will both indicate a nullptr and a Smi.
2813     STATIC_ASSERT(kSmiTag == 0);
2814     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2815     __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
2816     __ CmpObjectType(rcx, MAP_TYPE, rcx);
2817     __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2818   }
2819 
2820   // Figure out the right elements kind
2821   __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2822 
2823   // Load the map's "bit field 2" into |result|. We only need the first byte,
2824   // but the following masking takes care of that anyway.
2825   __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2826   // Retrieve elements_kind from bit field 2.
2827   __ DecodeField<Map::ElementsKindBits>(rcx);
2828 
2829   if (FLAG_debug_code) {
2830     Label done;
2831     __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2832     __ j(equal, &done);
2833     __ cmpl(rcx, Immediate(HOLEY_ELEMENTS));
2834     __ Assert(
2835         equal,
2836         AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2837     __ bind(&done);
2838   }
2839 
2840   Label fast_elements_case;
2841   __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2842   __ j(equal, &fast_elements_case);
2843   GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2844 
2845   __ bind(&fast_elements_case);
2846   GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2847 }
2848 
2849 #undef __
2850 
2851 }  // namespace internal
2852 }  // namespace v8
2853 
2854 #endif  // V8_TARGET_ARCH_X64
2855