1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_X64
6
7 #include "src/code-factory.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11
12 namespace v8 {
13 namespace internal {
14
15 #define __ ACCESS_MASM(masm)
16
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)17 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
18 ExitFrameType exit_frame_type) {
19 // ----------- S t a t e -------------
20 // -- rax : number of arguments excluding receiver
21 // -- rdi : target
22 // -- rdx : new.target
23 // -- rsp[0] : return address
24 // -- rsp[8] : last argument
25 // -- ...
26 // -- rsp[8 * argc] : first argument
27 // -- rsp[8 * (argc + 1)] : receiver
28 // -----------------------------------
29 __ AssertFunction(rdi);
30
31 // The logic contained here is mirrored for TurboFan inlining in
32 // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
39
40 // JumpToExternalReference expects rax to contain the number of arguments
41 // including the receiver and the extra arguments.
42 const int num_extra_args = 3;
43 __ addp(rax, Immediate(num_extra_args + 1));
44
45 // Unconditionally insert argc, target and new target as extra arguments. They
46 // will be used by stack frame iterators when constructing the stack trace.
47 __ PopReturnAddressTo(kScratchRegister);
48 __ Integer32ToSmi(rax, rax);
49 __ Push(rax);
50 __ SmiToInteger32(rax, rax);
51 __ Push(rdi);
52 __ Push(rdx);
53 __ PushReturnAddressFrom(kScratchRegister);
54
55 __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
56 exit_frame_type == BUILTIN_EXIT);
57 }
58
GenerateTailCallToSharedCode(MacroAssembler * masm)59 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
60 __ movp(kScratchRegister,
61 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
62 __ movp(kScratchRegister,
63 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
64 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
65 __ jmp(kScratchRegister);
66 }
67
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)68 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
69 Runtime::FunctionId function_id) {
70 // ----------- S t a t e -------------
71 // -- rax : argument count (preserved for callee)
72 // -- rdx : new target (preserved for callee)
73 // -- rdi : target function (preserved for callee)
74 // -----------------------------------
75 {
76 FrameScope scope(masm, StackFrame::INTERNAL);
77 // Push the number of arguments to the callee.
78 __ Integer32ToSmi(rax, rax);
79 __ Push(rax);
80 // Push a copy of the target function and the new target.
81 __ Push(rdi);
82 __ Push(rdx);
83 // Function is also the parameter to the runtime call.
84 __ Push(rdi);
85
86 __ CallRuntime(function_id, 1);
87 __ movp(rbx, rax);
88
89 // Restore target function and new target.
90 __ Pop(rdx);
91 __ Pop(rdi);
92 __ Pop(rax);
93 __ SmiToInteger32(rax, rax);
94 }
95 __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
96 __ jmp(rbx);
97 }
98
Generate_InOptimizationQueue(MacroAssembler * masm)99 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
100 // Checking whether the queued function is ready for install is optional,
101 // since we come across interrupts and stack checks elsewhere. However,
102 // not checking may delay installing ready functions, and always checking
103 // would be quite expensive. A good compromise is to first check against
104 // stack limit as a cue for an interrupt signal.
105 Label ok;
106 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
107 __ j(above_equal, &ok);
108
109 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
110
111 __ bind(&ok);
112 GenerateTailCallToSharedCode(masm);
113 }
114
115 namespace {
116
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)117 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
118 bool create_implicit_receiver,
119 bool check_derived_construct) {
120 // ----------- S t a t e -------------
121 // -- rax: number of arguments
122 // -- rsi: context
123 // -- rdi: constructor function
124 // -- rdx: new target
125 // -----------------------------------
126
127 // Enter a construct frame.
128 {
129 FrameScope scope(masm, StackFrame::CONSTRUCT);
130
131 // Preserve the incoming parameters on the stack.
132 __ Integer32ToSmi(rcx, rax);
133 __ Push(rsi);
134 __ Push(rcx);
135
136 if (create_implicit_receiver) {
137 // Allocate the new receiver object.
138 __ Push(rdi);
139 __ Push(rdx);
140 FastNewObjectStub stub(masm->isolate());
141 __ CallStub(&stub);
142 __ movp(rbx, rax);
143 __ Pop(rdx);
144 __ Pop(rdi);
145
146 // ----------- S t a t e -------------
147 // -- rdi: constructor function
148 // -- rbx: newly allocated object
149 // -- rdx: new target
150 // -----------------------------------
151
152 // Retrieve smi-tagged arguments count from the stack.
153 __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize));
154 }
155
156 if (create_implicit_receiver) {
157 // Push the allocated receiver to the stack. We need two copies
158 // because we may have to return the original one and the calling
159 // conventions dictate that the called function pops the receiver.
160 __ Push(rbx);
161 __ Push(rbx);
162 } else {
163 __ PushRoot(Heap::kTheHoleValueRootIndex);
164 }
165
166 // Set up pointer to last argument.
167 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
168
169 // Copy arguments and receiver to the expression stack.
170 Label loop, entry;
171 __ movp(rcx, rax);
172 __ jmp(&entry);
173 __ bind(&loop);
174 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
175 __ bind(&entry);
176 __ decp(rcx);
177 __ j(greater_equal, &loop);
178
179 // Call the function.
180 ParameterCount actual(rax);
181 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION,
182 CheckDebugStepCallWrapper());
183
184 // Store offset of return address for deoptimizer.
185 if (create_implicit_receiver && !is_api_function) {
186 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
187 }
188
189 // Restore context from the frame.
190 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
191
192 if (create_implicit_receiver) {
193 // If the result is an object (in the ECMA sense), we should get rid
194 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
195 // on page 74.
196 Label use_receiver, exit;
197 // If the result is a smi, it is *not* an object in the ECMA sense.
198 __ JumpIfSmi(rax, &use_receiver, Label::kNear);
199
200 // If the type of the result (stored in its map) is less than
201 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
202 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
203 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
204 __ j(above_equal, &exit, Label::kNear);
205
206 // Throw away the result of the constructor invocation and use the
207 // on-stack receiver as the result.
208 __ bind(&use_receiver);
209 __ movp(rax, Operand(rsp, 0));
210
211 // Restore the arguments count and leave the construct frame. The
212 // arguments count is stored below the receiver.
213 __ bind(&exit);
214 __ movp(rbx, Operand(rsp, 1 * kPointerSize));
215 } else {
216 __ movp(rbx, Operand(rsp, 0));
217 }
218
219 // Leave construct frame.
220 }
221
222 // ES6 9.2.2. Step 13+
223 // Check that the result is not a Smi, indicating that the constructor result
224 // from a derived class is neither undefined nor an Object.
225 if (check_derived_construct) {
226 Label dont_throw;
227 __ JumpIfNotSmi(rax, &dont_throw);
228 {
229 FrameScope scope(masm, StackFrame::INTERNAL);
230 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
231 }
232 __ bind(&dont_throw);
233 }
234
235 // Remove caller arguments from the stack and return.
236 __ PopReturnAddressTo(rcx);
237 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
238 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
239 __ PushReturnAddressFrom(rcx);
240 if (create_implicit_receiver) {
241 Counters* counters = masm->isolate()->counters();
242 __ IncrementCounter(counters->constructed_objects(), 1);
243 }
244 __ ret(0);
245 }
246
247 } // namespace
248
Generate_JSConstructStubGeneric(MacroAssembler * masm)249 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
250 Generate_JSConstructStubHelper(masm, false, true, false);
251 }
252
Generate_JSConstructStubApi(MacroAssembler * masm)253 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
254 Generate_JSConstructStubHelper(masm, true, false, false);
255 }
256
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)257 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
258 Generate_JSConstructStubHelper(masm, false, false, false);
259 }
260
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)261 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
262 MacroAssembler* masm) {
263 Generate_JSConstructStubHelper(masm, false, false, true);
264 }
265
Generate_ConstructedNonConstructable(MacroAssembler * masm)266 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
267 FrameScope scope(masm, StackFrame::INTERNAL);
268 __ Push(rdi);
269 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
270 }
271
272 enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
273
274 // Clobbers rcx, r11, kScratchRegister; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,IsTagged rax_is_tagged)275 static void Generate_CheckStackOverflow(MacroAssembler* masm,
276 IsTagged rax_is_tagged) {
277 // rax : the number of items to be pushed to the stack
278 //
279 // Check the stack for overflow. We are not trying to catch
280 // interruptions (e.g. debug break and preemption) here, so the "real stack
281 // limit" is checked.
282 Label okay;
283 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
284 __ movp(rcx, rsp);
285 // Make rcx the space we have left. The stack might already be overflowed
286 // here which will cause rcx to become negative.
287 __ subp(rcx, kScratchRegister);
288 // Make r11 the space we need for the array when it is unrolled onto the
289 // stack.
290 if (rax_is_tagged == kRaxIsSmiTagged) {
291 __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
292 } else {
293 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
294 __ movp(r11, rax);
295 __ shlq(r11, Immediate(kPointerSizeLog2));
296 }
297 // Check if the arguments will overflow the stack.
298 __ cmpp(rcx, r11);
299 __ j(greater, &okay); // Signed comparison.
300
301 // Out of stack space.
302 __ CallRuntime(Runtime::kThrowStackOverflow);
303
304 __ bind(&okay);
305 }
306
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)307 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
308 bool is_construct) {
309 ProfileEntryHookStub::MaybeCallEntryHook(masm);
310
311 // Expects five C++ function parameters.
312 // - Object* new_target
313 // - JSFunction* function
314 // - Object* receiver
315 // - int argc
316 // - Object*** argv
317 // (see Handle::Invoke in execution.cc).
318
319 // Open a C++ scope for the FrameScope.
320 {
321 // Platform specific argument handling. After this, the stack contains
322 // an internal frame and the pushed function and receiver, and
323 // register rax and rbx holds the argument count and argument array,
324 // while rdi holds the function pointer, rsi the context, and rdx the
325 // new.target.
326
327 #ifdef _WIN64
328 // MSVC parameters in:
329 // rcx : new_target
330 // rdx : function
331 // r8 : receiver
332 // r9 : argc
333 // [rsp+0x20] : argv
334
335 // Enter an internal frame.
336 FrameScope scope(masm, StackFrame::INTERNAL);
337
338 // Setup the context (we need to use the caller context from the isolate).
339 ExternalReference context_address(Isolate::kContextAddress,
340 masm->isolate());
341 __ movp(rsi, masm->ExternalOperand(context_address));
342
343 // Push the function and the receiver onto the stack.
344 __ Push(rdx);
345 __ Push(r8);
346
347 // Load the number of arguments and setup pointer to the arguments.
348 __ movp(rax, r9);
349 // Load the previous frame pointer to access C argument on stack
350 __ movp(kScratchRegister, Operand(rbp, 0));
351 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
352 // Load the function pointer into rdi.
353 __ movp(rdi, rdx);
354 // Load the new.target into rdx.
355 __ movp(rdx, rcx);
356 #else // _WIN64
357 // GCC parameters in:
358 // rdi : new_target
359 // rsi : function
360 // rdx : receiver
361 // rcx : argc
362 // r8 : argv
363
364 __ movp(r11, rdi);
365 __ movp(rdi, rsi);
366 // rdi : function
367 // r11 : new_target
368
369 // Clear the context before we push it when entering the internal frame.
370 __ Set(rsi, 0);
371
372 // Enter an internal frame.
373 FrameScope scope(masm, StackFrame::INTERNAL);
374
375 // Setup the context (we need to use the caller context from the isolate).
376 ExternalReference context_address(Isolate::kContextAddress,
377 masm->isolate());
378 __ movp(rsi, masm->ExternalOperand(context_address));
379
380 // Push the function and receiver onto the stack.
381 __ Push(rdi);
382 __ Push(rdx);
383
384 // Load the number of arguments and setup pointer to the arguments.
385 __ movp(rax, rcx);
386 __ movp(rbx, r8);
387
388 // Load the new.target into rdx.
389 __ movp(rdx, r11);
390 #endif // _WIN64
391
392 // Current stack contents:
393 // [rsp + 2 * kPointerSize ... ] : Internal frame
394 // [rsp + kPointerSize] : function
395 // [rsp] : receiver
396 // Current register contents:
397 // rax : argc
398 // rbx : argv
399 // rsi : context
400 // rdi : function
401 // rdx : new.target
402
403 // Check if we have enough stack space to push all arguments.
404 // Expects argument count in rax. Clobbers rcx, r11.
405 Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
406
407 // Copy arguments to the stack in a loop.
408 // Register rbx points to array of pointers to handle locations.
409 // Push the values of these handles.
410 Label loop, entry;
411 __ Set(rcx, 0); // Set loop variable to 0.
412 __ jmp(&entry, Label::kNear);
413 __ bind(&loop);
414 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
415 __ Push(Operand(kScratchRegister, 0)); // dereference handle
416 __ addp(rcx, Immediate(1));
417 __ bind(&entry);
418 __ cmpp(rcx, rax);
419 __ j(not_equal, &loop);
420
421 // Invoke the builtin code.
422 Handle<Code> builtin = is_construct
423 ? masm->isolate()->builtins()->Construct()
424 : masm->isolate()->builtins()->Call();
425 __ Call(builtin, RelocInfo::CODE_TARGET);
426
427 // Exit the internal frame. Notice that this also removes the empty
428 // context and the function left on the stack by the code
429 // invocation.
430 }
431
432 // TODO(X64): Is argument correct? Is there a receiver to remove?
433 __ ret(1 * kPointerSize); // Remove receiver.
434 }
435
Generate_JSEntryTrampoline(MacroAssembler * masm)436 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
437 Generate_JSEntryTrampolineHelper(masm, false);
438 }
439
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)440 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
441 Generate_JSEntryTrampolineHelper(masm, true);
442 }
443
444 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)445 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
446 // ----------- S t a t e -------------
447 // -- rax : the value to pass to the generator
448 // -- rbx : the JSGeneratorObject to resume
449 // -- rdx : the resume mode (tagged)
450 // -- rsp[0] : return address
451 // -----------------------------------
452 __ AssertGeneratorObject(rbx);
453
454 // Store input value into generator object.
455 __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
456 __ RecordWriteField(rbx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
457 kDontSaveFPRegs);
458
459 // Store resume mode into generator object.
460 __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx);
461
462 // Load suspended function and context.
463 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
464 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
465
466 // Flood function if we are stepping.
467 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
468 Label stepping_prepared;
469 ExternalReference last_step_action =
470 ExternalReference::debug_last_step_action_address(masm->isolate());
471 Operand last_step_action_operand = masm->ExternalOperand(last_step_action);
472 STATIC_ASSERT(StepFrame > StepIn);
473 __ cmpb(last_step_action_operand, Immediate(StepIn));
474 __ j(greater_equal, &prepare_step_in_if_stepping);
475
476 // Flood function if we need to continue stepping in the suspended generator.
477 ExternalReference debug_suspended_generator =
478 ExternalReference::debug_suspended_generator_address(masm->isolate());
479 Operand debug_suspended_generator_operand =
480 masm->ExternalOperand(debug_suspended_generator);
481 __ cmpp(rbx, debug_suspended_generator_operand);
482 __ j(equal, &prepare_step_in_suspended_generator);
483 __ bind(&stepping_prepared);
484
485 // Pop return address.
486 __ PopReturnAddressTo(rax);
487
488 // Push receiver.
489 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
490
491 // ----------- S t a t e -------------
492 // -- rax : return address
493 // -- rbx : the JSGeneratorObject to resume
494 // -- rdx : the resume mode (tagged)
495 // -- rdi : generator function
496 // -- rsi : generator context
497 // -- rsp[0] : generator receiver
498 // -----------------------------------
499
500 // Push holes for arguments to generator function. Since the parser forced
501 // context allocation for any variables in generators, the actual argument
502 // values have already been copied into the context and these dummy values
503 // will never be used.
504 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
505 __ LoadSharedFunctionInfoSpecialField(
506 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
507 {
508 Label done_loop, loop;
509 __ bind(&loop);
510 __ subl(rcx, Immediate(1));
511 __ j(carry, &done_loop, Label::kNear);
512 __ PushRoot(Heap::kTheHoleValueRootIndex);
513 __ jmp(&loop);
514 __ bind(&done_loop);
515 }
516
517 // Dispatch on the kind of generator object.
518 Label old_generator;
519 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
520 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
521 __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
522 __ j(not_equal, &old_generator);
523
524 // New-style (ignition/turbofan) generator object.
525 {
526 __ PushReturnAddressFrom(rax);
527 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
528 __ LoadSharedFunctionInfoSpecialField(
529 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
530 // We abuse new.target both to indicate that this is a resume call and to
531 // pass in the generator object. In ordinary calls, new.target is always
532 // undefined because generator functions are non-constructable.
533 __ movp(rdx, rbx);
534 __ jmp(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
535 }
536
537 // Old-style (full-codegen) generator object.
538 __ bind(&old_generator);
539 {
540 // Enter a new JavaScript frame, and initialize its slots as they were when
541 // the generator was suspended.
542 FrameScope scope(masm, StackFrame::MANUAL);
543 __ PushReturnAddressFrom(rax); // Return address.
544 __ Push(rbp); // Caller's frame pointer.
545 __ Move(rbp, rsp);
546 __ Push(rsi); // Callee's context.
547 __ Push(rdi); // Callee's JS Function.
548
549 // Restore the operand stack.
550 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
551 __ SmiToInteger32(rax, FieldOperand(rsi, FixedArray::kLengthOffset));
552 {
553 Label done_loop, loop;
554 __ Set(rcx, 0);
555 __ bind(&loop);
556 __ cmpl(rcx, rax);
557 __ j(equal, &done_loop, Label::kNear);
558 __ Push(
559 FieldOperand(rsi, rcx, times_pointer_size, FixedArray::kHeaderSize));
560 __ addl(rcx, Immediate(1));
561 __ jmp(&loop);
562 __ bind(&done_loop);
563 }
564
565 // Reset operand stack so we don't leak.
566 __ LoadRoot(FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset),
567 Heap::kEmptyFixedArrayRootIndex);
568
569 // Restore context.
570 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
571
572 // Resume the generator function at the continuation.
573 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
574 __ movp(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
575 __ SmiToInteger64(
576 rcx, FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
577 __ leap(rdx, FieldOperand(rdx, rcx, times_1, Code::kHeaderSize));
578 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
579 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
580 __ movp(rax, rbx); // Continuation expects generator object in rax.
581 __ jmp(rdx);
582 }
583
584 __ bind(&prepare_step_in_if_stepping);
585 {
586 FrameScope scope(masm, StackFrame::INTERNAL);
587 __ Push(rbx);
588 __ Push(rdx);
589 __ Push(rdi);
590 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
591 __ Pop(rdx);
592 __ Pop(rbx);
593 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
594 }
595 __ jmp(&stepping_prepared);
596
597 __ bind(&prepare_step_in_suspended_generator);
598 {
599 FrameScope scope(masm, StackFrame::INTERNAL);
600 __ Push(rbx);
601 __ Push(rdx);
602 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
603 __ Pop(rdx);
604 __ Pop(rbx);
605 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
606 }
607 __ jmp(&stepping_prepared);
608 }
609
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)610 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
611 Register scratch2) {
612 Register args_count = scratch1;
613 Register return_pc = scratch2;
614
615 // Get the arguments + receiver count.
616 __ movp(args_count,
617 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
618 __ movl(args_count,
619 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
620
621 // Leave the frame (also dropping the register file).
622 __ leave();
623
624 // Drop receiver + arguments.
625 __ PopReturnAddressTo(return_pc);
626 __ addp(rsp, args_count);
627 __ PushReturnAddressFrom(return_pc);
628 }
629
630 // Generate code for entering a JS function with the interpreter.
631 // On entry to the function the receiver and arguments have been pushed on the
632 // stack left to right. The actual argument count matches the formal parameter
633 // count expected by the function.
634 //
635 // The live registers are:
636 // o rdi: the JS function object being called
637 // o rdx: the new target
638 // o rsi: our context
639 // o rbp: the caller's frame pointer
640 // o rsp: stack pointer (pointing to return address)
641 //
642 // The function builds an interpreter frame. See InterpreterFrameConstants in
643 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)644 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
645 ProfileEntryHookStub::MaybeCallEntryHook(masm);
646
647 // Open a frame scope to indicate that there is a frame on the stack. The
648 // MANUAL indicates that the scope shouldn't actually generate code to set up
649 // the frame (that is done below).
650 FrameScope frame_scope(masm, StackFrame::MANUAL);
651 __ pushq(rbp); // Caller's frame pointer.
652 __ movp(rbp, rsp);
653 __ Push(rsi); // Callee's context.
654 __ Push(rdi); // Callee's JS function.
655 __ Push(rdx); // Callee's new target.
656
657 // Get the bytecode array from the function object (or from the DebugInfo if
658 // it is present) and load it into kInterpreterBytecodeArrayRegister.
659 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
660 Label load_debug_bytecode_array, bytecode_array_loaded;
661 DCHECK_EQ(Smi::kZero, DebugInfo::uninitialized());
662 __ cmpp(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
663 Immediate(0));
664 __ j(not_equal, &load_debug_bytecode_array);
665 __ movp(kInterpreterBytecodeArrayRegister,
666 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
667 __ bind(&bytecode_array_loaded);
668
669 // Check whether we should continue to use the interpreter.
670 Label switch_to_different_code_kind;
671 __ Move(rcx, masm->CodeObject()); // Self-reference to this code.
672 __ cmpp(rcx, FieldOperand(rax, SharedFunctionInfo::kCodeOffset));
673 __ j(not_equal, &switch_to_different_code_kind);
674
675 // Increment invocation count for the function.
676 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
677 __ movp(rcx, FieldOperand(rcx, LiteralsArray::kFeedbackVectorOffset));
678 __ SmiAddConstant(
679 FieldOperand(rcx,
680 TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
681 TypeFeedbackVector::kHeaderSize),
682 Smi::FromInt(1));
683
684 // Check function data field is actually a BytecodeArray object.
685 if (FLAG_debug_code) {
686 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
687 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
688 rax);
689 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
690 }
691
692 // Load initial bytecode offset.
693 __ movp(kInterpreterBytecodeOffsetRegister,
694 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
695
696 // Push bytecode array and Smi tagged bytecode offset.
697 __ Push(kInterpreterBytecodeArrayRegister);
698 __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister);
699 __ Push(rcx);
700
701 // Allocate the local and temporary register file on the stack.
702 {
703 // Load frame size from the BytecodeArray object.
704 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
705 BytecodeArray::kFrameSizeOffset));
706
707 // Do a stack check to ensure we don't go over the limit.
708 Label ok;
709 __ movp(rdx, rsp);
710 __ subp(rdx, rcx);
711 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
712 __ j(above_equal, &ok, Label::kNear);
713 __ CallRuntime(Runtime::kThrowStackOverflow);
714 __ bind(&ok);
715
716 // If ok, push undefined as the initial value for all register file entries.
717 Label loop_header;
718 Label loop_check;
719 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
720 __ j(always, &loop_check);
721 __ bind(&loop_header);
722 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
723 __ Push(rdx);
724 // Continue loop if not done.
725 __ bind(&loop_check);
726 __ subp(rcx, Immediate(kPointerSize));
727 __ j(greater_equal, &loop_header, Label::kNear);
728 }
729
730 // Load accumulator and dispatch table into registers.
731 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
732 __ Move(
733 kInterpreterDispatchTableRegister,
734 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
735
736 // Dispatch to the first bytecode handler for the function.
737 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
738 kInterpreterBytecodeOffsetRegister, times_1, 0));
739 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
740 times_pointer_size, 0));
741 __ call(rbx);
742 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
743
744 // The return value is in rax.
745 LeaveInterpreterFrame(masm, rbx, rcx);
746 __ ret(0);
747
748 // Load debug copy of the bytecode array.
749 __ bind(&load_debug_bytecode_array);
750 Register debug_info = kInterpreterBytecodeArrayRegister;
751 __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
752 __ movp(kInterpreterBytecodeArrayRegister,
753 FieldOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
754 __ jmp(&bytecode_array_loaded);
755
756 // If the shared code is no longer this entry trampoline, then the underlying
757 // function has been switched to a different kind of code and we heal the
758 // closure by switching the code entry field over to the new code as well.
759 __ bind(&switch_to_different_code_kind);
760 __ leave(); // Leave the frame so we can tail call.
761 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
762 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
763 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
764 __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx);
765 __ RecordWriteCodeEntryField(rdi, rcx, r15);
766 __ jmp(rcx);
767 }
768
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow)769 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
770 Register scratch1, Register scratch2,
771 Label* stack_overflow) {
772 // Check the stack for overflow. We are not trying to catch
773 // interruptions (e.g. debug break and preemption) here, so the "real stack
774 // limit" is checked.
775 __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
776 __ movp(scratch2, rsp);
777 // Make scratch2 the space we have left. The stack might already be overflowed
778 // here which will cause scratch2 to become negative.
779 __ subp(scratch2, scratch1);
780 // Make scratch1 the space we need for the array when it is unrolled onto the
781 // stack.
782 __ movp(scratch1, num_args);
783 __ shlp(scratch1, Immediate(kPointerSizeLog2));
784 // Check if the arguments will overflow the stack.
785 __ cmpp(scratch2, scratch1);
786 __ j(less_equal, stack_overflow); // Signed comparison.
787 }
788
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register start_address,Register scratch)789 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
790 Register num_args,
791 Register start_address,
792 Register scratch) {
793 // Find the address of the last argument.
794 __ Move(scratch, num_args);
795 __ shlp(scratch, Immediate(kPointerSizeLog2));
796 __ negp(scratch);
797 __ addp(scratch, start_address);
798
799 // Push the arguments.
800 Label loop_header, loop_check;
801 __ j(always, &loop_check);
802 __ bind(&loop_header);
803 __ Push(Operand(start_address, 0));
804 __ subp(start_address, Immediate(kPointerSize));
805 __ bind(&loop_check);
806 __ cmpp(start_address, scratch);
807 __ j(greater, &loop_header, Label::kNear);
808 }
809
810 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)811 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
812 MacroAssembler* masm, TailCallMode tail_call_mode,
813 CallableType function_type) {
814 // ----------- S t a t e -------------
815 // -- rax : the number of arguments (not including the receiver)
816 // -- rbx : the address of the first argument to be pushed. Subsequent
817 // arguments should be consecutive above this, in the same order as
818 // they are to be pushed onto the stack.
819 // -- rdi : the target to call (can be any Object).
820 // -----------------------------------
821 Label stack_overflow;
822
823 // Number of values to be pushed.
824 __ Move(rcx, rax);
825 __ addp(rcx, Immediate(1)); // Add one for receiver.
826
827 // Add a stack check before pushing arguments.
828 Generate_StackOverflowCheck(masm, rcx, rdx, r8, &stack_overflow);
829
830 // Pop return address to allow tail-call after pushing arguments.
831 __ PopReturnAddressTo(kScratchRegister);
832
833 // rbx and rdx will be modified.
834 Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
835
836 // Call the target.
837 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
838
839 if (function_type == CallableType::kJSFunction) {
840 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
841 tail_call_mode),
842 RelocInfo::CODE_TARGET);
843 } else {
844 DCHECK_EQ(function_type, CallableType::kAny);
845 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
846 tail_call_mode),
847 RelocInfo::CODE_TARGET);
848 }
849
850 // Throw stack overflow exception.
851 __ bind(&stack_overflow);
852 {
853 __ TailCallRuntime(Runtime::kThrowStackOverflow);
854 // This should be unreachable.
855 __ int3();
856 }
857 }
858
859 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)860 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
861 MacroAssembler* masm, CallableType construct_type) {
862 // ----------- S t a t e -------------
863 // -- rax : the number of arguments (not including the receiver)
864 // -- rdx : the new target (either the same as the constructor or
865 // the JSFunction on which new was invoked initially)
866 // -- rdi : the constructor to call (can be any Object)
867 // -- rbx : the allocation site feedback if available, undefined otherwise
868 // -- rcx : the address of the first argument to be pushed. Subsequent
869 // arguments should be consecutive above this, in the same order as
870 // they are to be pushed onto the stack.
871 // -----------------------------------
872 Label stack_overflow;
873
874 // Add a stack check before pushing arguments.
875 Generate_StackOverflowCheck(masm, rax, r8, r9, &stack_overflow);
876
877 // Pop return address to allow tail-call after pushing arguments.
878 __ PopReturnAddressTo(kScratchRegister);
879
880 // Push slot for the receiver to be constructed.
881 __ Push(Immediate(0));
882
883 // rcx and r8 will be modified.
884 Generate_InterpreterPushArgs(masm, rax, rcx, r8);
885
886 // Push return address in preparation for the tail-call.
887 __ PushReturnAddressFrom(kScratchRegister);
888
889 __ AssertUndefinedOrAllocationSite(rbx);
890 if (construct_type == CallableType::kJSFunction) {
891 // Tail call to the function-specific construct stub (still in the caller
892 // context at this point).
893 __ AssertFunction(rdi);
894
895 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
896 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
897 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
898 // Jump to the constructor function (rax, rbx, rdx passed on).
899 __ jmp(rcx);
900 } else {
901 DCHECK_EQ(construct_type, CallableType::kAny);
902 // Call the constructor (rax, rdx, rdi passed on).
903 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
904 }
905
906 // Throw stack overflow exception.
907 __ bind(&stack_overflow);
908 {
909 __ TailCallRuntime(Runtime::kThrowStackOverflow);
910 // This should be unreachable.
911 __ int3();
912 }
913 }
914
915 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)916 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
917 MacroAssembler* masm) {
918 // ----------- S t a t e -------------
919 // -- rax : the number of arguments (not including the receiver)
920 // -- rdx : the target to call checked to be Array function.
921 // -- rbx : the allocation site feedback
922 // -- rcx : the address of the first argument to be pushed. Subsequent
923 // arguments should be consecutive above this, in the same order as
924 // they are to be pushed onto the stack.
925 // -----------------------------------
926 Label stack_overflow;
927
928 // Number of values to be pushed.
929 __ Move(r8, rax);
930 __ addp(r8, Immediate(1)); // Add one for receiver.
931
932 // Add a stack check before pushing arguments.
933 Generate_StackOverflowCheck(masm, r8, rdi, r9, &stack_overflow);
934
935 // Pop return address to allow tail-call after pushing arguments.
936 __ PopReturnAddressTo(kScratchRegister);
937
938 // rcx and rdi will be modified.
939 Generate_InterpreterPushArgs(masm, r8, rcx, rdi);
940
941 // Push return address in preparation for the tail-call.
942 __ PushReturnAddressFrom(kScratchRegister);
943
944 // Array constructor expects constructor in rdi. It is same as rdx here.
945 __ Move(rdi, rdx);
946
947 ArrayConstructorStub stub(masm->isolate());
948 __ TailCallStub(&stub);
949
950 // Throw stack overflow exception.
951 __ bind(&stack_overflow);
952 {
953 __ TailCallRuntime(Runtime::kThrowStackOverflow);
954 // This should be unreachable.
955 __ int3();
956 }
957 }
958
Generate_InterpreterEnterBytecode(MacroAssembler * masm)959 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
960 // Set the return address to the correct point in the interpreter entry
961 // trampoline.
962 Smi* interpreter_entry_return_pc_offset(
963 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
964 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
965 __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline());
966 __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
967 Code::kHeaderSize - kHeapObjectTag));
968 __ Push(rbx);
969
970 // Initialize dispatch table register.
971 __ Move(
972 kInterpreterDispatchTableRegister,
973 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
974
975 // Get the bytecode array pointer from the frame.
976 __ movp(kInterpreterBytecodeArrayRegister,
977 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
978
979 if (FLAG_debug_code) {
980 // Check function data field is actually a BytecodeArray object.
981 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
982 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
983 rbx);
984 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
985 }
986
987 // Get the target bytecode offset from the frame.
988 __ movp(kInterpreterBytecodeOffsetRegister,
989 Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
990 __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
991 kInterpreterBytecodeOffsetRegister);
992
993 // Dispatch to the target bytecode.
994 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
995 kInterpreterBytecodeOffsetRegister, times_1, 0));
996 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
997 times_pointer_size, 0));
998 __ jmp(rbx);
999 }
1000
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1001 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1002 // Advance the current bytecode offset stored within the given interpreter
1003 // stack frame. This simulates what all bytecode handlers do upon completion
1004 // of the underlying operation.
1005 __ movp(rbx, Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1006 __ movp(rdx, Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1007 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1008 {
1009 FrameScope scope(masm, StackFrame::INTERNAL);
1010 __ Push(kInterpreterAccumulatorRegister);
1011 __ Push(rbx); // First argument is the bytecode array.
1012 __ Push(rdx); // Second argument is the bytecode offset.
1013 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1014 __ Move(rdx, rax); // Result is the new bytecode offset.
1015 __ Pop(kInterpreterAccumulatorRegister);
1016 }
1017 __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rdx);
1018
1019 Generate_InterpreterEnterBytecode(masm);
1020 }
1021
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1022 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1023 Generate_InterpreterEnterBytecode(masm);
1024 }
1025
Generate_CompileLazy(MacroAssembler * masm)1026 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1027 // ----------- S t a t e -------------
1028 // -- rax : argument count (preserved for callee)
1029 // -- rdx : new target (preserved for callee)
1030 // -- rdi : target function (preserved for callee)
1031 // -----------------------------------
1032 // First lookup code, maybe we don't need to compile!
1033 Label gotta_call_runtime;
1034 Label try_shared;
1035 Label loop_top, loop_bottom;
1036
1037 Register closure = rdi;
1038 Register map = r8;
1039 Register index = r9;
1040 __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1041 __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1042 __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset));
1043 __ cmpl(index, Immediate(2));
1044 __ j(less, &gotta_call_runtime);
1045
1046 // Find literals.
1047 // r14 : native context
1048 // r9 : length / index
1049 // r8 : optimized code map
1050 // rdx : new target
1051 // rdi : closure
1052 Register native_context = r14;
1053 __ movp(native_context, NativeContextOperand());
1054
1055 __ bind(&loop_top);
1056 // Native context match?
1057 Register temp = r11;
1058 __ movp(temp, FieldOperand(map, index, times_pointer_size,
1059 SharedFunctionInfo::kOffsetToPreviousContext));
1060 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
1061 __ cmpp(temp, native_context);
1062 __ j(not_equal, &loop_bottom);
1063 // OSR id set to none?
1064 __ movp(temp, FieldOperand(map, index, times_pointer_size,
1065 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1066 __ SmiToInteger32(temp, temp);
1067 const int bailout_id = BailoutId::None().ToInt();
1068 __ cmpl(temp, Immediate(bailout_id));
1069 __ j(not_equal, &loop_bottom);
1070 // Literals available?
1071 __ movp(temp, FieldOperand(map, index, times_pointer_size,
1072 SharedFunctionInfo::kOffsetToPreviousLiterals));
1073 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
1074 __ JumpIfSmi(temp, &gotta_call_runtime);
1075
1076 // Save the literals in the closure.
1077 __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp);
1078 __ movp(r15, index);
1079 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r15,
1080 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1081
1082 // Code available?
1083 Register entry = rcx;
1084 __ movp(entry, FieldOperand(map, index, times_pointer_size,
1085 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1086 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
1087 __ JumpIfSmi(entry, &try_shared);
1088
1089 // Found literals and code. Get them into the closure and return.
1090 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
1091 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1092 __ RecordWriteCodeEntryField(closure, entry, r15);
1093
1094 // Link the closure into the optimized function list.
1095 // rcx : code entry (entry)
1096 // r14 : native context
1097 // rdx : new target
1098 // rdi : closure
1099 __ movp(rbx,
1100 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1101 __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx);
1102 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, r15,
1103 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1104 const int function_list_offset =
1105 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1106 __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
1107 closure);
1108 // Save closure before the write barrier.
1109 __ movp(rbx, closure);
1110 __ RecordWriteContextSlot(native_context, function_list_offset, closure, r15,
1111 kDontSaveFPRegs);
1112 __ movp(closure, rbx);
1113 __ jmp(entry);
1114
1115 __ bind(&loop_bottom);
1116 __ subl(index, Immediate(SharedFunctionInfo::kEntryLength));
1117 __ cmpl(index, Immediate(1));
1118 __ j(greater, &loop_top);
1119
1120 // We found neither literals nor code.
1121 __ jmp(&gotta_call_runtime);
1122
1123 __ bind(&try_shared);
1124 __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1125 // Is the shared function marked for tier up?
1126 __ testb(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset),
1127 Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1128 __ j(not_zero, &gotta_call_runtime);
1129 // Is the full code valid?
1130 __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
1131 __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset));
1132 __ andl(rbx, Immediate(Code::KindField::kMask));
1133 __ shrl(rbx, Immediate(Code::KindField::kShift));
1134 __ cmpl(rbx, Immediate(Code::BUILTIN));
1135 __ j(equal, &gotta_call_runtime);
1136 // Yes, install the full code.
1137 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
1138 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1139 __ RecordWriteCodeEntryField(closure, entry, r15);
1140 __ jmp(entry);
1141
1142 __ bind(&gotta_call_runtime);
1143 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1144 }
1145
Generate_CompileBaseline(MacroAssembler * masm)1146 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1147 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1148 }
1149
Generate_CompileOptimized(MacroAssembler * masm)1150 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1151 GenerateTailCallToReturnedCode(masm,
1152 Runtime::kCompileOptimized_NotConcurrent);
1153 }
1154
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1155 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1156 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1157 }
1158
Generate_InstantiateAsmJs(MacroAssembler * masm)1159 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1160 // ----------- S t a t e -------------
1161 // -- rax : argument count (preserved for callee)
1162 // -- rdx : new target (preserved for callee)
1163 // -- rdi : target function (preserved for callee)
1164 // -----------------------------------
1165 Label failed;
1166 {
1167 FrameScope scope(masm, StackFrame::INTERNAL);
1168 // Preserve argument count for later compare.
1169 __ movp(kScratchRegister, rax);
1170 // Push the number of arguments to the callee.
1171 __ Integer32ToSmi(rax, rax);
1172 __ Push(rax);
1173 // Push a copy of the target function and the new target.
1174 __ Push(rdi);
1175 __ Push(rdx);
1176
1177 // The function.
1178 __ Push(rdi);
1179 // Copy arguments from caller (stdlib, foreign, heap).
1180 Label args_done;
1181 for (int j = 0; j < 4; ++j) {
1182 Label over;
1183 if (j < 3) {
1184 __ cmpp(kScratchRegister, Immediate(j));
1185 __ j(not_equal, &over, Label::kNear);
1186 }
1187 for (int i = j - 1; i >= 0; --i) {
1188 __ Push(Operand(
1189 rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1190 }
1191 for (int i = 0; i < 3 - j; ++i) {
1192 __ PushRoot(Heap::kUndefinedValueRootIndex);
1193 }
1194 if (j < 3) {
1195 __ jmp(&args_done, Label::kNear);
1196 __ bind(&over);
1197 }
1198 }
1199 __ bind(&args_done);
1200
1201 // Call runtime, on success unwind frame, and parent frame.
1202 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1203 // A smi 0 is returned on failure, an object on success.
1204 __ JumpIfSmi(rax, &failed, Label::kNear);
1205
1206 __ Drop(2);
1207 __ Pop(kScratchRegister);
1208 __ SmiToInteger32(kScratchRegister, kScratchRegister);
1209 scope.GenerateLeaveFrame();
1210
1211 __ PopReturnAddressTo(rbx);
1212 __ incp(kScratchRegister);
1213 __ leap(rsp, Operand(rsp, kScratchRegister, times_pointer_size, 0));
1214 __ PushReturnAddressFrom(rbx);
1215 __ ret(0);
1216
1217 __ bind(&failed);
1218 // Restore target function and new target.
1219 __ Pop(rdx);
1220 __ Pop(rdi);
1221 __ Pop(rax);
1222 __ SmiToInteger32(rax, rax);
1223 }
1224 // On failure, tail call back to regular js.
1225 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1226 }
1227
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1228 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1229 // For now, we are relying on the fact that make_code_young doesn't do any
1230 // garbage collection which allows us to save/restore the registers without
1231 // worrying about which of them contain pointers. We also don't build an
1232 // internal frame to make the code faster, since we shouldn't have to do stack
1233 // crawls in MakeCodeYoung. This seems a bit fragile.
1234
1235 // Re-execute the code that was patched back to the young age when
1236 // the stub returns.
1237 __ subp(Operand(rsp, 0), Immediate(5));
1238 __ Pushad();
1239 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
1240 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
1241 { // NOLINT
1242 FrameScope scope(masm, StackFrame::MANUAL);
1243 __ PrepareCallCFunction(2);
1244 __ CallCFunction(
1245 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1246 }
1247 __ Popad();
1248 __ ret(0);
1249 }
1250
1251 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1252 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1253 MacroAssembler* masm) { \
1254 GenerateMakeCodeYoungAgainCommon(masm); \
1255 } \
1256 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1257 MacroAssembler* masm) { \
1258 GenerateMakeCodeYoungAgainCommon(masm); \
1259 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1260 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1261 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1262
1263 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1264 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1265 // that make_code_young doesn't do any garbage collection which allows us to
1266 // save/restore the registers without worrying about which of them contain
1267 // pointers.
1268 __ Pushad();
1269 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
1270 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
1271 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
1272 { // NOLINT
1273 FrameScope scope(masm, StackFrame::MANUAL);
1274 __ PrepareCallCFunction(2);
1275 __ CallCFunction(
1276 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1277 2);
1278 }
1279 __ Popad();
1280
1281 // Perform prologue operations usually performed by the young code stub.
1282 __ PopReturnAddressTo(kScratchRegister);
1283 __ pushq(rbp); // Caller's frame pointer.
1284 __ movp(rbp, rsp);
1285 __ Push(rsi); // Callee's context.
1286 __ Push(rdi); // Callee's JS Function.
1287 __ PushReturnAddressFrom(kScratchRegister);
1288
1289 // Jump to point after the code-age stub.
1290 __ ret(0);
1291 }
1292
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1293 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1294 GenerateMakeCodeYoungAgainCommon(masm);
1295 }
1296
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1297 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1298 Generate_MarkCodeAsExecutedOnce(masm);
1299 }
1300
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1301 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1302 SaveFPRegsMode save_doubles) {
1303 // Enter an internal frame.
1304 {
1305 FrameScope scope(masm, StackFrame::INTERNAL);
1306
1307 // Preserve registers across notification, this is important for compiled
1308 // stubs that tail call the runtime on deopts passing their parameters in
1309 // registers.
1310 __ Pushad();
1311 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1312 __ Popad();
1313 // Tear down internal frame.
1314 }
1315
1316 __ DropUnderReturnAddress(1); // Ignore state offset
1317 __ ret(0); // Return to IC Miss stub, continuation still on stack.
1318 }
1319
Generate_NotifyStubFailure(MacroAssembler * masm)1320 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1321 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1322 }
1323
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1324 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1325 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1326 }
1327
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1328 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1329 Deoptimizer::BailoutType type) {
1330 // Enter an internal frame.
1331 {
1332 FrameScope scope(masm, StackFrame::INTERNAL);
1333
1334 // Pass the deoptimization type to the runtime system.
1335 __ Push(Smi::FromInt(static_cast<int>(type)));
1336
1337 __ CallRuntime(Runtime::kNotifyDeoptimized);
1338 // Tear down internal frame.
1339 }
1340
1341 // Get the full codegen state from the stack and untag it.
1342 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
1343
1344 // Switch on the state.
1345 Label not_no_registers, not_tos_rax;
1346 __ cmpp(kScratchRegister,
1347 Immediate(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
1348 __ j(not_equal, ¬_no_registers, Label::kNear);
1349 __ ret(1 * kPointerSize); // Remove state.
1350
1351 __ bind(¬_no_registers);
1352 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1353 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
1354 __ cmpp(kScratchRegister,
1355 Immediate(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1356 __ j(not_equal, ¬_tos_rax, Label::kNear);
1357 __ ret(2 * kPointerSize); // Remove state, rax.
1358
1359 __ bind(¬_tos_rax);
1360 __ Abort(kNoCasesLeft);
1361 }
1362
Generate_NotifyDeoptimized(MacroAssembler * masm)1363 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1364 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1365 }
1366
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1367 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1368 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1369 }
1370
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1371 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1372 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1373 }
1374
1375 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1376 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1377 // ----------- S t a t e -------------
1378 // -- rax : argc
1379 // -- rsp[0] : return address
1380 // -- rsp[8] : argArray
1381 // -- rsp[16] : thisArg
1382 // -- rsp[24] : receiver
1383 // -----------------------------------
1384
1385 // 1. Load receiver into rdi, argArray into rax (if present), remove all
1386 // arguments from the stack (including the receiver), and push thisArg (if
1387 // present) instead.
1388 {
1389 Label no_arg_array, no_this_arg;
1390 StackArgumentsAccessor args(rsp, rax);
1391 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1392 __ movp(rbx, rdx);
1393 __ movp(rdi, args.GetReceiverOperand());
1394 __ testp(rax, rax);
1395 __ j(zero, &no_this_arg, Label::kNear);
1396 {
1397 __ movp(rdx, args.GetArgumentOperand(1));
1398 __ cmpp(rax, Immediate(1));
1399 __ j(equal, &no_arg_array, Label::kNear);
1400 __ movp(rbx, args.GetArgumentOperand(2));
1401 __ bind(&no_arg_array);
1402 }
1403 __ bind(&no_this_arg);
1404 __ PopReturnAddressTo(rcx);
1405 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1406 __ Push(rdx);
1407 __ PushReturnAddressFrom(rcx);
1408 __ movp(rax, rbx);
1409 }
1410
1411 // ----------- S t a t e -------------
1412 // -- rax : argArray
1413 // -- rdi : receiver
1414 // -- rsp[0] : return address
1415 // -- rsp[8] : thisArg
1416 // -----------------------------------
1417
1418 // 2. Make sure the receiver is actually callable.
1419 Label receiver_not_callable;
1420 __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
1421 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1422 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1423 Immediate(1 << Map::kIsCallable));
1424 __ j(zero, &receiver_not_callable, Label::kNear);
1425
1426 // 3. Tail call with no arguments if argArray is null or undefined.
1427 Label no_arguments;
1428 __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1429 __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments,
1430 Label::kNear);
1431
1432 // 4a. Apply the receiver to the given argArray (passing undefined for
1433 // new.target).
1434 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1435 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1436
1437 // 4b. The argArray is either null or undefined, so we tail call without any
1438 // arguments to the receiver. Since we did not create a frame for
1439 // Function.prototype.apply() yet, we use a normal Call builtin here.
1440 __ bind(&no_arguments);
1441 {
1442 __ Set(rax, 0);
1443 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1444 }
1445
1446 // 4c. The receiver is not callable, throw an appropriate TypeError.
1447 __ bind(&receiver_not_callable);
1448 {
1449 StackArgumentsAccessor args(rsp, 0);
1450 __ movp(args.GetReceiverOperand(), rdi);
1451 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1452 }
1453 }
1454
1455 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1456 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1457 // Stack Layout:
1458 // rsp[0] : Return address
1459 // rsp[8] : Argument n
1460 // rsp[16] : Argument n-1
1461 // ...
1462 // rsp[8 * n] : Argument 1
1463 // rsp[8 * (n + 1)] : Receiver (callable to call)
1464 //
1465 // rax contains the number of arguments, n, not counting the receiver.
1466 //
1467 // 1. Make sure we have at least one argument.
1468 {
1469 Label done;
1470 __ testp(rax, rax);
1471 __ j(not_zero, &done, Label::kNear);
1472 __ PopReturnAddressTo(rbx);
1473 __ PushRoot(Heap::kUndefinedValueRootIndex);
1474 __ PushReturnAddressFrom(rbx);
1475 __ incp(rax);
1476 __ bind(&done);
1477 }
1478
1479 // 2. Get the callable to call (passed as receiver) from the stack.
1480 {
1481 StackArgumentsAccessor args(rsp, rax);
1482 __ movp(rdi, args.GetReceiverOperand());
1483 }
1484
1485 // 3. Shift arguments and return address one slot down on the stack
1486 // (overwriting the original receiver). Adjust argument count to make
1487 // the original first argument the new receiver.
1488 {
1489 Label loop;
1490 __ movp(rcx, rax);
1491 StackArgumentsAccessor args(rsp, rcx);
1492 __ bind(&loop);
1493 __ movp(rbx, args.GetArgumentOperand(1));
1494 __ movp(args.GetArgumentOperand(0), rbx);
1495 __ decp(rcx);
1496 __ j(not_zero, &loop); // While non-zero.
1497 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1498 __ decp(rax); // One fewer argument (first argument is new receiver).
1499 }
1500
1501 // 4. Call the callable.
1502 // Since we did not create a frame for Function.prototype.call() yet,
1503 // we use a normal Call builtin here.
1504 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1505 }
1506
Generate_ReflectApply(MacroAssembler * masm)1507 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1508 // ----------- S t a t e -------------
1509 // -- rax : argc
1510 // -- rsp[0] : return address
1511 // -- rsp[8] : argumentsList
1512 // -- rsp[16] : thisArgument
1513 // -- rsp[24] : target
1514 // -- rsp[32] : receiver
1515 // -----------------------------------
1516
1517 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1518 // remove all arguments from the stack (including the receiver), and push
1519 // thisArgument (if present) instead.
1520 {
1521 Label done;
1522 StackArgumentsAccessor args(rsp, rax);
1523 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1524 __ movp(rdx, rdi);
1525 __ movp(rbx, rdi);
1526 __ cmpp(rax, Immediate(1));
1527 __ j(below, &done, Label::kNear);
1528 __ movp(rdi, args.GetArgumentOperand(1)); // target
1529 __ j(equal, &done, Label::kNear);
1530 __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument
1531 __ cmpp(rax, Immediate(3));
1532 __ j(below, &done, Label::kNear);
1533 __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList
1534 __ bind(&done);
1535 __ PopReturnAddressTo(rcx);
1536 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1537 __ Push(rdx);
1538 __ PushReturnAddressFrom(rcx);
1539 __ movp(rax, rbx);
1540 }
1541
1542 // ----------- S t a t e -------------
1543 // -- rax : argumentsList
1544 // -- rdi : target
1545 // -- rsp[0] : return address
1546 // -- rsp[8] : thisArgument
1547 // -----------------------------------
1548
1549 // 2. Make sure the target is actually callable.
1550 Label target_not_callable;
1551 __ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
1552 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1553 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1554 Immediate(1 << Map::kIsCallable));
1555 __ j(zero, &target_not_callable, Label::kNear);
1556
1557 // 3a. Apply the target to the given argumentsList (passing undefined for
1558 // new.target).
1559 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1560 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1561
1562 // 3b. The target is not callable, throw an appropriate TypeError.
1563 __ bind(&target_not_callable);
1564 {
1565 StackArgumentsAccessor args(rsp, 0);
1566 __ movp(args.GetReceiverOperand(), rdi);
1567 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1568 }
1569 }
1570
Generate_ReflectConstruct(MacroAssembler * masm)1571 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1572 // ----------- S t a t e -------------
1573 // -- rax : argc
1574 // -- rsp[0] : return address
1575 // -- rsp[8] : new.target (optional)
1576 // -- rsp[16] : argumentsList
1577 // -- rsp[24] : target
1578 // -- rsp[32] : receiver
1579 // -----------------------------------
1580
1581 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1582 // new.target into rdx (if present, otherwise use target), remove all
1583 // arguments from the stack (including the receiver), and push thisArgument
1584 // (if present) instead.
1585 {
1586 Label done;
1587 StackArgumentsAccessor args(rsp, rax);
1588 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1589 __ movp(rdx, rdi);
1590 __ movp(rbx, rdi);
1591 __ cmpp(rax, Immediate(1));
1592 __ j(below, &done, Label::kNear);
1593 __ movp(rdi, args.GetArgumentOperand(1)); // target
1594 __ movp(rdx, rdi); // new.target defaults to target
1595 __ j(equal, &done, Label::kNear);
1596 __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList
1597 __ cmpp(rax, Immediate(3));
1598 __ j(below, &done, Label::kNear);
1599 __ movp(rdx, args.GetArgumentOperand(3)); // new.target
1600 __ bind(&done);
1601 __ PopReturnAddressTo(rcx);
1602 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1603 __ PushRoot(Heap::kUndefinedValueRootIndex);
1604 __ PushReturnAddressFrom(rcx);
1605 __ movp(rax, rbx);
1606 }
1607
1608 // ----------- S t a t e -------------
1609 // -- rax : argumentsList
1610 // -- rdx : new.target
1611 // -- rdi : target
1612 // -- rsp[0] : return address
1613 // -- rsp[8] : receiver (undefined)
1614 // -----------------------------------
1615
1616 // 2. Make sure the target is actually a constructor.
1617 Label target_not_constructor;
1618 __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear);
1619 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1620 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1621 Immediate(1 << Map::kIsConstructor));
1622 __ j(zero, &target_not_constructor, Label::kNear);
1623
1624 // 3. Make sure the target is actually a constructor.
1625 Label new_target_not_constructor;
1626 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1627 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1628 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1629 Immediate(1 << Map::kIsConstructor));
1630 __ j(zero, &new_target_not_constructor, Label::kNear);
1631
1632 // 4a. Construct the target with the given new.target and argumentsList.
1633 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1634
1635 // 4b. The target is not a constructor, throw an appropriate TypeError.
1636 __ bind(&target_not_constructor);
1637 {
1638 StackArgumentsAccessor args(rsp, 0);
1639 __ movp(args.GetReceiverOperand(), rdi);
1640 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1641 }
1642
1643 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1644 __ bind(&new_target_not_constructor);
1645 {
1646 StackArgumentsAccessor args(rsp, 0);
1647 __ movp(args.GetReceiverOperand(), rdx);
1648 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1649 }
1650 }
1651
Generate_InternalArrayCode(MacroAssembler * masm)1652 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1653 // ----------- S t a t e -------------
1654 // -- rax : argc
1655 // -- rsp[0] : return address
1656 // -- rsp[8] : last argument
1657 // -----------------------------------
1658 Label generic_array_code;
1659
1660 // Get the InternalArray function.
1661 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1662
1663 if (FLAG_debug_code) {
1664 // Initial map for the builtin InternalArray functions should be maps.
1665 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1666 // Will both indicate a NULL and a Smi.
1667 STATIC_ASSERT(kSmiTag == 0);
1668 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1669 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1670 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1671 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1672 }
1673
1674 // Run the native code for the InternalArray function called as a normal
1675 // function.
1676 // tail call a stub
1677 InternalArrayConstructorStub stub(masm->isolate());
1678 __ TailCallStub(&stub);
1679 }
1680
Generate_ArrayCode(MacroAssembler * masm)1681 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1682 // ----------- S t a t e -------------
1683 // -- rax : argc
1684 // -- rsp[0] : return address
1685 // -- rsp[8] : last argument
1686 // -----------------------------------
1687 Label generic_array_code;
1688
1689 // Get the Array function.
1690 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
1691
1692 if (FLAG_debug_code) {
1693 // Initial map for the builtin Array functions should be maps.
1694 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1695 // Will both indicate a NULL and a Smi.
1696 STATIC_ASSERT(kSmiTag == 0);
1697 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1698 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1699 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1700 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1701 }
1702
1703 __ movp(rdx, rdi);
1704 // Run the native code for the Array function called as a normal function.
1705 // tail call a stub
1706 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1707 ArrayConstructorStub stub(masm->isolate());
1708 __ TailCallStub(&stub);
1709 }
1710
1711 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)1712 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1713 // ----------- S t a t e -------------
1714 // -- rax : number of arguments
1715 // -- rdi : function
1716 // -- rsi : context
1717 // -- rsp[0] : return address
1718 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1719 // -- rsp[(argc + 1) * 8] : receiver
1720 // -----------------------------------
1721 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1722 Heap::RootListIndex const root_index =
1723 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1724 : Heap::kMinusInfinityValueRootIndex;
1725 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0;
1726
1727 // Load the accumulator with the default return value (either -Infinity or
1728 // +Infinity), with the tagged value in rdx and the double value in xmm0.
1729 __ LoadRoot(rdx, root_index);
1730 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1731 __ Move(rcx, rax);
1732
1733 Label done_loop, loop;
1734 __ bind(&loop);
1735 {
1736 // Check if all parameters done.
1737 __ testp(rcx, rcx);
1738 __ j(zero, &done_loop);
1739
1740 // Load the next parameter tagged value into rbx.
1741 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
1742
1743 // Load the double value of the parameter into xmm1, maybe converting the
1744 // parameter to a number first using the ToNumber builtin if necessary.
1745 Label convert, convert_smi, convert_number, done_convert;
1746 __ bind(&convert);
1747 __ JumpIfSmi(rbx, &convert_smi);
1748 __ JumpIfRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1749 Heap::kHeapNumberMapRootIndex, &convert_number);
1750 {
1751 // Parameter is not a Number, use the ToNumber builtin to convert it.
1752 FrameScope scope(masm, StackFrame::MANUAL);
1753 __ Integer32ToSmi(rax, rax);
1754 __ Integer32ToSmi(rcx, rcx);
1755 __ EnterBuiltinFrame(rsi, rdi, rax);
1756 __ Push(rcx);
1757 __ Push(rdx);
1758 __ movp(rax, rbx);
1759 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1760 __ movp(rbx, rax);
1761 __ Pop(rdx);
1762 __ Pop(rcx);
1763 __ LeaveBuiltinFrame(rsi, rdi, rax);
1764 __ SmiToInteger32(rcx, rcx);
1765 __ SmiToInteger32(rax, rax);
1766 {
1767 // Restore the double accumulator value (xmm0).
1768 Label restore_smi, done_restore;
1769 __ JumpIfSmi(rdx, &restore_smi, Label::kNear);
1770 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1771 __ jmp(&done_restore, Label::kNear);
1772 __ bind(&restore_smi);
1773 __ SmiToDouble(xmm0, rdx);
1774 __ bind(&done_restore);
1775 }
1776 }
1777 __ jmp(&convert);
1778 __ bind(&convert_number);
1779 __ Movsd(xmm1, FieldOperand(rbx, HeapNumber::kValueOffset));
1780 __ jmp(&done_convert, Label::kNear);
1781 __ bind(&convert_smi);
1782 __ SmiToDouble(xmm1, rbx);
1783 __ bind(&done_convert);
1784
1785 // Perform the actual comparison with the accumulator value on the left hand
1786 // side (xmm0) and the next parameter value on the right hand side (xmm1).
1787 Label compare_equal, compare_nan, compare_swap, done_compare;
1788 __ Ucomisd(xmm0, xmm1);
1789 __ j(parity_even, &compare_nan, Label::kNear);
1790 __ j(cc, &done_compare, Label::kNear);
1791 __ j(equal, &compare_equal, Label::kNear);
1792
1793 // Result is on the right hand side.
1794 __ bind(&compare_swap);
1795 __ Movaps(xmm0, xmm1);
1796 __ Move(rdx, rbx);
1797 __ jmp(&done_compare, Label::kNear);
1798
1799 // At least one side is NaN, which means that the result will be NaN too.
1800 __ bind(&compare_nan);
1801 __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1802 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1803 __ jmp(&done_compare, Label::kNear);
1804
1805 // Left and right hand side are equal, check for -0 vs. +0.
1806 __ bind(&compare_equal);
1807 __ Movmskpd(kScratchRegister, reg);
1808 __ testl(kScratchRegister, Immediate(1));
1809 __ j(not_zero, &compare_swap);
1810
1811 __ bind(&done_compare);
1812 __ decp(rcx);
1813 __ jmp(&loop);
1814 }
1815
1816 __ bind(&done_loop);
1817 __ PopReturnAddressTo(rcx);
1818 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1819 __ PushReturnAddressFrom(rcx);
1820 __ movp(rax, rdx);
1821 __ Ret();
1822 }
1823
1824 // static
Generate_NumberConstructor(MacroAssembler * masm)1825 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
1826 // ----------- S t a t e -------------
1827 // -- rax : number of arguments
1828 // -- rdi : constructor function
1829 // -- rsi : context
1830 // -- rsp[0] : return address
1831 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1832 // -- rsp[(argc + 1) * 8] : receiver
1833 // -----------------------------------
1834
1835 // 1. Load the first argument into rbx.
1836 Label no_arguments;
1837 {
1838 StackArgumentsAccessor args(rsp, rax);
1839 __ testp(rax, rax);
1840 __ j(zero, &no_arguments, Label::kNear);
1841 __ movp(rbx, args.GetArgumentOperand(1));
1842 }
1843
1844 // 2a. Convert the first argument to a number.
1845 {
1846 FrameScope scope(masm, StackFrame::MANUAL);
1847 __ Integer32ToSmi(rax, rax);
1848 __ EnterBuiltinFrame(rsi, rdi, rax);
1849 __ movp(rax, rbx);
1850 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1851 __ LeaveBuiltinFrame(rsi, rdi, rbx); // Argc popped to rbx.
1852 __ SmiToInteger32(rbx, rbx);
1853 }
1854
1855 {
1856 // Drop all arguments including the receiver.
1857 __ PopReturnAddressTo(rcx);
1858 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, kPointerSize));
1859 __ PushReturnAddressFrom(rcx);
1860 __ Ret();
1861 }
1862
1863 // 2b. No arguments, return +0 (already in rax).
1864 __ bind(&no_arguments);
1865 __ ret(1 * kPointerSize);
1866 }
1867
1868 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)1869 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
1870 // ----------- S t a t e -------------
1871 // -- rax : number of arguments
1872 // -- rdi : constructor function
1873 // -- rdx : new target
1874 // -- rsi : context
1875 // -- rsp[0] : return address
1876 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1877 // -- rsp[(argc + 1) * 8] : receiver
1878 // -----------------------------------
1879
1880 // 1. Make sure we operate in the context of the called function.
1881 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1882
1883 // Store argc in r8.
1884 __ Integer32ToSmi(r8, rax);
1885
1886 // 2. Load the first argument into rbx.
1887 {
1888 StackArgumentsAccessor args(rsp, rax);
1889 Label no_arguments, done;
1890 __ testp(rax, rax);
1891 __ j(zero, &no_arguments, Label::kNear);
1892 __ movp(rbx, args.GetArgumentOperand(1));
1893 __ jmp(&done, Label::kNear);
1894 __ bind(&no_arguments);
1895 __ Move(rbx, Smi::kZero);
1896 __ bind(&done);
1897 }
1898
1899 // 3. Make sure rbx is a number.
1900 {
1901 Label done_convert;
1902 __ JumpIfSmi(rbx, &done_convert);
1903 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1904 Heap::kHeapNumberMapRootIndex);
1905 __ j(equal, &done_convert);
1906 {
1907 FrameScope scope(masm, StackFrame::MANUAL);
1908 __ EnterBuiltinFrame(rsi, rdi, r8);
1909 __ Push(rdx);
1910 __ Move(rax, rbx);
1911 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1912 __ Move(rbx, rax);
1913 __ Pop(rdx);
1914 __ LeaveBuiltinFrame(rsi, rdi, r8);
1915 }
1916 __ bind(&done_convert);
1917 }
1918
1919 // 4. Check if new target and constructor differ.
1920 Label drop_frame_and_ret, new_object;
1921 __ cmpp(rdx, rdi);
1922 __ j(not_equal, &new_object);
1923
1924 // 5. Allocate a JSValue wrapper for the number.
1925 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1926 __ jmp(&drop_frame_and_ret, Label::kNear);
1927
1928 // 6. Fallback to the runtime to create new object.
1929 __ bind(&new_object);
1930 {
1931 FrameScope scope(masm, StackFrame::MANUAL);
1932 __ EnterBuiltinFrame(rsi, rdi, r8);
1933 __ Push(rbx); // the first argument
1934 FastNewObjectStub stub(masm->isolate());
1935 __ CallStub(&stub);
1936 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
1937 __ LeaveBuiltinFrame(rsi, rdi, r8);
1938 }
1939
1940 __ bind(&drop_frame_and_ret);
1941 {
1942 // Drop all arguments including the receiver.
1943 __ PopReturnAddressTo(rcx);
1944 __ SmiToInteger32(r8, r8);
1945 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
1946 __ PushReturnAddressFrom(rcx);
1947 __ Ret();
1948 }
1949 }
1950
1951 // static
Generate_StringConstructor(MacroAssembler * masm)1952 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1953 // ----------- S t a t e -------------
1954 // -- rax : number of arguments
1955 // -- rdi : constructor function
1956 // -- rsi : context
1957 // -- rsp[0] : return address
1958 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1959 // -- rsp[(argc + 1) * 8] : receiver
1960 // -----------------------------------
1961
1962 // 1. Load the first argument into rax.
1963 Label no_arguments;
1964 {
1965 StackArgumentsAccessor args(rsp, rax);
1966 __ Integer32ToSmi(r8, rax); // Store argc in r8.
1967 __ testp(rax, rax);
1968 __ j(zero, &no_arguments, Label::kNear);
1969 __ movp(rax, args.GetArgumentOperand(1));
1970 }
1971
1972 // 2a. At least one argument, return rax if it's a string, otherwise
1973 // dispatch to appropriate conversion.
1974 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
1975 {
1976 __ JumpIfSmi(rax, &to_string, Label::kNear);
1977 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1978 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
1979 __ j(above, &to_string, Label::kNear);
1980 __ j(equal, &symbol_descriptive_string, Label::kNear);
1981 __ jmp(&drop_frame_and_ret, Label::kNear);
1982 }
1983
1984 // 2b. No arguments, return the empty string (and pop the receiver).
1985 __ bind(&no_arguments);
1986 {
1987 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
1988 __ ret(1 * kPointerSize);
1989 }
1990
1991 // 3a. Convert rax to a string.
1992 __ bind(&to_string);
1993 {
1994 FrameScope scope(masm, StackFrame::MANUAL);
1995 __ EnterBuiltinFrame(rsi, rdi, r8);
1996 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
1997 __ LeaveBuiltinFrame(rsi, rdi, r8);
1998 }
1999 __ jmp(&drop_frame_and_ret, Label::kNear);
2000
2001 // 3b. Convert symbol in rax to a string.
2002 __ bind(&symbol_descriptive_string);
2003 {
2004 __ PopReturnAddressTo(rcx);
2005 __ SmiToInteger32(r8, r8);
2006 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
2007 __ Push(rax);
2008 __ PushReturnAddressFrom(rcx);
2009 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
2010 }
2011
2012 __ bind(&drop_frame_and_ret);
2013 {
2014 // Drop all arguments including the receiver.
2015 __ PopReturnAddressTo(rcx);
2016 __ SmiToInteger32(r8, r8);
2017 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
2018 __ PushReturnAddressFrom(rcx);
2019 __ Ret();
2020 }
2021 }
2022
2023 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)2024 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
2025 // ----------- S t a t e -------------
2026 // -- rax : number of arguments
2027 // -- rdi : constructor function
2028 // -- rdx : new target
2029 // -- rsi : context
2030 // -- rsp[0] : return address
2031 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
2032 // -- rsp[(argc + 1) * 8] : receiver
2033 // -----------------------------------
2034
2035 // 1. Make sure we operate in the context of the called function.
2036 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2037
2038 // Store argc in r8.
2039 __ Integer32ToSmi(r8, rax);
2040
2041 // 2. Load the first argument into rbx.
2042 {
2043 StackArgumentsAccessor args(rsp, rax);
2044 Label no_arguments, done;
2045 __ testp(rax, rax);
2046 __ j(zero, &no_arguments, Label::kNear);
2047 __ movp(rbx, args.GetArgumentOperand(1));
2048 __ jmp(&done, Label::kNear);
2049 __ bind(&no_arguments);
2050 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
2051 __ bind(&done);
2052 }
2053
2054 // 3. Make sure rbx is a string.
2055 {
2056 Label convert, done_convert;
2057 __ JumpIfSmi(rbx, &convert, Label::kNear);
2058 __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx);
2059 __ j(below, &done_convert);
2060 __ bind(&convert);
2061 {
2062 FrameScope scope(masm, StackFrame::MANUAL);
2063 __ EnterBuiltinFrame(rsi, rdi, r8);
2064 __ Push(rdx);
2065 __ Move(rax, rbx);
2066 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
2067 __ Move(rbx, rax);
2068 __ Pop(rdx);
2069 __ LeaveBuiltinFrame(rsi, rdi, r8);
2070 }
2071 __ bind(&done_convert);
2072 }
2073
2074 // 4. Check if new target and constructor differ.
2075 Label drop_frame_and_ret, new_object;
2076 __ cmpp(rdx, rdi);
2077 __ j(not_equal, &new_object);
2078
2079 // 5. Allocate a JSValue wrapper for the string.
2080 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
2081 __ jmp(&drop_frame_and_ret, Label::kNear);
2082
2083 // 6. Fallback to the runtime to create new object.
2084 __ bind(&new_object);
2085 {
2086 FrameScope scope(masm, StackFrame::MANUAL);
2087 __ EnterBuiltinFrame(rsi, rdi, r8);
2088 __ Push(rbx); // the first argument
2089 FastNewObjectStub stub(masm->isolate());
2090 __ CallStub(&stub);
2091 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
2092 __ LeaveBuiltinFrame(rsi, rdi, r8);
2093 }
2094
2095 __ bind(&drop_frame_and_ret);
2096 {
2097 // Drop all arguments including the receiver.
2098 __ PopReturnAddressTo(rcx);
2099 __ SmiToInteger32(r8, r8);
2100 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize));
2101 __ PushReturnAddressFrom(rcx);
2102 __ Ret();
2103 }
2104 }
2105
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2106 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2107 __ pushq(rbp);
2108 __ movp(rbp, rsp);
2109
2110 // Store the arguments adaptor context sentinel.
2111 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2112
2113 // Push the function on the stack.
2114 __ Push(rdi);
2115
2116 // Preserve the number of arguments on the stack. Must preserve rax,
2117 // rbx and rcx because these registers are used when copying the
2118 // arguments and the receiver.
2119 __ Integer32ToSmi(r8, rax);
2120 __ Push(r8);
2121 }
2122
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2123 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2124 // Retrieve the number of arguments from the stack. Number is a Smi.
2125 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2126
2127 // Leave the frame.
2128 __ movp(rsp, rbp);
2129 __ popq(rbp);
2130
2131 // Remove caller arguments from the stack.
2132 __ PopReturnAddressTo(rcx);
2133 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
2134 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
2135 __ PushReturnAddressFrom(rcx);
2136 }
2137
2138 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2139 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2140 // ----------- S t a t e -------------
2141 // -- rdx : requested object size (untagged)
2142 // -- rsp[0] : return address
2143 // -----------------------------------
2144 __ Integer32ToSmi(rdx, rdx);
2145 __ PopReturnAddressTo(rcx);
2146 __ Push(rdx);
2147 __ PushReturnAddressFrom(rcx);
2148 __ Move(rsi, Smi::kZero);
2149 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2150 }
2151
2152 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2153 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2154 // ----------- S t a t e -------------
2155 // -- rdx : requested object size (untagged)
2156 // -- rsp[0] : return address
2157 // -----------------------------------
2158 __ Integer32ToSmi(rdx, rdx);
2159 __ PopReturnAddressTo(rcx);
2160 __ Push(rdx);
2161 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2162 __ PushReturnAddressFrom(rcx);
2163 __ Move(rsi, Smi::kZero);
2164 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2165 }
2166
2167 // static
Generate_Abort(MacroAssembler * masm)2168 void Builtins::Generate_Abort(MacroAssembler* masm) {
2169 // ----------- S t a t e -------------
2170 // -- rdx : message_id as Smi
2171 // -- rsp[0] : return address
2172 // -----------------------------------
2173 __ PopReturnAddressTo(rcx);
2174 __ Push(rdx);
2175 __ PushReturnAddressFrom(rcx);
2176 __ Move(rsi, Smi::kZero);
2177 __ TailCallRuntime(Runtime::kAbort);
2178 }
2179
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2180 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2181 // ----------- S t a t e -------------
2182 // -- rax : actual number of arguments
2183 // -- rbx : expected number of arguments
2184 // -- rdx : new target (passed through to callee)
2185 // -- rdi : function (passed through to callee)
2186 // -----------------------------------
2187
2188 Label invoke, dont_adapt_arguments, stack_overflow;
2189 Counters* counters = masm->isolate()->counters();
2190 __ IncrementCounter(counters->arguments_adaptors(), 1);
2191
2192 Label enough, too_few;
2193 __ cmpp(rax, rbx);
2194 __ j(less, &too_few);
2195 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2196 __ j(equal, &dont_adapt_arguments);
2197
2198 { // Enough parameters: Actual >= expected.
2199 __ bind(&enough);
2200 EnterArgumentsAdaptorFrame(masm);
2201 // The registers rcx and r8 will be modified. The register rbx is only read.
2202 Generate_StackOverflowCheck(masm, rbx, rcx, r8, &stack_overflow);
2203
2204 // Copy receiver and all expected arguments.
2205 const int offset = StandardFrameConstants::kCallerSPOffset;
2206 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
2207 __ Set(r8, -1); // account for receiver
2208
2209 Label copy;
2210 __ bind(©);
2211 __ incp(r8);
2212 __ Push(Operand(rax, 0));
2213 __ subp(rax, Immediate(kPointerSize));
2214 __ cmpp(r8, rbx);
2215 __ j(less, ©);
2216 __ jmp(&invoke);
2217 }
2218
2219 { // Too few parameters: Actual < expected.
2220 __ bind(&too_few);
2221
2222 EnterArgumentsAdaptorFrame(masm);
2223 // The registers rcx and r8 will be modified. The register rbx is only read.
2224 Generate_StackOverflowCheck(masm, rbx, rcx, r8, &stack_overflow);
2225
2226 // Copy receiver and all actual arguments.
2227 const int offset = StandardFrameConstants::kCallerSPOffset;
2228 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
2229 __ Set(r8, -1); // account for receiver
2230
2231 Label copy;
2232 __ bind(©);
2233 __ incp(r8);
2234 __ Push(Operand(rdi, 0));
2235 __ subp(rdi, Immediate(kPointerSize));
2236 __ cmpp(r8, rax);
2237 __ j(less, ©);
2238
2239 // Fill remaining expected arguments with undefined values.
2240 Label fill;
2241 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
2242 __ bind(&fill);
2243 __ incp(r8);
2244 __ Push(kScratchRegister);
2245 __ cmpp(r8, rbx);
2246 __ j(less, &fill);
2247
2248 // Restore function pointer.
2249 __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2250 }
2251
2252 // Call the entry point.
2253 __ bind(&invoke);
2254 __ movp(rax, rbx);
2255 // rax : expected number of arguments
2256 // rdx : new target (passed through to callee)
2257 // rdi : function (passed through to callee)
2258 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2259 __ call(rcx);
2260
2261 // Store offset of return address for deoptimizer.
2262 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2263
2264 // Leave frame and return.
2265 LeaveArgumentsAdaptorFrame(masm);
2266 __ ret(0);
2267
2268 // -------------------------------------------
2269 // Dont adapt arguments.
2270 // -------------------------------------------
2271 __ bind(&dont_adapt_arguments);
2272 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2273 __ jmp(rcx);
2274
2275 __ bind(&stack_overflow);
2276 {
2277 FrameScope frame(masm, StackFrame::MANUAL);
2278 __ CallRuntime(Runtime::kThrowStackOverflow);
2279 __ int3();
2280 }
2281 }
2282
2283 // static
Generate_Apply(MacroAssembler * masm)2284 void Builtins::Generate_Apply(MacroAssembler* masm) {
2285 // ----------- S t a t e -------------
2286 // -- rax : argumentsList
2287 // -- rdi : target
2288 // -- rdx : new.target (checked to be constructor or undefined)
2289 // -- rsp[0] : return address.
2290 // -- rsp[8] : thisArgument
2291 // -----------------------------------
2292
2293 // Create the list of arguments from the array-like argumentsList.
2294 {
2295 Label create_arguments, create_array, create_runtime, done_create;
2296 __ JumpIfSmi(rax, &create_runtime);
2297
2298 // Load the map of argumentsList into rcx.
2299 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
2300
2301 // Load native context into rbx.
2302 __ movp(rbx, NativeContextOperand());
2303
2304 // Check if argumentsList is an (unmodified) arguments object.
2305 __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2306 __ j(equal, &create_arguments);
2307 __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2308 __ j(equal, &create_arguments);
2309
2310 // Check if argumentsList is a fast JSArray.
2311 __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
2312 __ j(equal, &create_array);
2313
2314 // Ask the runtime to create the list (actually a FixedArray).
2315 __ bind(&create_runtime);
2316 {
2317 FrameScope scope(masm, StackFrame::INTERNAL);
2318 __ Push(rdi);
2319 __ Push(rdx);
2320 __ Push(rax);
2321 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2322 __ Pop(rdx);
2323 __ Pop(rdi);
2324 __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset));
2325 }
2326 __ jmp(&done_create);
2327
2328 // Try to create the list from an arguments object.
2329 __ bind(&create_arguments);
2330 __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset));
2331 __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset));
2332 __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2333 __ j(not_equal, &create_runtime);
2334 __ SmiToInteger32(rbx, rbx);
2335 __ movp(rax, rcx);
2336 __ jmp(&done_create);
2337
2338 // Try to create the list from a JSArray object.
2339 __ bind(&create_array);
2340 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2341 __ DecodeField<Map::ElementsKindBits>(rcx);
2342 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2343 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2344 STATIC_ASSERT(FAST_ELEMENTS == 2);
2345 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
2346 __ j(above, &create_runtime);
2347 __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2348 __ j(equal, &create_runtime);
2349 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
2350 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
2351
2352 __ bind(&done_create);
2353 }
2354
2355 // Check for stack overflow.
2356 {
2357 // Check the stack for overflow. We are not trying to catch interruptions
2358 // (i.e. debug break and preemption) here, so check the "real stack limit".
2359 Label done;
2360 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2361 __ movp(rcx, rsp);
2362 // Make rcx the space we have left. The stack might already be overflowed
2363 // here which will cause rcx to become negative.
2364 __ subp(rcx, kScratchRegister);
2365 __ sarp(rcx, Immediate(kPointerSizeLog2));
2366 // Check if the arguments will overflow the stack.
2367 __ cmpp(rcx, rbx);
2368 __ j(greater, &done, Label::kNear); // Signed comparison.
2369 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2370 __ bind(&done);
2371 }
2372
2373 // ----------- S t a t e -------------
2374 // -- rdi : target
2375 // -- rax : args (a FixedArray built from argumentsList)
2376 // -- rbx : len (number of elements to push from args)
2377 // -- rdx : new.target (checked to be constructor or undefined)
2378 // -- rsp[0] : return address.
2379 // -- rsp[8] : thisArgument
2380 // -----------------------------------
2381
2382 // Push arguments onto the stack (thisArgument is already on the stack).
2383 {
2384 __ PopReturnAddressTo(r8);
2385 __ Set(rcx, 0);
2386 Label done, loop;
2387 __ bind(&loop);
2388 __ cmpl(rcx, rbx);
2389 __ j(equal, &done, Label::kNear);
2390 __ Push(
2391 FieldOperand(rax, rcx, times_pointer_size, FixedArray::kHeaderSize));
2392 __ incl(rcx);
2393 __ jmp(&loop);
2394 __ bind(&done);
2395 __ PushReturnAddressFrom(r8);
2396 __ Move(rax, rcx);
2397 }
2398
2399 // Dispatch to Call or Construct depending on whether new.target is undefined.
2400 {
2401 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2402 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2403 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2404 }
2405 }
2406
2407 namespace {
2408
2409 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2410 // present) preserving all the arguments prepared for current call.
2411 // Does nothing if debugger is currently active.
2412 // ES6 14.6.3. PrepareForTailCall
2413 //
2414 // Stack structure for the function g() tail calling f():
2415 //
2416 // ------- Caller frame: -------
2417 // | ...
2418 // | g()'s arg M
2419 // | ...
2420 // | g()'s arg 1
2421 // | g()'s receiver arg
2422 // | g()'s caller pc
2423 // ------- g()'s frame: -------
2424 // | g()'s caller fp <- fp
2425 // | g()'s context
2426 // | function pointer: g
2427 // | -------------------------
2428 // | ...
2429 // | ...
2430 // | f()'s arg N
2431 // | ...
2432 // | f()'s arg 1
2433 // | f()'s receiver arg
2434 // | f()'s caller pc <- sp
2435 // ----------------------
2436 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2437 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2438 Register scratch1, Register scratch2,
2439 Register scratch3) {
2440 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2441 Comment cmnt(masm, "[ PrepareForTailCall");
2442
2443 // Prepare for tail call only if ES2015 tail call elimination is active.
2444 Label done;
2445 ExternalReference is_tail_call_elimination_enabled =
2446 ExternalReference::is_tail_call_elimination_enabled_address(
2447 masm->isolate());
2448 __ Move(kScratchRegister, is_tail_call_elimination_enabled);
2449 __ cmpb(Operand(kScratchRegister, 0), Immediate(0));
2450 __ j(equal, &done);
2451
2452 // Drop possible interpreter handler/stub frame.
2453 {
2454 Label no_interpreter_frame;
2455 __ Cmp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
2456 Smi::FromInt(StackFrame::STUB));
2457 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2458 __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2459 __ bind(&no_interpreter_frame);
2460 }
2461
2462 // Check if next frame is an arguments adaptor frame.
2463 Register caller_args_count_reg = scratch1;
2464 Label no_arguments_adaptor, formal_parameter_count_loaded;
2465 __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2466 __ Cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
2467 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2468 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2469
2470 // Drop current frame and load arguments count from arguments adaptor frame.
2471 __ movp(rbp, scratch2);
2472 __ SmiToInteger32(
2473 caller_args_count_reg,
2474 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2475 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2476
2477 __ bind(&no_arguments_adaptor);
2478 // Load caller's formal parameter count
2479 __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2480 __ movp(scratch1,
2481 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2482 __ LoadSharedFunctionInfoSpecialField(
2483 caller_args_count_reg, scratch1,
2484 SharedFunctionInfo::kFormalParameterCountOffset);
2485
2486 __ bind(&formal_parameter_count_loaded);
2487
2488 ParameterCount callee_args_count(args_reg);
2489 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2490 scratch3, ReturnAddressState::kOnStack);
2491 __ bind(&done);
2492 }
2493 } // namespace
2494
2495 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2496 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2497 ConvertReceiverMode mode,
2498 TailCallMode tail_call_mode) {
2499 // ----------- S t a t e -------------
2500 // -- rax : the number of arguments (not including the receiver)
2501 // -- rdi : the function to call (checked to be a JSFunction)
2502 // -----------------------------------
2503 StackArgumentsAccessor args(rsp, rax);
2504 __ AssertFunction(rdi);
2505
2506 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2507 // Check that the function is not a "classConstructor".
2508 Label class_constructor;
2509 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2510 __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset),
2511 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2512 __ j(not_zero, &class_constructor);
2513
2514 // ----------- S t a t e -------------
2515 // -- rax : the number of arguments (not including the receiver)
2516 // -- rdx : the shared function info.
2517 // -- rdi : the function to call (checked to be a JSFunction)
2518 // -----------------------------------
2519
2520 // Enter the context of the function; ToObject has to run in the function
2521 // context, and we also need to take the global proxy from the function
2522 // context in case of conversion.
2523 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2524 SharedFunctionInfo::kStrictModeByteOffset);
2525 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2526 // We need to convert the receiver for non-native sloppy mode functions.
2527 Label done_convert;
2528 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
2529 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2530 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2531 __ j(not_zero, &done_convert);
2532 {
2533 // ----------- S t a t e -------------
2534 // -- rax : the number of arguments (not including the receiver)
2535 // -- rdx : the shared function info.
2536 // -- rdi : the function to call (checked to be a JSFunction)
2537 // -- rsi : the function context.
2538 // -----------------------------------
2539
2540 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2541 // Patch receiver to global proxy.
2542 __ LoadGlobalProxy(rcx);
2543 } else {
2544 Label convert_to_object, convert_receiver;
2545 __ movp(rcx, args.GetReceiverOperand());
2546 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2547 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2548 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2549 __ j(above_equal, &done_convert);
2550 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2551 Label convert_global_proxy;
2552 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
2553 &convert_global_proxy, Label::kNear);
2554 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
2555 Label::kNear);
2556 __ bind(&convert_global_proxy);
2557 {
2558 // Patch receiver to global proxy.
2559 __ LoadGlobalProxy(rcx);
2560 }
2561 __ jmp(&convert_receiver);
2562 }
2563 __ bind(&convert_to_object);
2564 {
2565 // Convert receiver using ToObject.
2566 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2567 // in the fast case? (fall back to AllocateInNewSpace?)
2568 FrameScope scope(masm, StackFrame::INTERNAL);
2569 __ Integer32ToSmi(rax, rax);
2570 __ Push(rax);
2571 __ Push(rdi);
2572 __ movp(rax, rcx);
2573 __ Push(rsi);
2574 __ Call(masm->isolate()->builtins()->ToObject(),
2575 RelocInfo::CODE_TARGET);
2576 __ Pop(rsi);
2577 __ movp(rcx, rax);
2578 __ Pop(rdi);
2579 __ Pop(rax);
2580 __ SmiToInteger32(rax, rax);
2581 }
2582 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2583 __ bind(&convert_receiver);
2584 }
2585 __ movp(args.GetReceiverOperand(), rcx);
2586 }
2587 __ bind(&done_convert);
2588
2589 // ----------- S t a t e -------------
2590 // -- rax : the number of arguments (not including the receiver)
2591 // -- rdx : the shared function info.
2592 // -- rdi : the function to call (checked to be a JSFunction)
2593 // -- rsi : the function context.
2594 // -----------------------------------
2595
2596 if (tail_call_mode == TailCallMode::kAllow) {
2597 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2598 }
2599
2600 __ LoadSharedFunctionInfoSpecialField(
2601 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
2602 ParameterCount actual(rax);
2603 ParameterCount expected(rbx);
2604
2605 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION,
2606 CheckDebugStepCallWrapper());
2607
2608 // The function is a "classConstructor", need to raise an exception.
2609 __ bind(&class_constructor);
2610 {
2611 FrameScope frame(masm, StackFrame::INTERNAL);
2612 __ Push(rdi);
2613 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2614 }
2615 }
2616
2617 namespace {
2618
Generate_PushBoundArguments(MacroAssembler * masm)2619 void Generate_PushBoundArguments(MacroAssembler* masm) {
2620 // ----------- S t a t e -------------
2621 // -- rax : the number of arguments (not including the receiver)
2622 // -- rdx : new.target (only in case of [[Construct]])
2623 // -- rdi : target (checked to be a JSBoundFunction)
2624 // -----------------------------------
2625
2626 // Load [[BoundArguments]] into rcx and length of that into rbx.
2627 Label no_bound_arguments;
2628 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2629 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2630 __ testl(rbx, rbx);
2631 __ j(zero, &no_bound_arguments);
2632 {
2633 // ----------- S t a t e -------------
2634 // -- rax : the number of arguments (not including the receiver)
2635 // -- rdx : new.target (only in case of [[Construct]])
2636 // -- rdi : target (checked to be a JSBoundFunction)
2637 // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2638 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2639 // -----------------------------------
2640
2641 // Reserve stack space for the [[BoundArguments]].
2642 {
2643 Label done;
2644 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2645 __ subp(rsp, kScratchRegister);
2646 // Check the stack for overflow. We are not trying to catch interruptions
2647 // (i.e. debug break and preemption) here, so check the "real stack
2648 // limit".
2649 __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2650 __ j(greater, &done, Label::kNear); // Signed comparison.
2651 // Restore the stack pointer.
2652 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2653 {
2654 FrameScope scope(masm, StackFrame::MANUAL);
2655 __ EnterFrame(StackFrame::INTERNAL);
2656 __ CallRuntime(Runtime::kThrowStackOverflow);
2657 }
2658 __ bind(&done);
2659 }
2660
2661 // Adjust effective number of arguments to include return address.
2662 __ incl(rax);
2663
2664 // Relocate arguments and return address down the stack.
2665 {
2666 Label loop;
2667 __ Set(rcx, 0);
2668 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2669 __ bind(&loop);
2670 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2671 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2672 __ incl(rcx);
2673 __ cmpl(rcx, rax);
2674 __ j(less, &loop);
2675 }
2676
2677 // Copy [[BoundArguments]] to the stack (below the arguments).
2678 {
2679 Label loop;
2680 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2681 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2682 __ bind(&loop);
2683 __ decl(rbx);
2684 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2685 FixedArray::kHeaderSize));
2686 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2687 __ leal(rax, Operand(rax, 1));
2688 __ j(greater, &loop);
2689 }
2690
2691 // Adjust effective number of arguments (rax contains the number of
2692 // arguments from the call plus return address plus the number of
2693 // [[BoundArguments]]), so we need to subtract one for the return address.
2694 __ decl(rax);
2695 }
2696 __ bind(&no_bound_arguments);
2697 }
2698
2699 } // namespace
2700
2701 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2702 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2703 TailCallMode tail_call_mode) {
2704 // ----------- S t a t e -------------
2705 // -- rax : the number of arguments (not including the receiver)
2706 // -- rdi : the function to call (checked to be a JSBoundFunction)
2707 // -----------------------------------
2708 __ AssertBoundFunction(rdi);
2709
2710 if (tail_call_mode == TailCallMode::kAllow) {
2711 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2712 }
2713
2714 // Patch the receiver to [[BoundThis]].
2715 StackArgumentsAccessor args(rsp, rax);
2716 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2717 __ movp(args.GetReceiverOperand(), rbx);
2718
2719 // Push the [[BoundArguments]] onto the stack.
2720 Generate_PushBoundArguments(masm);
2721
2722 // Call the [[BoundTargetFunction]] via the Call builtin.
2723 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2724 __ Load(rcx,
2725 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2726 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2727 __ jmp(rcx);
2728 }
2729
2730 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2731 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2732 TailCallMode tail_call_mode) {
2733 // ----------- S t a t e -------------
2734 // -- rax : the number of arguments (not including the receiver)
2735 // -- rdi : the target to call (can be any Object)
2736 // -----------------------------------
2737 StackArgumentsAccessor args(rsp, rax);
2738
2739 Label non_callable, non_function, non_smi;
2740 __ JumpIfSmi(rdi, &non_callable);
2741 __ bind(&non_smi);
2742 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2743 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2744 RelocInfo::CODE_TARGET);
2745 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2746 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2747 RelocInfo::CODE_TARGET);
2748
2749 // Check if target has a [[Call]] internal method.
2750 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2751 Immediate(1 << Map::kIsCallable));
2752 __ j(zero, &non_callable);
2753
2754 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2755 __ j(not_equal, &non_function);
2756
2757 // 0. Prepare for tail call if necessary.
2758 if (tail_call_mode == TailCallMode::kAllow) {
2759 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2760 }
2761
2762 // 1. Runtime fallback for Proxy [[Call]].
2763 __ PopReturnAddressTo(kScratchRegister);
2764 __ Push(rdi);
2765 __ PushReturnAddressFrom(kScratchRegister);
2766 // Increase the arguments size to include the pushed function and the
2767 // existing receiver on the stack.
2768 __ addp(rax, Immediate(2));
2769 // Tail-call to the runtime.
2770 __ JumpToExternalReference(
2771 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2772
2773 // 2. Call to something else, which might have a [[Call]] internal method (if
2774 // not we raise an exception).
2775 __ bind(&non_function);
2776 // Overwrite the original receiver with the (original) target.
2777 __ movp(args.GetReceiverOperand(), rdi);
2778 // Let the "call_as_function_delegate" take care of the rest.
2779 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2780 __ Jump(masm->isolate()->builtins()->CallFunction(
2781 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2782 RelocInfo::CODE_TARGET);
2783
2784 // 3. Call to something that is not callable.
2785 __ bind(&non_callable);
2786 {
2787 FrameScope scope(masm, StackFrame::INTERNAL);
2788 __ Push(rdi);
2789 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2790 }
2791 }
2792
2793 // static
Generate_ConstructFunction(MacroAssembler * masm)2794 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2795 // ----------- S t a t e -------------
2796 // -- rax : the number of arguments (not including the receiver)
2797 // -- rdx : the new target (checked to be a constructor)
2798 // -- rdi : the constructor to call (checked to be a JSFunction)
2799 // -----------------------------------
2800 __ AssertFunction(rdi);
2801
2802 // Calling convention for function specific ConstructStubs require
2803 // rbx to contain either an AllocationSite or undefined.
2804 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2805
2806 // Tail call to the function-specific construct stub (still in the caller
2807 // context at this point).
2808 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2809 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
2810 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2811 __ jmp(rcx);
2812 }
2813
2814 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2815 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2816 // ----------- S t a t e -------------
2817 // -- rax : the number of arguments (not including the receiver)
2818 // -- rdx : the new target (checked to be a constructor)
2819 // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2820 // -----------------------------------
2821 __ AssertBoundFunction(rdi);
2822
2823 // Push the [[BoundArguments]] onto the stack.
2824 Generate_PushBoundArguments(masm);
2825
2826 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2827 {
2828 Label done;
2829 __ cmpp(rdi, rdx);
2830 __ j(not_equal, &done, Label::kNear);
2831 __ movp(rdx,
2832 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2833 __ bind(&done);
2834 }
2835
2836 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2837 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2838 __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate()));
2839 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2840 __ jmp(rcx);
2841 }
2842
2843 // static
Generate_ConstructProxy(MacroAssembler * masm)2844 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2845 // ----------- S t a t e -------------
2846 // -- rax : the number of arguments (not including the receiver)
2847 // -- rdi : the constructor to call (checked to be a JSProxy)
2848 // -- rdx : the new target (either the same as the constructor or
2849 // the JSFunction on which new was invoked initially)
2850 // -----------------------------------
2851
2852 // Call into the Runtime for Proxy [[Construct]].
2853 __ PopReturnAddressTo(kScratchRegister);
2854 __ Push(rdi);
2855 __ Push(rdx);
2856 __ PushReturnAddressFrom(kScratchRegister);
2857 // Include the pushed new_target, constructor and the receiver.
2858 __ addp(rax, Immediate(3));
2859 __ JumpToExternalReference(
2860 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2861 }
2862
2863 // static
Generate_Construct(MacroAssembler * masm)2864 void Builtins::Generate_Construct(MacroAssembler* masm) {
2865 // ----------- S t a t e -------------
2866 // -- rax : the number of arguments (not including the receiver)
2867 // -- rdx : the new target (either the same as the constructor or
2868 // the JSFunction on which new was invoked initially)
2869 // -- rdi : the constructor to call (can be any Object)
2870 // -----------------------------------
2871 StackArgumentsAccessor args(rsp, rax);
2872
2873 // Check if target is a Smi.
2874 Label non_constructor;
2875 __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
2876
2877 // Dispatch based on instance type.
2878 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2879 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2880 RelocInfo::CODE_TARGET);
2881
2882 // Check if target has a [[Construct]] internal method.
2883 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2884 Immediate(1 << Map::kIsConstructor));
2885 __ j(zero, &non_constructor, Label::kNear);
2886
2887 // Only dispatch to bound functions after checking whether they are
2888 // constructors.
2889 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2890 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2891 RelocInfo::CODE_TARGET);
2892
2893 // Only dispatch to proxies after checking whether they are constructors.
2894 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2895 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2896 RelocInfo::CODE_TARGET);
2897
2898 // Called Construct on an exotic Object with a [[Construct]] internal method.
2899 {
2900 // Overwrite the original receiver with the (original) target.
2901 __ movp(args.GetReceiverOperand(), rdi);
2902 // Let the "call_as_constructor_delegate" take care of the rest.
2903 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2904 __ Jump(masm->isolate()->builtins()->CallFunction(),
2905 RelocInfo::CODE_TARGET);
2906 }
2907
2908 // Called Construct on an Object that doesn't have a [[Construct]] internal
2909 // method.
2910 __ bind(&non_constructor);
2911 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2912 RelocInfo::CODE_TARGET);
2913 }
2914
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Register scratch0,Register scratch1,Register scratch2,Label * receiver_check_failed)2915 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2916 Register function_template_info,
2917 Register scratch0, Register scratch1,
2918 Register scratch2,
2919 Label* receiver_check_failed) {
2920 Register signature = scratch0;
2921 Register map = scratch1;
2922 Register constructor = scratch2;
2923
2924 // If there is no signature, return the holder.
2925 __ movp(signature, FieldOperand(function_template_info,
2926 FunctionTemplateInfo::kSignatureOffset));
2927 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
2928 Label receiver_check_passed;
2929 __ j(equal, &receiver_check_passed, Label::kNear);
2930
2931 // Walk the prototype chain.
2932 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
2933 Label prototype_loop_start;
2934 __ bind(&prototype_loop_start);
2935
2936 // Get the constructor, if any.
2937 __ GetMapConstructor(constructor, map, kScratchRegister);
2938 __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE);
2939 Label next_prototype;
2940 __ j(not_equal, &next_prototype, Label::kNear);
2941
2942 // Get the constructor's signature.
2943 Register type = constructor;
2944 __ movp(type,
2945 FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
2946 __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset));
2947
2948 // Loop through the chain of inheriting function templates.
2949 Label function_template_loop;
2950 __ bind(&function_template_loop);
2951
2952 // If the signatures match, we have a compatible receiver.
2953 __ cmpp(signature, type);
2954 __ j(equal, &receiver_check_passed, Label::kNear);
2955
2956 // If the current type is not a FunctionTemplateInfo, load the next prototype
2957 // in the chain.
2958 __ JumpIfSmi(type, &next_prototype, Label::kNear);
2959 __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister);
2960 __ j(not_equal, &next_prototype, Label::kNear);
2961
2962 // Otherwise load the parent function template and iterate.
2963 __ movp(type,
2964 FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
2965 __ jmp(&function_template_loop, Label::kNear);
2966
2967 // Load the next prototype.
2968 __ bind(&next_prototype);
2969 __ testq(FieldOperand(map, Map::kBitField3Offset),
2970 Immediate(Map::HasHiddenPrototype::kMask));
2971 __ j(zero, receiver_check_failed);
2972 __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset));
2973 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
2974 // Iterate.
2975 __ jmp(&prototype_loop_start, Label::kNear);
2976
2977 __ bind(&receiver_check_passed);
2978 }
2979
Generate_HandleFastApiCall(MacroAssembler * masm)2980 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
2981 // ----------- S t a t e -------------
2982 // -- rax : number of arguments (not including the receiver)
2983 // -- rdi : callee
2984 // -- rsi : context
2985 // -- rsp[0] : return address
2986 // -- rsp[8] : last argument
2987 // -- ...
2988 // -- rsp[rax * 8] : first argument
2989 // -- rsp[(rax + 1) * 8] : receiver
2990 // -----------------------------------
2991
2992 StackArgumentsAccessor args(rsp, rax);
2993
2994 // Load the FunctionTemplateInfo.
2995 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2996 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
2997
2998 // Do the compatible receiver check.
2999 Label receiver_check_failed;
3000 __ movp(rcx, args.GetReceiverOperand());
3001 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed);
3002
3003 // Get the callback offset from the FunctionTemplateInfo, and jump to the
3004 // beginning of the code.
3005 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset));
3006 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset));
3007 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3008 __ jmp(rdx);
3009
3010 // Compatible receiver check failed: pop return address, arguments and
3011 // receiver and throw an Illegal Invocation exception.
3012 __ bind(&receiver_check_failed);
3013 __ PopReturnAddressTo(rbx);
3014 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize));
3015 __ addp(rsp, rax);
3016 __ PushReturnAddressFrom(rbx);
3017 {
3018 FrameScope scope(masm, StackFrame::INTERNAL);
3019 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
3020 }
3021 }
3022
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)3023 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
3024 bool has_handler_frame) {
3025 // Lookup the function in the JavaScript frame.
3026 if (has_handler_frame) {
3027 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3028 __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
3029 } else {
3030 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3031 }
3032
3033 {
3034 FrameScope scope(masm, StackFrame::INTERNAL);
3035 // Pass function as argument.
3036 __ Push(rax);
3037 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
3038 }
3039
3040 Label skip;
3041 // If the code object is null, just return to the caller.
3042 __ cmpp(rax, Immediate(0));
3043 __ j(not_equal, &skip, Label::kNear);
3044 __ ret(0);
3045
3046 __ bind(&skip);
3047
3048 // Drop any potential handler frame that is be sitting on top of the actual
3049 // JavaScript frame. This is the case then OSR is triggered from bytecode.
3050 if (has_handler_frame) {
3051 __ leave();
3052 }
3053
3054 // Load deoptimization data from the code object.
3055 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
3056
3057 // Load the OSR entrypoint offset from the deoptimization data.
3058 __ SmiToInteger32(
3059 rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
3060 DeoptimizationInputData::kOsrPcOffsetIndex) -
3061 kHeapObjectTag));
3062
3063 // Compute the target address = code_obj + header_size + osr_offset
3064 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
3065
3066 // Overwrite the return address on the stack.
3067 __ movq(StackOperandForReturnAddress(0), rax);
3068
3069 // And "return" to the OSR entry point of the function.
3070 __ ret(0);
3071 }
3072
Generate_OnStackReplacement(MacroAssembler * masm)3073 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
3074 Generate_OnStackReplacementHelper(masm, false);
3075 }
3076
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)3077 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3078 Generate_OnStackReplacementHelper(masm, true);
3079 }
3080
3081 #undef __
3082
3083 } // namespace internal
3084 } // namespace v8
3085
3086 #endif // V8_TARGET_ARCH_X64
3087