1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_X64
8
9 #include "src/code-factory.h"
10 #include "src/codegen.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
13
14 namespace v8 {
15 namespace internal {
16
17
18 #define __ ACCESS_MASM(masm)
19
20
Generate_Adaptor(MacroAssembler * masm,CFunctionId id,BuiltinExtraArguments extra_args)21 void Builtins::Generate_Adaptor(MacroAssembler* masm,
22 CFunctionId id,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- rax : number of arguments excluding receiver
26 // -- rdi : called function (only guaranteed when
27 // extra_args requires it)
28 // -- rsi : context
29 // -- rsp[0] : return address
30 // -- rsp[8] : last argument
31 // -- ...
32 // -- rsp[8 * argc] : first argument (argc == rax)
33 // -- rsp[8 * (argc + 1)] : receiver
34 // -----------------------------------
35
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
39 num_extra_args = 1;
40 __ PopReturnAddressTo(kScratchRegister);
41 __ Push(rdi);
42 __ PushReturnAddressFrom(kScratchRegister);
43 } else {
44 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 }
46
47 // JumpToExternalReference expects rax to contain the number of arguments
48 // including the receiver and the extra arguments.
49 __ addp(rax, Immediate(num_extra_args + 1));
50 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
51 }
52
53
CallRuntimePassFunction(MacroAssembler * masm,Runtime::FunctionId function_id)54 static void CallRuntimePassFunction(
55 MacroAssembler* masm, Runtime::FunctionId function_id) {
56 FrameScope scope(masm, StackFrame::INTERNAL);
57 // Push a copy of the function onto the stack.
58 __ Push(rdi);
59 // Function is also the parameter to the runtime call.
60 __ Push(rdi);
61
62 __ CallRuntime(function_id, 1);
63 // Restore receiver.
64 __ Pop(rdi);
65 }
66
67
GenerateTailCallToSharedCode(MacroAssembler * masm)68 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
69 __ movp(kScratchRegister,
70 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
71 __ movp(kScratchRegister,
72 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
73 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
74 __ jmp(kScratchRegister);
75 }
76
77
GenerateTailCallToReturnedCode(MacroAssembler * masm)78 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
79 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
80 __ jmp(rax);
81 }
82
83
Generate_InOptimizationQueue(MacroAssembler * masm)84 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
85 // Checking whether the queued function is ready for install is optional,
86 // since we come across interrupts and stack checks elsewhere. However,
87 // not checking may delay installing ready functions, and always checking
88 // would be quite expensive. A good compromise is to first check against
89 // stack limit as a cue for an interrupt signal.
90 Label ok;
91 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
92 __ j(above_equal, &ok);
93
94 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
95 GenerateTailCallToReturnedCode(masm);
96
97 __ bind(&ok);
98 GenerateTailCallToSharedCode(masm);
99 }
100
101
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_memento)102 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
103 bool is_api_function,
104 bool create_memento) {
105 // ----------- S t a t e -------------
106 // -- rax: number of arguments
107 // -- rdi: constructor function
108 // -- rbx: allocation site or undefined
109 // -----------------------------------
110
111 // Should never create mementos for api functions.
112 DCHECK(!is_api_function || !create_memento);
113
114 // Enter a construct frame.
115 {
116 FrameScope scope(masm, StackFrame::CONSTRUCT);
117
118 if (create_memento) {
119 __ AssertUndefinedOrAllocationSite(rbx);
120 __ Push(rbx);
121 }
122
123 // Store a smi-tagged arguments count on the stack.
124 __ Integer32ToSmi(rax, rax);
125 __ Push(rax);
126
127 // Push the function to invoke on the stack.
128 __ Push(rdi);
129
130 // Try to allocate the object without transitioning into C code. If any of
131 // the preconditions is not met, the code bails out to the runtime call.
132 Label rt_call, allocated;
133 if (FLAG_inline_new) {
134 Label undo_allocation;
135
136 ExternalReference debug_step_in_fp =
137 ExternalReference::debug_step_in_fp_address(masm->isolate());
138 __ Move(kScratchRegister, debug_step_in_fp);
139 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
140 __ j(not_equal, &rt_call);
141
142 // Verified that the constructor is a JSFunction.
143 // Load the initial map and verify that it is in fact a map.
144 // rdi: constructor
145 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
146 // Will both indicate a NULL and a Smi
147 DCHECK(kSmiTag == 0);
148 __ JumpIfSmi(rax, &rt_call);
149 // rdi: constructor
150 // rax: initial map (if proven valid below)
151 __ CmpObjectType(rax, MAP_TYPE, rbx);
152 __ j(not_equal, &rt_call);
153
154 // Check that the constructor is not constructing a JSFunction (see
155 // comments in Runtime_NewObject in runtime.cc). In which case the
156 // initial map's instance type would be JS_FUNCTION_TYPE.
157 // rdi: constructor
158 // rax: initial map
159 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
160 __ j(equal, &rt_call);
161
162 if (!is_api_function) {
163 Label allocate;
164 // The code below relies on these assumptions.
165 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
166 STATIC_ASSERT(Map::ConstructionCount::kShift +
167 Map::ConstructionCount::kSize == 32);
168 // Check if slack tracking is enabled.
169 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
170 __ shrl(rsi, Immediate(Map::ConstructionCount::kShift));
171 __ j(zero, &allocate); // JSFunction::kNoSlackTracking
172 // Decrease generous allocation count.
173 __ subl(FieldOperand(rax, Map::kBitField3Offset),
174 Immediate(1 << Map::ConstructionCount::kShift));
175
176 __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking));
177 __ j(not_equal, &allocate);
178
179 __ Push(rax);
180 __ Push(rdi);
181
182 __ Push(rdi); // constructor
183 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
184
185 __ Pop(rdi);
186 __ Pop(rax);
187 __ xorl(rsi, rsi); // JSFunction::kNoSlackTracking
188
189 __ bind(&allocate);
190 }
191
192 // Now allocate the JSObject on the heap.
193 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
194 __ shlp(rdi, Immediate(kPointerSizeLog2));
195 if (create_memento) {
196 __ addp(rdi, Immediate(AllocationMemento::kSize));
197 }
198 // rdi: size of new object
199 __ Allocate(rdi,
200 rbx,
201 rdi,
202 no_reg,
203 &rt_call,
204 NO_ALLOCATION_FLAGS);
205 Factory* factory = masm->isolate()->factory();
206 // Allocated the JSObject, now initialize the fields.
207 // rax: initial map
208 // rbx: JSObject (not HeapObject tagged - the actual address).
209 // rdi: start of next object (including memento if create_memento)
210 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
211 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
212 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
213 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
214 // Set extra fields in the newly allocated object.
215 // rax: initial map
216 // rbx: JSObject
217 // rdi: start of next object (including memento if create_memento)
218 // rsi: slack tracking counter (non-API function case)
219 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
220 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
221 if (!is_api_function) {
222 Label no_inobject_slack_tracking;
223
224 // Check if slack tracking is enabled.
225 __ cmpl(rsi, Immediate(JSFunction::kNoSlackTracking));
226 __ j(equal, &no_inobject_slack_tracking);
227
228 // Allocate object with a slack.
229 __ movzxbp(rsi,
230 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
231 __ leap(rsi,
232 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
233 // rsi: offset of first field after pre-allocated fields
234 if (FLAG_debug_code) {
235 __ cmpp(rsi, rdi);
236 __ Assert(less_equal,
237 kUnexpectedNumberOfPreAllocatedPropertyFields);
238 }
239 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
240 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
241 // Fill the remaining fields with one pointer filler map.
242
243 __ bind(&no_inobject_slack_tracking);
244 }
245 if (create_memento) {
246 __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
247 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
248
249 // Fill in memento fields if necessary.
250 // rsi: points to the allocated but uninitialized memento.
251 __ Move(Operand(rsi, AllocationMemento::kMapOffset),
252 factory->allocation_memento_map());
253 // Get the cell or undefined.
254 __ movp(rdx, Operand(rsp, kPointerSize*2));
255 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
256 } else {
257 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
258 }
259
260 // Add the object tag to make the JSObject real, so that we can continue
261 // and jump into the continuation code at any time from now on. Any
262 // failures need to undo the allocation, so that the heap is in a
263 // consistent state and verifiable.
264 // rax: initial map
265 // rbx: JSObject
266 // rdi: start of next object
267 __ orp(rbx, Immediate(kHeapObjectTag));
268
269 // Check if a non-empty properties array is needed.
270 // Allocate and initialize a FixedArray if it is.
271 // rax: initial map
272 // rbx: JSObject
273 // rdi: start of next object
274 // Calculate total properties described map.
275 __ movzxbp(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
276 __ movzxbp(rcx,
277 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
278 __ addp(rdx, rcx);
279 // Calculate unused properties past the end of the in-object properties.
280 __ movzxbp(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
281 __ subp(rdx, rcx);
282 // Done if no extra properties are to be allocated.
283 __ j(zero, &allocated);
284 __ Assert(positive, kPropertyAllocationCountFailed);
285
286 // Scale the number of elements by pointer size and add the header for
287 // FixedArrays to the start of the next object calculation from above.
288 // rbx: JSObject
289 // rdi: start of next object (will be start of FixedArray)
290 // rdx: number of elements in properties array
291 __ Allocate(FixedArray::kHeaderSize,
292 times_pointer_size,
293 rdx,
294 rdi,
295 rax,
296 no_reg,
297 &undo_allocation,
298 RESULT_CONTAINS_TOP);
299
300 // Initialize the FixedArray.
301 // rbx: JSObject
302 // rdi: FixedArray
303 // rdx: number of elements
304 // rax: start of next object
305 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
306 __ movp(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
307 __ Integer32ToSmi(rdx, rdx);
308 __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
309
310 // Initialize the fields to undefined.
311 // rbx: JSObject
312 // rdi: FixedArray
313 // rax: start of next object
314 // rdx: number of elements
315 { Label loop, entry;
316 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
317 __ leap(rcx, Operand(rdi, FixedArray::kHeaderSize));
318 __ jmp(&entry);
319 __ bind(&loop);
320 __ movp(Operand(rcx, 0), rdx);
321 __ addp(rcx, Immediate(kPointerSize));
322 __ bind(&entry);
323 __ cmpp(rcx, rax);
324 __ j(below, &loop);
325 }
326
327 // Store the initialized FixedArray into the properties field of
328 // the JSObject
329 // rbx: JSObject
330 // rdi: FixedArray
331 __ orp(rdi, Immediate(kHeapObjectTag)); // add the heap tag
332 __ movp(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
333
334
335 // Continue with JSObject being successfully allocated
336 // rbx: JSObject
337 __ jmp(&allocated);
338
339 // Undo the setting of the new top so that the heap is verifiable. For
340 // example, the map's unused properties potentially do not match the
341 // allocated objects unused properties.
342 // rbx: JSObject (previous new top)
343 __ bind(&undo_allocation);
344 __ UndoAllocationInNewSpace(rbx);
345 }
346
347 // Allocate the new receiver object using the runtime call.
348 // rdi: function (constructor)
349 __ bind(&rt_call);
350 int offset = 0;
351 if (create_memento) {
352 // Get the cell or allocation site.
353 __ movp(rdi, Operand(rsp, kPointerSize*2));
354 __ Push(rdi);
355 offset = kPointerSize;
356 }
357
358 // Must restore rsi (context) and rdi (constructor) before calling runtime.
359 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
360 __ movp(rdi, Operand(rsp, offset));
361 __ Push(rdi);
362 if (create_memento) {
363 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
364 } else {
365 __ CallRuntime(Runtime::kNewObject, 1);
366 }
367 __ movp(rbx, rax); // store result in rbx
368
369 // If we ended up using the runtime, and we want a memento, then the
370 // runtime call made it for us, and we shouldn't do create count
371 // increment.
372 Label count_incremented;
373 if (create_memento) {
374 __ jmp(&count_incremented);
375 }
376
377 // New object allocated.
378 // rbx: newly allocated object
379 __ bind(&allocated);
380
381 if (create_memento) {
382 __ movp(rcx, Operand(rsp, kPointerSize*2));
383 __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
384 __ j(equal, &count_incremented);
385 // rcx is an AllocationSite. We are creating a memento from it, so we
386 // need to increment the memento create count.
387 __ SmiAddConstant(
388 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
389 Smi::FromInt(1));
390 __ bind(&count_incremented);
391 }
392
393 // Retrieve the function from the stack.
394 __ Pop(rdi);
395
396 // Retrieve smi-tagged arguments count from the stack.
397 __ movp(rax, Operand(rsp, 0));
398 __ SmiToInteger32(rax, rax);
399
400 // Push the allocated receiver to the stack. We need two copies
401 // because we may have to return the original one and the calling
402 // conventions dictate that the called function pops the receiver.
403 __ Push(rbx);
404 __ Push(rbx);
405
406 // Set up pointer to last argument.
407 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
408
409 // Copy arguments and receiver to the expression stack.
410 Label loop, entry;
411 __ movp(rcx, rax);
412 __ jmp(&entry);
413 __ bind(&loop);
414 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
415 __ bind(&entry);
416 __ decp(rcx);
417 __ j(greater_equal, &loop);
418
419 // Call the function.
420 if (is_api_function) {
421 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
422 Handle<Code> code =
423 masm->isolate()->builtins()->HandleApiCallConstruct();
424 __ Call(code, RelocInfo::CODE_TARGET);
425 } else {
426 ParameterCount actual(rax);
427 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
428 }
429
430 // Store offset of return address for deoptimizer.
431 if (!is_api_function) {
432 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
433 }
434
435 // Restore context from the frame.
436 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
437
438 // If the result is an object (in the ECMA sense), we should get rid
439 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
440 // on page 74.
441 Label use_receiver, exit;
442 // If the result is a smi, it is *not* an object in the ECMA sense.
443 __ JumpIfSmi(rax, &use_receiver);
444
445 // If the type of the result (stored in its map) is less than
446 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
448 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
449 __ j(above_equal, &exit);
450
451 // Throw away the result of the constructor invocation and use the
452 // on-stack receiver as the result.
453 __ bind(&use_receiver);
454 __ movp(rax, Operand(rsp, 0));
455
456 // Restore the arguments count and leave the construct frame.
457 __ bind(&exit);
458 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
459
460 // Leave construct frame.
461 }
462
463 // Remove caller arguments from the stack and return.
464 __ PopReturnAddressTo(rcx);
465 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
466 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
467 __ PushReturnAddressFrom(rcx);
468 Counters* counters = masm->isolate()->counters();
469 __ IncrementCounter(counters->constructed_objects(), 1);
470 __ ret(0);
471 }
472
473
Generate_JSConstructStubGeneric(MacroAssembler * masm)474 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
475 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
476 }
477
478
Generate_JSConstructStubApi(MacroAssembler * masm)479 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
480 Generate_JSConstructStubHelper(masm, true, false);
481 }
482
483
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)484 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
485 bool is_construct) {
486 ProfileEntryHookStub::MaybeCallEntryHook(masm);
487
488 // Expects five C++ function parameters.
489 // - Address entry (ignored)
490 // - JSFunction* function (
491 // - Object* receiver
492 // - int argc
493 // - Object*** argv
494 // (see Handle::Invoke in execution.cc).
495
496 // Open a C++ scope for the FrameScope.
497 {
498 // Platform specific argument handling. After this, the stack contains
499 // an internal frame and the pushed function and receiver, and
500 // register rax and rbx holds the argument count and argument array,
501 // while rdi holds the function pointer and rsi the context.
502
503 #ifdef _WIN64
504 // MSVC parameters in:
505 // rcx : entry (ignored)
506 // rdx : function
507 // r8 : receiver
508 // r9 : argc
509 // [rsp+0x20] : argv
510
511 // Clear the context before we push it when entering the internal frame.
512 __ Set(rsi, 0);
513 // Enter an internal frame.
514 FrameScope scope(masm, StackFrame::INTERNAL);
515
516 // Load the function context into rsi.
517 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
518
519 // Push the function and the receiver onto the stack.
520 __ Push(rdx);
521 __ Push(r8);
522
523 // Load the number of arguments and setup pointer to the arguments.
524 __ movp(rax, r9);
525 // Load the previous frame pointer to access C argument on stack
526 __ movp(kScratchRegister, Operand(rbp, 0));
527 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
528 // Load the function pointer into rdi.
529 __ movp(rdi, rdx);
530 #else // _WIN64
531 // GCC parameters in:
532 // rdi : entry (ignored)
533 // rsi : function
534 // rdx : receiver
535 // rcx : argc
536 // r8 : argv
537
538 __ movp(rdi, rsi);
539 // rdi : function
540
541 // Clear the context before we push it when entering the internal frame.
542 __ Set(rsi, 0);
543 // Enter an internal frame.
544 FrameScope scope(masm, StackFrame::INTERNAL);
545
546 // Push the function and receiver and setup the context.
547 __ Push(rdi);
548 __ Push(rdx);
549 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
550
551 // Load the number of arguments and setup pointer to the arguments.
552 __ movp(rax, rcx);
553 __ movp(rbx, r8);
554 #endif // _WIN64
555
556 // Current stack contents:
557 // [rsp + 2 * kPointerSize ... ] : Internal frame
558 // [rsp + kPointerSize] : function
559 // [rsp] : receiver
560 // Current register contents:
561 // rax : argc
562 // rbx : argv
563 // rsi : context
564 // rdi : function
565
566 // Copy arguments to the stack in a loop.
567 // Register rbx points to array of pointers to handle locations.
568 // Push the values of these handles.
569 Label loop, entry;
570 __ Set(rcx, 0); // Set loop variable to 0.
571 __ jmp(&entry);
572 __ bind(&loop);
573 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
574 __ Push(Operand(kScratchRegister, 0)); // dereference handle
575 __ addp(rcx, Immediate(1));
576 __ bind(&entry);
577 __ cmpp(rcx, rax);
578 __ j(not_equal, &loop);
579
580 // Invoke the code.
581 if (is_construct) {
582 // No type feedback cell is available
583 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
584 // Expects rdi to hold function pointer.
585 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
586 __ CallStub(&stub);
587 } else {
588 ParameterCount actual(rax);
589 // Function must be in rdi.
590 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
591 }
592 // Exit the internal frame. Notice that this also removes the empty
593 // context and the function left on the stack by the code
594 // invocation.
595 }
596
597 // TODO(X64): Is argument correct? Is there a receiver to remove?
598 __ ret(1 * kPointerSize); // Remove receiver.
599 }
600
601
Generate_JSEntryTrampoline(MacroAssembler * masm)602 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
603 Generate_JSEntryTrampolineHelper(masm, false);
604 }
605
606
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)607 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
608 Generate_JSEntryTrampolineHelper(masm, true);
609 }
610
611
Generate_CompileLazy(MacroAssembler * masm)612 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
613 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
614 GenerateTailCallToReturnedCode(masm);
615 }
616
617
CallCompileOptimized(MacroAssembler * masm,bool concurrent)618 static void CallCompileOptimized(MacroAssembler* masm,
619 bool concurrent) {
620 FrameScope scope(masm, StackFrame::INTERNAL);
621 // Push a copy of the function onto the stack.
622 __ Push(rdi);
623 // Function is also the parameter to the runtime call.
624 __ Push(rdi);
625 // Whether to compile in a background thread.
626 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
627
628 __ CallRuntime(Runtime::kCompileOptimized, 2);
629 // Restore receiver.
630 __ Pop(rdi);
631 }
632
633
Generate_CompileOptimized(MacroAssembler * masm)634 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
635 CallCompileOptimized(masm, false);
636 GenerateTailCallToReturnedCode(masm);
637 }
638
639
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)640 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
641 CallCompileOptimized(masm, true);
642 GenerateTailCallToReturnedCode(masm);
643 }
644
645
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)646 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
647 // For now, we are relying on the fact that make_code_young doesn't do any
648 // garbage collection which allows us to save/restore the registers without
649 // worrying about which of them contain pointers. We also don't build an
650 // internal frame to make the code faster, since we shouldn't have to do stack
651 // crawls in MakeCodeYoung. This seems a bit fragile.
652
653 // Re-execute the code that was patched back to the young age when
654 // the stub returns.
655 __ subp(Operand(rsp, 0), Immediate(5));
656 __ Pushad();
657 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
658 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
659 { // NOLINT
660 FrameScope scope(masm, StackFrame::MANUAL);
661 __ PrepareCallCFunction(2);
662 __ CallCFunction(
663 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
664 }
665 __ Popad();
666 __ ret(0);
667 }
668
669
670 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
671 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
672 MacroAssembler* masm) { \
673 GenerateMakeCodeYoungAgainCommon(masm); \
674 } \
675 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
676 MacroAssembler* masm) { \
677 GenerateMakeCodeYoungAgainCommon(masm); \
678 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)679 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
680 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
681
682
683 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
684 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
685 // that make_code_young doesn't do any garbage collection which allows us to
686 // save/restore the registers without worrying about which of them contain
687 // pointers.
688 __ Pushad();
689 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
690 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
691 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
692 { // NOLINT
693 FrameScope scope(masm, StackFrame::MANUAL);
694 __ PrepareCallCFunction(2);
695 __ CallCFunction(
696 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
697 2);
698 }
699 __ Popad();
700
701 // Perform prologue operations usually performed by the young code stub.
702 __ PopReturnAddressTo(kScratchRegister);
703 __ pushq(rbp); // Caller's frame pointer.
704 __ movp(rbp, rsp);
705 __ Push(rsi); // Callee's context.
706 __ Push(rdi); // Callee's JS Function.
707 __ PushReturnAddressFrom(kScratchRegister);
708
709 // Jump to point after the code-age stub.
710 __ ret(0);
711 }
712
713
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)714 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
715 GenerateMakeCodeYoungAgainCommon(masm);
716 }
717
718
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)719 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
720 SaveFPRegsMode save_doubles) {
721 // Enter an internal frame.
722 {
723 FrameScope scope(masm, StackFrame::INTERNAL);
724
725 // Preserve registers across notification, this is important for compiled
726 // stubs that tail call the runtime on deopts passing their parameters in
727 // registers.
728 __ Pushad();
729 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
730 __ Popad();
731 // Tear down internal frame.
732 }
733
734 __ DropUnderReturnAddress(1); // Ignore state offset
735 __ ret(0); // Return to IC Miss stub, continuation still on stack.
736 }
737
738
Generate_NotifyStubFailure(MacroAssembler * masm)739 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
740 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
741 }
742
743
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)744 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
745 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
746 }
747
748
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)749 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
750 Deoptimizer::BailoutType type) {
751 // Enter an internal frame.
752 {
753 FrameScope scope(masm, StackFrame::INTERNAL);
754
755 // Pass the deoptimization type to the runtime system.
756 __ Push(Smi::FromInt(static_cast<int>(type)));
757
758 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
759 // Tear down internal frame.
760 }
761
762 // Get the full codegen state from the stack and untag it.
763 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
764
765 // Switch on the state.
766 Label not_no_registers, not_tos_rax;
767 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
768 __ j(not_equal, ¬_no_registers, Label::kNear);
769 __ ret(1 * kPointerSize); // Remove state.
770
771 __ bind(¬_no_registers);
772 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
773 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
774 __ j(not_equal, ¬_tos_rax, Label::kNear);
775 __ ret(2 * kPointerSize); // Remove state, rax.
776
777 __ bind(¬_tos_rax);
778 __ Abort(kNoCasesLeft);
779 }
780
781
Generate_NotifyDeoptimized(MacroAssembler * masm)782 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
783 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
784 }
785
786
Generate_NotifySoftDeoptimized(MacroAssembler * masm)787 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
788 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
789 }
790
791
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)792 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
793 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
794 }
795
796
Generate_FunctionCall(MacroAssembler * masm)797 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
798 // Stack Layout:
799 // rsp[0] : Return address
800 // rsp[8] : Argument n
801 // rsp[16] : Argument n-1
802 // ...
803 // rsp[8 * n] : Argument 1
804 // rsp[8 * (n + 1)] : Receiver (function to call)
805 //
806 // rax contains the number of arguments, n, not counting the receiver.
807 //
808 // 1. Make sure we have at least one argument.
809 { Label done;
810 __ testp(rax, rax);
811 __ j(not_zero, &done);
812 __ PopReturnAddressTo(rbx);
813 __ Push(masm->isolate()->factory()->undefined_value());
814 __ PushReturnAddressFrom(rbx);
815 __ incp(rax);
816 __ bind(&done);
817 }
818
819 // 2. Get the function to call (passed as receiver) from the stack, check
820 // if it is a function.
821 Label slow, non_function;
822 StackArgumentsAccessor args(rsp, rax);
823 __ movp(rdi, args.GetReceiverOperand());
824 __ JumpIfSmi(rdi, &non_function);
825 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
826 __ j(not_equal, &slow);
827
828 // 3a. Patch the first argument if necessary when calling a function.
829 Label shift_arguments;
830 __ Set(rdx, 0); // indicate regular JS_FUNCTION
831 { Label convert_to_object, use_global_proxy, patch_receiver;
832 // Change context eagerly in case we need the global receiver.
833 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
834
835 // Do not transform the receiver for strict mode functions.
836 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
837 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
838 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
839 __ j(not_equal, &shift_arguments);
840
841 // Do not transform the receiver for natives.
842 // SharedFunctionInfo is already loaded into rbx.
843 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
844 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
845 __ j(not_zero, &shift_arguments);
846
847 // Compute the receiver in sloppy mode.
848 __ movp(rbx, args.GetArgumentOperand(1));
849 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
850
851 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
852 __ j(equal, &use_global_proxy);
853 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
854 __ j(equal, &use_global_proxy);
855
856 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
857 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
858 __ j(above_equal, &shift_arguments);
859
860 __ bind(&convert_to_object);
861 {
862 // Enter an internal frame in order to preserve argument count.
863 FrameScope scope(masm, StackFrame::INTERNAL);
864 __ Integer32ToSmi(rax, rax);
865 __ Push(rax);
866
867 __ Push(rbx);
868 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
869 __ movp(rbx, rax);
870 __ Set(rdx, 0); // indicate regular JS_FUNCTION
871
872 __ Pop(rax);
873 __ SmiToInteger32(rax, rax);
874 }
875
876 // Restore the function to rdi.
877 __ movp(rdi, args.GetReceiverOperand());
878 __ jmp(&patch_receiver, Label::kNear);
879
880 __ bind(&use_global_proxy);
881 __ movp(rbx,
882 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
883 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
884
885 __ bind(&patch_receiver);
886 __ movp(args.GetArgumentOperand(1), rbx);
887
888 __ jmp(&shift_arguments);
889 }
890
891 // 3b. Check for function proxy.
892 __ bind(&slow);
893 __ Set(rdx, 1); // indicate function proxy
894 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
895 __ j(equal, &shift_arguments);
896 __ bind(&non_function);
897 __ Set(rdx, 2); // indicate non-function
898
899 // 3c. Patch the first argument when calling a non-function. The
900 // CALL_NON_FUNCTION builtin expects the non-function callee as
901 // receiver, so overwrite the first argument which will ultimately
902 // become the receiver.
903 __ movp(args.GetArgumentOperand(1), rdi);
904
905 // 4. Shift arguments and return address one slot down on the stack
906 // (overwriting the original receiver). Adjust argument count to make
907 // the original first argument the new receiver.
908 __ bind(&shift_arguments);
909 { Label loop;
910 __ movp(rcx, rax);
911 StackArgumentsAccessor args(rsp, rcx);
912 __ bind(&loop);
913 __ movp(rbx, args.GetArgumentOperand(1));
914 __ movp(args.GetArgumentOperand(0), rbx);
915 __ decp(rcx);
916 __ j(not_zero, &loop); // While non-zero.
917 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
918 __ decp(rax); // One fewer argument (first argument is new receiver).
919 }
920
921 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
922 // or a function proxy via CALL_FUNCTION_PROXY.
923 { Label function, non_proxy;
924 __ testp(rdx, rdx);
925 __ j(zero, &function);
926 __ Set(rbx, 0);
927 __ cmpp(rdx, Immediate(1));
928 __ j(not_equal, &non_proxy);
929
930 __ PopReturnAddressTo(rdx);
931 __ Push(rdi); // re-add proxy object as additional argument
932 __ PushReturnAddressFrom(rdx);
933 __ incp(rax);
934 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
935 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
936 RelocInfo::CODE_TARGET);
937
938 __ bind(&non_proxy);
939 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
940 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
941 RelocInfo::CODE_TARGET);
942 __ bind(&function);
943 }
944
945 // 5b. Get the code to call from the function and check that the number of
946 // expected arguments matches what we're providing. If so, jump
947 // (tail-call) to the code in register edx without checking arguments.
948 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
949 __ LoadSharedFunctionInfoSpecialField(rbx, rdx,
950 SharedFunctionInfo::kFormalParameterCountOffset);
951 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
952 __ cmpp(rax, rbx);
953 __ j(not_equal,
954 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
955 RelocInfo::CODE_TARGET);
956
957 ParameterCount expected(0);
958 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
959 }
960
961
Generate_FunctionApply(MacroAssembler * masm)962 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
963 // Stack at entry:
964 // rsp : return address
965 // rsp[8] : arguments
966 // rsp[16] : receiver ("this")
967 // rsp[24] : function
968 {
969 FrameScope frame_scope(masm, StackFrame::INTERNAL);
970 // Stack frame:
971 // rbp : Old base pointer
972 // rbp[8] : return address
973 // rbp[16] : function arguments
974 // rbp[24] : receiver
975 // rbp[32] : function
976 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
977 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
978 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
979
980 __ Push(Operand(rbp, kFunctionOffset));
981 __ Push(Operand(rbp, kArgumentsOffset));
982 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
983
984 // Check the stack for overflow. We are not trying to catch
985 // interruptions (e.g. debug break and preemption) here, so the "real stack
986 // limit" is checked.
987 Label okay;
988 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
989 __ movp(rcx, rsp);
990 // Make rcx the space we have left. The stack might already be overflowed
991 // here which will cause rcx to become negative.
992 __ subp(rcx, kScratchRegister);
993 // Make rdx the space we need for the array when it is unrolled onto the
994 // stack.
995 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
996 // Check if the arguments will overflow the stack.
997 __ cmpp(rcx, rdx);
998 __ j(greater, &okay); // Signed comparison.
999
1000 // Out of stack space.
1001 __ Push(Operand(rbp, kFunctionOffset));
1002 __ Push(rax);
1003 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1004 __ bind(&okay);
1005 // End of stack check.
1006
1007 // Push current index and limit.
1008 const int kLimitOffset =
1009 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1010 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1011 __ Push(rax); // limit
1012 __ Push(Immediate(0)); // index
1013
1014 // Get the receiver.
1015 __ movp(rbx, Operand(rbp, kReceiverOffset));
1016
1017 // Check that the function is a JS function (otherwise it must be a proxy).
1018 Label push_receiver;
1019 __ movp(rdi, Operand(rbp, kFunctionOffset));
1020 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1021 __ j(not_equal, &push_receiver);
1022
1023 // Change context eagerly to get the right global object if necessary.
1024 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1025
1026 // Do not transform the receiver for strict mode functions.
1027 Label call_to_object, use_global_proxy;
1028 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1029 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
1030 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1031 __ j(not_equal, &push_receiver);
1032
1033 // Do not transform the receiver for natives.
1034 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
1035 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1036 __ j(not_equal, &push_receiver);
1037
1038 // Compute the receiver in sloppy mode.
1039 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
1040 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1041 __ j(equal, &use_global_proxy);
1042 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1043 __ j(equal, &use_global_proxy);
1044
1045 // If given receiver is already a JavaScript object then there's no
1046 // reason for converting it.
1047 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1048 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1049 __ j(above_equal, &push_receiver);
1050
1051 // Convert the receiver to an object.
1052 __ bind(&call_to_object);
1053 __ Push(rbx);
1054 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1055 __ movp(rbx, rax);
1056 __ jmp(&push_receiver, Label::kNear);
1057
1058 __ bind(&use_global_proxy);
1059 __ movp(rbx,
1060 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1061 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
1062
1063 // Push the receiver.
1064 __ bind(&push_receiver);
1065 __ Push(rbx);
1066
1067 // Copy all arguments from the array to the stack.
1068 Label entry, loop;
1069 Register receiver = LoadDescriptor::ReceiverRegister();
1070 Register key = LoadDescriptor::NameRegister();
1071 __ movp(key, Operand(rbp, kIndexOffset));
1072 __ jmp(&entry);
1073 __ bind(&loop);
1074 __ movp(receiver, Operand(rbp, kArgumentsOffset)); // load arguments
1075
1076 // Use inline caching to speed up access to arguments.
1077 if (FLAG_vector_ics) {
1078 __ Move(VectorLoadICDescriptor::SlotRegister(), Smi::FromInt(0));
1079 }
1080 Handle<Code> ic = CodeFactory::KeyedLoadIC(masm->isolate()).code();
1081 __ Call(ic, RelocInfo::CODE_TARGET);
1082 // It is important that we do not have a test instruction after the
1083 // call. A test instruction after the call is used to indicate that
1084 // we have generated an inline version of the keyed load. In this
1085 // case, we know that we are not generating a test instruction next.
1086
1087 // Push the nth argument.
1088 __ Push(rax);
1089
1090 // Update the index on the stack and in register key.
1091 __ movp(key, Operand(rbp, kIndexOffset));
1092 __ SmiAddConstant(key, key, Smi::FromInt(1));
1093 __ movp(Operand(rbp, kIndexOffset), key);
1094
1095 __ bind(&entry);
1096 __ cmpp(key, Operand(rbp, kLimitOffset));
1097 __ j(not_equal, &loop);
1098
1099 // Call the function.
1100 Label call_proxy;
1101 ParameterCount actual(rax);
1102 __ SmiToInteger32(rax, key);
1103 __ movp(rdi, Operand(rbp, kFunctionOffset));
1104 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1105 __ j(not_equal, &call_proxy);
1106 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1107
1108 frame_scope.GenerateLeaveFrame();
1109 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1110
1111 // Call the function proxy.
1112 __ bind(&call_proxy);
1113 __ Push(rdi); // add function proxy as last argument
1114 __ incp(rax);
1115 __ Set(rbx, 0);
1116 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1117 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1118 RelocInfo::CODE_TARGET);
1119
1120 // Leave internal frame.
1121 }
1122 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1123 }
1124
1125
Generate_InternalArrayCode(MacroAssembler * masm)1126 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1127 // ----------- S t a t e -------------
1128 // -- rax : argc
1129 // -- rsp[0] : return address
1130 // -- rsp[8] : last argument
1131 // -----------------------------------
1132 Label generic_array_code;
1133
1134 // Get the InternalArray function.
1135 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1136
1137 if (FLAG_debug_code) {
1138 // Initial map for the builtin InternalArray functions should be maps.
1139 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1140 // Will both indicate a NULL and a Smi.
1141 STATIC_ASSERT(kSmiTag == 0);
1142 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1143 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1144 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1145 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1146 }
1147
1148 // Run the native code for the InternalArray function called as a normal
1149 // function.
1150 // tail call a stub
1151 InternalArrayConstructorStub stub(masm->isolate());
1152 __ TailCallStub(&stub);
1153 }
1154
1155
Generate_ArrayCode(MacroAssembler * masm)1156 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1157 // ----------- S t a t e -------------
1158 // -- rax : argc
1159 // -- rsp[0] : return address
1160 // -- rsp[8] : last argument
1161 // -----------------------------------
1162 Label generic_array_code;
1163
1164 // Get the Array function.
1165 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1166
1167 if (FLAG_debug_code) {
1168 // Initial map for the builtin Array functions should be maps.
1169 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1170 // Will both indicate a NULL and a Smi.
1171 STATIC_ASSERT(kSmiTag == 0);
1172 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1173 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1174 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1175 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1176 }
1177
1178 // Run the native code for the Array function called as a normal function.
1179 // tail call a stub
1180 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1181 ArrayConstructorStub stub(masm->isolate());
1182 __ TailCallStub(&stub);
1183 }
1184
1185
Generate_StringConstructCode(MacroAssembler * masm)1186 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1187 // ----------- S t a t e -------------
1188 // -- rax : number of arguments
1189 // -- rdi : constructor function
1190 // -- rsp[0] : return address
1191 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1192 // -- rsp[(argc + 1) * 8] : receiver
1193 // -----------------------------------
1194 Counters* counters = masm->isolate()->counters();
1195 __ IncrementCounter(counters->string_ctor_calls(), 1);
1196
1197 if (FLAG_debug_code) {
1198 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1199 __ cmpp(rdi, rcx);
1200 __ Assert(equal, kUnexpectedStringFunction);
1201 }
1202
1203 // Load the first argument into rax and get rid of the rest
1204 // (including the receiver).
1205 StackArgumentsAccessor args(rsp, rax);
1206 Label no_arguments;
1207 __ testp(rax, rax);
1208 __ j(zero, &no_arguments);
1209 __ movp(rbx, args.GetArgumentOperand(1));
1210 __ PopReturnAddressTo(rcx);
1211 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1212 __ PushReturnAddressFrom(rcx);
1213 __ movp(rax, rbx);
1214
1215 // Lookup the argument in the number to string cache.
1216 Label not_cached, argument_is_string;
1217 __ LookupNumberStringCache(rax, // Input.
1218 rbx, // Result.
1219 rcx, // Scratch 1.
1220 rdx, // Scratch 2.
1221 ¬_cached);
1222 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1223 __ bind(&argument_is_string);
1224
1225 // ----------- S t a t e -------------
1226 // -- rbx : argument converted to string
1227 // -- rdi : constructor function
1228 // -- rsp[0] : return address
1229 // -----------------------------------
1230
1231 // Allocate a JSValue and put the tagged pointer into rax.
1232 Label gc_required;
1233 __ Allocate(JSValue::kSize,
1234 rax, // Result.
1235 rcx, // New allocation top (we ignore it).
1236 no_reg,
1237 &gc_required,
1238 TAG_OBJECT);
1239
1240 // Set the map.
1241 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1242 if (FLAG_debug_code) {
1243 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1244 Immediate(JSValue::kSize >> kPointerSizeLog2));
1245 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1246 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1247 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1248 }
1249 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1250
1251 // Set properties and elements.
1252 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1253 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1254 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1255
1256 // Set the value.
1257 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1258
1259 // Ensure the object is fully initialized.
1260 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1261
1262 // We're done. Return.
1263 __ ret(0);
1264
1265 // The argument was not found in the number to string cache. Check
1266 // if it's a string already before calling the conversion builtin.
1267 Label convert_argument;
1268 __ bind(¬_cached);
1269 STATIC_ASSERT(kSmiTag == 0);
1270 __ JumpIfSmi(rax, &convert_argument);
1271 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1272 __ j(NegateCondition(is_string), &convert_argument);
1273 __ movp(rbx, rax);
1274 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1275 __ jmp(&argument_is_string);
1276
1277 // Invoke the conversion builtin and put the result into rbx.
1278 __ bind(&convert_argument);
1279 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1280 {
1281 FrameScope scope(masm, StackFrame::INTERNAL);
1282 __ Push(rdi); // Preserve the function.
1283 __ Push(rax);
1284 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1285 __ Pop(rdi);
1286 }
1287 __ movp(rbx, rax);
1288 __ jmp(&argument_is_string);
1289
1290 // Load the empty string into rbx, remove the receiver from the
1291 // stack, and jump back to the case where the argument is a string.
1292 __ bind(&no_arguments);
1293 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1294 __ PopReturnAddressTo(rcx);
1295 __ leap(rsp, Operand(rsp, kPointerSize));
1296 __ PushReturnAddressFrom(rcx);
1297 __ jmp(&argument_is_string);
1298
1299 // At this point the argument is already a string. Call runtime to
1300 // create a string wrapper.
1301 __ bind(&gc_required);
1302 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1303 {
1304 FrameScope scope(masm, StackFrame::INTERNAL);
1305 __ Push(rbx);
1306 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1307 }
1308 __ ret(0);
1309 }
1310
1311
ArgumentsAdaptorStackCheck(MacroAssembler * masm,Label * stack_overflow)1312 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1313 Label* stack_overflow) {
1314 // ----------- S t a t e -------------
1315 // -- rax : actual number of arguments
1316 // -- rbx : expected number of arguments
1317 // -- rdi: function (passed through to callee)
1318 // -----------------------------------
1319 // Check the stack for overflow. We are not trying to catch
1320 // interruptions (e.g. debug break and preemption) here, so the "real stack
1321 // limit" is checked.
1322 Label okay;
1323 __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
1324 __ movp(rcx, rsp);
1325 // Make rcx the space we have left. The stack might already be overflowed
1326 // here which will cause rcx to become negative.
1327 __ subp(rcx, rdx);
1328 // Make rdx the space we need for the array when it is unrolled onto the
1329 // stack.
1330 __ movp(rdx, rbx);
1331 __ shlp(rdx, Immediate(kPointerSizeLog2));
1332 // Check if the arguments will overflow the stack.
1333 __ cmpp(rcx, rdx);
1334 __ j(less_equal, stack_overflow); // Signed comparison.
1335 }
1336
1337
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1338 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1339 __ pushq(rbp);
1340 __ movp(rbp, rsp);
1341
1342 // Store the arguments adaptor context sentinel.
1343 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1344
1345 // Push the function on the stack.
1346 __ Push(rdi);
1347
1348 // Preserve the number of arguments on the stack. Must preserve rax,
1349 // rbx and rcx because these registers are used when copying the
1350 // arguments and the receiver.
1351 __ Integer32ToSmi(r8, rax);
1352 __ Push(r8);
1353 }
1354
1355
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1356 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1357 // Retrieve the number of arguments from the stack. Number is a Smi.
1358 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1359
1360 // Leave the frame.
1361 __ movp(rsp, rbp);
1362 __ popq(rbp);
1363
1364 // Remove caller arguments from the stack.
1365 __ PopReturnAddressTo(rcx);
1366 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1367 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1368 __ PushReturnAddressFrom(rcx);
1369 }
1370
1371
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)1372 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1373 // ----------- S t a t e -------------
1374 // -- rax : actual number of arguments
1375 // -- rbx : expected number of arguments
1376 // -- rdi: function (passed through to callee)
1377 // -----------------------------------
1378
1379 Label invoke, dont_adapt_arguments;
1380 Counters* counters = masm->isolate()->counters();
1381 __ IncrementCounter(counters->arguments_adaptors(), 1);
1382
1383 Label stack_overflow;
1384 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
1385
1386 Label enough, too_few;
1387 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1388 __ cmpp(rax, rbx);
1389 __ j(less, &too_few);
1390 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1391 __ j(equal, &dont_adapt_arguments);
1392
1393 { // Enough parameters: Actual >= expected.
1394 __ bind(&enough);
1395 EnterArgumentsAdaptorFrame(masm);
1396
1397 // Copy receiver and all expected arguments.
1398 const int offset = StandardFrameConstants::kCallerSPOffset;
1399 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1400 __ Set(r8, -1); // account for receiver
1401
1402 Label copy;
1403 __ bind(©);
1404 __ incp(r8);
1405 __ Push(Operand(rax, 0));
1406 __ subp(rax, Immediate(kPointerSize));
1407 __ cmpp(r8, rbx);
1408 __ j(less, ©);
1409 __ jmp(&invoke);
1410 }
1411
1412 { // Too few parameters: Actual < expected.
1413 __ bind(&too_few);
1414 EnterArgumentsAdaptorFrame(masm);
1415
1416 // Copy receiver and all actual arguments.
1417 const int offset = StandardFrameConstants::kCallerSPOffset;
1418 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1419 __ Set(r8, -1); // account for receiver
1420
1421 Label copy;
1422 __ bind(©);
1423 __ incp(r8);
1424 __ Push(Operand(rdi, 0));
1425 __ subp(rdi, Immediate(kPointerSize));
1426 __ cmpp(r8, rax);
1427 __ j(less, ©);
1428
1429 // Fill remaining expected arguments with undefined values.
1430 Label fill;
1431 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1432 __ bind(&fill);
1433 __ incp(r8);
1434 __ Push(kScratchRegister);
1435 __ cmpp(r8, rbx);
1436 __ j(less, &fill);
1437
1438 // Restore function pointer.
1439 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1440 }
1441
1442 // Call the entry point.
1443 __ bind(&invoke);
1444 __ call(rdx);
1445
1446 // Store offset of return address for deoptimizer.
1447 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1448
1449 // Leave frame and return.
1450 LeaveArgumentsAdaptorFrame(masm);
1451 __ ret(0);
1452
1453 // -------------------------------------------
1454 // Dont adapt arguments.
1455 // -------------------------------------------
1456 __ bind(&dont_adapt_arguments);
1457 __ jmp(rdx);
1458
1459 __ bind(&stack_overflow);
1460 {
1461 FrameScope frame(masm, StackFrame::MANUAL);
1462 EnterArgumentsAdaptorFrame(masm);
1463 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1464 __ int3();
1465 }
1466 }
1467
1468
Generate_OnStackReplacement(MacroAssembler * masm)1469 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1470 // Lookup the function in the JavaScript frame.
1471 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1472 {
1473 FrameScope scope(masm, StackFrame::INTERNAL);
1474 // Pass function as argument.
1475 __ Push(rax);
1476 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1477 }
1478
1479 Label skip;
1480 // If the code object is null, just return to the unoptimized code.
1481 __ cmpp(rax, Immediate(0));
1482 __ j(not_equal, &skip, Label::kNear);
1483 __ ret(0);
1484
1485 __ bind(&skip);
1486
1487 // Load deoptimization data from the code object.
1488 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1489
1490 // Load the OSR entrypoint offset from the deoptimization data.
1491 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1492 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1493
1494 // Compute the target address = code_obj + header_size + osr_offset
1495 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1496
1497 // Overwrite the return address on the stack.
1498 __ movq(StackOperandForReturnAddress(0), rax);
1499
1500 // And "return" to the OSR entry point of the function.
1501 __ ret(0);
1502 }
1503
1504
Generate_OsrAfterStackCheck(MacroAssembler * masm)1505 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1506 // We check the stack limit as indicator that recompilation might be done.
1507 Label ok;
1508 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1509 __ j(above_equal, &ok);
1510 {
1511 FrameScope scope(masm, StackFrame::INTERNAL);
1512 __ CallRuntime(Runtime::kStackGuard, 0);
1513 }
1514 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1515 RelocInfo::CODE_TARGET);
1516
1517 __ bind(&ok);
1518 __ ret(0);
1519 }
1520
1521
1522 #undef __
1523
1524 } } // namespace v8::internal
1525
1526 #endif // V8_TARGET_ARCH_X64
1527