1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_IA32
6
7 #include "src/code-factory.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/ia32/frames-ia32.h"
12
13 namespace v8 {
14 namespace internal {
15
16 #define __ ACCESS_MASM(masm)
17
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19 ExitFrameType exit_frame_type) {
20 // ----------- S t a t e -------------
21 // -- eax : number of arguments excluding receiver
22 // -- edi : target
23 // -- edx : new.target
24 // -- esp[0] : return address
25 // -- esp[4] : last argument
26 // -- ...
27 // -- esp[4 * argc] : first argument
28 // -- esp[4 * (argc +1)] : receiver
29 // -----------------------------------
30 __ AssertFunction(edi);
31
32 // Make sure we operate in the context of the called function (for example
33 // ConstructStubs implemented in C++ will be run in the context of the caller
34 // instead of the callee, due to the way that [[Construct]] is defined for
35 // ordinary functions).
36 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
37
38 // JumpToExternalReference expects eax to contain the number of arguments
39 // including the receiver and the extra arguments.
40 const int num_extra_args = 3;
41 __ add(eax, Immediate(num_extra_args + 1));
42
43 // Insert extra arguments.
44 __ PopReturnAddressTo(ecx);
45 __ SmiTag(eax);
46 __ Push(eax);
47 __ SmiUntag(eax);
48 __ Push(edi);
49 __ Push(edx);
50 __ PushReturnAddressFrom(ecx);
51
52 __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
53 exit_frame_type == BUILTIN_EXIT);
54 }
55
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)56 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
57 Runtime::FunctionId function_id) {
58 // ----------- S t a t e -------------
59 // -- eax : argument count (preserved for callee)
60 // -- edx : new target (preserved for callee)
61 // -- edi : target function (preserved for callee)
62 // -----------------------------------
63 {
64 FrameScope scope(masm, StackFrame::INTERNAL);
65 // Push the number of arguments to the callee.
66 __ SmiTag(eax);
67 __ push(eax);
68 // Push a copy of the target function and the new target.
69 __ push(edi);
70 __ push(edx);
71 // Function is also the parameter to the runtime call.
72 __ push(edi);
73
74 __ CallRuntime(function_id, 1);
75 __ mov(ebx, eax);
76
77 // Restore target function and new target.
78 __ pop(edx);
79 __ pop(edi);
80 __ pop(eax);
81 __ SmiUntag(eax);
82 }
83
84 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
85 __ jmp(ebx);
86 }
87
GenerateTailCallToSharedCode(MacroAssembler * masm)88 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
89 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
90 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
91 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
92 __ jmp(ebx);
93 }
94
Generate_InOptimizationQueue(MacroAssembler * masm)95 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
96 // Checking whether the queued function is ready for install is optional,
97 // since we come across interrupts and stack checks elsewhere. However,
98 // not checking may delay installing ready functions, and always checking
99 // would be quite expensive. A good compromise is to first check against
100 // stack limit as a cue for an interrupt signal.
101 Label ok;
102 ExternalReference stack_limit =
103 ExternalReference::address_of_stack_limit(masm->isolate());
104 __ cmp(esp, Operand::StaticVariable(stack_limit));
105 __ j(above_equal, &ok, Label::kNear);
106
107 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
108
109 __ bind(&ok);
110 GenerateTailCallToSharedCode(masm);
111 }
112
113 namespace {
114
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)115 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
116 bool create_implicit_receiver,
117 bool check_derived_construct) {
118 // ----------- S t a t e -------------
119 // -- eax: number of arguments
120 // -- esi: context
121 // -- edi: constructor function
122 // -- edx: new target
123 // -----------------------------------
124
125 // Enter a construct frame.
126 {
127 FrameScope scope(masm, StackFrame::CONSTRUCT);
128
129 // Preserve the incoming parameters on the stack.
130 __ SmiTag(eax);
131 __ push(esi);
132 __ push(eax);
133
134 if (create_implicit_receiver) {
135 // Allocate the new receiver object.
136 __ Push(edi);
137 __ Push(edx);
138 FastNewObjectStub stub(masm->isolate());
139 __ CallStub(&stub);
140 __ mov(ebx, eax);
141 __ Pop(edx);
142 __ Pop(edi);
143
144 // ----------- S t a t e -------------
145 // -- edi: constructor function
146 // -- ebx: newly allocated object
147 // -- edx: new target
148 // -----------------------------------
149
150 // Retrieve smi-tagged arguments count from the stack.
151 __ mov(eax, Operand(esp, 0));
152 }
153
154 __ SmiUntag(eax);
155
156 if (create_implicit_receiver) {
157 // Push the allocated receiver to the stack. We need two copies
158 // because we may have to return the original one and the calling
159 // conventions dictate that the called function pops the receiver.
160 __ push(ebx);
161 __ push(ebx);
162 } else {
163 __ PushRoot(Heap::kTheHoleValueRootIndex);
164 }
165
166 // Set up pointer to last argument.
167 __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
168
169 // Copy arguments and receiver to the expression stack.
170 Label loop, entry;
171 __ mov(ecx, eax);
172 __ jmp(&entry);
173 __ bind(&loop);
174 __ push(Operand(ebx, ecx, times_4, 0));
175 __ bind(&entry);
176 __ dec(ecx);
177 __ j(greater_equal, &loop);
178
179 // Call the function.
180 ParameterCount actual(eax);
181 __ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
182 CheckDebugStepCallWrapper());
183
184 // Store offset of return address for deoptimizer.
185 if (create_implicit_receiver && !is_api_function) {
186 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
187 }
188
189 // Restore context from the frame.
190 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
191
192 if (create_implicit_receiver) {
193 // If the result is an object (in the ECMA sense), we should get rid
194 // of the receiver and use the result.
195 Label use_receiver, exit;
196
197 // If the result is a smi, it is *not* an object in the ECMA sense.
198 __ JumpIfSmi(eax, &use_receiver, Label::kNear);
199
200 // If the type of the result (stored in its map) is less than
201 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
202 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
203 __ j(above_equal, &exit, Label::kNear);
204
205 // Throw away the result of the constructor invocation and use the
206 // on-stack receiver as the result.
207 __ bind(&use_receiver);
208 __ mov(eax, Operand(esp, 0));
209
210 // Restore the arguments count and leave the construct frame. The
211 // arguments count is stored below the receiver.
212 __ bind(&exit);
213 __ mov(ebx, Operand(esp, 1 * kPointerSize));
214 } else {
215 __ mov(ebx, Operand(esp, 0));
216 }
217
218 // Leave construct frame.
219 }
220
221 // ES6 9.2.2. Step 13+
222 // Check that the result is not a Smi, indicating that the constructor result
223 // from a derived class is neither undefined nor an Object.
224 if (check_derived_construct) {
225 Label dont_throw;
226 __ JumpIfNotSmi(eax, &dont_throw);
227 {
228 FrameScope scope(masm, StackFrame::INTERNAL);
229 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
230 }
231 __ bind(&dont_throw);
232 }
233
234 // Remove caller arguments from the stack and return.
235 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
236 __ pop(ecx);
237 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
238 __ push(ecx);
239 if (create_implicit_receiver) {
240 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
241 }
242 __ ret(0);
243 }
244
245 } // namespace
246
Generate_JSConstructStubGeneric(MacroAssembler * masm)247 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
248 Generate_JSConstructStubHelper(masm, false, true, false);
249 }
250
Generate_JSConstructStubApi(MacroAssembler * masm)251 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
252 Generate_JSConstructStubHelper(masm, true, false, false);
253 }
254
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)255 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
256 Generate_JSConstructStubHelper(masm, false, false, false);
257 }
258
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)259 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
260 MacroAssembler* masm) {
261 Generate_JSConstructStubHelper(masm, false, false, true);
262 }
263
Generate_ConstructedNonConstructable(MacroAssembler * masm)264 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
265 FrameScope scope(masm, StackFrame::INTERNAL);
266 __ push(edi);
267 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
268 }
269
270 enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt };
271
272 // Clobbers ecx, edx, edi; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,IsTagged eax_is_tagged)273 static void Generate_CheckStackOverflow(MacroAssembler* masm,
274 IsTagged eax_is_tagged) {
275 // eax : the number of items to be pushed to the stack
276 //
277 // Check the stack for overflow. We are not trying to catch
278 // interruptions (e.g. debug break and preemption) here, so the "real stack
279 // limit" is checked.
280 Label okay;
281 ExternalReference real_stack_limit =
282 ExternalReference::address_of_real_stack_limit(masm->isolate());
283 __ mov(edi, Operand::StaticVariable(real_stack_limit));
284 // Make ecx the space we have left. The stack might already be overflowed
285 // here which will cause ecx to become negative.
286 __ mov(ecx, esp);
287 __ sub(ecx, edi);
288 // Make edx the space we need for the array when it is unrolled onto the
289 // stack.
290 __ mov(edx, eax);
291 int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0;
292 __ shl(edx, kPointerSizeLog2 - smi_tag);
293 // Check if the arguments will overflow the stack.
294 __ cmp(ecx, edx);
295 __ j(greater, &okay); // Signed comparison.
296
297 // Out of stack space.
298 __ CallRuntime(Runtime::kThrowStackOverflow);
299
300 __ bind(&okay);
301 }
302
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)303 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
304 bool is_construct) {
305 ProfileEntryHookStub::MaybeCallEntryHook(masm);
306
307 {
308 FrameScope scope(masm, StackFrame::INTERNAL);
309
310 // Setup the context (we need to use the caller context from the isolate).
311 ExternalReference context_address(Isolate::kContextAddress,
312 masm->isolate());
313 __ mov(esi, Operand::StaticVariable(context_address));
314
315 // Load the previous frame pointer (ebx) to access C arguments
316 __ mov(ebx, Operand(ebp, 0));
317
318 // Push the function and the receiver onto the stack.
319 __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
320 __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
321
322 // Load the number of arguments and setup pointer to the arguments.
323 __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
324 __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
325
326 // Check if we have enough stack space to push all arguments.
327 // Expects argument count in eax. Clobbers ecx, edx, edi.
328 Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt);
329
330 // Copy arguments to the stack in a loop.
331 Label loop, entry;
332 __ Move(ecx, Immediate(0));
333 __ jmp(&entry, Label::kNear);
334 __ bind(&loop);
335 __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
336 __ push(Operand(edx, 0)); // dereference handle
337 __ inc(ecx);
338 __ bind(&entry);
339 __ cmp(ecx, eax);
340 __ j(not_equal, &loop);
341
342 // Load the previous frame pointer (ebx) to access C arguments
343 __ mov(ebx, Operand(ebp, 0));
344
345 // Get the new.target and function from the frame.
346 __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
347 __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
348
349 // Invoke the code.
350 Handle<Code> builtin = is_construct
351 ? masm->isolate()->builtins()->Construct()
352 : masm->isolate()->builtins()->Call();
353 __ Call(builtin, RelocInfo::CODE_TARGET);
354
355 // Exit the internal frame. Notice that this also removes the empty.
356 // context and the function left on the stack by the code
357 // invocation.
358 }
359 __ ret(kPointerSize); // Remove receiver.
360 }
361
Generate_JSEntryTrampoline(MacroAssembler * masm)362 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
363 Generate_JSEntryTrampolineHelper(masm, false);
364 }
365
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)366 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
367 Generate_JSEntryTrampolineHelper(masm, true);
368 }
369
370 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)371 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
372 // ----------- S t a t e -------------
373 // -- eax : the value to pass to the generator
374 // -- ebx : the JSGeneratorObject to resume
375 // -- edx : the resume mode (tagged)
376 // -- esp[0] : return address
377 // -----------------------------------
378 __ AssertGeneratorObject(ebx);
379
380 // Store input value into generator object.
381 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
382 __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
383 kDontSaveFPRegs);
384
385 // Store resume mode into generator object.
386 __ mov(FieldOperand(ebx, JSGeneratorObject::kResumeModeOffset), edx);
387
388 // Load suspended function and context.
389 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
390 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
391
392 // Flood function if we are stepping.
393 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
394 Label stepping_prepared;
395 ExternalReference last_step_action =
396 ExternalReference::debug_last_step_action_address(masm->isolate());
397 STATIC_ASSERT(StepFrame > StepIn);
398 __ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
399 __ j(greater_equal, &prepare_step_in_if_stepping);
400
401 // Flood function if we need to continue stepping in the suspended generator.
402 ExternalReference debug_suspended_generator =
403 ExternalReference::debug_suspended_generator_address(masm->isolate());
404 __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator));
405 __ j(equal, &prepare_step_in_suspended_generator);
406 __ bind(&stepping_prepared);
407
408 // Pop return address.
409 __ PopReturnAddressTo(eax);
410
411 // Push receiver.
412 __ Push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
413
414 // ----------- S t a t e -------------
415 // -- eax : return address
416 // -- ebx : the JSGeneratorObject to resume
417 // -- edx : the resume mode (tagged)
418 // -- edi : generator function
419 // -- esi : generator context
420 // -- esp[0] : generator receiver
421 // -----------------------------------
422
423 // Push holes for arguments to generator function. Since the parser forced
424 // context allocation for any variables in generators, the actual argument
425 // values have already been copied into the context and these dummy values
426 // will never be used.
427 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
428 __ mov(ecx,
429 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
430 {
431 Label done_loop, loop;
432 __ bind(&loop);
433 __ sub(ecx, Immediate(Smi::FromInt(1)));
434 __ j(carry, &done_loop, Label::kNear);
435 __ PushRoot(Heap::kTheHoleValueRootIndex);
436 __ jmp(&loop);
437 __ bind(&done_loop);
438 }
439
440 // Dispatch on the kind of generator object.
441 Label old_generator;
442 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
443 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
444 __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
445 __ j(not_equal, &old_generator);
446
447 // New-style (ignition/turbofan) generator object
448 {
449 __ PushReturnAddressFrom(eax);
450 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
451 __ mov(eax,
452 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
453 // We abuse new.target both to indicate that this is a resume call and to
454 // pass in the generator object. In ordinary calls, new.target is always
455 // undefined because generator functions are non-constructable.
456 __ mov(edx, ebx);
457 __ jmp(FieldOperand(edi, JSFunction::kCodeEntryOffset));
458 }
459
460 // Old-style (full-codegen) generator object
461 __ bind(&old_generator);
462 {
463 // Enter a new JavaScript frame, and initialize its slots as they were when
464 // the generator was suspended.
465 FrameScope scope(masm, StackFrame::MANUAL);
466 __ PushReturnAddressFrom(eax); // Return address.
467 __ Push(ebp); // Caller's frame pointer.
468 __ Move(ebp, esp);
469 __ Push(esi); // Callee's context.
470 __ Push(edi); // Callee's JS Function.
471
472 // Restore the operand stack.
473 __ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
474 {
475 Label done_loop, loop;
476 __ Move(ecx, Smi::kZero);
477 __ bind(&loop);
478 __ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
479 __ j(equal, &done_loop, Label::kNear);
480 __ Push(FieldOperand(eax, ecx, times_half_pointer_size,
481 FixedArray::kHeaderSize));
482 __ add(ecx, Immediate(Smi::FromInt(1)));
483 __ jmp(&loop);
484 __ bind(&done_loop);
485 }
486
487 // Reset operand stack so we don't leak.
488 __ mov(FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset),
489 Immediate(masm->isolate()->factory()->empty_fixed_array()));
490
491 // Resume the generator function at the continuation.
492 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
493 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
494 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
495 __ SmiUntag(ecx);
496 __ lea(edx, FieldOperand(edx, ecx, times_1, Code::kHeaderSize));
497 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
498 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
499 __ mov(eax, ebx); // Continuation expects generator object in eax.
500 __ jmp(edx);
501 }
502
503 __ bind(&prepare_step_in_if_stepping);
504 {
505 FrameScope scope(masm, StackFrame::INTERNAL);
506 __ Push(ebx);
507 __ Push(edx);
508 __ Push(edi);
509 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
510 __ Pop(edx);
511 __ Pop(ebx);
512 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
513 }
514 __ jmp(&stepping_prepared);
515
516 __ bind(&prepare_step_in_suspended_generator);
517 {
518 FrameScope scope(masm, StackFrame::INTERNAL);
519 __ Push(ebx);
520 __ Push(edx);
521 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
522 __ Pop(edx);
523 __ Pop(ebx);
524 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
525 }
526 __ jmp(&stepping_prepared);
527 }
528
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)529 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
530 Register scratch2) {
531 Register args_count = scratch1;
532 Register return_pc = scratch2;
533
534 // Get the arguments + reciever count.
535 __ mov(args_count,
536 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
537 __ mov(args_count,
538 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
539
540 // Leave the frame (also dropping the register file).
541 __ leave();
542
543 // Drop receiver + arguments.
544 __ pop(return_pc);
545 __ add(esp, args_count);
546 __ push(return_pc);
547 }
548
549 // Generate code for entering a JS function with the interpreter.
550 // On entry to the function the receiver and arguments have been pushed on the
551 // stack left to right. The actual argument count matches the formal parameter
552 // count expected by the function.
553 //
554 // The live registers are:
555 // o edi: the JS function object being called
556 // o edx: the new target
557 // o esi: our context
558 // o ebp: the caller's frame pointer
559 // o esp: stack pointer (pointing to return address)
560 //
561 // The function builds an interpreter frame. See InterpreterFrameConstants in
562 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)563 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
564 ProfileEntryHookStub::MaybeCallEntryHook(masm);
565
566 // Open a frame scope to indicate that there is a frame on the stack. The
567 // MANUAL indicates that the scope shouldn't actually generate code to set up
568 // the frame (that is done below).
569 FrameScope frame_scope(masm, StackFrame::MANUAL);
570 __ push(ebp); // Caller's frame pointer.
571 __ mov(ebp, esp);
572 __ push(esi); // Callee's context.
573 __ push(edi); // Callee's JS function.
574 __ push(edx); // Callee's new target.
575
576 // Get the bytecode array from the function object (or from the DebugInfo if
577 // it is present) and load it into kInterpreterBytecodeArrayRegister.
578 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
579 Label load_debug_bytecode_array, bytecode_array_loaded;
580 __ cmp(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
581 Immediate(DebugInfo::uninitialized()));
582 __ j(not_equal, &load_debug_bytecode_array);
583 __ mov(kInterpreterBytecodeArrayRegister,
584 FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
585 __ bind(&bytecode_array_loaded);
586
587 // Check whether we should continue to use the interpreter.
588 Label switch_to_different_code_kind;
589 __ Move(ecx, masm->CodeObject()); // Self-reference to this code.
590 __ cmp(ecx, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
591 __ j(not_equal, &switch_to_different_code_kind);
592
593 // Increment invocation count for the function.
594 __ EmitLoadTypeFeedbackVector(ecx);
595 __ add(FieldOperand(ecx,
596 TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
597 TypeFeedbackVector::kHeaderSize),
598 Immediate(Smi::FromInt(1)));
599
600 // Check function data field is actually a BytecodeArray object.
601 if (FLAG_debug_code) {
602 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
603 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
604 eax);
605 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
606 }
607
608 // Push bytecode array.
609 __ push(kInterpreterBytecodeArrayRegister);
610 // Push Smi tagged initial bytecode array offset.
611 __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
612
613 // Allocate the local and temporary register file on the stack.
614 {
615 // Load frame size from the BytecodeArray object.
616 __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
617 BytecodeArray::kFrameSizeOffset));
618
619 // Do a stack check to ensure we don't go over the limit.
620 Label ok;
621 __ mov(ecx, esp);
622 __ sub(ecx, ebx);
623 ExternalReference stack_limit =
624 ExternalReference::address_of_real_stack_limit(masm->isolate());
625 __ cmp(ecx, Operand::StaticVariable(stack_limit));
626 __ j(above_equal, &ok);
627 __ CallRuntime(Runtime::kThrowStackOverflow);
628 __ bind(&ok);
629
630 // If ok, push undefined as the initial value for all register file entries.
631 Label loop_header;
632 Label loop_check;
633 __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value()));
634 __ jmp(&loop_check);
635 __ bind(&loop_header);
636 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
637 __ push(eax);
638 // Continue loop if not done.
639 __ bind(&loop_check);
640 __ sub(ebx, Immediate(kPointerSize));
641 __ j(greater_equal, &loop_header);
642 }
643
644 // Load accumulator, bytecode offset and dispatch table into registers.
645 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
646 __ mov(kInterpreterBytecodeOffsetRegister,
647 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
648 __ mov(kInterpreterDispatchTableRegister,
649 Immediate(ExternalReference::interpreter_dispatch_table_address(
650 masm->isolate())));
651
652 // Dispatch to the first bytecode handler for the function.
653 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
654 kInterpreterBytecodeOffsetRegister, times_1, 0));
655 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
656 times_pointer_size, 0));
657 __ call(ebx);
658 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
659
660 // The return value is in eax.
661 LeaveInterpreterFrame(masm, ebx, ecx);
662 __ ret(0);
663
664 // Load debug copy of the bytecode array.
665 __ bind(&load_debug_bytecode_array);
666 Register debug_info = kInterpreterBytecodeArrayRegister;
667 __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
668 __ mov(kInterpreterBytecodeArrayRegister,
669 FieldOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
670 __ jmp(&bytecode_array_loaded);
671
672 // If the shared code is no longer this entry trampoline, then the underlying
673 // function has been switched to a different kind of code and we heal the
674 // closure by switching the code entry field over to the new code as well.
675 __ bind(&switch_to_different_code_kind);
676 __ pop(edx); // Callee's new target.
677 __ pop(edi); // Callee's JS function.
678 __ pop(esi); // Callee's context.
679 __ leave(); // Leave the frame so we can tail call.
680 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
681 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
682 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
683 __ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx);
684 __ RecordWriteCodeEntryField(edi, ecx, ebx);
685 __ jmp(ecx);
686 }
687
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow,bool include_receiver=false)688 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
689 Register scratch1, Register scratch2,
690 Label* stack_overflow,
691 bool include_receiver = false) {
692 // Check the stack for overflow. We are not trying to catch
693 // interruptions (e.g. debug break and preemption) here, so the "real stack
694 // limit" is checked.
695 ExternalReference real_stack_limit =
696 ExternalReference::address_of_real_stack_limit(masm->isolate());
697 __ mov(scratch1, Operand::StaticVariable(real_stack_limit));
698 // Make scratch2 the space we have left. The stack might already be overflowed
699 // here which will cause scratch2 to become negative.
700 __ mov(scratch2, esp);
701 __ sub(scratch2, scratch1);
702 // Make scratch1 the space we need for the array when it is unrolled onto the
703 // stack.
704 __ mov(scratch1, num_args);
705 if (include_receiver) {
706 __ add(scratch1, Immediate(1));
707 }
708 __ shl(scratch1, kPointerSizeLog2);
709 // Check if the arguments will overflow the stack.
710 __ cmp(scratch2, scratch1);
711 __ j(less_equal, stack_overflow); // Signed comparison.
712 }
713
Generate_InterpreterPushArgs(MacroAssembler * masm,Register array_limit,Register start_address)714 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
715 Register array_limit,
716 Register start_address) {
717 // ----------- S t a t e -------------
718 // -- start_address : Pointer to the last argument in the args array.
719 // -- array_limit : Pointer to one before the first argument in the
720 // args array.
721 // -----------------------------------
722 Label loop_header, loop_check;
723 __ jmp(&loop_check);
724 __ bind(&loop_header);
725 __ Push(Operand(start_address, 0));
726 __ sub(start_address, Immediate(kPointerSize));
727 __ bind(&loop_check);
728 __ cmp(start_address, array_limit);
729 __ j(greater, &loop_header, Label::kNear);
730 }
731
732 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)733 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
734 MacroAssembler* masm, TailCallMode tail_call_mode,
735 CallableType function_type) {
736 // ----------- S t a t e -------------
737 // -- eax : the number of arguments (not including the receiver)
738 // -- ebx : the address of the first argument to be pushed. Subsequent
739 // arguments should be consecutive above this, in the same order as
740 // they are to be pushed onto the stack.
741 // -- edi : the target to call (can be any Object).
742 // -----------------------------------
743 Label stack_overflow;
744 // Compute the expected number of arguments.
745 __ mov(ecx, eax);
746 __ add(ecx, Immediate(1)); // Add one for receiver.
747
748 // Add a stack check before pushing the arguments. We need an extra register
749 // to perform a stack check. So push it onto the stack temporarily. This
750 // might cause stack overflow, but it will be detected by the check.
751 __ Push(edi);
752 Generate_StackOverflowCheck(masm, ecx, edx, edi, &stack_overflow);
753 __ Pop(edi);
754
755 // Pop return address to allow tail-call after pushing arguments.
756 __ Pop(edx);
757
758 // Find the address of the last argument.
759 __ shl(ecx, kPointerSizeLog2);
760 __ neg(ecx);
761 __ add(ecx, ebx);
762 Generate_InterpreterPushArgs(masm, ecx, ebx);
763
764 // Call the target.
765 __ Push(edx); // Re-push return address.
766
767 if (function_type == CallableType::kJSFunction) {
768 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
769 tail_call_mode),
770 RelocInfo::CODE_TARGET);
771 } else {
772 DCHECK_EQ(function_type, CallableType::kAny);
773 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
774 tail_call_mode),
775 RelocInfo::CODE_TARGET);
776 }
777
778 __ bind(&stack_overflow);
779 {
780 // Pop the temporary registers, so that return address is on top of stack.
781 __ Pop(edi);
782
783 __ TailCallRuntime(Runtime::kThrowStackOverflow);
784
785 // This should be unreachable.
786 __ int3();
787 }
788 }
789
790 namespace {
791
792 // This function modified start_addr, and only reads the contents of num_args
793 // register. scratch1 and scratch2 are used as temporary registers. Their
794 // original values are restored after the use.
Generate_InterpreterPushArgsAndReturnAddress(MacroAssembler * masm,Register num_args,Register start_addr,Register scratch1,Register scratch2,bool receiver_in_args,int num_slots_above_ret_addr,Label * stack_overflow)795 void Generate_InterpreterPushArgsAndReturnAddress(
796 MacroAssembler* masm, Register num_args, Register start_addr,
797 Register scratch1, Register scratch2, bool receiver_in_args,
798 int num_slots_above_ret_addr, Label* stack_overflow) {
799 // We have to move return address and the temporary registers above it
800 // before we can copy arguments onto the stack. To achieve this:
801 // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
802 // Step 2: Move the return address and values above it to the top of stack.
803 // Step 3: Copy the arguments into the correct locations.
804 // current stack =====> required stack layout
805 // | | | scratch1 | (2) <-- esp(1)
806 // | | | .... | (2)
807 // | | | scratch-n | (2)
808 // | | | return addr | (2)
809 // | | | arg N | (3)
810 // | scratch1 | <-- esp | .... |
811 // | .... | | arg 0 |
812 // | scratch-n | | arg 0 |
813 // | return addr | | receiver slot |
814
815 // Check for stack overflow before we increment the stack pointer.
816 Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
817 stack_overflow, true);
818
819 // Step 1 - Update the stack pointer. scratch1 already contains the required
820 // increment to the stack. i.e. num_args + 1 stack slots. This is computed in
821 // the Generate_StackOverflowCheck.
822
823 #ifdef _MSC_VER
824 // TODO(mythria): Move it to macro assembler.
825 // In windows, we cannot increment the stack size by more than one page
826 // (mimimum page size is 4KB) without accessing at least one byte on the
827 // page. Check this:
828 // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
829 const int page_size = 4 * 1024;
830 Label check_offset, update_stack_pointer;
831 __ bind(&check_offset);
832 __ cmp(scratch1, page_size);
833 __ j(less, &update_stack_pointer);
834 __ sub(esp, Immediate(page_size));
835 // Just to touch the page, before we increment further.
836 __ mov(Operand(esp, 0), Immediate(0));
837 __ sub(scratch1, Immediate(page_size));
838 __ jmp(&check_offset);
839 __ bind(&update_stack_pointer);
840 #endif
841
842 __ sub(esp, scratch1);
843
844 // Step 2 move return_address and slots above it to the correct locations.
845 // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
846 // basically when the source and destination overlap. We at least need one
847 // extra slot for receiver, so no extra checks are required to avoid copy.
848 for (int i = 0; i < num_slots_above_ret_addr + 1; i++) {
849 __ mov(scratch1,
850 Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
851 __ mov(Operand(esp, i * kPointerSize), scratch1);
852 }
853
854 // Step 3 copy arguments to correct locations.
855 if (receiver_in_args) {
856 __ mov(scratch1, num_args);
857 __ add(scratch1, Immediate(1));
858 } else {
859 // Slot meant for receiver contains return address. Reset it so that
860 // we will not incorrectly interpret return address as an object.
861 __ mov(Operand(esp, num_args, times_pointer_size,
862 (num_slots_above_ret_addr + 1) * kPointerSize),
863 Immediate(0));
864 __ mov(scratch1, num_args);
865 }
866
867 Label loop_header, loop_check;
868 __ jmp(&loop_check);
869 __ bind(&loop_header);
870 __ mov(scratch2, Operand(start_addr, 0));
871 __ mov(Operand(esp, scratch1, times_pointer_size,
872 num_slots_above_ret_addr * kPointerSize),
873 scratch2);
874 __ sub(start_addr, Immediate(kPointerSize));
875 __ sub(scratch1, Immediate(1));
876 __ bind(&loop_check);
877 __ cmp(scratch1, Immediate(0));
878 __ j(greater, &loop_header, Label::kNear);
879 }
880
881 } // end anonymous namespace
882
883 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)884 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
885 MacroAssembler* masm, CallableType construct_type) {
886 // ----------- S t a t e -------------
887 // -- eax : the number of arguments (not including the receiver)
888 // -- edx : the new target
889 // -- edi : the constructor
890 // -- ebx : allocation site feedback (if available or undefined)
891 // -- ecx : the address of the first argument to be pushed. Subsequent
892 // arguments should be consecutive above this, in the same order as
893 // they are to be pushed onto the stack.
894 // -----------------------------------
895 Label stack_overflow;
896 // We need two scratch registers. Push edi and edx onto stack.
897 __ Push(edi);
898 __ Push(edx);
899
900 // Push arguments and move return address to the top of stack.
901 // The eax register is readonly. The ecx register will be modified. The edx
902 // and edi registers will be modified but restored to their original values.
903 Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, false,
904 2, &stack_overflow);
905
906 // Restore edi and edx
907 __ Pop(edx);
908 __ Pop(edi);
909
910 __ AssertUndefinedOrAllocationSite(ebx);
911 if (construct_type == CallableType::kJSFunction) {
912 // Tail call to the function-specific construct stub (still in the caller
913 // context at this point).
914 __ AssertFunction(edi);
915
916 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
917 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
918 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
919 __ jmp(ecx);
920 } else {
921 DCHECK_EQ(construct_type, CallableType::kAny);
922
923 // Call the constructor with unmodified eax, edi, edx values.
924 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
925 }
926
927 __ bind(&stack_overflow);
928 {
929 // Pop the temporary registers, so that return address is on top of stack.
930 __ Pop(edx);
931 __ Pop(edi);
932
933 __ TailCallRuntime(Runtime::kThrowStackOverflow);
934
935 // This should be unreachable.
936 __ int3();
937 }
938 }
939
940 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)941 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
942 MacroAssembler* masm) {
943 // ----------- S t a t e -------------
944 // -- eax : the number of arguments (not including the receiver)
945 // -- edx : the target to call checked to be Array function.
946 // -- ebx : the allocation site feedback
947 // -- ecx : the address of the first argument to be pushed. Subsequent
948 // arguments should be consecutive above this, in the same order as
949 // they are to be pushed onto the stack.
950 // -----------------------------------
951 Label stack_overflow;
952 // We need two scratch registers. Register edi is available, push edx onto
953 // stack.
954 __ Push(edx);
955
956 // Push arguments and move return address to the top of stack.
957 // The eax register is readonly. The ecx register will be modified. The edx
958 // and edi registers will be modified but restored to their original values.
959 Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, true,
960 1, &stack_overflow);
961
962 // Restore edx.
963 __ Pop(edx);
964
965 // Array constructor expects constructor in edi. It is same as edx here.
966 __ Move(edi, edx);
967
968 ArrayConstructorStub stub(masm->isolate());
969 __ TailCallStub(&stub);
970
971 __ bind(&stack_overflow);
972 {
973 // Pop the temporary registers, so that return address is on top of stack.
974 __ Pop(edx);
975
976 __ TailCallRuntime(Runtime::kThrowStackOverflow);
977
978 // This should be unreachable.
979 __ int3();
980 }
981 }
982
Generate_InterpreterEnterBytecode(MacroAssembler * masm)983 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
984 // Set the return address to the correct point in the interpreter entry
985 // trampoline.
986 Smi* interpreter_entry_return_pc_offset(
987 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
988 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
989 __ LoadHeapObject(ebx,
990 masm->isolate()->builtins()->InterpreterEntryTrampoline());
991 __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
992 Code::kHeaderSize - kHeapObjectTag));
993 __ push(ebx);
994
995 // Initialize the dispatch table register.
996 __ mov(kInterpreterDispatchTableRegister,
997 Immediate(ExternalReference::interpreter_dispatch_table_address(
998 masm->isolate())));
999
1000 // Get the bytecode array pointer from the frame.
1001 __ mov(kInterpreterBytecodeArrayRegister,
1002 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1003
1004 if (FLAG_debug_code) {
1005 // Check function data field is actually a BytecodeArray object.
1006 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1007 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1008 ebx);
1009 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1010 }
1011
1012 // Get the target bytecode offset from the frame.
1013 __ mov(kInterpreterBytecodeOffsetRegister,
1014 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1015 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1016
1017 // Dispatch to the target bytecode.
1018 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
1019 kInterpreterBytecodeOffsetRegister, times_1, 0));
1020 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
1021 times_pointer_size, 0));
1022 __ jmp(ebx);
1023 }
1024
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1025 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1026 // Advance the current bytecode offset stored within the given interpreter
1027 // stack frame. This simulates what all bytecode handlers do upon completion
1028 // of the underlying operation.
1029 __ mov(ebx, Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1030 __ mov(edx, Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1031 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1032 {
1033 FrameScope scope(masm, StackFrame::INTERNAL);
1034 __ Push(kInterpreterAccumulatorRegister);
1035 __ Push(ebx); // First argument is the bytecode array.
1036 __ Push(edx); // Second argument is the bytecode offset.
1037 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1038 __ Move(edx, eax); // Result is the new bytecode offset.
1039 __ Pop(kInterpreterAccumulatorRegister);
1040 }
1041 __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), edx);
1042
1043 Generate_InterpreterEnterBytecode(masm);
1044 }
1045
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1046 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1047 Generate_InterpreterEnterBytecode(masm);
1048 }
1049
Generate_CompileLazy(MacroAssembler * masm)1050 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1051 // ----------- S t a t e -------------
1052 // -- eax : argument count (preserved for callee)
1053 // -- edx : new target (preserved for callee)
1054 // -- edi : target function (preserved for callee)
1055 // -----------------------------------
1056 // First lookup code, maybe we don't need to compile!
1057 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1058 Label try_shared;
1059 Label loop_top, loop_bottom;
1060
1061 Register closure = edi;
1062 Register new_target = edx;
1063 Register argument_count = eax;
1064
1065 __ push(argument_count);
1066 __ push(new_target);
1067 __ push(closure);
1068
1069 Register map = argument_count;
1070 Register index = ebx;
1071 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1072 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1073 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
1074 __ cmp(index, Immediate(Smi::FromInt(2)));
1075 __ j(less, &gotta_call_runtime);
1076
1077 // Find literals.
1078 // edx : native context
1079 // ebx : length / index
1080 // eax : optimized code map
1081 // stack[0] : new target
1082 // stack[4] : closure
1083 Register native_context = edx;
1084 __ mov(native_context, NativeContextOperand());
1085
1086 __ bind(&loop_top);
1087 Register temp = edi;
1088
1089 // Does the native context match?
1090 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
1091 SharedFunctionInfo::kOffsetToPreviousContext));
1092 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
1093 __ cmp(temp, native_context);
1094 __ j(not_equal, &loop_bottom);
1095 // OSR id set to none?
1096 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
1097 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1098 const int bailout_id = BailoutId::None().ToInt();
1099 __ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
1100 __ j(not_equal, &loop_bottom);
1101 // Literals available?
1102 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
1103 SharedFunctionInfo::kOffsetToPreviousLiterals));
1104 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
1105 __ JumpIfSmi(temp, &gotta_call_runtime);
1106
1107 // Save the literals in the closure.
1108 __ mov(ecx, Operand(esp, 0));
1109 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
1110 __ push(index);
1111 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index,
1112 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1113 __ pop(index);
1114
1115 // Code available?
1116 Register entry = ecx;
1117 __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
1118 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1119 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
1120 __ JumpIfSmi(entry, &try_shared);
1121
1122 // Found literals and code. Get them into the closure and return.
1123 __ pop(closure);
1124 // Store code entry in the closure.
1125 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
1126 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1127 __ RecordWriteCodeEntryField(closure, entry, eax);
1128
1129 // Link the closure into the optimized function list.
1130 // ecx : code entry
1131 // edx : native context
1132 // edi : closure
1133 __ mov(ebx,
1134 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1135 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
1136 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
1137 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1138 const int function_list_offset =
1139 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1140 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
1141 closure);
1142 // Save closure before the write barrier.
1143 __ mov(ebx, closure);
1144 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
1145 kDontSaveFPRegs);
1146 __ mov(closure, ebx);
1147 __ pop(new_target);
1148 __ pop(argument_count);
1149 __ jmp(entry);
1150
1151 __ bind(&loop_bottom);
1152 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1153 __ cmp(index, Immediate(Smi::FromInt(1)));
1154 __ j(greater, &loop_top);
1155
1156 // We found neither literals nor code.
1157 __ jmp(&gotta_call_runtime);
1158
1159 __ bind(&try_shared);
1160 __ pop(closure);
1161 __ pop(new_target);
1162 __ pop(argument_count);
1163 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1164 // Is the shared function marked for tier up?
1165 __ test_b(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset),
1166 Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1167 __ j(not_zero, &gotta_call_runtime_no_stack);
1168 // Is the full code valid?
1169 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
1170 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset));
1171 __ and_(ebx, Code::KindField::kMask);
1172 __ shr(ebx, Code::KindField::kShift);
1173 __ cmp(ebx, Immediate(Code::BUILTIN));
1174 __ j(equal, &gotta_call_runtime_no_stack);
1175 // Yes, install the full code.
1176 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
1177 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1178 __ RecordWriteCodeEntryField(closure, entry, ebx);
1179 __ jmp(entry);
1180
1181 __ bind(&gotta_call_runtime);
1182 __ pop(closure);
1183 __ pop(new_target);
1184 __ pop(argument_count);
1185 __ bind(&gotta_call_runtime_no_stack);
1186
1187 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1188 }
1189
Generate_CompileBaseline(MacroAssembler * masm)1190 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1191 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1192 }
1193
Generate_CompileOptimized(MacroAssembler * masm)1194 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1195 GenerateTailCallToReturnedCode(masm,
1196 Runtime::kCompileOptimized_NotConcurrent);
1197 }
1198
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1199 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1200 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1201 }
1202
Generate_InstantiateAsmJs(MacroAssembler * masm)1203 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1204 // ----------- S t a t e -------------
1205 // -- eax : argument count (preserved for callee)
1206 // -- edx : new target (preserved for callee)
1207 // -- edi : target function (preserved for callee)
1208 // -----------------------------------
1209 Label failed;
1210 {
1211 FrameScope scope(masm, StackFrame::INTERNAL);
1212 // Preserve argument count for later compare.
1213 __ mov(ecx, eax);
1214 // Push the number of arguments to the callee.
1215 __ SmiTag(eax);
1216 __ push(eax);
1217 // Push a copy of the target function and the new target.
1218 __ push(edi);
1219 __ push(edx);
1220
1221 // The function.
1222 __ push(edi);
1223 // Copy arguments from caller (stdlib, foreign, heap).
1224 Label args_done;
1225 for (int j = 0; j < 4; ++j) {
1226 Label over;
1227 if (j < 3) {
1228 __ cmp(ecx, Immediate(j));
1229 __ j(not_equal, &over, Label::kNear);
1230 }
1231 for (int i = j - 1; i >= 0; --i) {
1232 __ Push(Operand(
1233 ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1234 }
1235 for (int i = 0; i < 3 - j; ++i) {
1236 __ PushRoot(Heap::kUndefinedValueRootIndex);
1237 }
1238 if (j < 3) {
1239 __ jmp(&args_done, Label::kNear);
1240 __ bind(&over);
1241 }
1242 }
1243 __ bind(&args_done);
1244
1245 // Call runtime, on success unwind frame, and parent frame.
1246 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1247 // A smi 0 is returned on failure, an object on success.
1248 __ JumpIfSmi(eax, &failed, Label::kNear);
1249
1250 __ Drop(2);
1251 __ Pop(ecx);
1252 __ SmiUntag(ecx);
1253 scope.GenerateLeaveFrame();
1254
1255 __ PopReturnAddressTo(ebx);
1256 __ inc(ecx);
1257 __ lea(esp, Operand(esp, ecx, times_pointer_size, 0));
1258 __ PushReturnAddressFrom(ebx);
1259 __ ret(0);
1260
1261 __ bind(&failed);
1262 // Restore target function and new target.
1263 __ pop(edx);
1264 __ pop(edi);
1265 __ pop(eax);
1266 __ SmiUntag(eax);
1267 }
1268 // On failure, tail call back to regular js.
1269 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1270 }
1271
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1272 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1273 // For now, we are relying on the fact that make_code_young doesn't do any
1274 // garbage collection which allows us to save/restore the registers without
1275 // worrying about which of them contain pointers. We also don't build an
1276 // internal frame to make the code faster, since we shouldn't have to do stack
1277 // crawls in MakeCodeYoung. This seems a bit fragile.
1278
1279 // Re-execute the code that was patched back to the young age when
1280 // the stub returns.
1281 __ sub(Operand(esp, 0), Immediate(5));
1282 __ pushad();
1283 __ mov(eax, Operand(esp, 8 * kPointerSize));
1284 {
1285 FrameScope scope(masm, StackFrame::MANUAL);
1286 __ PrepareCallCFunction(2, ebx);
1287 __ mov(Operand(esp, 1 * kPointerSize),
1288 Immediate(ExternalReference::isolate_address(masm->isolate())));
1289 __ mov(Operand(esp, 0), eax);
1290 __ CallCFunction(
1291 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1292 }
1293 __ popad();
1294 __ ret(0);
1295 }
1296
1297 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1298 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1299 MacroAssembler* masm) { \
1300 GenerateMakeCodeYoungAgainCommon(masm); \
1301 } \
1302 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1303 MacroAssembler* masm) { \
1304 GenerateMakeCodeYoungAgainCommon(masm); \
1305 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1306 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1307 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1308
1309 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1310 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1311 // that make_code_young doesn't do any garbage collection which allows us to
1312 // save/restore the registers without worrying about which of them contain
1313 // pointers.
1314 __ pushad();
1315 __ mov(eax, Operand(esp, 8 * kPointerSize));
1316 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
1317 { // NOLINT
1318 FrameScope scope(masm, StackFrame::MANUAL);
1319 __ PrepareCallCFunction(2, ebx);
1320 __ mov(Operand(esp, 1 * kPointerSize),
1321 Immediate(ExternalReference::isolate_address(masm->isolate())));
1322 __ mov(Operand(esp, 0), eax);
1323 __ CallCFunction(
1324 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1325 2);
1326 }
1327 __ popad();
1328
1329 // Perform prologue operations usually performed by the young code stub.
1330 __ pop(eax); // Pop return address into scratch register.
1331 __ push(ebp); // Caller's frame pointer.
1332 __ mov(ebp, esp);
1333 __ push(esi); // Callee's context.
1334 __ push(edi); // Callee's JS Function.
1335 __ push(eax); // Push return address after frame prologue.
1336
1337 // Jump to point after the code-age stub.
1338 __ ret(0);
1339 }
1340
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1341 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1342 GenerateMakeCodeYoungAgainCommon(masm);
1343 }
1344
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1345 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1346 Generate_MarkCodeAsExecutedOnce(masm);
1347 }
1348
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1349 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1350 SaveFPRegsMode save_doubles) {
1351 // Enter an internal frame.
1352 {
1353 FrameScope scope(masm, StackFrame::INTERNAL);
1354
1355 // Preserve registers across notification, this is important for compiled
1356 // stubs that tail call the runtime on deopts passing their parameters in
1357 // registers.
1358 __ pushad();
1359 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1360 __ popad();
1361 // Tear down internal frame.
1362 }
1363
1364 __ pop(MemOperand(esp, 0)); // Ignore state offset
1365 __ ret(0); // Return to IC Miss stub, continuation still on stack.
1366 }
1367
Generate_NotifyStubFailure(MacroAssembler * masm)1368 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1369 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1370 }
1371
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1372 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1373 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1374 }
1375
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1376 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1377 Deoptimizer::BailoutType type) {
1378 {
1379 FrameScope scope(masm, StackFrame::INTERNAL);
1380
1381 // Pass deoptimization type to the runtime system.
1382 __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
1383 __ CallRuntime(Runtime::kNotifyDeoptimized);
1384
1385 // Tear down internal frame.
1386 }
1387
1388 // Get the full codegen state from the stack and untag it.
1389 __ mov(ecx, Operand(esp, 1 * kPointerSize));
1390 __ SmiUntag(ecx);
1391
1392 // Switch on the state.
1393 Label not_no_registers, not_tos_eax;
1394 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS));
1395 __ j(not_equal, ¬_no_registers, Label::kNear);
1396 __ ret(1 * kPointerSize); // Remove state.
1397
1398 __ bind(¬_no_registers);
1399 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1400 __ mov(eax, Operand(esp, 2 * kPointerSize));
1401 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER));
1402 __ j(not_equal, ¬_tos_eax, Label::kNear);
1403 __ ret(2 * kPointerSize); // Remove state, eax.
1404
1405 __ bind(¬_tos_eax);
1406 __ Abort(kNoCasesLeft);
1407 }
1408
Generate_NotifyDeoptimized(MacroAssembler * masm)1409 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1410 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1411 }
1412
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1413 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1414 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1415 }
1416
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1417 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1418 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1419 }
1420
1421 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1422 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1423 // ----------- S t a t e -------------
1424 // -- eax : argc
1425 // -- esp[0] : return address
1426 // -- esp[4] : argArray
1427 // -- esp[8] : thisArg
1428 // -- esp[12] : receiver
1429 // -----------------------------------
1430
1431 // 1. Load receiver into edi, argArray into eax (if present), remove all
1432 // arguments from the stack (including the receiver), and push thisArg (if
1433 // present) instead.
1434 {
1435 Label no_arg_array, no_this_arg;
1436 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1437 __ mov(ebx, edx);
1438 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1439 __ test(eax, eax);
1440 __ j(zero, &no_this_arg, Label::kNear);
1441 {
1442 __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
1443 __ cmp(eax, Immediate(1));
1444 __ j(equal, &no_arg_array, Label::kNear);
1445 __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1446 __ bind(&no_arg_array);
1447 }
1448 __ bind(&no_this_arg);
1449 __ PopReturnAddressTo(ecx);
1450 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1451 __ Push(edx);
1452 __ PushReturnAddressFrom(ecx);
1453 __ Move(eax, ebx);
1454 }
1455
1456 // ----------- S t a t e -------------
1457 // -- eax : argArray
1458 // -- edi : receiver
1459 // -- esp[0] : return address
1460 // -- esp[4] : thisArg
1461 // -----------------------------------
1462
1463 // 2. Make sure the receiver is actually callable.
1464 Label receiver_not_callable;
1465 __ JumpIfSmi(edi, &receiver_not_callable, Label::kNear);
1466 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1467 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1468 Immediate(1 << Map::kIsCallable));
1469 __ j(zero, &receiver_not_callable, Label::kNear);
1470
1471 // 3. Tail call with no arguments if argArray is null or undefined.
1472 Label no_arguments;
1473 __ JumpIfRoot(eax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1474 __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &no_arguments,
1475 Label::kNear);
1476
1477 // 4a. Apply the receiver to the given argArray (passing undefined for
1478 // new.target).
1479 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1480 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1481
1482 // 4b. The argArray is either null or undefined, so we tail call without any
1483 // arguments to the receiver.
1484 __ bind(&no_arguments);
1485 {
1486 __ Set(eax, 0);
1487 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1488 }
1489
1490 // 4c. The receiver is not callable, throw an appropriate TypeError.
1491 __ bind(&receiver_not_callable);
1492 {
1493 __ mov(Operand(esp, kPointerSize), edi);
1494 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1495 }
1496 }
1497
1498 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1499 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1500 // Stack Layout:
1501 // esp[0] : Return address
1502 // esp[8] : Argument n
1503 // esp[16] : Argument n-1
1504 // ...
1505 // esp[8 * n] : Argument 1
1506 // esp[8 * (n + 1)] : Receiver (callable to call)
1507 //
1508 // eax contains the number of arguments, n, not counting the receiver.
1509 //
1510 // 1. Make sure we have at least one argument.
1511 {
1512 Label done;
1513 __ test(eax, eax);
1514 __ j(not_zero, &done, Label::kNear);
1515 __ PopReturnAddressTo(ebx);
1516 __ PushRoot(Heap::kUndefinedValueRootIndex);
1517 __ PushReturnAddressFrom(ebx);
1518 __ inc(eax);
1519 __ bind(&done);
1520 }
1521
1522 // 2. Get the callable to call (passed as receiver) from the stack.
1523 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1524
1525 // 3. Shift arguments and return address one slot down on the stack
1526 // (overwriting the original receiver). Adjust argument count to make
1527 // the original first argument the new receiver.
1528 {
1529 Label loop;
1530 __ mov(ecx, eax);
1531 __ bind(&loop);
1532 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1533 __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
1534 __ dec(ecx);
1535 __ j(not_sign, &loop); // While non-negative (to copy return address).
1536 __ pop(ebx); // Discard copy of return address.
1537 __ dec(eax); // One fewer argument (first argument is new receiver).
1538 }
1539
1540 // 4. Call the callable.
1541 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1542 }
1543
Generate_ReflectApply(MacroAssembler * masm)1544 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1545 // ----------- S t a t e -------------
1546 // -- eax : argc
1547 // -- esp[0] : return address
1548 // -- esp[4] : argumentsList
1549 // -- esp[8] : thisArgument
1550 // -- esp[12] : target
1551 // -- esp[16] : receiver
1552 // -----------------------------------
1553
1554 // 1. Load target into edi (if present), argumentsList into eax (if present),
1555 // remove all arguments from the stack (including the receiver), and push
1556 // thisArgument (if present) instead.
1557 {
1558 Label done;
1559 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1560 __ mov(edx, edi);
1561 __ mov(ebx, edi);
1562 __ cmp(eax, Immediate(1));
1563 __ j(below, &done, Label::kNear);
1564 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1565 __ j(equal, &done, Label::kNear);
1566 __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1567 __ cmp(eax, Immediate(3));
1568 __ j(below, &done, Label::kNear);
1569 __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1570 __ bind(&done);
1571 __ PopReturnAddressTo(ecx);
1572 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1573 __ Push(edx);
1574 __ PushReturnAddressFrom(ecx);
1575 __ Move(eax, ebx);
1576 }
1577
1578 // ----------- S t a t e -------------
1579 // -- eax : argumentsList
1580 // -- edi : target
1581 // -- esp[0] : return address
1582 // -- esp[4] : thisArgument
1583 // -----------------------------------
1584
1585 // 2. Make sure the target is actually callable.
1586 Label target_not_callable;
1587 __ JumpIfSmi(edi, &target_not_callable, Label::kNear);
1588 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1589 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1590 Immediate(1 << Map::kIsCallable));
1591 __ j(zero, &target_not_callable, Label::kNear);
1592
1593 // 3a. Apply the target to the given argumentsList (passing undefined for
1594 // new.target).
1595 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1596 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1597
1598 // 3b. The target is not callable, throw an appropriate TypeError.
1599 __ bind(&target_not_callable);
1600 {
1601 __ mov(Operand(esp, kPointerSize), edi);
1602 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1603 }
1604 }
1605
Generate_ReflectConstruct(MacroAssembler * masm)1606 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1607 // ----------- S t a t e -------------
1608 // -- eax : argc
1609 // -- esp[0] : return address
1610 // -- esp[4] : new.target (optional)
1611 // -- esp[8] : argumentsList
1612 // -- esp[12] : target
1613 // -- esp[16] : receiver
1614 // -----------------------------------
1615
1616 // 1. Load target into edi (if present), argumentsList into eax (if present),
1617 // new.target into edx (if present, otherwise use target), remove all
1618 // arguments from the stack (including the receiver), and push thisArgument
1619 // (if present) instead.
1620 {
1621 Label done;
1622 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1623 __ mov(edx, edi);
1624 __ mov(ebx, edi);
1625 __ cmp(eax, Immediate(1));
1626 __ j(below, &done, Label::kNear);
1627 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1628 __ mov(edx, edi);
1629 __ j(equal, &done, Label::kNear);
1630 __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1631 __ cmp(eax, Immediate(3));
1632 __ j(below, &done, Label::kNear);
1633 __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1634 __ bind(&done);
1635 __ PopReturnAddressTo(ecx);
1636 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1637 __ PushRoot(Heap::kUndefinedValueRootIndex);
1638 __ PushReturnAddressFrom(ecx);
1639 __ Move(eax, ebx);
1640 }
1641
1642 // ----------- S t a t e -------------
1643 // -- eax : argumentsList
1644 // -- edx : new.target
1645 // -- edi : target
1646 // -- esp[0] : return address
1647 // -- esp[4] : receiver (undefined)
1648 // -----------------------------------
1649
1650 // 2. Make sure the target is actually a constructor.
1651 Label target_not_constructor;
1652 __ JumpIfSmi(edi, &target_not_constructor, Label::kNear);
1653 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1654 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1655 Immediate(1 << Map::kIsConstructor));
1656 __ j(zero, &target_not_constructor, Label::kNear);
1657
1658 // 3. Make sure the target is actually a constructor.
1659 Label new_target_not_constructor;
1660 __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
1661 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
1662 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1663 Immediate(1 << Map::kIsConstructor));
1664 __ j(zero, &new_target_not_constructor, Label::kNear);
1665
1666 // 4a. Construct the target with the given new.target and argumentsList.
1667 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1668
1669 // 4b. The target is not a constructor, throw an appropriate TypeError.
1670 __ bind(&target_not_constructor);
1671 {
1672 __ mov(Operand(esp, kPointerSize), edi);
1673 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1674 }
1675
1676 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1677 __ bind(&new_target_not_constructor);
1678 {
1679 __ mov(Operand(esp, kPointerSize), edx);
1680 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1681 }
1682 }
1683
Generate_InternalArrayCode(MacroAssembler * masm)1684 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1685 // ----------- S t a t e -------------
1686 // -- eax : argc
1687 // -- esp[0] : return address
1688 // -- esp[4] : last argument
1689 // -----------------------------------
1690 Label generic_array_code;
1691
1692 // Get the InternalArray function.
1693 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1694
1695 if (FLAG_debug_code) {
1696 // Initial map for the builtin InternalArray function should be a map.
1697 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1698 // Will both indicate a NULL and a Smi.
1699 __ test(ebx, Immediate(kSmiTagMask));
1700 __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1701 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1702 __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
1703 }
1704
1705 // Run the native code for the InternalArray function called as a normal
1706 // function.
1707 // tail call a stub
1708 InternalArrayConstructorStub stub(masm->isolate());
1709 __ TailCallStub(&stub);
1710 }
1711
Generate_ArrayCode(MacroAssembler * masm)1712 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1713 // ----------- S t a t e -------------
1714 // -- eax : argc
1715 // -- esp[0] : return address
1716 // -- esp[4] : last argument
1717 // -----------------------------------
1718 Label generic_array_code;
1719
1720 // Get the Array function.
1721 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1722 __ mov(edx, edi);
1723
1724 if (FLAG_debug_code) {
1725 // Initial map for the builtin Array function should be a map.
1726 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1727 // Will both indicate a NULL and a Smi.
1728 __ test(ebx, Immediate(kSmiTagMask));
1729 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
1730 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1731 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
1732 }
1733
1734 // Run the native code for the Array function called as a normal function.
1735 // tail call a stub
1736 __ mov(ebx, masm->isolate()->factory()->undefined_value());
1737 ArrayConstructorStub stub(masm->isolate());
1738 __ TailCallStub(&stub);
1739 }
1740
1741 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)1742 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1743 // ----------- S t a t e -------------
1744 // -- eax : number of arguments
1745 // -- edi : function
1746 // -- esi : context
1747 // -- esp[0] : return address
1748 // -- esp[(argc - n) * 8] : arg[n] (zero-based)
1749 // -- esp[(argc + 1) * 8] : receiver
1750 // -----------------------------------
1751 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1752 Heap::RootListIndex const root_index =
1753 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1754 : Heap::kMinusInfinityValueRootIndex;
1755 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0;
1756
1757 // Load the accumulator with the default return value (either -Infinity or
1758 // +Infinity), with the tagged value in edx and the double value in xmm0.
1759 __ LoadRoot(edx, root_index);
1760 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1761 __ Move(ecx, eax);
1762
1763 Label done_loop, loop;
1764 __ bind(&loop);
1765 {
1766 // Check if all parameters done.
1767 __ test(ecx, ecx);
1768 __ j(zero, &done_loop);
1769
1770 // Load the next parameter tagged value into ebx.
1771 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1772
1773 // Load the double value of the parameter into xmm1, maybe converting the
1774 // parameter to a number first using the ToNumber builtin if necessary.
1775 Label convert, convert_smi, convert_number, done_convert;
1776 __ bind(&convert);
1777 __ JumpIfSmi(ebx, &convert_smi);
1778 __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1779 Heap::kHeapNumberMapRootIndex, &convert_number);
1780 {
1781 // Parameter is not a Number, use the ToNumber builtin to convert it.
1782 FrameScope scope(masm, StackFrame::MANUAL);
1783 __ SmiTag(eax);
1784 __ SmiTag(ecx);
1785 __ EnterBuiltinFrame(esi, edi, eax);
1786 __ Push(ecx);
1787 __ Push(edx);
1788 __ mov(eax, ebx);
1789 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1790 __ mov(ebx, eax);
1791 __ Pop(edx);
1792 __ Pop(ecx);
1793 __ LeaveBuiltinFrame(esi, edi, eax);
1794 __ SmiUntag(ecx);
1795 __ SmiUntag(eax);
1796 {
1797 // Restore the double accumulator value (xmm0).
1798 Label restore_smi, done_restore;
1799 __ JumpIfSmi(edx, &restore_smi, Label::kNear);
1800 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1801 __ jmp(&done_restore, Label::kNear);
1802 __ bind(&restore_smi);
1803 __ SmiUntag(edx);
1804 __ Cvtsi2sd(xmm0, edx);
1805 __ SmiTag(edx);
1806 __ bind(&done_restore);
1807 }
1808 }
1809 __ jmp(&convert);
1810 __ bind(&convert_number);
1811 __ movsd(xmm1, FieldOperand(ebx, HeapNumber::kValueOffset));
1812 __ jmp(&done_convert, Label::kNear);
1813 __ bind(&convert_smi);
1814 __ SmiUntag(ebx);
1815 __ Cvtsi2sd(xmm1, ebx);
1816 __ SmiTag(ebx);
1817 __ bind(&done_convert);
1818
1819 // Perform the actual comparison with the accumulator value on the left hand
1820 // side (xmm0) and the next parameter value on the right hand side (xmm1).
1821 Label compare_equal, compare_nan, compare_swap, done_compare;
1822 __ ucomisd(xmm0, xmm1);
1823 __ j(parity_even, &compare_nan, Label::kNear);
1824 __ j(cc, &done_compare, Label::kNear);
1825 __ j(equal, &compare_equal, Label::kNear);
1826
1827 // Result is on the right hand side.
1828 __ bind(&compare_swap);
1829 __ movaps(xmm0, xmm1);
1830 __ mov(edx, ebx);
1831 __ jmp(&done_compare, Label::kNear);
1832
1833 // At least one side is NaN, which means that the result will be NaN too.
1834 __ bind(&compare_nan);
1835 __ LoadRoot(edx, Heap::kNanValueRootIndex);
1836 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1837 __ jmp(&done_compare, Label::kNear);
1838
1839 // Left and right hand side are equal, check for -0 vs. +0.
1840 __ bind(&compare_equal);
1841 __ Push(edi); // Preserve function in edi.
1842 __ movmskpd(edi, reg);
1843 __ test(edi, Immediate(1));
1844 __ Pop(edi);
1845 __ j(not_zero, &compare_swap);
1846
1847 __ bind(&done_compare);
1848 __ dec(ecx);
1849 __ jmp(&loop);
1850 }
1851
1852 __ bind(&done_loop);
1853 __ PopReturnAddressTo(ecx);
1854 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1855 __ PushReturnAddressFrom(ecx);
1856 __ mov(eax, edx);
1857 __ Ret();
1858 }
1859
1860 // static
Generate_NumberConstructor(MacroAssembler * masm)1861 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
1862 // ----------- S t a t e -------------
1863 // -- eax : number of arguments
1864 // -- edi : constructor function
1865 // -- esi : context
1866 // -- esp[0] : return address
1867 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1868 // -- esp[(argc + 1) * 4] : receiver
1869 // -----------------------------------
1870
1871 // 1. Load the first argument into ebx.
1872 Label no_arguments;
1873 {
1874 __ test(eax, eax);
1875 __ j(zero, &no_arguments, Label::kNear);
1876 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1877 }
1878
1879 // 2a. Convert the first argument to a number.
1880 {
1881 FrameScope scope(masm, StackFrame::MANUAL);
1882 __ SmiTag(eax);
1883 __ EnterBuiltinFrame(esi, edi, eax);
1884 __ mov(eax, ebx);
1885 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1886 __ LeaveBuiltinFrame(esi, edi, ebx); // Argc popped to ebx.
1887 __ SmiUntag(ebx);
1888 }
1889
1890 {
1891 // Drop all arguments including the receiver.
1892 __ PopReturnAddressTo(ecx);
1893 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1894 __ PushReturnAddressFrom(ecx);
1895 __ Ret();
1896 }
1897
1898 // 2b. No arguments, return +0 (already in eax).
1899 __ bind(&no_arguments);
1900 __ ret(1 * kPointerSize);
1901 }
1902
1903 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)1904 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
1905 // ----------- S t a t e -------------
1906 // -- eax : number of arguments
1907 // -- edi : constructor function
1908 // -- edx : new target
1909 // -- esi : context
1910 // -- esp[0] : return address
1911 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1912 // -- esp[(argc + 1) * 4] : receiver
1913 // -----------------------------------
1914
1915 // 1. Make sure we operate in the context of the called function.
1916 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1917
1918 // Store argc in r8.
1919 __ mov(ecx, eax);
1920 __ SmiTag(ecx);
1921
1922 // 2. Load the first argument into ebx.
1923 {
1924 Label no_arguments, done;
1925 __ test(eax, eax);
1926 __ j(zero, &no_arguments, Label::kNear);
1927 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1928 __ jmp(&done, Label::kNear);
1929 __ bind(&no_arguments);
1930 __ Move(ebx, Smi::kZero);
1931 __ bind(&done);
1932 }
1933
1934 // 3. Make sure ebx is a number.
1935 {
1936 Label done_convert;
1937 __ JumpIfSmi(ebx, &done_convert);
1938 __ CompareRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1939 Heap::kHeapNumberMapRootIndex);
1940 __ j(equal, &done_convert);
1941 {
1942 FrameScope scope(masm, StackFrame::MANUAL);
1943 __ EnterBuiltinFrame(esi, edi, ecx);
1944 __ Push(edx);
1945 __ Move(eax, ebx);
1946 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1947 __ Move(ebx, eax);
1948 __ Pop(edx);
1949 __ LeaveBuiltinFrame(esi, edi, ecx);
1950 }
1951 __ bind(&done_convert);
1952 }
1953
1954 // 4. Check if new target and constructor differ.
1955 Label drop_frame_and_ret, done_alloc, new_object;
1956 __ cmp(edx, edi);
1957 __ j(not_equal, &new_object);
1958
1959 // 5. Allocate a JSValue wrapper for the number.
1960 __ AllocateJSValue(eax, edi, ebx, esi, &done_alloc);
1961 __ jmp(&drop_frame_and_ret);
1962
1963 __ bind(&done_alloc);
1964 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); // Restore esi.
1965
1966 // 6. Fallback to the runtime to create new object.
1967 __ bind(&new_object);
1968 {
1969 FrameScope scope(masm, StackFrame::MANUAL);
1970 __ EnterBuiltinFrame(esi, edi, ecx);
1971 __ Push(ebx); // the first argument
1972 FastNewObjectStub stub(masm->isolate());
1973 __ CallStub(&stub);
1974 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
1975 __ LeaveBuiltinFrame(esi, edi, ecx);
1976 }
1977
1978 __ bind(&drop_frame_and_ret);
1979 {
1980 // Drop all arguments including the receiver.
1981 __ PopReturnAddressTo(esi);
1982 __ SmiUntag(ecx);
1983 __ lea(esp, Operand(esp, ecx, times_pointer_size, kPointerSize));
1984 __ PushReturnAddressFrom(esi);
1985 __ Ret();
1986 }
1987 }
1988
1989 // static
Generate_StringConstructor(MacroAssembler * masm)1990 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1991 // ----------- S t a t e -------------
1992 // -- eax : number of arguments
1993 // -- edi : constructor function
1994 // -- esi : context
1995 // -- esp[0] : return address
1996 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1997 // -- esp[(argc + 1) * 4] : receiver
1998 // -----------------------------------
1999
2000 // 1. Load the first argument into eax.
2001 Label no_arguments;
2002 {
2003 __ mov(ebx, eax); // Store argc in ebx.
2004 __ test(eax, eax);
2005 __ j(zero, &no_arguments, Label::kNear);
2006 __ mov(eax, Operand(esp, eax, times_pointer_size, 0));
2007 }
2008
2009 // 2a. At least one argument, return eax if it's a string, otherwise
2010 // dispatch to appropriate conversion.
2011 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
2012 {
2013 __ JumpIfSmi(eax, &to_string, Label::kNear);
2014 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
2015 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
2016 __ j(above, &to_string, Label::kNear);
2017 __ j(equal, &symbol_descriptive_string, Label::kNear);
2018 __ jmp(&drop_frame_and_ret, Label::kNear);
2019 }
2020
2021 // 2b. No arguments, return the empty string (and pop the receiver).
2022 __ bind(&no_arguments);
2023 {
2024 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
2025 __ ret(1 * kPointerSize);
2026 }
2027
2028 // 3a. Convert eax to a string.
2029 __ bind(&to_string);
2030 {
2031 FrameScope scope(masm, StackFrame::MANUAL);
2032 __ SmiTag(ebx);
2033 __ EnterBuiltinFrame(esi, edi, ebx);
2034 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
2035 __ LeaveBuiltinFrame(esi, edi, ebx);
2036 __ SmiUntag(ebx);
2037 }
2038 __ jmp(&drop_frame_and_ret, Label::kNear);
2039
2040 // 3b. Convert symbol in eax to a string.
2041 __ bind(&symbol_descriptive_string);
2042 {
2043 __ PopReturnAddressTo(ecx);
2044 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2045 __ Push(eax);
2046 __ PushReturnAddressFrom(ecx);
2047 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
2048 }
2049
2050 __ bind(&drop_frame_and_ret);
2051 {
2052 // Drop all arguments including the receiver.
2053 __ PopReturnAddressTo(ecx);
2054 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2055 __ PushReturnAddressFrom(ecx);
2056 __ Ret();
2057 }
2058 }
2059
2060 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)2061 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
2062 // ----------- S t a t e -------------
2063 // -- eax : number of arguments
2064 // -- edi : constructor function
2065 // -- edx : new target
2066 // -- esi : context
2067 // -- esp[0] : return address
2068 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2069 // -- esp[(argc + 1) * 4] : receiver
2070 // -----------------------------------
2071
2072 // 1. Make sure we operate in the context of the called function.
2073 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2074
2075 __ mov(ebx, eax);
2076
2077 // 2. Load the first argument into eax.
2078 {
2079 Label no_arguments, done;
2080 __ test(ebx, ebx);
2081 __ j(zero, &no_arguments, Label::kNear);
2082 __ mov(eax, Operand(esp, ebx, times_pointer_size, 0));
2083 __ jmp(&done, Label::kNear);
2084 __ bind(&no_arguments);
2085 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
2086 __ bind(&done);
2087 }
2088
2089 // 3. Make sure eax is a string.
2090 {
2091 Label convert, done_convert;
2092 __ JumpIfSmi(eax, &convert, Label::kNear);
2093 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
2094 __ j(below, &done_convert);
2095 __ bind(&convert);
2096 {
2097 FrameScope scope(masm, StackFrame::MANUAL);
2098 __ SmiTag(ebx);
2099 __ EnterBuiltinFrame(esi, edi, ebx);
2100 __ Push(edx);
2101 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
2102 __ Pop(edx);
2103 __ LeaveBuiltinFrame(esi, edi, ebx);
2104 __ SmiUntag(ebx);
2105 }
2106 __ bind(&done_convert);
2107 }
2108
2109 // 4. Check if new target and constructor differ.
2110 Label drop_frame_and_ret, done_alloc, new_object;
2111 __ cmp(edx, edi);
2112 __ j(not_equal, &new_object);
2113
2114 // 5. Allocate a JSValue wrapper for the string.
2115 // AllocateJSValue can't handle src == dst register. Reuse esi and restore it
2116 // as needed after the call.
2117 __ mov(esi, eax);
2118 __ AllocateJSValue(eax, edi, esi, ecx, &done_alloc);
2119 __ jmp(&drop_frame_and_ret);
2120
2121 __ bind(&done_alloc);
2122 {
2123 // Restore eax to the first argument and esi to the context.
2124 __ mov(eax, esi);
2125 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2126 }
2127
2128 // 6. Fallback to the runtime to create new object.
2129 __ bind(&new_object);
2130 {
2131 FrameScope scope(masm, StackFrame::MANUAL);
2132 __ SmiTag(ebx);
2133 __ EnterBuiltinFrame(esi, edi, ebx);
2134 __ Push(eax); // the first argument
2135 FastNewObjectStub stub(masm->isolate());
2136 __ CallStub(&stub);
2137 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
2138 __ LeaveBuiltinFrame(esi, edi, ebx);
2139 __ SmiUntag(ebx);
2140 }
2141
2142 __ bind(&drop_frame_and_ret);
2143 {
2144 // Drop all arguments including the receiver.
2145 __ PopReturnAddressTo(ecx);
2146 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2147 __ PushReturnAddressFrom(ecx);
2148 __ Ret();
2149 }
2150 }
2151
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2152 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2153 __ push(ebp);
2154 __ mov(ebp, esp);
2155
2156 // Store the arguments adaptor context sentinel.
2157 __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2158
2159 // Push the function on the stack.
2160 __ push(edi);
2161
2162 // Preserve the number of arguments on the stack. Must preserve eax,
2163 // ebx and ecx because these registers are used when copying the
2164 // arguments and the receiver.
2165 STATIC_ASSERT(kSmiTagSize == 1);
2166 __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
2167 __ push(edi);
2168 }
2169
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2170 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2171 // Retrieve the number of arguments from the stack.
2172 __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2173
2174 // Leave the frame.
2175 __ leave();
2176
2177 // Remove caller arguments from the stack.
2178 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2179 __ pop(ecx);
2180 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
2181 __ push(ecx);
2182 }
2183
2184 // static
Generate_Apply(MacroAssembler * masm)2185 void Builtins::Generate_Apply(MacroAssembler* masm) {
2186 // ----------- S t a t e -------------
2187 // -- eax : argumentsList
2188 // -- edi : target
2189 // -- edx : new.target (checked to be constructor or undefined)
2190 // -- esp[0] : return address.
2191 // -- esp[4] : thisArgument
2192 // -----------------------------------
2193
2194 // Create the list of arguments from the array-like argumentsList.
2195 {
2196 Label create_arguments, create_array, create_runtime, done_create;
2197 __ JumpIfSmi(eax, &create_runtime);
2198
2199 // Load the map of argumentsList into ecx.
2200 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
2201
2202 // Load native context into ebx.
2203 __ mov(ebx, NativeContextOperand());
2204
2205 // Check if argumentsList is an (unmodified) arguments object.
2206 __ cmp(ecx, ContextOperand(ebx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2207 __ j(equal, &create_arguments);
2208 __ cmp(ecx, ContextOperand(ebx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2209 __ j(equal, &create_arguments);
2210
2211 // Check if argumentsList is a fast JSArray.
2212 __ CmpInstanceType(ecx, JS_ARRAY_TYPE);
2213 __ j(equal, &create_array);
2214
2215 // Ask the runtime to create the list (actually a FixedArray).
2216 __ bind(&create_runtime);
2217 {
2218 FrameScope scope(masm, StackFrame::INTERNAL);
2219 __ Push(edi);
2220 __ Push(edx);
2221 __ Push(eax);
2222 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2223 __ Pop(edx);
2224 __ Pop(edi);
2225 __ mov(ebx, FieldOperand(eax, FixedArray::kLengthOffset));
2226 __ SmiUntag(ebx);
2227 }
2228 __ jmp(&done_create);
2229
2230 // Try to create the list from an arguments object.
2231 __ bind(&create_arguments);
2232 __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset));
2233 __ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset));
2234 __ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2235 __ j(not_equal, &create_runtime);
2236 __ SmiUntag(ebx);
2237 __ mov(eax, ecx);
2238 __ jmp(&done_create);
2239
2240 // Try to create the list from a JSArray object.
2241 __ bind(&create_array);
2242 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2243 __ DecodeField<Map::ElementsKindBits>(ecx);
2244 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2245 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2246 STATIC_ASSERT(FAST_ELEMENTS == 2);
2247 __ cmp(ecx, Immediate(FAST_ELEMENTS));
2248 __ j(above, &create_runtime);
2249 __ cmp(ecx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2250 __ j(equal, &create_runtime);
2251 __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset));
2252 __ SmiUntag(ebx);
2253 __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset));
2254
2255 __ bind(&done_create);
2256 }
2257
2258 // Check for stack overflow.
2259 {
2260 // Check the stack for overflow. We are not trying to catch interruptions
2261 // (i.e. debug break and preemption) here, so check the "real stack limit".
2262 Label done;
2263 ExternalReference real_stack_limit =
2264 ExternalReference::address_of_real_stack_limit(masm->isolate());
2265 __ mov(ecx, Operand::StaticVariable(real_stack_limit));
2266 // Make ecx the space we have left. The stack might already be overflowed
2267 // here which will cause ecx to become negative.
2268 __ neg(ecx);
2269 __ add(ecx, esp);
2270 __ sar(ecx, kPointerSizeLog2);
2271 // Check if the arguments will overflow the stack.
2272 __ cmp(ecx, ebx);
2273 __ j(greater, &done, Label::kNear); // Signed comparison.
2274 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2275 __ bind(&done);
2276 }
2277
2278 // ----------- S t a t e -------------
2279 // -- edi : target
2280 // -- eax : args (a FixedArray built from argumentsList)
2281 // -- ebx : len (number of elements to push from args)
2282 // -- edx : new.target (checked to be constructor or undefined)
2283 // -- esp[0] : return address.
2284 // -- esp[4] : thisArgument
2285 // -----------------------------------
2286
2287 // Push arguments onto the stack (thisArgument is already on the stack).
2288 {
2289 __ movd(xmm0, edx);
2290 __ PopReturnAddressTo(edx);
2291 __ Move(ecx, Immediate(0));
2292 Label done, loop;
2293 __ bind(&loop);
2294 __ cmp(ecx, ebx);
2295 __ j(equal, &done, Label::kNear);
2296 __ Push(
2297 FieldOperand(eax, ecx, times_pointer_size, FixedArray::kHeaderSize));
2298 __ inc(ecx);
2299 __ jmp(&loop);
2300 __ bind(&done);
2301 __ PushReturnAddressFrom(edx);
2302 __ movd(edx, xmm0);
2303 __ Move(eax, ebx);
2304 }
2305
2306 // Dispatch to Call or Construct depending on whether new.target is undefined.
2307 {
2308 __ CompareRoot(edx, Heap::kUndefinedValueRootIndex);
2309 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2310 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2311 }
2312 }
2313
2314 namespace {
2315
2316 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2317 // present) preserving all the arguments prepared for current call.
2318 // Does nothing if debugger is currently active.
2319 // ES6 14.6.3. PrepareForTailCall
2320 //
2321 // Stack structure for the function g() tail calling f():
2322 //
2323 // ------- Caller frame: -------
2324 // | ...
2325 // | g()'s arg M
2326 // | ...
2327 // | g()'s arg 1
2328 // | g()'s receiver arg
2329 // | g()'s caller pc
2330 // ------- g()'s frame: -------
2331 // | g()'s caller fp <- fp
2332 // | g()'s context
2333 // | function pointer: g
2334 // | -------------------------
2335 // | ...
2336 // | ...
2337 // | f()'s arg N
2338 // | ...
2339 // | f()'s arg 1
2340 // | f()'s receiver arg
2341 // | f()'s caller pc <- sp
2342 // ----------------------
2343 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2344 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2345 Register scratch1, Register scratch2,
2346 Register scratch3) {
2347 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2348 Comment cmnt(masm, "[ PrepareForTailCall");
2349
2350 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2351 Label done;
2352 ExternalReference is_tail_call_elimination_enabled =
2353 ExternalReference::is_tail_call_elimination_enabled_address(
2354 masm->isolate());
2355 __ movzx_b(scratch1,
2356 Operand::StaticVariable(is_tail_call_elimination_enabled));
2357 __ cmp(scratch1, Immediate(0));
2358 __ j(equal, &done, Label::kNear);
2359
2360 // Drop possible interpreter handler/stub frame.
2361 {
2362 Label no_interpreter_frame;
2363 __ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
2364 Immediate(Smi::FromInt(StackFrame::STUB)));
2365 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2366 __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2367 __ bind(&no_interpreter_frame);
2368 }
2369
2370 // Check if next frame is an arguments adaptor frame.
2371 Register caller_args_count_reg = scratch1;
2372 Label no_arguments_adaptor, formal_parameter_count_loaded;
2373 __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2374 __ cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
2375 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2376 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2377
2378 // Drop current frame and load arguments count from arguments adaptor frame.
2379 __ mov(ebp, scratch2);
2380 __ mov(caller_args_count_reg,
2381 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2382 __ SmiUntag(caller_args_count_reg);
2383 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2384
2385 __ bind(&no_arguments_adaptor);
2386 // Load caller's formal parameter count
2387 __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2388 __ mov(scratch1,
2389 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2390 __ mov(
2391 caller_args_count_reg,
2392 FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
2393 __ SmiUntag(caller_args_count_reg);
2394
2395 __ bind(&formal_parameter_count_loaded);
2396
2397 ParameterCount callee_args_count(args_reg);
2398 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2399 scratch3, ReturnAddressState::kOnStack, 0);
2400 __ bind(&done);
2401 }
2402 } // namespace
2403
2404 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2405 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2406 ConvertReceiverMode mode,
2407 TailCallMode tail_call_mode) {
2408 // ----------- S t a t e -------------
2409 // -- eax : the number of arguments (not including the receiver)
2410 // -- edi : the function to call (checked to be a JSFunction)
2411 // -----------------------------------
2412 __ AssertFunction(edi);
2413
2414 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2415 // Check that the function is not a "classConstructor".
2416 Label class_constructor;
2417 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2418 __ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset),
2419 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2420 __ j(not_zero, &class_constructor);
2421
2422 // Enter the context of the function; ToObject has to run in the function
2423 // context, and we also need to take the global proxy from the function
2424 // context in case of conversion.
2425 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2426 SharedFunctionInfo::kStrictModeByteOffset);
2427 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2428 // We need to convert the receiver for non-native sloppy mode functions.
2429 Label done_convert;
2430 __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset),
2431 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2432 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2433 __ j(not_zero, &done_convert);
2434 {
2435 // ----------- S t a t e -------------
2436 // -- eax : the number of arguments (not including the receiver)
2437 // -- edx : the shared function info.
2438 // -- edi : the function to call (checked to be a JSFunction)
2439 // -- esi : the function context.
2440 // -----------------------------------
2441
2442 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2443 // Patch receiver to global proxy.
2444 __ LoadGlobalProxy(ecx);
2445 } else {
2446 Label convert_to_object, convert_receiver;
2447 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
2448 __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2449 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2450 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
2451 __ j(above_equal, &done_convert);
2452 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2453 Label convert_global_proxy;
2454 __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
2455 &convert_global_proxy, Label::kNear);
2456 __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
2457 Label::kNear);
2458 __ bind(&convert_global_proxy);
2459 {
2460 // Patch receiver to global proxy.
2461 __ LoadGlobalProxy(ecx);
2462 }
2463 __ jmp(&convert_receiver);
2464 }
2465 __ bind(&convert_to_object);
2466 {
2467 // Convert receiver using ToObject.
2468 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2469 // in the fast case? (fall back to AllocateInNewSpace?)
2470 FrameScope scope(masm, StackFrame::INTERNAL);
2471 __ SmiTag(eax);
2472 __ Push(eax);
2473 __ Push(edi);
2474 __ mov(eax, ecx);
2475 __ Push(esi);
2476 __ Call(masm->isolate()->builtins()->ToObject(),
2477 RelocInfo::CODE_TARGET);
2478 __ Pop(esi);
2479 __ mov(ecx, eax);
2480 __ Pop(edi);
2481 __ Pop(eax);
2482 __ SmiUntag(eax);
2483 }
2484 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2485 __ bind(&convert_receiver);
2486 }
2487 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2488 }
2489 __ bind(&done_convert);
2490
2491 // ----------- S t a t e -------------
2492 // -- eax : the number of arguments (not including the receiver)
2493 // -- edx : the shared function info.
2494 // -- edi : the function to call (checked to be a JSFunction)
2495 // -- esi : the function context.
2496 // -----------------------------------
2497
2498 if (tail_call_mode == TailCallMode::kAllow) {
2499 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2500 // Reload shared function info.
2501 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2502 }
2503
2504 __ mov(ebx,
2505 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2506 __ SmiUntag(ebx);
2507 ParameterCount actual(eax);
2508 ParameterCount expected(ebx);
2509 __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION,
2510 CheckDebugStepCallWrapper());
2511 // The function is a "classConstructor", need to raise an exception.
2512 __ bind(&class_constructor);
2513 {
2514 FrameScope frame(masm, StackFrame::INTERNAL);
2515 __ push(edi);
2516 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2517 }
2518 }
2519
2520 namespace {
2521
Generate_PushBoundArguments(MacroAssembler * masm)2522 void Generate_PushBoundArguments(MacroAssembler* masm) {
2523 // ----------- S t a t e -------------
2524 // -- eax : the number of arguments (not including the receiver)
2525 // -- edx : new.target (only in case of [[Construct]])
2526 // -- edi : target (checked to be a JSBoundFunction)
2527 // -----------------------------------
2528
2529 // Load [[BoundArguments]] into ecx and length of that into ebx.
2530 Label no_bound_arguments;
2531 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2532 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2533 __ SmiUntag(ebx);
2534 __ test(ebx, ebx);
2535 __ j(zero, &no_bound_arguments);
2536 {
2537 // ----------- S t a t e -------------
2538 // -- eax : the number of arguments (not including the receiver)
2539 // -- edx : new.target (only in case of [[Construct]])
2540 // -- edi : target (checked to be a JSBoundFunction)
2541 // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2542 // -- ebx : the number of [[BoundArguments]]
2543 // -----------------------------------
2544
2545 // Reserve stack space for the [[BoundArguments]].
2546 {
2547 Label done;
2548 __ lea(ecx, Operand(ebx, times_pointer_size, 0));
2549 __ sub(esp, ecx);
2550 // Check the stack for overflow. We are not trying to catch interruptions
2551 // (i.e. debug break and preemption) here, so check the "real stack
2552 // limit".
2553 __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
2554 __ j(greater, &done, Label::kNear); // Signed comparison.
2555 // Restore the stack pointer.
2556 __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
2557 {
2558 FrameScope scope(masm, StackFrame::MANUAL);
2559 __ EnterFrame(StackFrame::INTERNAL);
2560 __ CallRuntime(Runtime::kThrowStackOverflow);
2561 }
2562 __ bind(&done);
2563 }
2564
2565 // Adjust effective number of arguments to include return address.
2566 __ inc(eax);
2567
2568 // Relocate arguments and return address down the stack.
2569 {
2570 Label loop;
2571 __ Set(ecx, 0);
2572 __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
2573 __ bind(&loop);
2574 __ movd(xmm0, Operand(ebx, ecx, times_pointer_size, 0));
2575 __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm0);
2576 __ inc(ecx);
2577 __ cmp(ecx, eax);
2578 __ j(less, &loop);
2579 }
2580
2581 // Copy [[BoundArguments]] to the stack (below the arguments).
2582 {
2583 Label loop;
2584 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2585 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2586 __ SmiUntag(ebx);
2587 __ bind(&loop);
2588 __ dec(ebx);
2589 __ movd(xmm0, FieldOperand(ecx, ebx, times_pointer_size,
2590 FixedArray::kHeaderSize));
2591 __ movd(Operand(esp, eax, times_pointer_size, 0), xmm0);
2592 __ lea(eax, Operand(eax, 1));
2593 __ j(greater, &loop);
2594 }
2595
2596 // Adjust effective number of arguments (eax contains the number of
2597 // arguments from the call plus return address plus the number of
2598 // [[BoundArguments]]), so we need to subtract one for the return address.
2599 __ dec(eax);
2600 }
2601 __ bind(&no_bound_arguments);
2602 }
2603
2604 } // namespace
2605
2606 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2607 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2608 TailCallMode tail_call_mode) {
2609 // ----------- S t a t e -------------
2610 // -- eax : the number of arguments (not including the receiver)
2611 // -- edi : the function to call (checked to be a JSBoundFunction)
2612 // -----------------------------------
2613 __ AssertBoundFunction(edi);
2614
2615 if (tail_call_mode == TailCallMode::kAllow) {
2616 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2617 }
2618
2619 // Patch the receiver to [[BoundThis]].
2620 __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2621 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
2622
2623 // Push the [[BoundArguments]] onto the stack.
2624 Generate_PushBoundArguments(masm);
2625
2626 // Call the [[BoundTargetFunction]] via the Call builtin.
2627 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2628 __ mov(ecx, Operand::StaticVariable(ExternalReference(
2629 Builtins::kCall_ReceiverIsAny, masm->isolate())));
2630 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2631 __ jmp(ecx);
2632 }
2633
2634 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2635 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2636 TailCallMode tail_call_mode) {
2637 // ----------- S t a t e -------------
2638 // -- eax : the number of arguments (not including the receiver)
2639 // -- edi : the target to call (can be any Object).
2640 // -----------------------------------
2641
2642 Label non_callable, non_function, non_smi;
2643 __ JumpIfSmi(edi, &non_callable);
2644 __ bind(&non_smi);
2645 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2646 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2647 RelocInfo::CODE_TARGET);
2648 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2649 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2650 RelocInfo::CODE_TARGET);
2651
2652 // Check if target has a [[Call]] internal method.
2653 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2654 Immediate(1 << Map::kIsCallable));
2655 __ j(zero, &non_callable);
2656
2657 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2658 __ j(not_equal, &non_function);
2659
2660 // 0. Prepare for tail call if necessary.
2661 if (tail_call_mode == TailCallMode::kAllow) {
2662 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2663 }
2664
2665 // 1. Runtime fallback for Proxy [[Call]].
2666 __ PopReturnAddressTo(ecx);
2667 __ Push(edi);
2668 __ PushReturnAddressFrom(ecx);
2669 // Increase the arguments size to include the pushed function and the
2670 // existing receiver on the stack.
2671 __ add(eax, Immediate(2));
2672 // Tail-call to the runtime.
2673 __ JumpToExternalReference(
2674 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2675
2676 // 2. Call to something else, which might have a [[Call]] internal method (if
2677 // not we raise an exception).
2678 __ bind(&non_function);
2679 // Overwrite the original receiver with the (original) target.
2680 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2681 // Let the "call_as_function_delegate" take care of the rest.
2682 __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2683 __ Jump(masm->isolate()->builtins()->CallFunction(
2684 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2685 RelocInfo::CODE_TARGET);
2686
2687 // 3. Call to something that is not callable.
2688 __ bind(&non_callable);
2689 {
2690 FrameScope scope(masm, StackFrame::INTERNAL);
2691 __ Push(edi);
2692 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2693 }
2694 }
2695
2696 // static
Generate_ConstructFunction(MacroAssembler * masm)2697 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2698 // ----------- S t a t e -------------
2699 // -- eax : the number of arguments (not including the receiver)
2700 // -- edx : the new target (checked to be a constructor)
2701 // -- edi : the constructor to call (checked to be a JSFunction)
2702 // -----------------------------------
2703 __ AssertFunction(edi);
2704
2705 // Calling convention for function specific ConstructStubs require
2706 // ebx to contain either an AllocationSite or undefined.
2707 __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
2708
2709 // Tail call to the function-specific construct stub (still in the caller
2710 // context at this point).
2711 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2712 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
2713 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2714 __ jmp(ecx);
2715 }
2716
2717 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2718 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2719 // ----------- S t a t e -------------
2720 // -- eax : the number of arguments (not including the receiver)
2721 // -- edx : the new target (checked to be a constructor)
2722 // -- edi : the constructor to call (checked to be a JSBoundFunction)
2723 // -----------------------------------
2724 __ AssertBoundFunction(edi);
2725
2726 // Push the [[BoundArguments]] onto the stack.
2727 Generate_PushBoundArguments(masm);
2728
2729 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2730 {
2731 Label done;
2732 __ cmp(edi, edx);
2733 __ j(not_equal, &done, Label::kNear);
2734 __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2735 __ bind(&done);
2736 }
2737
2738 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2739 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2740 __ mov(ecx, Operand::StaticVariable(
2741 ExternalReference(Builtins::kConstruct, masm->isolate())));
2742 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2743 __ jmp(ecx);
2744 }
2745
2746 // static
Generate_ConstructProxy(MacroAssembler * masm)2747 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2748 // ----------- S t a t e -------------
2749 // -- eax : the number of arguments (not including the receiver)
2750 // -- edi : the constructor to call (checked to be a JSProxy)
2751 // -- edx : the new target (either the same as the constructor or
2752 // the JSFunction on which new was invoked initially)
2753 // -----------------------------------
2754
2755 // Call into the Runtime for Proxy [[Construct]].
2756 __ PopReturnAddressTo(ecx);
2757 __ Push(edi);
2758 __ Push(edx);
2759 __ PushReturnAddressFrom(ecx);
2760 // Include the pushed new_target, constructor and the receiver.
2761 __ add(eax, Immediate(3));
2762 // Tail-call to the runtime.
2763 __ JumpToExternalReference(
2764 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2765 }
2766
2767 // static
Generate_Construct(MacroAssembler * masm)2768 void Builtins::Generate_Construct(MacroAssembler* masm) {
2769 // ----------- S t a t e -------------
2770 // -- eax : the number of arguments (not including the receiver)
2771 // -- edx : the new target (either the same as the constructor or
2772 // the JSFunction on which new was invoked initially)
2773 // -- edi : the constructor to call (can be any Object)
2774 // -----------------------------------
2775
2776 // Check if target is a Smi.
2777 Label non_constructor;
2778 __ JumpIfSmi(edi, &non_constructor, Label::kNear);
2779
2780 // Dispatch based on instance type.
2781 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2782 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2783 RelocInfo::CODE_TARGET);
2784
2785 // Check if target has a [[Construct]] internal method.
2786 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2787 Immediate(1 << Map::kIsConstructor));
2788 __ j(zero, &non_constructor, Label::kNear);
2789
2790 // Only dispatch to bound functions after checking whether they are
2791 // constructors.
2792 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2793 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2794 RelocInfo::CODE_TARGET);
2795
2796 // Only dispatch to proxies after checking whether they are constructors.
2797 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2798 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2799 RelocInfo::CODE_TARGET);
2800
2801 // Called Construct on an exotic Object with a [[Construct]] internal method.
2802 {
2803 // Overwrite the original receiver with the (original) target.
2804 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2805 // Let the "call_as_constructor_delegate" take care of the rest.
2806 __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2807 __ Jump(masm->isolate()->builtins()->CallFunction(),
2808 RelocInfo::CODE_TARGET);
2809 }
2810
2811 // Called Construct on an Object that doesn't have a [[Construct]] internal
2812 // method.
2813 __ bind(&non_constructor);
2814 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2815 RelocInfo::CODE_TARGET);
2816 }
2817
2818 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2819 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2820 // ----------- S t a t e -------------
2821 // -- edx : requested object size (untagged)
2822 // -- esp[0] : return address
2823 // -----------------------------------
2824 __ SmiTag(edx);
2825 __ PopReturnAddressTo(ecx);
2826 __ Push(edx);
2827 __ PushReturnAddressFrom(ecx);
2828 __ Move(esi, Smi::kZero);
2829 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2830 }
2831
2832 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2833 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2834 // ----------- S t a t e -------------
2835 // -- edx : requested object size (untagged)
2836 // -- esp[0] : return address
2837 // -----------------------------------
2838 __ SmiTag(edx);
2839 __ PopReturnAddressTo(ecx);
2840 __ Push(edx);
2841 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2842 __ PushReturnAddressFrom(ecx);
2843 __ Move(esi, Smi::kZero);
2844 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2845 }
2846
2847 // static
Generate_Abort(MacroAssembler * masm)2848 void Builtins::Generate_Abort(MacroAssembler* masm) {
2849 // ----------- S t a t e -------------
2850 // -- edx : message_id as Smi
2851 // -- esp[0] : return address
2852 // -----------------------------------
2853 __ PopReturnAddressTo(ecx);
2854 __ Push(edx);
2855 __ PushReturnAddressFrom(ecx);
2856 __ Move(esi, Smi::kZero);
2857 __ TailCallRuntime(Runtime::kAbort);
2858 }
2859
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2860 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2861 // ----------- S t a t e -------------
2862 // -- eax : actual number of arguments
2863 // -- ebx : expected number of arguments
2864 // -- edx : new target (passed through to callee)
2865 // -- edi : function (passed through to callee)
2866 // -----------------------------------
2867
2868 Label invoke, dont_adapt_arguments, stack_overflow;
2869 __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
2870
2871 Label enough, too_few;
2872 __ cmp(eax, ebx);
2873 __ j(less, &too_few);
2874 __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2875 __ j(equal, &dont_adapt_arguments);
2876
2877 { // Enough parameters: Actual >= expected.
2878 __ bind(&enough);
2879 EnterArgumentsAdaptorFrame(masm);
2880 // edi is used as a scratch register. It should be restored from the frame
2881 // when needed.
2882 Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
2883
2884 // Copy receiver and all expected arguments.
2885 const int offset = StandardFrameConstants::kCallerSPOffset;
2886 __ lea(edi, Operand(ebp, eax, times_4, offset));
2887 __ mov(eax, -1); // account for receiver
2888
2889 Label copy;
2890 __ bind(©);
2891 __ inc(eax);
2892 __ push(Operand(edi, 0));
2893 __ sub(edi, Immediate(kPointerSize));
2894 __ cmp(eax, ebx);
2895 __ j(less, ©);
2896 // eax now contains the expected number of arguments.
2897 __ jmp(&invoke);
2898 }
2899
2900 { // Too few parameters: Actual < expected.
2901 __ bind(&too_few);
2902 EnterArgumentsAdaptorFrame(masm);
2903 // edi is used as a scratch register. It should be restored from the frame
2904 // when needed.
2905 Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
2906
2907 // Remember expected arguments in ecx.
2908 __ mov(ecx, ebx);
2909
2910 // Copy receiver and all actual arguments.
2911 const int offset = StandardFrameConstants::kCallerSPOffset;
2912 __ lea(edi, Operand(ebp, eax, times_4, offset));
2913 // ebx = expected - actual.
2914 __ sub(ebx, eax);
2915 // eax = -actual - 1
2916 __ neg(eax);
2917 __ sub(eax, Immediate(1));
2918
2919 Label copy;
2920 __ bind(©);
2921 __ inc(eax);
2922 __ push(Operand(edi, 0));
2923 __ sub(edi, Immediate(kPointerSize));
2924 __ test(eax, eax);
2925 __ j(not_zero, ©);
2926
2927 // Fill remaining expected arguments with undefined values.
2928 Label fill;
2929 __ bind(&fill);
2930 __ inc(eax);
2931 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
2932 __ cmp(eax, ebx);
2933 __ j(less, &fill);
2934
2935 // Restore expected arguments.
2936 __ mov(eax, ecx);
2937 }
2938
2939 // Call the entry point.
2940 __ bind(&invoke);
2941 // Restore function pointer.
2942 __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2943 // eax : expected number of arguments
2944 // edx : new target (passed through to callee)
2945 // edi : function (passed through to callee)
2946 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2947 __ call(ecx);
2948
2949 // Store offset of return address for deoptimizer.
2950 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2951
2952 // Leave frame and return.
2953 LeaveArgumentsAdaptorFrame(masm);
2954 __ ret(0);
2955
2956 // -------------------------------------------
2957 // Dont adapt arguments.
2958 // -------------------------------------------
2959 __ bind(&dont_adapt_arguments);
2960 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2961 __ jmp(ecx);
2962
2963 __ bind(&stack_overflow);
2964 {
2965 FrameScope frame(masm, StackFrame::MANUAL);
2966 __ CallRuntime(Runtime::kThrowStackOverflow);
2967 __ int3();
2968 }
2969 }
2970
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Register scratch0,Register scratch1,Label * receiver_check_failed)2971 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2972 Register function_template_info,
2973 Register scratch0, Register scratch1,
2974 Label* receiver_check_failed) {
2975 // If there is no signature, return the holder.
2976 __ CompareRoot(FieldOperand(function_template_info,
2977 FunctionTemplateInfo::kSignatureOffset),
2978 Heap::kUndefinedValueRootIndex);
2979 Label receiver_check_passed;
2980 __ j(equal, &receiver_check_passed, Label::kNear);
2981
2982 // Walk the prototype chain.
2983 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
2984 Label prototype_loop_start;
2985 __ bind(&prototype_loop_start);
2986
2987 // Get the constructor, if any.
2988 __ GetMapConstructor(scratch0, scratch0, scratch1);
2989 __ CmpInstanceType(scratch1, JS_FUNCTION_TYPE);
2990 Label next_prototype;
2991 __ j(not_equal, &next_prototype, Label::kNear);
2992
2993 // Get the constructor's signature.
2994 __ mov(scratch0,
2995 FieldOperand(scratch0, JSFunction::kSharedFunctionInfoOffset));
2996 __ mov(scratch0,
2997 FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
2998
2999 // Loop through the chain of inheriting function templates.
3000 Label function_template_loop;
3001 __ bind(&function_template_loop);
3002
3003 // If the signatures match, we have a compatible receiver.
3004 __ cmp(scratch0, FieldOperand(function_template_info,
3005 FunctionTemplateInfo::kSignatureOffset));
3006 __ j(equal, &receiver_check_passed, Label::kNear);
3007
3008 // If the current type is not a FunctionTemplateInfo, load the next prototype
3009 // in the chain.
3010 __ JumpIfSmi(scratch0, &next_prototype, Label::kNear);
3011 __ CmpObjectType(scratch0, FUNCTION_TEMPLATE_INFO_TYPE, scratch1);
3012 __ j(not_equal, &next_prototype, Label::kNear);
3013
3014 // Otherwise load the parent function template and iterate.
3015 __ mov(scratch0,
3016 FieldOperand(scratch0, FunctionTemplateInfo::kParentTemplateOffset));
3017 __ jmp(&function_template_loop, Label::kNear);
3018
3019 // Load the next prototype.
3020 __ bind(&next_prototype);
3021 __ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset));
3022 __ test(FieldOperand(receiver, Map::kBitField3Offset),
3023 Immediate(Map::HasHiddenPrototype::kMask));
3024 __ j(zero, receiver_check_failed);
3025
3026 __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
3027 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
3028 // Iterate.
3029 __ jmp(&prototype_loop_start, Label::kNear);
3030
3031 __ bind(&receiver_check_passed);
3032 }
3033
Generate_HandleFastApiCall(MacroAssembler * masm)3034 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
3035 // ----------- S t a t e -------------
3036 // -- eax : number of arguments (not including the receiver)
3037 // -- edi : callee
3038 // -- esi : context
3039 // -- esp[0] : return address
3040 // -- esp[4] : last argument
3041 // -- ...
3042 // -- esp[eax * 4] : first argument
3043 // -- esp[(eax + 1) * 4] : receiver
3044 // -----------------------------------
3045
3046 // Load the FunctionTemplateInfo.
3047 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3048 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
3049
3050 // Do the compatible receiver check.
3051 Label receiver_check_failed;
3052 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPCOnStackSize));
3053 __ Push(eax);
3054 CompatibleReceiverCheck(masm, ecx, ebx, edx, eax, &receiver_check_failed);
3055 __ Pop(eax);
3056 // Get the callback offset from the FunctionTemplateInfo, and jump to the
3057 // beginning of the code.
3058 __ mov(edx, FieldOperand(ebx, FunctionTemplateInfo::kCallCodeOffset));
3059 __ mov(edx, FieldOperand(edx, CallHandlerInfo::kFastHandlerOffset));
3060 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3061 __ jmp(edx);
3062
3063 // Compatible receiver check failed: pop return address, arguments and
3064 // receiver and throw an Illegal Invocation exception.
3065 __ bind(&receiver_check_failed);
3066 __ Pop(eax);
3067 __ PopReturnAddressTo(ebx);
3068 __ lea(eax, Operand(eax, times_pointer_size, 1 * kPointerSize));
3069 __ add(esp, eax);
3070 __ PushReturnAddressFrom(ebx);
3071 {
3072 FrameScope scope(masm, StackFrame::INTERNAL);
3073 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
3074 }
3075 }
3076
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)3077 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
3078 bool has_handler_frame) {
3079 // Lookup the function in the JavaScript frame.
3080 if (has_handler_frame) {
3081 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3082 __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
3083 } else {
3084 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3085 }
3086
3087 {
3088 FrameScope scope(masm, StackFrame::INTERNAL);
3089 // Pass function as argument.
3090 __ push(eax);
3091 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
3092 }
3093
3094 Label skip;
3095 // If the code object is null, just return to the caller.
3096 __ cmp(eax, Immediate(0));
3097 __ j(not_equal, &skip, Label::kNear);
3098 __ ret(0);
3099
3100 __ bind(&skip);
3101
3102 // Drop any potential handler frame that is be sitting on top of the actual
3103 // JavaScript frame. This is the case then OSR is triggered from bytecode.
3104 if (has_handler_frame) {
3105 __ leave();
3106 }
3107
3108 // Load deoptimization data from the code object.
3109 __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
3110
3111 // Load the OSR entrypoint offset from the deoptimization data.
3112 __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
3113 DeoptimizationInputData::kOsrPcOffsetIndex) -
3114 kHeapObjectTag));
3115 __ SmiUntag(ebx);
3116
3117 // Compute the target address = code_obj + header_size + osr_offset
3118 __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
3119
3120 // Overwrite the return address on the stack.
3121 __ mov(Operand(esp, 0), eax);
3122
3123 // And "return" to the OSR entry point of the function.
3124 __ ret(0);
3125 }
3126
Generate_OnStackReplacement(MacroAssembler * masm)3127 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
3128 Generate_OnStackReplacementHelper(masm, false);
3129 }
3130
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)3131 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3132 Generate_OnStackReplacementHelper(masm, true);
3133 }
3134
3135 #undef __
3136 } // namespace internal
3137 } // namespace v8
3138
3139 #endif // V8_TARGET_ARCH_IA32
3140