1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_X87
8 
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h"
17 #include "src/parser.h"
18 #include "src/scopes.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 #define __ ACCESS_MASM(masm_)
24 
25 
26 class JumpPatchSite BASE_EMBEDDED {
27  public:
JumpPatchSite(MacroAssembler * masm)28   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
29 #ifdef DEBUG
30     info_emitted_ = false;
31 #endif
32   }
33 
~JumpPatchSite()34   ~JumpPatchSite() {
35     DCHECK(patch_site_.is_bound() == info_emitted_);
36   }
37 
EmitJumpIfNotSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)38   void EmitJumpIfNotSmi(Register reg,
39                         Label* target,
40                         Label::Distance distance = Label::kFar) {
41     __ test(reg, Immediate(kSmiTagMask));
42     EmitJump(not_carry, target, distance);  // Always taken before patched.
43   }
44 
EmitJumpIfSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)45   void EmitJumpIfSmi(Register reg,
46                      Label* target,
47                      Label::Distance distance = Label::kFar) {
48     __ test(reg, Immediate(kSmiTagMask));
49     EmitJump(carry, target, distance);  // Never taken before patched.
50   }
51 
EmitPatchInfo()52   void EmitPatchInfo() {
53     if (patch_site_.is_bound()) {
54       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
55       DCHECK(is_uint8(delta_to_patch_site));
56       __ test(eax, Immediate(delta_to_patch_site));
57 #ifdef DEBUG
58       info_emitted_ = true;
59 #endif
60     } else {
61       __ nop();  // Signals no inlined code.
62     }
63   }
64 
65  private:
66   // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,Label * target,Label::Distance distance)67   void EmitJump(Condition cc, Label* target, Label::Distance distance) {
68     DCHECK(!patch_site_.is_bound() && !info_emitted_);
69     DCHECK(cc == carry || cc == not_carry);
70     __ bind(&patch_site_);
71     __ j(cc, target, distance);
72   }
73 
74   MacroAssembler* masm_;
75   Label patch_site_;
76 #ifdef DEBUG
77   bool info_emitted_;
78 #endif
79 };
80 
81 
82 // Generate code for a JS function.  On entry to the function the receiver
83 // and arguments have been pushed on the stack left to right, with the
84 // return address on top of them.  The actual argument count matches the
85 // formal parameter count expected by the function.
86 //
87 // The live registers are:
88 //   o edi: the JS function object being called (i.e. ourselves)
89 //   o esi: our context
90 //   o ebp: our caller's frame pointer
91 //   o esp: stack pointer (pointing to return address)
92 //
93 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
94 // frames-x87.h for its layout.
Generate()95 void FullCodeGenerator::Generate() {
96   CompilationInfo* info = info_;
97   handler_table_ =
98       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
99 
100   profiling_counter_ = isolate()->factory()->NewCell(
101       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
102   SetFunctionPosition(function());
103   Comment cmnt(masm_, "[ function compiled by full code generator");
104 
105   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
106 
107 #ifdef DEBUG
108   if (strlen(FLAG_stop_at) > 0 &&
109       info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110     __ int3();
111   }
112 #endif
113 
114   // Sloppy mode functions and builtins need to replace the receiver with the
115   // global proxy when called as functions (without an explicit receiver
116   // object).
117   if (info->strict_mode() == SLOPPY && !info->is_native()) {
118     Label ok;
119     // +1 for return address.
120     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
121     __ mov(ecx, Operand(esp, receiver_offset));
122 
123     __ cmp(ecx, isolate()->factory()->undefined_value());
124     __ j(not_equal, &ok, Label::kNear);
125 
126     __ mov(ecx, GlobalObjectOperand());
127     __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
128 
129     __ mov(Operand(esp, receiver_offset), ecx);
130 
131     __ bind(&ok);
132   }
133 
134   // Open a frame scope to indicate that there is a frame on the stack.  The
135   // MANUAL indicates that the scope shouldn't actually generate code to set up
136   // the frame (that is done below).
137   FrameScope frame_scope(masm_, StackFrame::MANUAL);
138 
139   info->set_prologue_offset(masm_->pc_offset());
140   __ Prologue(info->IsCodePreAgingActive());
141   info->AddNoFrameRange(0, masm_->pc_offset());
142 
143   { Comment cmnt(masm_, "[ Allocate locals");
144     int locals_count = info->scope()->num_stack_slots();
145     // Generators allocate locals, if any, in context slots.
146     DCHECK(!info->function()->is_generator() || locals_count == 0);
147     if (locals_count == 1) {
148       __ push(Immediate(isolate()->factory()->undefined_value()));
149     } else if (locals_count > 1) {
150       if (locals_count >= 128) {
151         Label ok;
152         __ mov(ecx, esp);
153         __ sub(ecx, Immediate(locals_count * kPointerSize));
154         ExternalReference stack_limit =
155             ExternalReference::address_of_real_stack_limit(isolate());
156         __ cmp(ecx, Operand::StaticVariable(stack_limit));
157         __ j(above_equal, &ok, Label::kNear);
158         __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
159         __ bind(&ok);
160       }
161       __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
162       const int kMaxPushes = 32;
163       if (locals_count >= kMaxPushes) {
164         int loop_iterations = locals_count / kMaxPushes;
165         __ mov(ecx, loop_iterations);
166         Label loop_header;
167         __ bind(&loop_header);
168         // Do pushes.
169         for (int i = 0; i < kMaxPushes; i++) {
170           __ push(eax);
171         }
172         __ dec(ecx);
173         __ j(not_zero, &loop_header, Label::kNear);
174       }
175       int remaining = locals_count % kMaxPushes;
176       // Emit the remaining pushes.
177       for (int i  = 0; i < remaining; i++) {
178         __ push(eax);
179       }
180     }
181   }
182 
183   bool function_in_register = true;
184 
185   // Possibly allocate a local context.
186   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
187   if (heap_slots > 0) {
188     Comment cmnt(masm_, "[ Allocate context");
189     bool need_write_barrier = true;
190     // Argument to NewContext is the function, which is still in edi.
191     if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
192       __ push(edi);
193       __ Push(info->scope()->GetScopeInfo());
194       __ CallRuntime(Runtime::kNewGlobalContext, 2);
195     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
196       FastNewContextStub stub(isolate(), heap_slots);
197       __ CallStub(&stub);
198       // Result of FastNewContextStub is always in new space.
199       need_write_barrier = false;
200     } else {
201       __ push(edi);
202       __ CallRuntime(Runtime::kNewFunctionContext, 1);
203     }
204     function_in_register = false;
205     // Context is returned in eax.  It replaces the context passed to us.
206     // It's saved in the stack and kept live in esi.
207     __ mov(esi, eax);
208     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
209 
210     // Copy parameters into context if necessary.
211     int num_parameters = info->scope()->num_parameters();
212     for (int i = 0; i < num_parameters; i++) {
213       Variable* var = scope()->parameter(i);
214       if (var->IsContextSlot()) {
215         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
216             (num_parameters - 1 - i) * kPointerSize;
217         // Load parameter from stack.
218         __ mov(eax, Operand(ebp, parameter_offset));
219         // Store it in the context.
220         int context_offset = Context::SlotOffset(var->index());
221         __ mov(Operand(esi, context_offset), eax);
222         // Update the write barrier. This clobbers eax and ebx.
223         if (need_write_barrier) {
224           __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
225                                     kDontSaveFPRegs);
226         } else if (FLAG_debug_code) {
227           Label done;
228           __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
229           __ Abort(kExpectedNewSpaceObject);
230           __ bind(&done);
231         }
232       }
233     }
234   }
235 
236   Variable* arguments = scope()->arguments();
237   if (arguments != NULL) {
238     // Function uses arguments object.
239     Comment cmnt(masm_, "[ Allocate arguments object");
240     if (function_in_register) {
241       __ push(edi);
242     } else {
243       __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
244     }
245     // Receiver is just before the parameters on the caller's stack.
246     int num_parameters = info->scope()->num_parameters();
247     int offset = num_parameters * kPointerSize;
248     __ lea(edx,
249            Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
250     __ push(edx);
251     __ push(Immediate(Smi::FromInt(num_parameters)));
252     // Arguments to ArgumentsAccessStub:
253     //   function, receiver address, parameter count.
254     // The stub will rewrite receiver and parameter count if the previous
255     // stack frame was an arguments adapter frame.
256     ArgumentsAccessStub::Type type;
257     if (strict_mode() == STRICT) {
258       type = ArgumentsAccessStub::NEW_STRICT;
259     } else if (function()->has_duplicate_parameters()) {
260       type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
261     } else {
262       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
263     }
264     ArgumentsAccessStub stub(isolate(), type);
265     __ CallStub(&stub);
266 
267     SetVar(arguments, eax, ebx, edx);
268   }
269 
270   if (FLAG_trace) {
271     __ CallRuntime(Runtime::kTraceEnter, 0);
272   }
273 
274   // Visit the declarations and body unless there is an illegal
275   // redeclaration.
276   if (scope()->HasIllegalRedeclaration()) {
277     Comment cmnt(masm_, "[ Declarations");
278     scope()->VisitIllegalRedeclaration(this);
279 
280   } else {
281     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
282     { Comment cmnt(masm_, "[ Declarations");
283       // For named function expressions, declare the function name as a
284       // constant.
285       if (scope()->is_function_scope() && scope()->function() != NULL) {
286         VariableDeclaration* function = scope()->function();
287         DCHECK(function->proxy()->var()->mode() == CONST ||
288                function->proxy()->var()->mode() == CONST_LEGACY);
289         DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
290         VisitVariableDeclaration(function);
291       }
292       VisitDeclarations(scope()->declarations());
293     }
294 
295     { Comment cmnt(masm_, "[ Stack check");
296       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
297       Label ok;
298       ExternalReference stack_limit
299           = ExternalReference::address_of_stack_limit(isolate());
300       __ cmp(esp, Operand::StaticVariable(stack_limit));
301       __ j(above_equal, &ok, Label::kNear);
302       __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
303       __ bind(&ok);
304     }
305 
306     { Comment cmnt(masm_, "[ Body");
307       DCHECK(loop_depth() == 0);
308       VisitStatements(function()->body());
309       DCHECK(loop_depth() == 0);
310     }
311   }
312 
313   // Always emit a 'return undefined' in case control fell off the end of
314   // the body.
315   { Comment cmnt(masm_, "[ return <undefined>;");
316     __ mov(eax, isolate()->factory()->undefined_value());
317     EmitReturnSequence();
318   }
319 }
320 
321 
ClearAccumulator()322 void FullCodeGenerator::ClearAccumulator() {
323   __ Move(eax, Immediate(Smi::FromInt(0)));
324 }
325 
326 
EmitProfilingCounterDecrement(int delta)327 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
328   __ mov(ebx, Immediate(profiling_counter_));
329   __ sub(FieldOperand(ebx, Cell::kValueOffset),
330          Immediate(Smi::FromInt(delta)));
331 }
332 
333 
EmitProfilingCounterReset()334 void FullCodeGenerator::EmitProfilingCounterReset() {
335   int reset_value = FLAG_interrupt_budget;
336   __ mov(ebx, Immediate(profiling_counter_));
337   __ mov(FieldOperand(ebx, Cell::kValueOffset),
338          Immediate(Smi::FromInt(reset_value)));
339 }
340 
341 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)342 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
343                                                 Label* back_edge_target) {
344   Comment cmnt(masm_, "[ Back edge bookkeeping");
345   Label ok;
346 
347   DCHECK(back_edge_target->is_bound());
348   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
349   int weight = Min(kMaxBackEdgeWeight,
350                    Max(1, distance / kCodeSizeMultiplier));
351   EmitProfilingCounterDecrement(weight);
352   __ j(positive, &ok, Label::kNear);
353   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
354 
355   // Record a mapping of this PC offset to the OSR id.  This is used to find
356   // the AST id from the unoptimized code in order to use it as a key into
357   // the deoptimization input data found in the optimized code.
358   RecordBackEdge(stmt->OsrEntryId());
359 
360   EmitProfilingCounterReset();
361 
362   __ bind(&ok);
363   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
364   // Record a mapping of the OSR id to this PC.  This is used if the OSR
365   // entry becomes the target of a bailout.  We don't expect it to be, but
366   // we want it to work if it is.
367   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
368 }
369 
370 
EmitReturnSequence()371 void FullCodeGenerator::EmitReturnSequence() {
372   Comment cmnt(masm_, "[ Return sequence");
373   if (return_label_.is_bound()) {
374     __ jmp(&return_label_);
375   } else {
376     // Common return label
377     __ bind(&return_label_);
378     if (FLAG_trace) {
379       __ push(eax);
380       __ CallRuntime(Runtime::kTraceExit, 1);
381     }
382     // Pretend that the exit is a backwards jump to the entry.
383     int weight = 1;
384     if (info_->ShouldSelfOptimize()) {
385       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
386     } else {
387       int distance = masm_->pc_offset();
388       weight = Min(kMaxBackEdgeWeight,
389                    Max(1, distance / kCodeSizeMultiplier));
390     }
391     EmitProfilingCounterDecrement(weight);
392     Label ok;
393     __ j(positive, &ok, Label::kNear);
394     __ push(eax);
395     __ call(isolate()->builtins()->InterruptCheck(),
396             RelocInfo::CODE_TARGET);
397     __ pop(eax);
398     EmitProfilingCounterReset();
399     __ bind(&ok);
400 #ifdef DEBUG
401     // Add a label for checking the size of the code used for returning.
402     Label check_exit_codesize;
403     masm_->bind(&check_exit_codesize);
404 #endif
405     SetSourcePosition(function()->end_position() - 1);
406     __ RecordJSReturn();
407     // Do not use the leave instruction here because it is too short to
408     // patch with the code required by the debugger.
409     __ mov(esp, ebp);
410     int no_frame_start = masm_->pc_offset();
411     __ pop(ebp);
412 
413     int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
414     __ Ret(arguments_bytes, ecx);
415     // Check that the size of the code used for returning is large enough
416     // for the debugger's requirements.
417     DCHECK(Assembler::kJSReturnSequenceLength <=
418            masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
419     info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
420   }
421 }
422 
423 
Plug(Variable * var) const424 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
425   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
426 }
427 
428 
Plug(Variable * var) const429 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
430   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
431   codegen()->GetVar(result_register(), var);
432 }
433 
434 
Plug(Variable * var) const435 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
436   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
437   MemOperand operand = codegen()->VarOperand(var, result_register());
438   // Memory operands can be pushed directly.
439   __ push(operand);
440 }
441 
442 
Plug(Variable * var) const443 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
444   // For simplicity we always test the accumulator register.
445   codegen()->GetVar(result_register(), var);
446   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
447   codegen()->DoTest(this);
448 }
449 
450 
Plug(Heap::RootListIndex index) const451 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
452   UNREACHABLE();  // Not used on X87.
453 }
454 
455 
Plug(Heap::RootListIndex index) const456 void FullCodeGenerator::AccumulatorValueContext::Plug(
457     Heap::RootListIndex index) const {
458   UNREACHABLE();  // Not used on X87.
459 }
460 
461 
Plug(Heap::RootListIndex index) const462 void FullCodeGenerator::StackValueContext::Plug(
463     Heap::RootListIndex index) const {
464   UNREACHABLE();  // Not used on X87.
465 }
466 
467 
Plug(Heap::RootListIndex index) const468 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
469   UNREACHABLE();  // Not used on X87.
470 }
471 
472 
Plug(Handle<Object> lit) const473 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
474 }
475 
476 
Plug(Handle<Object> lit) const477 void FullCodeGenerator::AccumulatorValueContext::Plug(
478     Handle<Object> lit) const {
479   if (lit->IsSmi()) {
480     __ SafeMove(result_register(), Immediate(lit));
481   } else {
482     __ Move(result_register(), Immediate(lit));
483   }
484 }
485 
486 
Plug(Handle<Object> lit) const487 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
488   if (lit->IsSmi()) {
489     __ SafePush(Immediate(lit));
490   } else {
491     __ push(Immediate(lit));
492   }
493 }
494 
495 
Plug(Handle<Object> lit) const496 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
497   codegen()->PrepareForBailoutBeforeSplit(condition(),
498                                           true,
499                                           true_label_,
500                                           false_label_);
501   DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
502   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
503     if (false_label_ != fall_through_) __ jmp(false_label_);
504   } else if (lit->IsTrue() || lit->IsJSObject()) {
505     if (true_label_ != fall_through_) __ jmp(true_label_);
506   } else if (lit->IsString()) {
507     if (String::cast(*lit)->length() == 0) {
508       if (false_label_ != fall_through_) __ jmp(false_label_);
509     } else {
510       if (true_label_ != fall_through_) __ jmp(true_label_);
511     }
512   } else if (lit->IsSmi()) {
513     if (Smi::cast(*lit)->value() == 0) {
514       if (false_label_ != fall_through_) __ jmp(false_label_);
515     } else {
516       if (true_label_ != fall_through_) __ jmp(true_label_);
517     }
518   } else {
519     // For simplicity we always test the accumulator register.
520     __ mov(result_register(), lit);
521     codegen()->DoTest(this);
522   }
523 }
524 
525 
DropAndPlug(int count,Register reg) const526 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
527                                                    Register reg) const {
528   DCHECK(count > 0);
529   __ Drop(count);
530 }
531 
532 
DropAndPlug(int count,Register reg) const533 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
534     int count,
535     Register reg) const {
536   DCHECK(count > 0);
537   __ Drop(count);
538   __ Move(result_register(), reg);
539 }
540 
541 
DropAndPlug(int count,Register reg) const542 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
543                                                        Register reg) const {
544   DCHECK(count > 0);
545   if (count > 1) __ Drop(count - 1);
546   __ mov(Operand(esp, 0), reg);
547 }
548 
549 
DropAndPlug(int count,Register reg) const550 void FullCodeGenerator::TestContext::DropAndPlug(int count,
551                                                  Register reg) const {
552   DCHECK(count > 0);
553   // For simplicity we always test the accumulator register.
554   __ Drop(count);
555   __ Move(result_register(), reg);
556   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
557   codegen()->DoTest(this);
558 }
559 
560 
Plug(Label * materialize_true,Label * materialize_false) const561 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
562                                             Label* materialize_false) const {
563   DCHECK(materialize_true == materialize_false);
564   __ bind(materialize_true);
565 }
566 
567 
Plug(Label * materialize_true,Label * materialize_false) const568 void FullCodeGenerator::AccumulatorValueContext::Plug(
569     Label* materialize_true,
570     Label* materialize_false) const {
571   Label done;
572   __ bind(materialize_true);
573   __ mov(result_register(), isolate()->factory()->true_value());
574   __ jmp(&done, Label::kNear);
575   __ bind(materialize_false);
576   __ mov(result_register(), isolate()->factory()->false_value());
577   __ bind(&done);
578 }
579 
580 
Plug(Label * materialize_true,Label * materialize_false) const581 void FullCodeGenerator::StackValueContext::Plug(
582     Label* materialize_true,
583     Label* materialize_false) const {
584   Label done;
585   __ bind(materialize_true);
586   __ push(Immediate(isolate()->factory()->true_value()));
587   __ jmp(&done, Label::kNear);
588   __ bind(materialize_false);
589   __ push(Immediate(isolate()->factory()->false_value()));
590   __ bind(&done);
591 }
592 
593 
Plug(Label * materialize_true,Label * materialize_false) const594 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
595                                           Label* materialize_false) const {
596   DCHECK(materialize_true == true_label_);
597   DCHECK(materialize_false == false_label_);
598 }
599 
600 
Plug(bool flag) const601 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
602 }
603 
604 
Plug(bool flag) const605 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
606   Handle<Object> value = flag
607       ? isolate()->factory()->true_value()
608       : isolate()->factory()->false_value();
609   __ mov(result_register(), value);
610 }
611 
612 
Plug(bool flag) const613 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
614   Handle<Object> value = flag
615       ? isolate()->factory()->true_value()
616       : isolate()->factory()->false_value();
617   __ push(Immediate(value));
618 }
619 
620 
Plug(bool flag) const621 void FullCodeGenerator::TestContext::Plug(bool flag) const {
622   codegen()->PrepareForBailoutBeforeSplit(condition(),
623                                           true,
624                                           true_label_,
625                                           false_label_);
626   if (flag) {
627     if (true_label_ != fall_through_) __ jmp(true_label_);
628   } else {
629     if (false_label_ != fall_through_) __ jmp(false_label_);
630   }
631 }
632 
633 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)634 void FullCodeGenerator::DoTest(Expression* condition,
635                                Label* if_true,
636                                Label* if_false,
637                                Label* fall_through) {
638   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
639   CallIC(ic, condition->test_id());
640   __ test(result_register(), result_register());
641   // The stub returns nonzero for true.
642   Split(not_zero, if_true, if_false, fall_through);
643 }
644 
645 
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)646 void FullCodeGenerator::Split(Condition cc,
647                               Label* if_true,
648                               Label* if_false,
649                               Label* fall_through) {
650   if (if_false == fall_through) {
651     __ j(cc, if_true);
652   } else if (if_true == fall_through) {
653     __ j(NegateCondition(cc), if_false);
654   } else {
655     __ j(cc, if_true);
656     __ jmp(if_false);
657   }
658 }
659 
660 
StackOperand(Variable * var)661 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
662   DCHECK(var->IsStackAllocated());
663   // Offset is negative because higher indexes are at lower addresses.
664   int offset = -var->index() * kPointerSize;
665   // Adjust by a (parameter or local) base offset.
666   if (var->IsParameter()) {
667     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
668   } else {
669     offset += JavaScriptFrameConstants::kLocal0Offset;
670   }
671   return Operand(ebp, offset);
672 }
673 
674 
VarOperand(Variable * var,Register scratch)675 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
676   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
677   if (var->IsContextSlot()) {
678     int context_chain_length = scope()->ContextChainLength(var->scope());
679     __ LoadContext(scratch, context_chain_length);
680     return ContextOperand(scratch, var->index());
681   } else {
682     return StackOperand(var);
683   }
684 }
685 
686 
GetVar(Register dest,Variable * var)687 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
688   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
689   MemOperand location = VarOperand(var, dest);
690   __ mov(dest, location);
691 }
692 
693 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)694 void FullCodeGenerator::SetVar(Variable* var,
695                                Register src,
696                                Register scratch0,
697                                Register scratch1) {
698   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
699   DCHECK(!scratch0.is(src));
700   DCHECK(!scratch0.is(scratch1));
701   DCHECK(!scratch1.is(src));
702   MemOperand location = VarOperand(var, scratch0);
703   __ mov(location, src);
704 
705   // Emit the write barrier code if the location is in the heap.
706   if (var->IsContextSlot()) {
707     int offset = Context::SlotOffset(var->index());
708     DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
709     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
710   }
711 }
712 
713 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)714 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
715                                                      bool should_normalize,
716                                                      Label* if_true,
717                                                      Label* if_false) {
718   // Only prepare for bailouts before splits if we're in a test
719   // context. Otherwise, we let the Visit function deal with the
720   // preparation to avoid preparing with the same AST id twice.
721   if (!context()->IsTest() || !info_->IsOptimizable()) return;
722 
723   Label skip;
724   if (should_normalize) __ jmp(&skip, Label::kNear);
725   PrepareForBailout(expr, TOS_REG);
726   if (should_normalize) {
727     __ cmp(eax, isolate()->factory()->true_value());
728     Split(equal, if_true, if_false, NULL);
729     __ bind(&skip);
730   }
731 }
732 
733 
EmitDebugCheckDeclarationContext(Variable * variable)734 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
735   // The variable in the declaration always resides in the current context.
736   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
737   if (generate_debug_code_) {
738     // Check that we're not inside a with or catch context.
739     __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
740     __ cmp(ebx, isolate()->factory()->with_context_map());
741     __ Check(not_equal, kDeclarationInWithContext);
742     __ cmp(ebx, isolate()->factory()->catch_context_map());
743     __ Check(not_equal, kDeclarationInCatchContext);
744   }
745 }
746 
747 
VisitVariableDeclaration(VariableDeclaration * declaration)748 void FullCodeGenerator::VisitVariableDeclaration(
749     VariableDeclaration* declaration) {
750   // If it was not possible to allocate the variable at compile time, we
751   // need to "declare" it at runtime to make sure it actually exists in the
752   // local context.
753   VariableProxy* proxy = declaration->proxy();
754   VariableMode mode = declaration->mode();
755   Variable* variable = proxy->var();
756   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
757   switch (variable->location()) {
758     case Variable::UNALLOCATED:
759       globals_->Add(variable->name(), zone());
760       globals_->Add(variable->binding_needs_init()
761                         ? isolate()->factory()->the_hole_value()
762                         : isolate()->factory()->undefined_value(), zone());
763       break;
764 
765     case Variable::PARAMETER:
766     case Variable::LOCAL:
767       if (hole_init) {
768         Comment cmnt(masm_, "[ VariableDeclaration");
769         __ mov(StackOperand(variable),
770                Immediate(isolate()->factory()->the_hole_value()));
771       }
772       break;
773 
774     case Variable::CONTEXT:
775       if (hole_init) {
776         Comment cmnt(masm_, "[ VariableDeclaration");
777         EmitDebugCheckDeclarationContext(variable);
778         __ mov(ContextOperand(esi, variable->index()),
779                Immediate(isolate()->factory()->the_hole_value()));
780         // No write barrier since the hole value is in old space.
781         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
782       }
783       break;
784 
785     case Variable::LOOKUP: {
786       Comment cmnt(masm_, "[ VariableDeclaration");
787       __ push(esi);
788       __ push(Immediate(variable->name()));
789       // VariableDeclaration nodes are always introduced in one of four modes.
790       DCHECK(IsDeclaredVariableMode(mode));
791       PropertyAttributes attr =
792           IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
793       __ push(Immediate(Smi::FromInt(attr)));
794       // Push initial value, if any.
795       // Note: For variables we must not push an initial value (such as
796       // 'undefined') because we may have a (legal) redeclaration and we
797       // must not destroy the current value.
798       if (hole_init) {
799         __ push(Immediate(isolate()->factory()->the_hole_value()));
800       } else {
801         __ push(Immediate(Smi::FromInt(0)));  // Indicates no initial value.
802       }
803       __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
804       break;
805     }
806   }
807 }
808 
809 
VisitFunctionDeclaration(FunctionDeclaration * declaration)810 void FullCodeGenerator::VisitFunctionDeclaration(
811     FunctionDeclaration* declaration) {
812   VariableProxy* proxy = declaration->proxy();
813   Variable* variable = proxy->var();
814   switch (variable->location()) {
815     case Variable::UNALLOCATED: {
816       globals_->Add(variable->name(), zone());
817       Handle<SharedFunctionInfo> function =
818           Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
819       // Check for stack-overflow exception.
820       if (function.is_null()) return SetStackOverflow();
821       globals_->Add(function, zone());
822       break;
823     }
824 
825     case Variable::PARAMETER:
826     case Variable::LOCAL: {
827       Comment cmnt(masm_, "[ FunctionDeclaration");
828       VisitForAccumulatorValue(declaration->fun());
829       __ mov(StackOperand(variable), result_register());
830       break;
831     }
832 
833     case Variable::CONTEXT: {
834       Comment cmnt(masm_, "[ FunctionDeclaration");
835       EmitDebugCheckDeclarationContext(variable);
836       VisitForAccumulatorValue(declaration->fun());
837       __ mov(ContextOperand(esi, variable->index()), result_register());
838       // We know that we have written a function, which is not a smi.
839       __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
840                                 result_register(), ecx, kDontSaveFPRegs,
841                                 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
842       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
843       break;
844     }
845 
846     case Variable::LOOKUP: {
847       Comment cmnt(masm_, "[ FunctionDeclaration");
848       __ push(esi);
849       __ push(Immediate(variable->name()));
850       __ push(Immediate(Smi::FromInt(NONE)));
851       VisitForStackValue(declaration->fun());
852       __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
853       break;
854     }
855   }
856 }
857 
858 
VisitModuleDeclaration(ModuleDeclaration * declaration)859 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
860   Variable* variable = declaration->proxy()->var();
861   DCHECK(variable->location() == Variable::CONTEXT);
862   DCHECK(variable->interface()->IsFrozen());
863 
864   Comment cmnt(masm_, "[ ModuleDeclaration");
865   EmitDebugCheckDeclarationContext(variable);
866 
867   // Load instance object.
868   __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
869   __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
870   __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX));
871 
872   // Assign it.
873   __ mov(ContextOperand(esi, variable->index()), eax);
874   // We know that we have written a module, which is not a smi.
875   __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()), eax,
876                             ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
877                             OMIT_SMI_CHECK);
878   PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
879 
880   // Traverse into body.
881   Visit(declaration->module());
882 }
883 
884 
VisitImportDeclaration(ImportDeclaration * declaration)885 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
886   VariableProxy* proxy = declaration->proxy();
887   Variable* variable = proxy->var();
888   switch (variable->location()) {
889     case Variable::UNALLOCATED:
890       // TODO(rossberg)
891       break;
892 
893     case Variable::CONTEXT: {
894       Comment cmnt(masm_, "[ ImportDeclaration");
895       EmitDebugCheckDeclarationContext(variable);
896       // TODO(rossberg)
897       break;
898     }
899 
900     case Variable::PARAMETER:
901     case Variable::LOCAL:
902     case Variable::LOOKUP:
903       UNREACHABLE();
904   }
905 }
906 
907 
VisitExportDeclaration(ExportDeclaration * declaration)908 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
909   // TODO(rossberg)
910 }
911 
912 
DeclareGlobals(Handle<FixedArray> pairs)913 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
914   // Call the runtime to declare the globals.
915   __ push(esi);  // The context is the first argument.
916   __ Push(pairs);
917   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
918   __ CallRuntime(Runtime::kDeclareGlobals, 3);
919   // Return value is ignored.
920 }
921 
922 
DeclareModules(Handle<FixedArray> descriptions)923 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
924   // Call the runtime to declare the modules.
925   __ Push(descriptions);
926   __ CallRuntime(Runtime::kDeclareModules, 1);
927   // Return value is ignored.
928 }
929 
930 
VisitSwitchStatement(SwitchStatement * stmt)931 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
932   Comment cmnt(masm_, "[ SwitchStatement");
933   Breakable nested_statement(this, stmt);
934   SetStatementPosition(stmt);
935 
936   // Keep the switch value on the stack until a case matches.
937   VisitForStackValue(stmt->tag());
938   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
939 
940   ZoneList<CaseClause*>* clauses = stmt->cases();
941   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
942 
943   Label next_test;  // Recycled for each test.
944   // Compile all the tests with branches to their bodies.
945   for (int i = 0; i < clauses->length(); i++) {
946     CaseClause* clause = clauses->at(i);
947     clause->body_target()->Unuse();
948 
949     // The default is not a test, but remember it as final fall through.
950     if (clause->is_default()) {
951       default_clause = clause;
952       continue;
953     }
954 
955     Comment cmnt(masm_, "[ Case comparison");
956     __ bind(&next_test);
957     next_test.Unuse();
958 
959     // Compile the label expression.
960     VisitForAccumulatorValue(clause->label());
961 
962     // Perform the comparison as if via '==='.
963     __ mov(edx, Operand(esp, 0));  // Switch value.
964     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
965     JumpPatchSite patch_site(masm_);
966     if (inline_smi_code) {
967       Label slow_case;
968       __ mov(ecx, edx);
969       __ or_(ecx, eax);
970       patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
971 
972       __ cmp(edx, eax);
973       __ j(not_equal, &next_test);
974       __ Drop(1);  // Switch value is no longer needed.
975       __ jmp(clause->body_target());
976       __ bind(&slow_case);
977     }
978 
979     // Record position before stub call for type feedback.
980     SetSourcePosition(clause->position());
981     Handle<Code> ic =
982         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
983     CallIC(ic, clause->CompareId());
984     patch_site.EmitPatchInfo();
985 
986     Label skip;
987     __ jmp(&skip, Label::kNear);
988     PrepareForBailout(clause, TOS_REG);
989     __ cmp(eax, isolate()->factory()->true_value());
990     __ j(not_equal, &next_test);
991     __ Drop(1);
992     __ jmp(clause->body_target());
993     __ bind(&skip);
994 
995     __ test(eax, eax);
996     __ j(not_equal, &next_test);
997     __ Drop(1);  // Switch value is no longer needed.
998     __ jmp(clause->body_target());
999   }
1000 
1001   // Discard the test value and jump to the default if present, otherwise to
1002   // the end of the statement.
1003   __ bind(&next_test);
1004   __ Drop(1);  // Switch value is no longer needed.
1005   if (default_clause == NULL) {
1006     __ jmp(nested_statement.break_label());
1007   } else {
1008     __ jmp(default_clause->body_target());
1009   }
1010 
1011   // Compile all the case bodies.
1012   for (int i = 0; i < clauses->length(); i++) {
1013     Comment cmnt(masm_, "[ Case body");
1014     CaseClause* clause = clauses->at(i);
1015     __ bind(clause->body_target());
1016     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1017     VisitStatements(clause->statements());
1018   }
1019 
1020   __ bind(nested_statement.break_label());
1021   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1022 }
1023 
1024 
VisitForInStatement(ForInStatement * stmt)1025 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1026   Comment cmnt(masm_, "[ ForInStatement");
1027   int slot = stmt->ForInFeedbackSlot();
1028 
1029   SetStatementPosition(stmt);
1030 
1031   Label loop, exit;
1032   ForIn loop_statement(this, stmt);
1033   increment_loop_depth();
1034 
1035   // Get the object to enumerate over. If the object is null or undefined, skip
1036   // over the loop.  See ECMA-262 version 5, section 12.6.4.
1037   VisitForAccumulatorValue(stmt->enumerable());
1038   __ cmp(eax, isolate()->factory()->undefined_value());
1039   __ j(equal, &exit);
1040   __ cmp(eax, isolate()->factory()->null_value());
1041   __ j(equal, &exit);
1042 
1043   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1044 
1045   // Convert the object to a JS object.
1046   Label convert, done_convert;
1047   __ JumpIfSmi(eax, &convert, Label::kNear);
1048   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1049   __ j(above_equal, &done_convert, Label::kNear);
1050   __ bind(&convert);
1051   __ push(eax);
1052   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1053   __ bind(&done_convert);
1054   __ push(eax);
1055 
1056   // Check for proxies.
1057   Label call_runtime, use_cache, fixed_array;
1058   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1059   __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1060   __ j(below_equal, &call_runtime);
1061 
1062   // Check cache validity in generated code. This is a fast case for
1063   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1064   // guarantee cache validity, call the runtime system to check cache
1065   // validity or get the property names in a fixed array.
1066   __ CheckEnumCache(&call_runtime);
1067 
1068   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1069   __ jmp(&use_cache, Label::kNear);
1070 
1071   // Get the set of properties to enumerate.
1072   __ bind(&call_runtime);
1073   __ push(eax);
1074   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1075   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1076          isolate()->factory()->meta_map());
1077   __ j(not_equal, &fixed_array);
1078 
1079 
1080   // We got a map in register eax. Get the enumeration cache from it.
1081   Label no_descriptors;
1082   __ bind(&use_cache);
1083 
1084   __ EnumLength(edx, eax);
1085   __ cmp(edx, Immediate(Smi::FromInt(0)));
1086   __ j(equal, &no_descriptors);
1087 
1088   __ LoadInstanceDescriptors(eax, ecx);
1089   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1090   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1091 
1092   // Set up the four remaining stack slots.
1093   __ push(eax);  // Map.
1094   __ push(ecx);  // Enumeration cache.
1095   __ push(edx);  // Number of valid entries for the map in the enum cache.
1096   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1097   __ jmp(&loop);
1098 
1099   __ bind(&no_descriptors);
1100   __ add(esp, Immediate(kPointerSize));
1101   __ jmp(&exit);
1102 
1103   // We got a fixed array in register eax. Iterate through that.
1104   Label non_proxy;
1105   __ bind(&fixed_array);
1106 
1107   // No need for a write barrier, we are storing a Smi in the feedback vector.
1108   __ LoadHeapObject(ebx, FeedbackVector());
1109   __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)),
1110          Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1111 
1112   __ mov(ebx, Immediate(Smi::FromInt(1)));  // Smi indicates slow check
1113   __ mov(ecx, Operand(esp, 0 * kPointerSize));  // Get enumerated object
1114   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1115   __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1116   __ j(above, &non_proxy);
1117   __ Move(ebx, Immediate(Smi::FromInt(0)));  // Zero indicates proxy
1118   __ bind(&non_proxy);
1119   __ push(ebx);  // Smi
1120   __ push(eax);  // Array
1121   __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1122   __ push(eax);  // Fixed array length (as smi).
1123   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1124 
1125   // Generate code for doing the condition check.
1126   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1127   __ bind(&loop);
1128   __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
1129   __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
1130   __ j(above_equal, loop_statement.break_label());
1131 
1132   // Get the current entry of the array into register ebx.
1133   __ mov(ebx, Operand(esp, 2 * kPointerSize));
1134   __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1135 
1136   // Get the expected map from the stack or a smi in the
1137   // permanent slow case into register edx.
1138   __ mov(edx, Operand(esp, 3 * kPointerSize));
1139 
1140   // Check if the expected map still matches that of the enumerable.
1141   // If not, we may have to filter the key.
1142   Label update_each;
1143   __ mov(ecx, Operand(esp, 4 * kPointerSize));
1144   __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1145   __ j(equal, &update_each, Label::kNear);
1146 
1147   // For proxies, no filtering is done.
1148   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1149   DCHECK(Smi::FromInt(0) == 0);
1150   __ test(edx, edx);
1151   __ j(zero, &update_each);
1152 
1153   // Convert the entry to a string or null if it isn't a property
1154   // anymore. If the property has been removed while iterating, we
1155   // just skip it.
1156   __ push(ecx);  // Enumerable.
1157   __ push(ebx);  // Current entry.
1158   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1159   __ test(eax, eax);
1160   __ j(equal, loop_statement.continue_label());
1161   __ mov(ebx, eax);
1162 
1163   // Update the 'each' property or variable from the possibly filtered
1164   // entry in register ebx.
1165   __ bind(&update_each);
1166   __ mov(result_register(), ebx);
1167   // Perform the assignment as if via '='.
1168   { EffectContext context(this);
1169     EmitAssignment(stmt->each());
1170   }
1171 
1172   // Generate code for the body of the loop.
1173   Visit(stmt->body());
1174 
1175   // Generate code for going to the next element by incrementing the
1176   // index (smi) stored on top of the stack.
1177   __ bind(loop_statement.continue_label());
1178   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1179 
1180   EmitBackEdgeBookkeeping(stmt, &loop);
1181   __ jmp(&loop);
1182 
1183   // Remove the pointers stored on the stack.
1184   __ bind(loop_statement.break_label());
1185   __ add(esp, Immediate(5 * kPointerSize));
1186 
1187   // Exit and decrement the loop depth.
1188   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1189   __ bind(&exit);
1190   decrement_loop_depth();
1191 }
1192 
1193 
VisitForOfStatement(ForOfStatement * stmt)1194 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1195   Comment cmnt(masm_, "[ ForOfStatement");
1196   SetStatementPosition(stmt);
1197 
1198   Iteration loop_statement(this, stmt);
1199   increment_loop_depth();
1200 
1201   // var iterator = iterable[Symbol.iterator]();
1202   VisitForEffect(stmt->assign_iterator());
1203 
1204   // Loop entry.
1205   __ bind(loop_statement.continue_label());
1206 
1207   // result = iterator.next()
1208   VisitForEffect(stmt->next_result());
1209 
1210   // if (result.done) break;
1211   Label result_not_done;
1212   VisitForControl(stmt->result_done(),
1213                   loop_statement.break_label(),
1214                   &result_not_done,
1215                   &result_not_done);
1216   __ bind(&result_not_done);
1217 
1218   // each = result.value
1219   VisitForEffect(stmt->assign_each());
1220 
1221   // Generate code for the body of the loop.
1222   Visit(stmt->body());
1223 
1224   // Check stack before looping.
1225   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1226   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1227   __ jmp(loop_statement.continue_label());
1228 
1229   // Exit and decrement the loop depth.
1230   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1231   __ bind(loop_statement.break_label());
1232   decrement_loop_depth();
1233 }
1234 
1235 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1236 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1237                                        bool pretenure) {
1238   // Use the fast case closure allocation code that allocates in new
1239   // space for nested functions that don't need literals cloning. If
1240   // we're running with the --always-opt or the --prepare-always-opt
1241   // flag, we need to use the runtime function so that the new function
1242   // we are creating here gets a chance to have its code optimized and
1243   // doesn't just get a copy of the existing unoptimized code.
1244   if (!FLAG_always_opt &&
1245       !FLAG_prepare_always_opt &&
1246       !pretenure &&
1247       scope()->is_function_scope() &&
1248       info->num_literals() == 0) {
1249     FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1250     __ mov(ebx, Immediate(info));
1251     __ CallStub(&stub);
1252   } else {
1253     __ push(esi);
1254     __ push(Immediate(info));
1255     __ push(Immediate(pretenure
1256                       ? isolate()->factory()->true_value()
1257                       : isolate()->factory()->false_value()));
1258     __ CallRuntime(Runtime::kNewClosure, 3);
1259   }
1260   context()->Plug(eax);
1261 }
1262 
1263 
VisitVariableProxy(VariableProxy * expr)1264 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1265   Comment cmnt(masm_, "[ VariableProxy");
1266   EmitVariableLoad(expr);
1267 }
1268 
1269 
EmitLoadHomeObject(SuperReference * expr)1270 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1271   Comment cnmt(masm_, "[ SuperReference ");
1272 
1273   __ mov(LoadDescriptor::ReceiverRegister(),
1274          Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1275 
1276   Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1277   __ mov(LoadDescriptor::NameRegister(), home_object_symbol);
1278 
1279   CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1280 
1281   __ cmp(eax, isolate()->factory()->undefined_value());
1282   Label done;
1283   __ j(not_equal, &done);
1284   __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1285   __ bind(&done);
1286 }
1287 
1288 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofState typeof_state,Label * slow)1289 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1290                                                       TypeofState typeof_state,
1291                                                       Label* slow) {
1292   Register context = esi;
1293   Register temp = edx;
1294 
1295   Scope* s = scope();
1296   while (s != NULL) {
1297     if (s->num_heap_slots() > 0) {
1298       if (s->calls_sloppy_eval()) {
1299         // Check that extension is NULL.
1300         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1301                Immediate(0));
1302         __ j(not_equal, slow);
1303       }
1304       // Load next context in chain.
1305       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1306       // Walk the rest of the chain without clobbering esi.
1307       context = temp;
1308     }
1309     // If no outer scope calls eval, we do not need to check more
1310     // context extensions.  If we have reached an eval scope, we check
1311     // all extensions from this point.
1312     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1313     s = s->outer_scope();
1314   }
1315 
1316   if (s != NULL && s->is_eval_scope()) {
1317     // Loop up the context chain.  There is no frame effect so it is
1318     // safe to use raw labels here.
1319     Label next, fast;
1320     if (!context.is(temp)) {
1321       __ mov(temp, context);
1322     }
1323     __ bind(&next);
1324     // Terminate at native context.
1325     __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1326            Immediate(isolate()->factory()->native_context_map()));
1327     __ j(equal, &fast, Label::kNear);
1328     // Check that extension is NULL.
1329     __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1330     __ j(not_equal, slow);
1331     // Load next context in chain.
1332     __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1333     __ jmp(&next);
1334     __ bind(&fast);
1335   }
1336 
1337   // All extension objects were empty and it is safe to use a global
1338   // load IC call.
1339   __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1340   __ mov(LoadDescriptor::NameRegister(), proxy->var()->name());
1341   if (FLAG_vector_ics) {
1342     __ mov(VectorLoadICDescriptor::SlotRegister(),
1343            Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1344   }
1345 
1346   ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1347       ? NOT_CONTEXTUAL
1348       : CONTEXTUAL;
1349 
1350   CallLoadIC(mode);
1351 }
1352 
1353 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1354 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1355                                                                 Label* slow) {
1356   DCHECK(var->IsContextSlot());
1357   Register context = esi;
1358   Register temp = ebx;
1359 
1360   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1361     if (s->num_heap_slots() > 0) {
1362       if (s->calls_sloppy_eval()) {
1363         // Check that extension is NULL.
1364         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1365                Immediate(0));
1366         __ j(not_equal, slow);
1367       }
1368       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1369       // Walk the rest of the chain without clobbering esi.
1370       context = temp;
1371     }
1372   }
1373   // Check that last extension is NULL.
1374   __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1375   __ j(not_equal, slow);
1376 
1377   // This function is used only for loads, not stores, so it's safe to
1378   // return an esi-based operand (the write barrier cannot be allowed to
1379   // destroy the esi register).
1380   return ContextOperand(context, var->index());
1381 }
1382 
1383 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofState typeof_state,Label * slow,Label * done)1384 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1385                                                   TypeofState typeof_state,
1386                                                   Label* slow,
1387                                                   Label* done) {
1388   // Generate fast-case code for variables that might be shadowed by
1389   // eval-introduced variables.  Eval is used a lot without
1390   // introducing variables.  In those cases, we do not want to
1391   // perform a runtime call for all variables in the scope
1392   // containing the eval.
1393   Variable* var = proxy->var();
1394   if (var->mode() == DYNAMIC_GLOBAL) {
1395     EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1396     __ jmp(done);
1397   } else if (var->mode() == DYNAMIC_LOCAL) {
1398     Variable* local = var->local_if_not_shadowed();
1399     __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1400     if (local->mode() == LET || local->mode() == CONST ||
1401         local->mode() == CONST_LEGACY) {
1402       __ cmp(eax, isolate()->factory()->the_hole_value());
1403       __ j(not_equal, done);
1404       if (local->mode() == CONST_LEGACY) {
1405         __ mov(eax, isolate()->factory()->undefined_value());
1406       } else {  // LET || CONST
1407         __ push(Immediate(var->name()));
1408         __ CallRuntime(Runtime::kThrowReferenceError, 1);
1409       }
1410     }
1411     __ jmp(done);
1412   }
1413 }
1414 
1415 
EmitVariableLoad(VariableProxy * proxy)1416 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1417   // Record position before possible IC call.
1418   SetSourcePosition(proxy->position());
1419   Variable* var = proxy->var();
1420 
1421   // Three cases: global variables, lookup variables, and all other types of
1422   // variables.
1423   switch (var->location()) {
1424     case Variable::UNALLOCATED: {
1425       Comment cmnt(masm_, "[ Global variable");
1426       __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1427       __ mov(LoadDescriptor::NameRegister(), var->name());
1428       if (FLAG_vector_ics) {
1429         __ mov(VectorLoadICDescriptor::SlotRegister(),
1430                Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1431       }
1432       CallLoadIC(CONTEXTUAL);
1433       context()->Plug(eax);
1434       break;
1435     }
1436 
1437     case Variable::PARAMETER:
1438     case Variable::LOCAL:
1439     case Variable::CONTEXT: {
1440       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1441                                                : "[ Stack variable");
1442       if (var->binding_needs_init()) {
1443         // var->scope() may be NULL when the proxy is located in eval code and
1444         // refers to a potential outside binding. Currently those bindings are
1445         // always looked up dynamically, i.e. in that case
1446         //     var->location() == LOOKUP.
1447         // always holds.
1448         DCHECK(var->scope() != NULL);
1449 
1450         // Check if the binding really needs an initialization check. The check
1451         // can be skipped in the following situation: we have a LET or CONST
1452         // binding in harmony mode, both the Variable and the VariableProxy have
1453         // the same declaration scope (i.e. they are both in global code, in the
1454         // same function or in the same eval code) and the VariableProxy is in
1455         // the source physically located after the initializer of the variable.
1456         //
1457         // We cannot skip any initialization checks for CONST in non-harmony
1458         // mode because const variables may be declared but never initialized:
1459         //   if (false) { const x; }; var y = x;
1460         //
1461         // The condition on the declaration scopes is a conservative check for
1462         // nested functions that access a binding and are called before the
1463         // binding is initialized:
1464         //   function() { f(); let x = 1; function f() { x = 2; } }
1465         //
1466         bool skip_init_check;
1467         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1468           skip_init_check = false;
1469         } else {
1470           // Check that we always have valid source position.
1471           DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1472           DCHECK(proxy->position() != RelocInfo::kNoPosition);
1473           skip_init_check = var->mode() != CONST_LEGACY &&
1474               var->initializer_position() < proxy->position();
1475         }
1476 
1477         if (!skip_init_check) {
1478           // Let and const need a read barrier.
1479           Label done;
1480           GetVar(eax, var);
1481           __ cmp(eax, isolate()->factory()->the_hole_value());
1482           __ j(not_equal, &done, Label::kNear);
1483           if (var->mode() == LET || var->mode() == CONST) {
1484             // Throw a reference error when using an uninitialized let/const
1485             // binding in harmony mode.
1486             __ push(Immediate(var->name()));
1487             __ CallRuntime(Runtime::kThrowReferenceError, 1);
1488           } else {
1489             // Uninitalized const bindings outside of harmony mode are unholed.
1490             DCHECK(var->mode() == CONST_LEGACY);
1491             __ mov(eax, isolate()->factory()->undefined_value());
1492           }
1493           __ bind(&done);
1494           context()->Plug(eax);
1495           break;
1496         }
1497       }
1498       context()->Plug(var);
1499       break;
1500     }
1501 
1502     case Variable::LOOKUP: {
1503       Comment cmnt(masm_, "[ Lookup variable");
1504       Label done, slow;
1505       // Generate code for loading from variables potentially shadowed
1506       // by eval-introduced variables.
1507       EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1508       __ bind(&slow);
1509       __ push(esi);  // Context.
1510       __ push(Immediate(var->name()));
1511       __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1512       __ bind(&done);
1513       context()->Plug(eax);
1514       break;
1515     }
1516   }
1517 }
1518 
1519 
VisitRegExpLiteral(RegExpLiteral * expr)1520 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1521   Comment cmnt(masm_, "[ RegExpLiteral");
1522   Label materialized;
1523   // Registers will be used as follows:
1524   // edi = JS function.
1525   // ecx = literals array.
1526   // ebx = regexp literal.
1527   // eax = regexp literal clone.
1528   __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1529   __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1530   int literal_offset =
1531       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1532   __ mov(ebx, FieldOperand(ecx, literal_offset));
1533   __ cmp(ebx, isolate()->factory()->undefined_value());
1534   __ j(not_equal, &materialized, Label::kNear);
1535 
1536   // Create regexp literal using runtime function
1537   // Result will be in eax.
1538   __ push(ecx);
1539   __ push(Immediate(Smi::FromInt(expr->literal_index())));
1540   __ push(Immediate(expr->pattern()));
1541   __ push(Immediate(expr->flags()));
1542   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1543   __ mov(ebx, eax);
1544 
1545   __ bind(&materialized);
1546   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1547   Label allocated, runtime_allocate;
1548   __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1549   __ jmp(&allocated);
1550 
1551   __ bind(&runtime_allocate);
1552   __ push(ebx);
1553   __ push(Immediate(Smi::FromInt(size)));
1554   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1555   __ pop(ebx);
1556 
1557   __ bind(&allocated);
1558   // Copy the content into the newly allocated memory.
1559   // (Unroll copy loop once for better throughput).
1560   for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1561     __ mov(edx, FieldOperand(ebx, i));
1562     __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1563     __ mov(FieldOperand(eax, i), edx);
1564     __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1565   }
1566   if ((size % (2 * kPointerSize)) != 0) {
1567     __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1568     __ mov(FieldOperand(eax, size - kPointerSize), edx);
1569   }
1570   context()->Plug(eax);
1571 }
1572 
1573 
EmitAccessor(Expression * expression)1574 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1575   if (expression == NULL) {
1576     __ push(Immediate(isolate()->factory()->null_value()));
1577   } else {
1578     VisitForStackValue(expression);
1579   }
1580 }
1581 
1582 
VisitObjectLiteral(ObjectLiteral * expr)1583 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1584   Comment cmnt(masm_, "[ ObjectLiteral");
1585 
1586   expr->BuildConstantProperties(isolate());
1587   Handle<FixedArray> constant_properties = expr->constant_properties();
1588   int flags = expr->fast_elements()
1589       ? ObjectLiteral::kFastElements
1590       : ObjectLiteral::kNoFlags;
1591   flags |= expr->has_function()
1592       ? ObjectLiteral::kHasFunction
1593       : ObjectLiteral::kNoFlags;
1594   int properties_count = constant_properties->length() / 2;
1595   if (expr->may_store_doubles() || expr->depth() > 1 ||
1596       masm()->serializer_enabled() ||
1597       flags != ObjectLiteral::kFastElements ||
1598       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1599     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1600     __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1601     __ push(Immediate(Smi::FromInt(expr->literal_index())));
1602     __ push(Immediate(constant_properties));
1603     __ push(Immediate(Smi::FromInt(flags)));
1604     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1605   } else {
1606     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1607     __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1608     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1609     __ mov(ecx, Immediate(constant_properties));
1610     __ mov(edx, Immediate(Smi::FromInt(flags)));
1611     FastCloneShallowObjectStub stub(isolate(), properties_count);
1612     __ CallStub(&stub);
1613   }
1614 
1615   // If result_saved is true the result is on top of the stack.  If
1616   // result_saved is false the result is in eax.
1617   bool result_saved = false;
1618 
1619   // Mark all computed expressions that are bound to a key that
1620   // is shadowed by a later occurrence of the same key. For the
1621   // marked expressions, no store code is emitted.
1622   expr->CalculateEmitStore(zone());
1623 
1624   AccessorTable accessor_table(zone());
1625   for (int i = 0; i < expr->properties()->length(); i++) {
1626     ObjectLiteral::Property* property = expr->properties()->at(i);
1627     if (property->IsCompileTimeValue()) continue;
1628 
1629     Literal* key = property->key();
1630     Expression* value = property->value();
1631     if (!result_saved) {
1632       __ push(eax);  // Save result on the stack
1633       result_saved = true;
1634     }
1635     switch (property->kind()) {
1636       case ObjectLiteral::Property::CONSTANT:
1637         UNREACHABLE();
1638       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1639         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1640         // Fall through.
1641       case ObjectLiteral::Property::COMPUTED:
1642         if (key->value()->IsInternalizedString()) {
1643           if (property->emit_store()) {
1644             VisitForAccumulatorValue(value);
1645             DCHECK(StoreDescriptor::ValueRegister().is(eax));
1646             __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1647             __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1648             CallStoreIC(key->LiteralFeedbackId());
1649             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1650           } else {
1651             VisitForEffect(value);
1652           }
1653           break;
1654         }
1655         __ push(Operand(esp, 0));  // Duplicate receiver.
1656         VisitForStackValue(key);
1657         VisitForStackValue(value);
1658         if (property->emit_store()) {
1659           __ push(Immediate(Smi::FromInt(SLOPPY)));  // Strict mode
1660           __ CallRuntime(Runtime::kSetProperty, 4);
1661         } else {
1662           __ Drop(3);
1663         }
1664         break;
1665       case ObjectLiteral::Property::PROTOTYPE:
1666         __ push(Operand(esp, 0));  // Duplicate receiver.
1667         VisitForStackValue(value);
1668         if (property->emit_store()) {
1669           __ CallRuntime(Runtime::kSetPrototype, 2);
1670         } else {
1671           __ Drop(2);
1672         }
1673         break;
1674       case ObjectLiteral::Property::GETTER:
1675         accessor_table.lookup(key)->second->getter = value;
1676         break;
1677       case ObjectLiteral::Property::SETTER:
1678         accessor_table.lookup(key)->second->setter = value;
1679         break;
1680     }
1681   }
1682 
1683   // Emit code to define accessors, using only a single call to the runtime for
1684   // each pair of corresponding getters and setters.
1685   for (AccessorTable::Iterator it = accessor_table.begin();
1686        it != accessor_table.end();
1687        ++it) {
1688     __ push(Operand(esp, 0));  // Duplicate receiver.
1689     VisitForStackValue(it->first);
1690     EmitAccessor(it->second->getter);
1691     EmitAccessor(it->second->setter);
1692     __ push(Immediate(Smi::FromInt(NONE)));
1693     __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1694   }
1695 
1696   if (expr->has_function()) {
1697     DCHECK(result_saved);
1698     __ push(Operand(esp, 0));
1699     __ CallRuntime(Runtime::kToFastProperties, 1);
1700   }
1701 
1702   if (result_saved) {
1703     context()->PlugTOS();
1704   } else {
1705     context()->Plug(eax);
1706   }
1707 }
1708 
1709 
VisitArrayLiteral(ArrayLiteral * expr)1710 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1711   Comment cmnt(masm_, "[ ArrayLiteral");
1712 
1713   expr->BuildConstantElements(isolate());
1714   int flags = expr->depth() == 1
1715       ? ArrayLiteral::kShallowElements
1716       : ArrayLiteral::kNoFlags;
1717 
1718   ZoneList<Expression*>* subexprs = expr->values();
1719   int length = subexprs->length();
1720   Handle<FixedArray> constant_elements = expr->constant_elements();
1721   DCHECK_EQ(2, constant_elements->length());
1722   ElementsKind constant_elements_kind =
1723       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1724   bool has_constant_fast_elements =
1725       IsFastObjectElementsKind(constant_elements_kind);
1726   Handle<FixedArrayBase> constant_elements_values(
1727       FixedArrayBase::cast(constant_elements->get(1)));
1728 
1729   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1730   if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1731     // If the only customer of allocation sites is transitioning, then
1732     // we can turn it off if we don't have anywhere else to transition to.
1733     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1734   }
1735 
1736   if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1737     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1738     __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1739     __ push(Immediate(Smi::FromInt(expr->literal_index())));
1740     __ push(Immediate(constant_elements));
1741     __ push(Immediate(Smi::FromInt(flags)));
1742     __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1743   } else {
1744     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1745     __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1746     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1747     __ mov(ecx, Immediate(constant_elements));
1748     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1749     __ CallStub(&stub);
1750   }
1751 
1752   bool result_saved = false;  // Is the result saved to the stack?
1753 
1754   // Emit code to evaluate all the non-constant subexpressions and to store
1755   // them into the newly cloned array.
1756   for (int i = 0; i < length; i++) {
1757     Expression* subexpr = subexprs->at(i);
1758     // If the subexpression is a literal or a simple materialized literal it
1759     // is already set in the cloned array.
1760     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1761 
1762     if (!result_saved) {
1763       __ push(eax);  // array literal.
1764       __ push(Immediate(Smi::FromInt(expr->literal_index())));
1765       result_saved = true;
1766     }
1767     VisitForAccumulatorValue(subexpr);
1768 
1769     if (IsFastObjectElementsKind(constant_elements_kind)) {
1770       // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1771       // cannot transition and don't need to call the runtime stub.
1772       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1773       __ mov(ebx, Operand(esp, kPointerSize));  // Copy of array literal.
1774       __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1775       // Store the subexpression value in the array's elements.
1776       __ mov(FieldOperand(ebx, offset), result_register());
1777       // Update the write barrier for the array store.
1778       __ RecordWriteField(ebx, offset, result_register(), ecx, kDontSaveFPRegs,
1779                           EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1780     } else {
1781       // Store the subexpression value in the array's elements.
1782       __ mov(ecx, Immediate(Smi::FromInt(i)));
1783       StoreArrayLiteralElementStub stub(isolate());
1784       __ CallStub(&stub);
1785     }
1786 
1787     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1788   }
1789 
1790   if (result_saved) {
1791     __ add(esp, Immediate(kPointerSize));  // literal index
1792     context()->PlugTOS();
1793   } else {
1794     context()->Plug(eax);
1795   }
1796 }
1797 
1798 
VisitAssignment(Assignment * expr)1799 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1800   DCHECK(expr->target()->IsValidReferenceExpression());
1801 
1802   Comment cmnt(masm_, "[ Assignment");
1803 
1804   // Left-hand side can only be a property, a global or a (parameter or local)
1805   // slot.
1806   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1807   LhsKind assign_type = VARIABLE;
1808   Property* property = expr->target()->AsProperty();
1809   if (property != NULL) {
1810     assign_type = (property->key()->IsPropertyName())
1811         ? NAMED_PROPERTY
1812         : KEYED_PROPERTY;
1813   }
1814 
1815   // Evaluate LHS expression.
1816   switch (assign_type) {
1817     case VARIABLE:
1818       // Nothing to do here.
1819       break;
1820     case NAMED_PROPERTY:
1821       if (expr->is_compound()) {
1822         // We need the receiver both on the stack and in the register.
1823         VisitForStackValue(property->obj());
1824         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1825       } else {
1826         VisitForStackValue(property->obj());
1827       }
1828       break;
1829     case KEYED_PROPERTY: {
1830       if (expr->is_compound()) {
1831         VisitForStackValue(property->obj());
1832         VisitForStackValue(property->key());
1833         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1834         __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1835       } else {
1836         VisitForStackValue(property->obj());
1837         VisitForStackValue(property->key());
1838       }
1839       break;
1840     }
1841   }
1842 
1843   // For compound assignments we need another deoptimization point after the
1844   // variable/property load.
1845   if (expr->is_compound()) {
1846     AccumulatorValueContext result_context(this);
1847     { AccumulatorValueContext left_operand_context(this);
1848       switch (assign_type) {
1849         case VARIABLE:
1850           EmitVariableLoad(expr->target()->AsVariableProxy());
1851           PrepareForBailout(expr->target(), TOS_REG);
1852           break;
1853         case NAMED_PROPERTY:
1854           EmitNamedPropertyLoad(property);
1855           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1856           break;
1857         case KEYED_PROPERTY:
1858           EmitKeyedPropertyLoad(property);
1859           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1860           break;
1861       }
1862     }
1863 
1864     Token::Value op = expr->binary_op();
1865     __ push(eax);  // Left operand goes on the stack.
1866     VisitForAccumulatorValue(expr->value());
1867 
1868     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1869         ? OVERWRITE_RIGHT
1870         : NO_OVERWRITE;
1871     SetSourcePosition(expr->position() + 1);
1872     if (ShouldInlineSmiCase(op)) {
1873       EmitInlineSmiBinaryOp(expr->binary_operation(),
1874                             op,
1875                             mode,
1876                             expr->target(),
1877                             expr->value());
1878     } else {
1879       EmitBinaryOp(expr->binary_operation(), op, mode);
1880     }
1881 
1882     // Deoptimization point in case the binary operation may have side effects.
1883     PrepareForBailout(expr->binary_operation(), TOS_REG);
1884   } else {
1885     VisitForAccumulatorValue(expr->value());
1886   }
1887 
1888   // Record source position before possible IC call.
1889   SetSourcePosition(expr->position());
1890 
1891   // Store the value.
1892   switch (assign_type) {
1893     case VARIABLE:
1894       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1895                              expr->op());
1896       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1897       context()->Plug(eax);
1898       break;
1899     case NAMED_PROPERTY:
1900       EmitNamedPropertyAssignment(expr);
1901       break;
1902     case KEYED_PROPERTY:
1903       EmitKeyedPropertyAssignment(expr);
1904       break;
1905   }
1906 }
1907 
1908 
VisitYield(Yield * expr)1909 void FullCodeGenerator::VisitYield(Yield* expr) {
1910   Comment cmnt(masm_, "[ Yield");
1911   // Evaluate yielded value first; the initial iterator definition depends on
1912   // this.  It stays on the stack while we update the iterator.
1913   VisitForStackValue(expr->expression());
1914 
1915   switch (expr->yield_kind()) {
1916     case Yield::kSuspend:
1917       // Pop value from top-of-stack slot; box result into result register.
1918       EmitCreateIteratorResult(false);
1919       __ push(result_register());
1920       // Fall through.
1921     case Yield::kInitial: {
1922       Label suspend, continuation, post_runtime, resume;
1923 
1924       __ jmp(&suspend);
1925 
1926       __ bind(&continuation);
1927       __ jmp(&resume);
1928 
1929       __ bind(&suspend);
1930       VisitForAccumulatorValue(expr->generator_object());
1931       DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1932       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1933              Immediate(Smi::FromInt(continuation.pos())));
1934       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1935       __ mov(ecx, esi);
1936       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1937                           kDontSaveFPRegs);
1938       __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1939       __ cmp(esp, ebx);
1940       __ j(equal, &post_runtime);
1941       __ push(eax);  // generator object
1942       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1943       __ mov(context_register(),
1944              Operand(ebp, StandardFrameConstants::kContextOffset));
1945       __ bind(&post_runtime);
1946       __ pop(result_register());
1947       EmitReturnSequence();
1948 
1949       __ bind(&resume);
1950       context()->Plug(result_register());
1951       break;
1952     }
1953 
1954     case Yield::kFinal: {
1955       VisitForAccumulatorValue(expr->generator_object());
1956       __ mov(FieldOperand(result_register(),
1957                           JSGeneratorObject::kContinuationOffset),
1958              Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
1959       // Pop value from top-of-stack slot, box result into result register.
1960       EmitCreateIteratorResult(true);
1961       EmitUnwindBeforeReturn();
1962       EmitReturnSequence();
1963       break;
1964     }
1965 
1966     case Yield::kDelegating: {
1967       VisitForStackValue(expr->generator_object());
1968 
1969       // Initial stack layout is as follows:
1970       // [sp + 1 * kPointerSize] iter
1971       // [sp + 0 * kPointerSize] g
1972 
1973       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1974       Label l_next, l_call, l_loop;
1975       Register load_receiver = LoadDescriptor::ReceiverRegister();
1976       Register load_name = LoadDescriptor::NameRegister();
1977 
1978       // Initial send value is undefined.
1979       __ mov(eax, isolate()->factory()->undefined_value());
1980       __ jmp(&l_next);
1981 
1982       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
1983       __ bind(&l_catch);
1984       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
1985       __ mov(load_name, isolate()->factory()->throw_string());  // "throw"
1986       __ push(load_name);                                       // "throw"
1987       __ push(Operand(esp, 2 * kPointerSize));                  // iter
1988       __ push(eax);                                             // exception
1989       __ jmp(&l_call);
1990 
1991       // try { received = %yield result }
1992       // Shuffle the received result above a try handler and yield it without
1993       // re-boxing.
1994       __ bind(&l_try);
1995       __ pop(eax);                                       // result
1996       __ PushTryHandler(StackHandler::CATCH, expr->index());
1997       const int handler_size = StackHandlerConstants::kSize;
1998       __ push(eax);                                      // result
1999       __ jmp(&l_suspend);
2000       __ bind(&l_continuation);
2001       __ jmp(&l_resume);
2002       __ bind(&l_suspend);
2003       const int generator_object_depth = kPointerSize + handler_size;
2004       __ mov(eax, Operand(esp, generator_object_depth));
2005       __ push(eax);                                      // g
2006       DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2007       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2008              Immediate(Smi::FromInt(l_continuation.pos())));
2009       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2010       __ mov(ecx, esi);
2011       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2012                           kDontSaveFPRegs);
2013       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2014       __ mov(context_register(),
2015              Operand(ebp, StandardFrameConstants::kContextOffset));
2016       __ pop(eax);                                       // result
2017       EmitReturnSequence();
2018       __ bind(&l_resume);                                // received in eax
2019       __ PopTryHandler();
2020 
2021       // receiver = iter; f = iter.next; arg = received;
2022       __ bind(&l_next);
2023 
2024       __ mov(load_name, isolate()->factory()->next_string());
2025       __ push(load_name);                           // "next"
2026       __ push(Operand(esp, 2 * kPointerSize));      // iter
2027       __ push(eax);                                 // received
2028 
2029       // result = receiver[f](arg);
2030       __ bind(&l_call);
2031       __ mov(load_receiver, Operand(esp, kPointerSize));
2032       if (FLAG_vector_ics) {
2033         __ mov(VectorLoadICDescriptor::SlotRegister(),
2034                Immediate(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2035       }
2036       Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2037       CallIC(ic, TypeFeedbackId::None());
2038       __ mov(edi, eax);
2039       __ mov(Operand(esp, 2 * kPointerSize), edi);
2040       CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2041       __ CallStub(&stub);
2042 
2043       __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2044       __ Drop(1);  // The function is still on the stack; drop it.
2045 
2046       // if (!result.done) goto l_try;
2047       __ bind(&l_loop);
2048       __ push(eax);                                      // save result
2049       __ Move(load_receiver, eax);                       // result
2050       __ mov(load_name,
2051              isolate()->factory()->done_string());       // "done"
2052       if (FLAG_vector_ics) {
2053         __ mov(VectorLoadICDescriptor::SlotRegister(),
2054                Immediate(Smi::FromInt(expr->DoneFeedbackSlot())));
2055       }
2056       CallLoadIC(NOT_CONTEXTUAL);                        // result.done in eax
2057       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2058       CallIC(bool_ic);
2059       __ test(eax, eax);
2060       __ j(zero, &l_try);
2061 
2062       // result.value
2063       __ pop(load_receiver);                              // result
2064       __ mov(load_name,
2065              isolate()->factory()->value_string());       // "value"
2066       if (FLAG_vector_ics) {
2067         __ mov(VectorLoadICDescriptor::SlotRegister(),
2068                Immediate(Smi::FromInt(expr->ValueFeedbackSlot())));
2069       }
2070       CallLoadIC(NOT_CONTEXTUAL);                         // result.value in eax
2071       context()->DropAndPlug(2, eax);                     // drop iter and g
2072       break;
2073     }
2074   }
2075 }
2076 
2077 
EmitGeneratorResume(Expression * generator,Expression * value,JSGeneratorObject::ResumeMode resume_mode)2078 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2079     Expression *value,
2080     JSGeneratorObject::ResumeMode resume_mode) {
2081   // The value stays in eax, and is ultimately read by the resumed generator, as
2082   // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2083   // is read to throw the value when the resumed generator is already closed.
2084   // ebx will hold the generator object until the activation has been resumed.
2085   VisitForStackValue(generator);
2086   VisitForAccumulatorValue(value);
2087   __ pop(ebx);
2088 
2089   // Check generator state.
2090   Label wrong_state, closed_state, done;
2091   STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2092   STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2093   __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2094          Immediate(Smi::FromInt(0)));
2095   __ j(equal, &closed_state);
2096   __ j(less, &wrong_state);
2097 
2098   // Load suspended function and context.
2099   __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2100   __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2101 
2102   // Push receiver.
2103   __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2104 
2105   // Push holes for arguments to generator function.
2106   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2107   __ mov(edx,
2108          FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2109   __ mov(ecx, isolate()->factory()->the_hole_value());
2110   Label push_argument_holes, push_frame;
2111   __ bind(&push_argument_holes);
2112   __ sub(edx, Immediate(Smi::FromInt(1)));
2113   __ j(carry, &push_frame);
2114   __ push(ecx);
2115   __ jmp(&push_argument_holes);
2116 
2117   // Enter a new JavaScript frame, and initialize its slots as they were when
2118   // the generator was suspended.
2119   Label resume_frame;
2120   __ bind(&push_frame);
2121   __ call(&resume_frame);
2122   __ jmp(&done);
2123   __ bind(&resume_frame);
2124   __ push(ebp);  // Caller's frame pointer.
2125   __ mov(ebp, esp);
2126   __ push(esi);  // Callee's context.
2127   __ push(edi);  // Callee's JS Function.
2128 
2129   // Load the operand stack size.
2130   __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2131   __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2132   __ SmiUntag(edx);
2133 
2134   // If we are sending a value and there is no operand stack, we can jump back
2135   // in directly.
2136   if (resume_mode == JSGeneratorObject::NEXT) {
2137     Label slow_resume;
2138     __ cmp(edx, Immediate(0));
2139     __ j(not_zero, &slow_resume);
2140     __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2141     __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2142     __ SmiUntag(ecx);
2143     __ add(edx, ecx);
2144     __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2145            Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2146     __ jmp(edx);
2147     __ bind(&slow_resume);
2148   }
2149 
2150   // Otherwise, we push holes for the operand stack and call the runtime to fix
2151   // up the stack and the handlers.
2152   Label push_operand_holes, call_resume;
2153   __ bind(&push_operand_holes);
2154   __ sub(edx, Immediate(1));
2155   __ j(carry, &call_resume);
2156   __ push(ecx);
2157   __ jmp(&push_operand_holes);
2158   __ bind(&call_resume);
2159   __ push(ebx);
2160   __ push(result_register());
2161   __ Push(Smi::FromInt(resume_mode));
2162   __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2163   // Not reached: the runtime call returns elsewhere.
2164   __ Abort(kGeneratorFailedToResume);
2165 
2166   // Reach here when generator is closed.
2167   __ bind(&closed_state);
2168   if (resume_mode == JSGeneratorObject::NEXT) {
2169     // Return completed iterator result when generator is closed.
2170     __ push(Immediate(isolate()->factory()->undefined_value()));
2171     // Pop value from top-of-stack slot; box result into result register.
2172     EmitCreateIteratorResult(true);
2173   } else {
2174     // Throw the provided value.
2175     __ push(eax);
2176     __ CallRuntime(Runtime::kThrow, 1);
2177   }
2178   __ jmp(&done);
2179 
2180   // Throw error if we attempt to operate on a running generator.
2181   __ bind(&wrong_state);
2182   __ push(ebx);
2183   __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2184 
2185   __ bind(&done);
2186   context()->Plug(result_register());
2187 }
2188 
2189 
EmitCreateIteratorResult(bool done)2190 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2191   Label gc_required;
2192   Label allocated;
2193 
2194   const int instance_size = 5 * kPointerSize;
2195   DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2196             instance_size);
2197 
2198   __ Allocate(instance_size, eax, ecx, edx, &gc_required, TAG_OBJECT);
2199   __ jmp(&allocated);
2200 
2201   __ bind(&gc_required);
2202   __ Push(Smi::FromInt(instance_size));
2203   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2204   __ mov(context_register(),
2205          Operand(ebp, StandardFrameConstants::kContextOffset));
2206 
2207   __ bind(&allocated);
2208   __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2209   __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
2210   __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2211   __ pop(ecx);
2212   __ mov(edx, isolate()->factory()->ToBoolean(done));
2213   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2214   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2215          isolate()->factory()->empty_fixed_array());
2216   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2217          isolate()->factory()->empty_fixed_array());
2218   __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2219   __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2220 
2221   // Only the value field needs a write barrier, as the other values are in the
2222   // root set.
2223   __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset, ecx,
2224                       edx, kDontSaveFPRegs);
2225 }
2226 
2227 
EmitNamedPropertyLoad(Property * prop)2228 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2229   SetSourcePosition(prop->position());
2230   Literal* key = prop->key()->AsLiteral();
2231   DCHECK(!key->value()->IsSmi());
2232   __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2233   if (FLAG_vector_ics) {
2234     __ mov(VectorLoadICDescriptor::SlotRegister(),
2235            Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2236     CallLoadIC(NOT_CONTEXTUAL);
2237   } else {
2238     CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2239   }
2240 }
2241 
2242 
EmitNamedSuperPropertyLoad(Property * prop)2243 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2244   SetSourcePosition(prop->position());
2245   Literal* key = prop->key()->AsLiteral();
2246   DCHECK(!key->value()->IsSmi());
2247   DCHECK(prop->IsSuperAccess());
2248 
2249   SuperReference* super_ref = prop->obj()->AsSuperReference();
2250   EmitLoadHomeObject(super_ref);
2251   __ push(eax);
2252   VisitForStackValue(super_ref->this_var());
2253   __ push(Immediate(key->value()));
2254   __ CallRuntime(Runtime::kLoadFromSuper, 3);
2255 }
2256 
2257 
EmitKeyedPropertyLoad(Property * prop)2258 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2259   SetSourcePosition(prop->position());
2260   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2261   if (FLAG_vector_ics) {
2262     __ mov(VectorLoadICDescriptor::SlotRegister(),
2263            Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2264     CallIC(ic);
2265   } else {
2266     CallIC(ic, prop->PropertyFeedbackId());
2267   }
2268 }
2269 
2270 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode,Expression * left,Expression * right)2271 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2272                                               Token::Value op,
2273                                               OverwriteMode mode,
2274                                               Expression* left,
2275                                               Expression* right) {
2276   // Do combined smi check of the operands. Left operand is on the
2277   // stack. Right operand is in eax.
2278   Label smi_case, done, stub_call;
2279   __ pop(edx);
2280   __ mov(ecx, eax);
2281   __ or_(eax, edx);
2282   JumpPatchSite patch_site(masm_);
2283   patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2284 
2285   __ bind(&stub_call);
2286   __ mov(eax, ecx);
2287   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2288   CallIC(code, expr->BinaryOperationFeedbackId());
2289   patch_site.EmitPatchInfo();
2290   __ jmp(&done, Label::kNear);
2291 
2292   // Smi case.
2293   __ bind(&smi_case);
2294   __ mov(eax, edx);  // Copy left operand in case of a stub call.
2295 
2296   switch (op) {
2297     case Token::SAR:
2298       __ SmiUntag(ecx);
2299       __ sar_cl(eax);  // No checks of result necessary
2300       __ and_(eax, Immediate(~kSmiTagMask));
2301       break;
2302     case Token::SHL: {
2303       Label result_ok;
2304       __ SmiUntag(eax);
2305       __ SmiUntag(ecx);
2306       __ shl_cl(eax);
2307       // Check that the *signed* result fits in a smi.
2308       __ cmp(eax, 0xc0000000);
2309       __ j(positive, &result_ok);
2310       __ SmiTag(ecx);
2311       __ jmp(&stub_call);
2312       __ bind(&result_ok);
2313       __ SmiTag(eax);
2314       break;
2315     }
2316     case Token::SHR: {
2317       Label result_ok;
2318       __ SmiUntag(eax);
2319       __ SmiUntag(ecx);
2320       __ shr_cl(eax);
2321       __ test(eax, Immediate(0xc0000000));
2322       __ j(zero, &result_ok);
2323       __ SmiTag(ecx);
2324       __ jmp(&stub_call);
2325       __ bind(&result_ok);
2326       __ SmiTag(eax);
2327       break;
2328     }
2329     case Token::ADD:
2330       __ add(eax, ecx);
2331       __ j(overflow, &stub_call);
2332       break;
2333     case Token::SUB:
2334       __ sub(eax, ecx);
2335       __ j(overflow, &stub_call);
2336       break;
2337     case Token::MUL: {
2338       __ SmiUntag(eax);
2339       __ imul(eax, ecx);
2340       __ j(overflow, &stub_call);
2341       __ test(eax, eax);
2342       __ j(not_zero, &done, Label::kNear);
2343       __ mov(ebx, edx);
2344       __ or_(ebx, ecx);
2345       __ j(negative, &stub_call);
2346       break;
2347     }
2348     case Token::BIT_OR:
2349       __ or_(eax, ecx);
2350       break;
2351     case Token::BIT_AND:
2352       __ and_(eax, ecx);
2353       break;
2354     case Token::BIT_XOR:
2355       __ xor_(eax, ecx);
2356       break;
2357     default:
2358       UNREACHABLE();
2359   }
2360 
2361   __ bind(&done);
2362   context()->Plug(eax);
2363 }
2364 
2365 
EmitBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode)2366 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2367                                      Token::Value op,
2368                                      OverwriteMode mode) {
2369   __ pop(edx);
2370   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2371   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2372   CallIC(code, expr->BinaryOperationFeedbackId());
2373   patch_site.EmitPatchInfo();
2374   context()->Plug(eax);
2375 }
2376 
2377 
EmitAssignment(Expression * expr)2378 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2379   DCHECK(expr->IsValidReferenceExpression());
2380 
2381   // Left-hand side can only be a property, a global or a (parameter or local)
2382   // slot.
2383   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2384   LhsKind assign_type = VARIABLE;
2385   Property* prop = expr->AsProperty();
2386   if (prop != NULL) {
2387     assign_type = (prop->key()->IsPropertyName())
2388         ? NAMED_PROPERTY
2389         : KEYED_PROPERTY;
2390   }
2391 
2392   switch (assign_type) {
2393     case VARIABLE: {
2394       Variable* var = expr->AsVariableProxy()->var();
2395       EffectContext context(this);
2396       EmitVariableAssignment(var, Token::ASSIGN);
2397       break;
2398     }
2399     case NAMED_PROPERTY: {
2400       __ push(eax);  // Preserve value.
2401       VisitForAccumulatorValue(prop->obj());
2402       __ Move(StoreDescriptor::ReceiverRegister(), eax);
2403       __ pop(StoreDescriptor::ValueRegister());  // Restore value.
2404       __ mov(StoreDescriptor::NameRegister(),
2405              prop->key()->AsLiteral()->value());
2406       CallStoreIC();
2407       break;
2408     }
2409     case KEYED_PROPERTY: {
2410       __ push(eax);  // Preserve value.
2411       VisitForStackValue(prop->obj());
2412       VisitForAccumulatorValue(prop->key());
2413       __ Move(StoreDescriptor::NameRegister(), eax);
2414       __ pop(StoreDescriptor::ReceiverRegister());  // Receiver.
2415       __ pop(StoreDescriptor::ValueRegister());     // Restore value.
2416       Handle<Code> ic =
2417           CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2418       CallIC(ic);
2419       break;
2420     }
2421   }
2422   context()->Plug(eax);
2423 }
2424 
2425 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2426 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2427     Variable* var, MemOperand location) {
2428   __ mov(location, eax);
2429   if (var->IsContextSlot()) {
2430     __ mov(edx, eax);
2431     int offset = Context::SlotOffset(var->index());
2432     __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2433   }
2434 }
2435 
2436 
EmitVariableAssignment(Variable * var,Token::Value op)2437 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2438                                                Token::Value op) {
2439   if (var->IsUnallocated()) {
2440     // Global var, const, or let.
2441     __ mov(StoreDescriptor::NameRegister(), var->name());
2442     __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2443     CallStoreIC();
2444 
2445   } else if (op == Token::INIT_CONST_LEGACY) {
2446     // Const initializers need a write barrier.
2447     DCHECK(!var->IsParameter());  // No const parameters.
2448     if (var->IsLookupSlot()) {
2449       __ push(eax);
2450       __ push(esi);
2451       __ push(Immediate(var->name()));
2452       __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2453     } else {
2454       DCHECK(var->IsStackLocal() || var->IsContextSlot());
2455       Label skip;
2456       MemOperand location = VarOperand(var, ecx);
2457       __ mov(edx, location);
2458       __ cmp(edx, isolate()->factory()->the_hole_value());
2459       __ j(not_equal, &skip, Label::kNear);
2460       EmitStoreToStackLocalOrContextSlot(var, location);
2461       __ bind(&skip);
2462     }
2463 
2464   } else if (var->mode() == LET && op != Token::INIT_LET) {
2465     // Non-initializing assignment to let variable needs a write barrier.
2466     DCHECK(!var->IsLookupSlot());
2467     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2468     Label assign;
2469     MemOperand location = VarOperand(var, ecx);
2470     __ mov(edx, location);
2471     __ cmp(edx, isolate()->factory()->the_hole_value());
2472     __ j(not_equal, &assign, Label::kNear);
2473     __ push(Immediate(var->name()));
2474     __ CallRuntime(Runtime::kThrowReferenceError, 1);
2475     __ bind(&assign);
2476     EmitStoreToStackLocalOrContextSlot(var, location);
2477 
2478   } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2479     if (var->IsLookupSlot()) {
2480       // Assignment to var.
2481       __ push(eax);  // Value.
2482       __ push(esi);  // Context.
2483       __ push(Immediate(var->name()));
2484       __ push(Immediate(Smi::FromInt(strict_mode())));
2485       __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2486     } else {
2487       // Assignment to var or initializing assignment to let/const in harmony
2488       // mode.
2489       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2490       MemOperand location = VarOperand(var, ecx);
2491       if (generate_debug_code_ && op == Token::INIT_LET) {
2492         // Check for an uninitialized let binding.
2493         __ mov(edx, location);
2494         __ cmp(edx, isolate()->factory()->the_hole_value());
2495         __ Check(equal, kLetBindingReInitialization);
2496       }
2497       EmitStoreToStackLocalOrContextSlot(var, location);
2498     }
2499   }
2500   // Non-initializing assignments to consts are ignored.
2501 }
2502 
2503 
EmitNamedPropertyAssignment(Assignment * expr)2504 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2505   // Assignment to a property, using a named store IC.
2506   // eax    : value
2507   // esp[0] : receiver
2508 
2509   Property* prop = expr->target()->AsProperty();
2510   DCHECK(prop != NULL);
2511   DCHECK(prop->key()->IsLiteral());
2512 
2513   // Record source code position before IC call.
2514   SetSourcePosition(expr->position());
2515   __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2516   __ pop(StoreDescriptor::ReceiverRegister());
2517   CallStoreIC(expr->AssignmentFeedbackId());
2518   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2519   context()->Plug(eax);
2520 }
2521 
2522 
EmitKeyedPropertyAssignment(Assignment * expr)2523 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2524   // Assignment to a property, using a keyed store IC.
2525   // eax               : value
2526   // esp[0]            : key
2527   // esp[kPointerSize] : receiver
2528 
2529   __ pop(StoreDescriptor::NameRegister());  // Key.
2530   __ pop(StoreDescriptor::ReceiverRegister());
2531   DCHECK(StoreDescriptor::ValueRegister().is(eax));
2532   // Record source code position before IC call.
2533   SetSourcePosition(expr->position());
2534   Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2535   CallIC(ic, expr->AssignmentFeedbackId());
2536 
2537   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2538   context()->Plug(eax);
2539 }
2540 
2541 
VisitProperty(Property * expr)2542 void FullCodeGenerator::VisitProperty(Property* expr) {
2543   Comment cmnt(masm_, "[ Property");
2544   Expression* key = expr->key();
2545 
2546   if (key->IsPropertyName()) {
2547     if (!expr->IsSuperAccess()) {
2548       VisitForAccumulatorValue(expr->obj());
2549       __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2550       EmitNamedPropertyLoad(expr);
2551     } else {
2552       EmitNamedSuperPropertyLoad(expr);
2553     }
2554     PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2555     context()->Plug(eax);
2556   } else {
2557     VisitForStackValue(expr->obj());
2558     VisitForAccumulatorValue(expr->key());
2559     __ pop(LoadDescriptor::ReceiverRegister());                  // Object.
2560     __ Move(LoadDescriptor::NameRegister(), result_register());  // Key.
2561     EmitKeyedPropertyLoad(expr);
2562     context()->Plug(eax);
2563   }
2564 }
2565 
2566 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2567 void FullCodeGenerator::CallIC(Handle<Code> code,
2568                                TypeFeedbackId ast_id) {
2569   ic_total_count_++;
2570   __ call(code, RelocInfo::CODE_TARGET, ast_id);
2571 }
2572 
2573 
2574 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2575 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2576   Expression* callee = expr->expression();
2577 
2578   CallICState::CallType call_type =
2579       callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2580   // Get the target function.
2581   if (call_type == CallICState::FUNCTION) {
2582     { StackValueContext context(this);
2583       EmitVariableLoad(callee->AsVariableProxy());
2584       PrepareForBailout(callee, NO_REGISTERS);
2585     }
2586     // Push undefined as receiver. This is patched in the method prologue if it
2587     // is a sloppy mode method.
2588     __ push(Immediate(isolate()->factory()->undefined_value()));
2589   } else {
2590     // Load the function from the receiver.
2591     DCHECK(callee->IsProperty());
2592     DCHECK(!callee->AsProperty()->IsSuperAccess());
2593     __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2594     EmitNamedPropertyLoad(callee->AsProperty());
2595     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2596     // Push the target function under the receiver.
2597     __ push(Operand(esp, 0));
2598     __ mov(Operand(esp, kPointerSize), eax);
2599   }
2600 
2601   EmitCall(expr, call_type);
2602 }
2603 
2604 
EmitSuperCallWithLoadIC(Call * expr)2605 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2606   Expression* callee = expr->expression();
2607   DCHECK(callee->IsProperty());
2608   Property* prop = callee->AsProperty();
2609   DCHECK(prop->IsSuperAccess());
2610 
2611   SetSourcePosition(prop->position());
2612   Literal* key = prop->key()->AsLiteral();
2613   DCHECK(!key->value()->IsSmi());
2614   // Load the function from the receiver.
2615   SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference();
2616   EmitLoadHomeObject(super_ref);
2617   __ push(eax);
2618   VisitForAccumulatorValue(super_ref->this_var());
2619   __ push(eax);
2620   __ push(Operand(esp, kPointerSize));
2621   __ push(eax);
2622   __ push(Immediate(key->value()));
2623   // Stack here:
2624   //  - home_object
2625   //  - this (receiver)
2626   //  - home_object <-- LoadFromSuper will pop here and below.
2627   //  - this (receiver)
2628   //  - key
2629   __ CallRuntime(Runtime::kLoadFromSuper, 3);
2630 
2631   // Replace home_object with target function.
2632   __ mov(Operand(esp, kPointerSize), eax);
2633 
2634   // Stack here:
2635   // - target function
2636   // - this (receiver)
2637   EmitCall(expr, CallICState::METHOD);
2638 }
2639 
2640 
2641 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2642 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2643                                                 Expression* key) {
2644   // Load the key.
2645   VisitForAccumulatorValue(key);
2646 
2647   Expression* callee = expr->expression();
2648 
2649   // Load the function from the receiver.
2650   DCHECK(callee->IsProperty());
2651   __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2652   __ mov(LoadDescriptor::NameRegister(), eax);
2653   EmitKeyedPropertyLoad(callee->AsProperty());
2654   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2655 
2656   // Push the target function under the receiver.
2657   __ push(Operand(esp, 0));
2658   __ mov(Operand(esp, kPointerSize), eax);
2659 
2660   EmitCall(expr, CallICState::METHOD);
2661 }
2662 
2663 
EmitCall(Call * expr,CallICState::CallType call_type)2664 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2665   // Load the arguments.
2666   ZoneList<Expression*>* args = expr->arguments();
2667   int arg_count = args->length();
2668   { PreservePositionScope scope(masm()->positions_recorder());
2669     for (int i = 0; i < arg_count; i++) {
2670       VisitForStackValue(args->at(i));
2671     }
2672   }
2673 
2674   // Record source position of the IC call.
2675   SetSourcePosition(expr->position());
2676   Handle<Code> ic = CallIC::initialize_stub(
2677       isolate(), arg_count, call_type);
2678   __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
2679   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2680   // Don't assign a type feedback id to the IC, since type feedback is provided
2681   // by the vector above.
2682   CallIC(ic);
2683 
2684   RecordJSReturnSite(expr);
2685 
2686   // Restore context register.
2687   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2688 
2689   context()->DropAndPlug(1, eax);
2690 }
2691 
2692 
EmitResolvePossiblyDirectEval(int arg_count)2693 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2694   // Push copy of the first argument or undefined if it doesn't exist.
2695   if (arg_count > 0) {
2696     __ push(Operand(esp, arg_count * kPointerSize));
2697   } else {
2698     __ push(Immediate(isolate()->factory()->undefined_value()));
2699   }
2700 
2701   // Push the enclosing function.
2702   __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2703   // Push the receiver of the enclosing function.
2704   __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2705   // Push the language mode.
2706   __ push(Immediate(Smi::FromInt(strict_mode())));
2707 
2708   // Push the start position of the scope the calls resides in.
2709   __ push(Immediate(Smi::FromInt(scope()->start_position())));
2710 
2711   // Do the runtime call.
2712   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2713 }
2714 
2715 
VisitCall(Call * expr)2716 void FullCodeGenerator::VisitCall(Call* expr) {
2717 #ifdef DEBUG
2718   // We want to verify that RecordJSReturnSite gets called on all paths
2719   // through this function.  Avoid early returns.
2720   expr->return_is_recorded_ = false;
2721 #endif
2722 
2723   Comment cmnt(masm_, "[ Call");
2724   Expression* callee = expr->expression();
2725   Call::CallType call_type = expr->GetCallType(isolate());
2726 
2727   if (call_type == Call::POSSIBLY_EVAL_CALL) {
2728     // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2729     // to resolve the function we need to call and the receiver of the call.
2730     // Then we call the resolved function using the given arguments.
2731     ZoneList<Expression*>* args = expr->arguments();
2732     int arg_count = args->length();
2733     { PreservePositionScope pos_scope(masm()->positions_recorder());
2734       VisitForStackValue(callee);
2735       // Reserved receiver slot.
2736       __ push(Immediate(isolate()->factory()->undefined_value()));
2737       // Push the arguments.
2738       for (int i = 0; i < arg_count; i++) {
2739         VisitForStackValue(args->at(i));
2740       }
2741 
2742       // Push a copy of the function (found below the arguments) and
2743       // resolve eval.
2744       __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2745       EmitResolvePossiblyDirectEval(arg_count);
2746 
2747       // The runtime call returns a pair of values in eax (function) and
2748       // edx (receiver). Touch up the stack with the right values.
2749       __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2750       __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2751     }
2752     // Record source position for debugger.
2753     SetSourcePosition(expr->position());
2754     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2755     __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2756     __ CallStub(&stub);
2757     RecordJSReturnSite(expr);
2758     // Restore context register.
2759     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2760     context()->DropAndPlug(1, eax);
2761 
2762   } else if (call_type == Call::GLOBAL_CALL) {
2763     EmitCallWithLoadIC(expr);
2764 
2765   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2766     // Call to a lookup slot (dynamically introduced variable).
2767     VariableProxy* proxy = callee->AsVariableProxy();
2768     Label slow, done;
2769     { PreservePositionScope scope(masm()->positions_recorder());
2770       // Generate code for loading from variables potentially shadowed by
2771       // eval-introduced variables.
2772       EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2773     }
2774     __ bind(&slow);
2775     // Call the runtime to find the function to call (returned in eax) and
2776     // the object holding it (returned in edx).
2777     __ push(context_register());
2778     __ push(Immediate(proxy->name()));
2779     __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2780     __ push(eax);  // Function.
2781     __ push(edx);  // Receiver.
2782 
2783     // If fast case code has been generated, emit code to push the function
2784     // and receiver and have the slow path jump around this code.
2785     if (done.is_linked()) {
2786       Label call;
2787       __ jmp(&call, Label::kNear);
2788       __ bind(&done);
2789       // Push function.
2790       __ push(eax);
2791       // The receiver is implicitly the global receiver. Indicate this by
2792       // passing the hole to the call function stub.
2793       __ push(Immediate(isolate()->factory()->undefined_value()));
2794       __ bind(&call);
2795     }
2796 
2797     // The receiver is either the global receiver or an object found by
2798     // LoadContextSlot.
2799     EmitCall(expr);
2800 
2801   } else if (call_type == Call::PROPERTY_CALL) {
2802     Property* property = callee->AsProperty();
2803     bool is_named_call = property->key()->IsPropertyName();
2804     // super.x() is handled in EmitCallWithLoadIC.
2805     if (property->IsSuperAccess() && is_named_call) {
2806       EmitSuperCallWithLoadIC(expr);
2807     } else {
2808       {
2809         PreservePositionScope scope(masm()->positions_recorder());
2810         VisitForStackValue(property->obj());
2811       }
2812       if (is_named_call) {
2813         EmitCallWithLoadIC(expr);
2814       } else {
2815         EmitKeyedCallWithLoadIC(expr, property->key());
2816       }
2817     }
2818   } else {
2819     DCHECK(call_type == Call::OTHER_CALL);
2820     // Call to an arbitrary expression not handled specially above.
2821     { PreservePositionScope scope(masm()->positions_recorder());
2822       VisitForStackValue(callee);
2823     }
2824     __ push(Immediate(isolate()->factory()->undefined_value()));
2825     // Emit function call.
2826     EmitCall(expr);
2827   }
2828 
2829 #ifdef DEBUG
2830   // RecordJSReturnSite should have been called.
2831   DCHECK(expr->return_is_recorded_);
2832 #endif
2833 }
2834 
2835 
VisitCallNew(CallNew * expr)2836 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2837   Comment cmnt(masm_, "[ CallNew");
2838   // According to ECMA-262, section 11.2.2, page 44, the function
2839   // expression in new calls must be evaluated before the
2840   // arguments.
2841 
2842   // Push constructor on the stack.  If it's not a function it's used as
2843   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2844   // ignored.
2845   VisitForStackValue(expr->expression());
2846 
2847   // Push the arguments ("left-to-right") on the stack.
2848   ZoneList<Expression*>* args = expr->arguments();
2849   int arg_count = args->length();
2850   for (int i = 0; i < arg_count; i++) {
2851     VisitForStackValue(args->at(i));
2852   }
2853 
2854   // Call the construct call builtin that handles allocation and
2855   // constructor invocation.
2856   SetSourcePosition(expr->position());
2857 
2858   // Load function and argument count into edi and eax.
2859   __ Move(eax, Immediate(arg_count));
2860   __ mov(edi, Operand(esp, arg_count * kPointerSize));
2861 
2862   // Record call targets in unoptimized code.
2863   if (FLAG_pretenuring_call_new) {
2864     EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2865     DCHECK(expr->AllocationSiteFeedbackSlot() ==
2866            expr->CallNewFeedbackSlot() + 1);
2867   }
2868 
2869   __ LoadHeapObject(ebx, FeedbackVector());
2870   __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
2871 
2872   CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2873   __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2874   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2875   context()->Plug(eax);
2876 }
2877 
2878 
EmitIsSmi(CallRuntime * expr)2879 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2880   ZoneList<Expression*>* args = expr->arguments();
2881   DCHECK(args->length() == 1);
2882 
2883   VisitForAccumulatorValue(args->at(0));
2884 
2885   Label materialize_true, materialize_false;
2886   Label* if_true = NULL;
2887   Label* if_false = NULL;
2888   Label* fall_through = NULL;
2889   context()->PrepareTest(&materialize_true, &materialize_false,
2890                          &if_true, &if_false, &fall_through);
2891 
2892   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2893   __ test(eax, Immediate(kSmiTagMask));
2894   Split(zero, if_true, if_false, fall_through);
2895 
2896   context()->Plug(if_true, if_false);
2897 }
2898 
2899 
EmitIsNonNegativeSmi(CallRuntime * expr)2900 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2901   ZoneList<Expression*>* args = expr->arguments();
2902   DCHECK(args->length() == 1);
2903 
2904   VisitForAccumulatorValue(args->at(0));
2905 
2906   Label materialize_true, materialize_false;
2907   Label* if_true = NULL;
2908   Label* if_false = NULL;
2909   Label* fall_through = NULL;
2910   context()->PrepareTest(&materialize_true, &materialize_false,
2911                          &if_true, &if_false, &fall_through);
2912 
2913   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2914   __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2915   Split(zero, if_true, if_false, fall_through);
2916 
2917   context()->Plug(if_true, if_false);
2918 }
2919 
2920 
EmitIsObject(CallRuntime * expr)2921 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2922   ZoneList<Expression*>* args = expr->arguments();
2923   DCHECK(args->length() == 1);
2924 
2925   VisitForAccumulatorValue(args->at(0));
2926 
2927   Label materialize_true, materialize_false;
2928   Label* if_true = NULL;
2929   Label* if_false = NULL;
2930   Label* fall_through = NULL;
2931   context()->PrepareTest(&materialize_true, &materialize_false,
2932                          &if_true, &if_false, &fall_through);
2933 
2934   __ JumpIfSmi(eax, if_false);
2935   __ cmp(eax, isolate()->factory()->null_value());
2936   __ j(equal, if_true);
2937   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2938   // Undetectable objects behave like undefined when tested with typeof.
2939   __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2940   __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2941   __ j(not_zero, if_false);
2942   __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2943   __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2944   __ j(below, if_false);
2945   __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2946   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2947   Split(below_equal, if_true, if_false, fall_through);
2948 
2949   context()->Plug(if_true, if_false);
2950 }
2951 
2952 
EmitIsSpecObject(CallRuntime * expr)2953 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2954   ZoneList<Expression*>* args = expr->arguments();
2955   DCHECK(args->length() == 1);
2956 
2957   VisitForAccumulatorValue(args->at(0));
2958 
2959   Label materialize_true, materialize_false;
2960   Label* if_true = NULL;
2961   Label* if_false = NULL;
2962   Label* fall_through = NULL;
2963   context()->PrepareTest(&materialize_true, &materialize_false,
2964                          &if_true, &if_false, &fall_through);
2965 
2966   __ JumpIfSmi(eax, if_false);
2967   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2968   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2969   Split(above_equal, if_true, if_false, fall_through);
2970 
2971   context()->Plug(if_true, if_false);
2972 }
2973 
2974 
EmitIsUndetectableObject(CallRuntime * expr)2975 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2976   ZoneList<Expression*>* args = expr->arguments();
2977   DCHECK(args->length() == 1);
2978 
2979   VisitForAccumulatorValue(args->at(0));
2980 
2981   Label materialize_true, materialize_false;
2982   Label* if_true = NULL;
2983   Label* if_false = NULL;
2984   Label* fall_through = NULL;
2985   context()->PrepareTest(&materialize_true, &materialize_false,
2986                          &if_true, &if_false, &fall_through);
2987 
2988   __ JumpIfSmi(eax, if_false);
2989   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2990   __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2991   __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2992   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2993   Split(not_zero, if_true, if_false, fall_through);
2994 
2995   context()->Plug(if_true, if_false);
2996 }
2997 
2998 
EmitIsStringWrapperSafeForDefaultValueOf(CallRuntime * expr)2999 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3000     CallRuntime* expr) {
3001   ZoneList<Expression*>* args = expr->arguments();
3002   DCHECK(args->length() == 1);
3003 
3004   VisitForAccumulatorValue(args->at(0));
3005 
3006   Label materialize_true, materialize_false, skip_lookup;
3007   Label* if_true = NULL;
3008   Label* if_false = NULL;
3009   Label* fall_through = NULL;
3010   context()->PrepareTest(&materialize_true, &materialize_false,
3011                          &if_true, &if_false, &fall_through);
3012 
3013   __ AssertNotSmi(eax);
3014 
3015   // Check whether this map has already been checked to be safe for default
3016   // valueOf.
3017   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3018   __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3019             1 << Map::kStringWrapperSafeForDefaultValueOf);
3020   __ j(not_zero, &skip_lookup);
3021 
3022   // Check for fast case object. Return false for slow case objects.
3023   __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3024   __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3025   __ cmp(ecx, isolate()->factory()->hash_table_map());
3026   __ j(equal, if_false);
3027 
3028   // Look for valueOf string in the descriptor array, and indicate false if
3029   // found. Since we omit an enumeration index check, if it is added via a
3030   // transition that shares its descriptor array, this is a false positive.
3031   Label entry, loop, done;
3032 
3033   // Skip loop if no descriptors are valid.
3034   __ NumberOfOwnDescriptors(ecx, ebx);
3035   __ cmp(ecx, 0);
3036   __ j(equal, &done);
3037 
3038   __ LoadInstanceDescriptors(ebx, ebx);
3039   // ebx: descriptor array.
3040   // ecx: valid entries in the descriptor array.
3041   // Calculate the end of the descriptor array.
3042   STATIC_ASSERT(kSmiTag == 0);
3043   STATIC_ASSERT(kSmiTagSize == 1);
3044   STATIC_ASSERT(kPointerSize == 4);
3045   __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3046   __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3047   // Calculate location of the first key name.
3048   __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3049   // Loop through all the keys in the descriptor array. If one of these is the
3050   // internalized string "valueOf" the result is false.
3051   __ jmp(&entry);
3052   __ bind(&loop);
3053   __ mov(edx, FieldOperand(ebx, 0));
3054   __ cmp(edx, isolate()->factory()->value_of_string());
3055   __ j(equal, if_false);
3056   __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3057   __ bind(&entry);
3058   __ cmp(ebx, ecx);
3059   __ j(not_equal, &loop);
3060 
3061   __ bind(&done);
3062 
3063   // Reload map as register ebx was used as temporary above.
3064   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3065 
3066   // Set the bit in the map to indicate that there is no local valueOf field.
3067   __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3068          Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3069 
3070   __ bind(&skip_lookup);
3071 
3072   // If a valueOf property is not found on the object check that its
3073   // prototype is the un-modified String prototype. If not result is false.
3074   __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3075   __ JumpIfSmi(ecx, if_false);
3076   __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3077   __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3078   __ mov(edx,
3079          FieldOperand(edx, GlobalObject::kNativeContextOffset));
3080   __ cmp(ecx,
3081          ContextOperand(edx,
3082                         Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3083   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3084   Split(equal, if_true, if_false, fall_through);
3085 
3086   context()->Plug(if_true, if_false);
3087 }
3088 
3089 
EmitIsFunction(CallRuntime * expr)3090 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3091   ZoneList<Expression*>* args = expr->arguments();
3092   DCHECK(args->length() == 1);
3093 
3094   VisitForAccumulatorValue(args->at(0));
3095 
3096   Label materialize_true, materialize_false;
3097   Label* if_true = NULL;
3098   Label* if_false = NULL;
3099   Label* fall_through = NULL;
3100   context()->PrepareTest(&materialize_true, &materialize_false,
3101                          &if_true, &if_false, &fall_through);
3102 
3103   __ JumpIfSmi(eax, if_false);
3104   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3105   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3106   Split(equal, if_true, if_false, fall_through);
3107 
3108   context()->Plug(if_true, if_false);
3109 }
3110 
3111 
EmitIsMinusZero(CallRuntime * expr)3112 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3113   ZoneList<Expression*>* args = expr->arguments();
3114   DCHECK(args->length() == 1);
3115 
3116   VisitForAccumulatorValue(args->at(0));
3117 
3118   Label materialize_true, materialize_false;
3119   Label* if_true = NULL;
3120   Label* if_false = NULL;
3121   Label* fall_through = NULL;
3122   context()->PrepareTest(&materialize_true, &materialize_false,
3123                          &if_true, &if_false, &fall_through);
3124 
3125   Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3126   __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3127   // Check if the exponent half is 0x80000000. Comparing against 1 and
3128   // checking for overflow is the shortest possible encoding.
3129   __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3130   __ j(no_overflow, if_false);
3131   __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3132   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3133   Split(equal, if_true, if_false, fall_through);
3134 
3135   context()->Plug(if_true, if_false);
3136 }
3137 
3138 
3139 
EmitIsArray(CallRuntime * expr)3140 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3141   ZoneList<Expression*>* args = expr->arguments();
3142   DCHECK(args->length() == 1);
3143 
3144   VisitForAccumulatorValue(args->at(0));
3145 
3146   Label materialize_true, materialize_false;
3147   Label* if_true = NULL;
3148   Label* if_false = NULL;
3149   Label* fall_through = NULL;
3150   context()->PrepareTest(&materialize_true, &materialize_false,
3151                          &if_true, &if_false, &fall_through);
3152 
3153   __ JumpIfSmi(eax, if_false);
3154   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3155   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3156   Split(equal, if_true, if_false, fall_through);
3157 
3158   context()->Plug(if_true, if_false);
3159 }
3160 
3161 
EmitIsRegExp(CallRuntime * expr)3162 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3163   ZoneList<Expression*>* args = expr->arguments();
3164   DCHECK(args->length() == 1);
3165 
3166   VisitForAccumulatorValue(args->at(0));
3167 
3168   Label materialize_true, materialize_false;
3169   Label* if_true = NULL;
3170   Label* if_false = NULL;
3171   Label* fall_through = NULL;
3172   context()->PrepareTest(&materialize_true, &materialize_false,
3173                          &if_true, &if_false, &fall_through);
3174 
3175   __ JumpIfSmi(eax, if_false);
3176   __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3177   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3178   Split(equal, if_true, if_false, fall_through);
3179 
3180   context()->Plug(if_true, if_false);
3181 }
3182 
3183 
3184 
EmitIsConstructCall(CallRuntime * expr)3185 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3186   DCHECK(expr->arguments()->length() == 0);
3187 
3188   Label materialize_true, materialize_false;
3189   Label* if_true = NULL;
3190   Label* if_false = NULL;
3191   Label* fall_through = NULL;
3192   context()->PrepareTest(&materialize_true, &materialize_false,
3193                          &if_true, &if_false, &fall_through);
3194 
3195   // Get the frame pointer for the calling frame.
3196   __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3197 
3198   // Skip the arguments adaptor frame if it exists.
3199   Label check_frame_marker;
3200   __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3201          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3202   __ j(not_equal, &check_frame_marker);
3203   __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3204 
3205   // Check the marker in the calling frame.
3206   __ bind(&check_frame_marker);
3207   __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3208          Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3209   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3210   Split(equal, if_true, if_false, fall_through);
3211 
3212   context()->Plug(if_true, if_false);
3213 }
3214 
3215 
EmitObjectEquals(CallRuntime * expr)3216 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3217   ZoneList<Expression*>* args = expr->arguments();
3218   DCHECK(args->length() == 2);
3219 
3220   // Load the two objects into registers and perform the comparison.
3221   VisitForStackValue(args->at(0));
3222   VisitForAccumulatorValue(args->at(1));
3223 
3224   Label materialize_true, materialize_false;
3225   Label* if_true = NULL;
3226   Label* if_false = NULL;
3227   Label* fall_through = NULL;
3228   context()->PrepareTest(&materialize_true, &materialize_false,
3229                          &if_true, &if_false, &fall_through);
3230 
3231   __ pop(ebx);
3232   __ cmp(eax, ebx);
3233   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3234   Split(equal, if_true, if_false, fall_through);
3235 
3236   context()->Plug(if_true, if_false);
3237 }
3238 
3239 
EmitArguments(CallRuntime * expr)3240 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3241   ZoneList<Expression*>* args = expr->arguments();
3242   DCHECK(args->length() == 1);
3243 
3244   // ArgumentsAccessStub expects the key in edx and the formal
3245   // parameter count in eax.
3246   VisitForAccumulatorValue(args->at(0));
3247   __ mov(edx, eax);
3248   __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3249   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3250   __ CallStub(&stub);
3251   context()->Plug(eax);
3252 }
3253 
3254 
EmitArgumentsLength(CallRuntime * expr)3255 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3256   DCHECK(expr->arguments()->length() == 0);
3257 
3258   Label exit;
3259   // Get the number of formal parameters.
3260   __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3261 
3262   // Check if the calling frame is an arguments adaptor frame.
3263   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3264   __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3265          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3266   __ j(not_equal, &exit);
3267 
3268   // Arguments adaptor case: Read the arguments length from the
3269   // adaptor frame.
3270   __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3271 
3272   __ bind(&exit);
3273   __ AssertSmi(eax);
3274   context()->Plug(eax);
3275 }
3276 
3277 
EmitClassOf(CallRuntime * expr)3278 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3279   ZoneList<Expression*>* args = expr->arguments();
3280   DCHECK(args->length() == 1);
3281   Label done, null, function, non_function_constructor;
3282 
3283   VisitForAccumulatorValue(args->at(0));
3284 
3285   // If the object is a smi, we return null.
3286   __ JumpIfSmi(eax, &null);
3287 
3288   // Check that the object is a JS object but take special care of JS
3289   // functions to make sure they have 'Function' as their class.
3290   // Assume that there are only two callable types, and one of them is at
3291   // either end of the type range for JS object types. Saves extra comparisons.
3292   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3293   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3294   // Map is now in eax.
3295   __ j(below, &null);
3296   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3297                 FIRST_SPEC_OBJECT_TYPE + 1);
3298   __ j(equal, &function);
3299 
3300   __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3301   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3302                 LAST_SPEC_OBJECT_TYPE - 1);
3303   __ j(equal, &function);
3304   // Assume that there is no larger type.
3305   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3306 
3307   // Check if the constructor in the map is a JS function.
3308   __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
3309   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3310   __ j(not_equal, &non_function_constructor);
3311 
3312   // eax now contains the constructor function. Grab the
3313   // instance class name from there.
3314   __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3315   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3316   __ jmp(&done);
3317 
3318   // Functions have class 'Function'.
3319   __ bind(&function);
3320   __ mov(eax, isolate()->factory()->Function_string());
3321   __ jmp(&done);
3322 
3323   // Objects with a non-function constructor have class 'Object'.
3324   __ bind(&non_function_constructor);
3325   __ mov(eax, isolate()->factory()->Object_string());
3326   __ jmp(&done);
3327 
3328   // Non-JS objects have class null.
3329   __ bind(&null);
3330   __ mov(eax, isolate()->factory()->null_value());
3331 
3332   // All done.
3333   __ bind(&done);
3334 
3335   context()->Plug(eax);
3336 }
3337 
3338 
EmitSubString(CallRuntime * expr)3339 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3340   // Load the arguments on the stack and call the stub.
3341   SubStringStub stub(isolate());
3342   ZoneList<Expression*>* args = expr->arguments();
3343   DCHECK(args->length() == 3);
3344   VisitForStackValue(args->at(0));
3345   VisitForStackValue(args->at(1));
3346   VisitForStackValue(args->at(2));
3347   __ CallStub(&stub);
3348   context()->Plug(eax);
3349 }
3350 
3351 
EmitRegExpExec(CallRuntime * expr)3352 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3353   // Load the arguments on the stack and call the stub.
3354   RegExpExecStub stub(isolate());
3355   ZoneList<Expression*>* args = expr->arguments();
3356   DCHECK(args->length() == 4);
3357   VisitForStackValue(args->at(0));
3358   VisitForStackValue(args->at(1));
3359   VisitForStackValue(args->at(2));
3360   VisitForStackValue(args->at(3));
3361   __ CallStub(&stub);
3362   context()->Plug(eax);
3363 }
3364 
3365 
EmitValueOf(CallRuntime * expr)3366 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3367   ZoneList<Expression*>* args = expr->arguments();
3368   DCHECK(args->length() == 1);
3369 
3370   VisitForAccumulatorValue(args->at(0));  // Load the object.
3371 
3372   Label done;
3373   // If the object is a smi return the object.
3374   __ JumpIfSmi(eax, &done, Label::kNear);
3375   // If the object is not a value type, return the object.
3376   __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3377   __ j(not_equal, &done, Label::kNear);
3378   __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3379 
3380   __ bind(&done);
3381   context()->Plug(eax);
3382 }
3383 
3384 
EmitDateField(CallRuntime * expr)3385 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3386   ZoneList<Expression*>* args = expr->arguments();
3387   DCHECK(args->length() == 2);
3388   DCHECK_NE(NULL, args->at(1)->AsLiteral());
3389   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3390 
3391   VisitForAccumulatorValue(args->at(0));  // Load the object.
3392 
3393   Label runtime, done, not_date_object;
3394   Register object = eax;
3395   Register result = eax;
3396   Register scratch = ecx;
3397 
3398   __ JumpIfSmi(object, &not_date_object);
3399   __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3400   __ j(not_equal, &not_date_object);
3401 
3402   if (index->value() == 0) {
3403     __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3404     __ jmp(&done);
3405   } else {
3406     if (index->value() < JSDate::kFirstUncachedField) {
3407       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3408       __ mov(scratch, Operand::StaticVariable(stamp));
3409       __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3410       __ j(not_equal, &runtime, Label::kNear);
3411       __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3412                                           kPointerSize * index->value()));
3413       __ jmp(&done);
3414     }
3415     __ bind(&runtime);
3416     __ PrepareCallCFunction(2, scratch);
3417     __ mov(Operand(esp, 0), object);
3418     __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3419     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3420     __ jmp(&done);
3421   }
3422 
3423   __ bind(&not_date_object);
3424   __ CallRuntime(Runtime::kThrowNotDateError, 0);
3425   __ bind(&done);
3426   context()->Plug(result);
3427 }
3428 
3429 
EmitOneByteSeqStringSetChar(CallRuntime * expr)3430 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3431   ZoneList<Expression*>* args = expr->arguments();
3432   DCHECK_EQ(3, args->length());
3433 
3434   Register string = eax;
3435   Register index = ebx;
3436   Register value = ecx;
3437 
3438   VisitForStackValue(args->at(0));        // index
3439   VisitForStackValue(args->at(1));        // value
3440   VisitForAccumulatorValue(args->at(2));  // string
3441 
3442   __ pop(value);
3443   __ pop(index);
3444 
3445   if (FLAG_debug_code) {
3446     __ test(value, Immediate(kSmiTagMask));
3447     __ Check(zero, kNonSmiValue);
3448     __ test(index, Immediate(kSmiTagMask));
3449     __ Check(zero, kNonSmiValue);
3450   }
3451 
3452   __ SmiUntag(value);
3453   __ SmiUntag(index);
3454 
3455   if (FLAG_debug_code) {
3456     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3457     __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3458   }
3459 
3460   __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3461            value);
3462   context()->Plug(string);
3463 }
3464 
3465 
EmitTwoByteSeqStringSetChar(CallRuntime * expr)3466 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3467   ZoneList<Expression*>* args = expr->arguments();
3468   DCHECK_EQ(3, args->length());
3469 
3470   Register string = eax;
3471   Register index = ebx;
3472   Register value = ecx;
3473 
3474   VisitForStackValue(args->at(0));        // index
3475   VisitForStackValue(args->at(1));        // value
3476   VisitForAccumulatorValue(args->at(2));  // string
3477   __ pop(value);
3478   __ pop(index);
3479 
3480   if (FLAG_debug_code) {
3481     __ test(value, Immediate(kSmiTagMask));
3482     __ Check(zero, kNonSmiValue);
3483     __ test(index, Immediate(kSmiTagMask));
3484     __ Check(zero, kNonSmiValue);
3485     __ SmiUntag(index);
3486     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3487     __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3488     __ SmiTag(index);
3489   }
3490 
3491   __ SmiUntag(value);
3492   // No need to untag a smi for two-byte addressing.
3493   __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3494            value);
3495   context()->Plug(string);
3496 }
3497 
3498 
EmitMathPow(CallRuntime * expr)3499 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3500   // Load the arguments on the stack and call the runtime function.
3501   ZoneList<Expression*>* args = expr->arguments();
3502   DCHECK(args->length() == 2);
3503   VisitForStackValue(args->at(0));
3504   VisitForStackValue(args->at(1));
3505 
3506   __ CallRuntime(Runtime::kMathPowSlow, 2);
3507   context()->Plug(eax);
3508 }
3509 
3510 
EmitSetValueOf(CallRuntime * expr)3511 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3512   ZoneList<Expression*>* args = expr->arguments();
3513   DCHECK(args->length() == 2);
3514 
3515   VisitForStackValue(args->at(0));  // Load the object.
3516   VisitForAccumulatorValue(args->at(1));  // Load the value.
3517   __ pop(ebx);  // eax = value. ebx = object.
3518 
3519   Label done;
3520   // If the object is a smi, return the value.
3521   __ JumpIfSmi(ebx, &done, Label::kNear);
3522 
3523   // If the object is not a value type, return the value.
3524   __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3525   __ j(not_equal, &done, Label::kNear);
3526 
3527   // Store the value.
3528   __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3529 
3530   // Update the write barrier.  Save the value as it will be
3531   // overwritten by the write barrier code and is needed afterward.
3532   __ mov(edx, eax);
3533   __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3534 
3535   __ bind(&done);
3536   context()->Plug(eax);
3537 }
3538 
3539 
EmitNumberToString(CallRuntime * expr)3540 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3541   ZoneList<Expression*>* args = expr->arguments();
3542   DCHECK_EQ(args->length(), 1);
3543 
3544   // Load the argument into eax and call the stub.
3545   VisitForAccumulatorValue(args->at(0));
3546 
3547   NumberToStringStub stub(isolate());
3548   __ CallStub(&stub);
3549   context()->Plug(eax);
3550 }
3551 
3552 
EmitStringCharFromCode(CallRuntime * expr)3553 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3554   ZoneList<Expression*>* args = expr->arguments();
3555   DCHECK(args->length() == 1);
3556 
3557   VisitForAccumulatorValue(args->at(0));
3558 
3559   Label done;
3560   StringCharFromCodeGenerator generator(eax, ebx);
3561   generator.GenerateFast(masm_);
3562   __ jmp(&done);
3563 
3564   NopRuntimeCallHelper call_helper;
3565   generator.GenerateSlow(masm_, call_helper);
3566 
3567   __ bind(&done);
3568   context()->Plug(ebx);
3569 }
3570 
3571 
EmitStringCharCodeAt(CallRuntime * expr)3572 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3573   ZoneList<Expression*>* args = expr->arguments();
3574   DCHECK(args->length() == 2);
3575 
3576   VisitForStackValue(args->at(0));
3577   VisitForAccumulatorValue(args->at(1));
3578 
3579   Register object = ebx;
3580   Register index = eax;
3581   Register result = edx;
3582 
3583   __ pop(object);
3584 
3585   Label need_conversion;
3586   Label index_out_of_range;
3587   Label done;
3588   StringCharCodeAtGenerator generator(object,
3589                                       index,
3590                                       result,
3591                                       &need_conversion,
3592                                       &need_conversion,
3593                                       &index_out_of_range,
3594                                       STRING_INDEX_IS_NUMBER);
3595   generator.GenerateFast(masm_);
3596   __ jmp(&done);
3597 
3598   __ bind(&index_out_of_range);
3599   // When the index is out of range, the spec requires us to return
3600   // NaN.
3601   __ Move(result, Immediate(isolate()->factory()->nan_value()));
3602   __ jmp(&done);
3603 
3604   __ bind(&need_conversion);
3605   // Move the undefined value into the result register, which will
3606   // trigger conversion.
3607   __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3608   __ jmp(&done);
3609 
3610   NopRuntimeCallHelper call_helper;
3611   generator.GenerateSlow(masm_, call_helper);
3612 
3613   __ bind(&done);
3614   context()->Plug(result);
3615 }
3616 
3617 
EmitStringCharAt(CallRuntime * expr)3618 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3619   ZoneList<Expression*>* args = expr->arguments();
3620   DCHECK(args->length() == 2);
3621 
3622   VisitForStackValue(args->at(0));
3623   VisitForAccumulatorValue(args->at(1));
3624 
3625   Register object = ebx;
3626   Register index = eax;
3627   Register scratch = edx;
3628   Register result = eax;
3629 
3630   __ pop(object);
3631 
3632   Label need_conversion;
3633   Label index_out_of_range;
3634   Label done;
3635   StringCharAtGenerator generator(object,
3636                                   index,
3637                                   scratch,
3638                                   result,
3639                                   &need_conversion,
3640                                   &need_conversion,
3641                                   &index_out_of_range,
3642                                   STRING_INDEX_IS_NUMBER);
3643   generator.GenerateFast(masm_);
3644   __ jmp(&done);
3645 
3646   __ bind(&index_out_of_range);
3647   // When the index is out of range, the spec requires us to return
3648   // the empty string.
3649   __ Move(result, Immediate(isolate()->factory()->empty_string()));
3650   __ jmp(&done);
3651 
3652   __ bind(&need_conversion);
3653   // Move smi zero into the result register, which will trigger
3654   // conversion.
3655   __ Move(result, Immediate(Smi::FromInt(0)));
3656   __ jmp(&done);
3657 
3658   NopRuntimeCallHelper call_helper;
3659   generator.GenerateSlow(masm_, call_helper);
3660 
3661   __ bind(&done);
3662   context()->Plug(result);
3663 }
3664 
3665 
EmitStringAdd(CallRuntime * expr)3666 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3667   ZoneList<Expression*>* args = expr->arguments();
3668   DCHECK_EQ(2, args->length());
3669   VisitForStackValue(args->at(0));
3670   VisitForAccumulatorValue(args->at(1));
3671 
3672   __ pop(edx);
3673   StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3674   __ CallStub(&stub);
3675   context()->Plug(eax);
3676 }
3677 
3678 
EmitStringCompare(CallRuntime * expr)3679 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3680   ZoneList<Expression*>* args = expr->arguments();
3681   DCHECK_EQ(2, args->length());
3682 
3683   VisitForStackValue(args->at(0));
3684   VisitForStackValue(args->at(1));
3685 
3686   StringCompareStub stub(isolate());
3687   __ CallStub(&stub);
3688   context()->Plug(eax);
3689 }
3690 
3691 
EmitCallFunction(CallRuntime * expr)3692 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3693   ZoneList<Expression*>* args = expr->arguments();
3694   DCHECK(args->length() >= 2);
3695 
3696   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3697   for (int i = 0; i < arg_count + 1; ++i) {
3698     VisitForStackValue(args->at(i));
3699   }
3700   VisitForAccumulatorValue(args->last());  // Function.
3701 
3702   Label runtime, done;
3703   // Check for non-function argument (including proxy).
3704   __ JumpIfSmi(eax, &runtime);
3705   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3706   __ j(not_equal, &runtime);
3707 
3708   // InvokeFunction requires the function in edi. Move it in there.
3709   __ mov(edi, result_register());
3710   ParameterCount count(arg_count);
3711   __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
3712   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3713   __ jmp(&done);
3714 
3715   __ bind(&runtime);
3716   __ push(eax);
3717   __ CallRuntime(Runtime::kCall, args->length());
3718   __ bind(&done);
3719 
3720   context()->Plug(eax);
3721 }
3722 
3723 
EmitRegExpConstructResult(CallRuntime * expr)3724 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3725   // Load the arguments on the stack and call the stub.
3726   RegExpConstructResultStub stub(isolate());
3727   ZoneList<Expression*>* args = expr->arguments();
3728   DCHECK(args->length() == 3);
3729   VisitForStackValue(args->at(0));
3730   VisitForStackValue(args->at(1));
3731   VisitForAccumulatorValue(args->at(2));
3732   __ pop(ebx);
3733   __ pop(ecx);
3734   __ CallStub(&stub);
3735   context()->Plug(eax);
3736 }
3737 
3738 
EmitGetFromCache(CallRuntime * expr)3739 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3740   ZoneList<Expression*>* args = expr->arguments();
3741   DCHECK_EQ(2, args->length());
3742 
3743   DCHECK_NE(NULL, args->at(0)->AsLiteral());
3744   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3745 
3746   Handle<FixedArray> jsfunction_result_caches(
3747       isolate()->native_context()->jsfunction_result_caches());
3748   if (jsfunction_result_caches->length() <= cache_id) {
3749     __ Abort(kAttemptToUseUndefinedCache);
3750     __ mov(eax, isolate()->factory()->undefined_value());
3751     context()->Plug(eax);
3752     return;
3753   }
3754 
3755   VisitForAccumulatorValue(args->at(1));
3756 
3757   Register key = eax;
3758   Register cache = ebx;
3759   Register tmp = ecx;
3760   __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
3761   __ mov(cache,
3762          FieldOperand(cache, GlobalObject::kNativeContextOffset));
3763   __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3764   __ mov(cache,
3765          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3766 
3767   Label done, not_found;
3768   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3769   __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3770   // tmp now holds finger offset as a smi.
3771   __ cmp(key, FixedArrayElementOperand(cache, tmp));
3772   __ j(not_equal, &not_found);
3773 
3774   __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
3775   __ jmp(&done);
3776 
3777   __ bind(&not_found);
3778   // Call runtime to perform the lookup.
3779   __ push(cache);
3780   __ push(key);
3781   __ CallRuntime(Runtime::kGetFromCache, 2);
3782 
3783   __ bind(&done);
3784   context()->Plug(eax);
3785 }
3786 
3787 
EmitHasCachedArrayIndex(CallRuntime * expr)3788 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3789   ZoneList<Expression*>* args = expr->arguments();
3790   DCHECK(args->length() == 1);
3791 
3792   VisitForAccumulatorValue(args->at(0));
3793 
3794   __ AssertString(eax);
3795 
3796   Label materialize_true, materialize_false;
3797   Label* if_true = NULL;
3798   Label* if_false = NULL;
3799   Label* fall_through = NULL;
3800   context()->PrepareTest(&materialize_true, &materialize_false,
3801                          &if_true, &if_false, &fall_through);
3802 
3803   __ test(FieldOperand(eax, String::kHashFieldOffset),
3804           Immediate(String::kContainsCachedArrayIndexMask));
3805   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3806   Split(zero, if_true, if_false, fall_through);
3807 
3808   context()->Plug(if_true, if_false);
3809 }
3810 
3811 
EmitGetCachedArrayIndex(CallRuntime * expr)3812 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3813   ZoneList<Expression*>* args = expr->arguments();
3814   DCHECK(args->length() == 1);
3815   VisitForAccumulatorValue(args->at(0));
3816 
3817   __ AssertString(eax);
3818 
3819   __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3820   __ IndexFromHash(eax, eax);
3821 
3822   context()->Plug(eax);
3823 }
3824 
3825 
EmitFastOneByteArrayJoin(CallRuntime * expr)3826 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3827   Label bailout, done, one_char_separator, long_separator,
3828       non_trivial_array, not_size_one_array, loop,
3829       loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3830 
3831   ZoneList<Expression*>* args = expr->arguments();
3832   DCHECK(args->length() == 2);
3833   // We will leave the separator on the stack until the end of the function.
3834   VisitForStackValue(args->at(1));
3835   // Load this to eax (= array)
3836   VisitForAccumulatorValue(args->at(0));
3837   // All aliases of the same register have disjoint lifetimes.
3838   Register array = eax;
3839   Register elements = no_reg;  // Will be eax.
3840 
3841   Register index = edx;
3842 
3843   Register string_length = ecx;
3844 
3845   Register string = esi;
3846 
3847   Register scratch = ebx;
3848 
3849   Register array_length = edi;
3850   Register result_pos = no_reg;  // Will be edi.
3851 
3852   // Separator operand is already pushed.
3853   Operand separator_operand = Operand(esp, 2 * kPointerSize);
3854   Operand result_operand = Operand(esp, 1 * kPointerSize);
3855   Operand array_length_operand = Operand(esp, 0);
3856   __ sub(esp, Immediate(2 * kPointerSize));
3857   __ cld();
3858   // Check that the array is a JSArray
3859   __ JumpIfSmi(array, &bailout);
3860   __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3861   __ j(not_equal, &bailout);
3862 
3863   // Check that the array has fast elements.
3864   __ CheckFastElements(scratch, &bailout);
3865 
3866   // If the array has length zero, return the empty string.
3867   __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3868   __ SmiUntag(array_length);
3869   __ j(not_zero, &non_trivial_array);
3870   __ mov(result_operand, isolate()->factory()->empty_string());
3871   __ jmp(&done);
3872 
3873   // Save the array length.
3874   __ bind(&non_trivial_array);
3875   __ mov(array_length_operand, array_length);
3876 
3877   // Save the FixedArray containing array's elements.
3878   // End of array's live range.
3879   elements = array;
3880   __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3881   array = no_reg;
3882 
3883 
3884   // Check that all array elements are sequential one-byte strings, and
3885   // accumulate the sum of their lengths, as a smi-encoded value.
3886   __ Move(index, Immediate(0));
3887   __ Move(string_length, Immediate(0));
3888   // Loop condition: while (index < length).
3889   // Live loop registers: index, array_length, string,
3890   //                      scratch, string_length, elements.
3891   if (generate_debug_code_) {
3892     __ cmp(index, array_length);
3893     __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3894   }
3895   __ bind(&loop);
3896   __ mov(string, FieldOperand(elements,
3897                               index,
3898                               times_pointer_size,
3899                               FixedArray::kHeaderSize));
3900   __ JumpIfSmi(string, &bailout);
3901   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3902   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3903   __ and_(scratch, Immediate(
3904       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3905   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3906   __ j(not_equal, &bailout);
3907   __ add(string_length,
3908          FieldOperand(string, SeqOneByteString::kLengthOffset));
3909   __ j(overflow, &bailout);
3910   __ add(index, Immediate(1));
3911   __ cmp(index, array_length);
3912   __ j(less, &loop);
3913 
3914   // If array_length is 1, return elements[0], a string.
3915   __ cmp(array_length, 1);
3916   __ j(not_equal, &not_size_one_array);
3917   __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3918   __ mov(result_operand, scratch);
3919   __ jmp(&done);
3920 
3921   __ bind(&not_size_one_array);
3922 
3923   // End of array_length live range.
3924   result_pos = array_length;
3925   array_length = no_reg;
3926 
3927   // Live registers:
3928   // string_length: Sum of string lengths, as a smi.
3929   // elements: FixedArray of strings.
3930 
3931   // Check that the separator is a flat one-byte string.
3932   __ mov(string, separator_operand);
3933   __ JumpIfSmi(string, &bailout);
3934   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3935   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3936   __ and_(scratch, Immediate(
3937       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3938   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3939   __ j(not_equal, &bailout);
3940 
3941   // Add (separator length times array_length) - separator length
3942   // to string_length.
3943   __ mov(scratch, separator_operand);
3944   __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
3945   __ sub(string_length, scratch);  // May be negative, temporarily.
3946   __ imul(scratch, array_length_operand);
3947   __ j(overflow, &bailout);
3948   __ add(string_length, scratch);
3949   __ j(overflow, &bailout);
3950 
3951   __ shr(string_length, 1);
3952   // Live registers and stack values:
3953   //   string_length
3954   //   elements
3955   __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
3956                            &bailout);
3957   __ mov(result_operand, result_pos);
3958   __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3959 
3960 
3961   __ mov(string, separator_operand);
3962   __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
3963          Immediate(Smi::FromInt(1)));
3964   __ j(equal, &one_char_separator);
3965   __ j(greater, &long_separator);
3966 
3967 
3968   // Empty separator case
3969   __ mov(index, Immediate(0));
3970   __ jmp(&loop_1_condition);
3971   // Loop condition: while (index < length).
3972   __ bind(&loop_1);
3973   // Each iteration of the loop concatenates one string to the result.
3974   // Live values in registers:
3975   //   index: which element of the elements array we are adding to the result.
3976   //   result_pos: the position to which we are currently copying characters.
3977   //   elements: the FixedArray of strings we are joining.
3978 
3979   // Get string = array[index].
3980   __ mov(string, FieldOperand(elements, index,
3981                               times_pointer_size,
3982                               FixedArray::kHeaderSize));
3983   __ mov(string_length,
3984          FieldOperand(string, String::kLengthOffset));
3985   __ shr(string_length, 1);
3986   __ lea(string,
3987          FieldOperand(string, SeqOneByteString::kHeaderSize));
3988   __ CopyBytes(string, result_pos, string_length, scratch);
3989   __ add(index, Immediate(1));
3990   __ bind(&loop_1_condition);
3991   __ cmp(index, array_length_operand);
3992   __ j(less, &loop_1);  // End while (index < length).
3993   __ jmp(&done);
3994 
3995 
3996 
3997   // One-character separator case
3998   __ bind(&one_char_separator);
3999   // Replace separator with its one-byte character value.
4000   __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4001   __ mov_b(separator_operand, scratch);
4002 
4003   __ Move(index, Immediate(0));
4004   // Jump into the loop after the code that copies the separator, so the first
4005   // element is not preceded by a separator
4006   __ jmp(&loop_2_entry);
4007   // Loop condition: while (index < length).
4008   __ bind(&loop_2);
4009   // Each iteration of the loop concatenates one string to the result.
4010   // Live values in registers:
4011   //   index: which element of the elements array we are adding to the result.
4012   //   result_pos: the position to which we are currently copying characters.
4013 
4014   // Copy the separator character to the result.
4015   __ mov_b(scratch, separator_operand);
4016   __ mov_b(Operand(result_pos, 0), scratch);
4017   __ inc(result_pos);
4018 
4019   __ bind(&loop_2_entry);
4020   // Get string = array[index].
4021   __ mov(string, FieldOperand(elements, index,
4022                               times_pointer_size,
4023                               FixedArray::kHeaderSize));
4024   __ mov(string_length,
4025          FieldOperand(string, String::kLengthOffset));
4026   __ shr(string_length, 1);
4027   __ lea(string,
4028          FieldOperand(string, SeqOneByteString::kHeaderSize));
4029   __ CopyBytes(string, result_pos, string_length, scratch);
4030   __ add(index, Immediate(1));
4031 
4032   __ cmp(index, array_length_operand);
4033   __ j(less, &loop_2);  // End while (index < length).
4034   __ jmp(&done);
4035 
4036 
4037   // Long separator case (separator is more than one character).
4038   __ bind(&long_separator);
4039 
4040   __ Move(index, Immediate(0));
4041   // Jump into the loop after the code that copies the separator, so the first
4042   // element is not preceded by a separator
4043   __ jmp(&loop_3_entry);
4044   // Loop condition: while (index < length).
4045   __ bind(&loop_3);
4046   // Each iteration of the loop concatenates one string to the result.
4047   // Live values in registers:
4048   //   index: which element of the elements array we are adding to the result.
4049   //   result_pos: the position to which we are currently copying characters.
4050 
4051   // Copy the separator to the result.
4052   __ mov(string, separator_operand);
4053   __ mov(string_length,
4054          FieldOperand(string, String::kLengthOffset));
4055   __ shr(string_length, 1);
4056   __ lea(string,
4057          FieldOperand(string, SeqOneByteString::kHeaderSize));
4058   __ CopyBytes(string, result_pos, string_length, scratch);
4059 
4060   __ bind(&loop_3_entry);
4061   // Get string = array[index].
4062   __ mov(string, FieldOperand(elements, index,
4063                               times_pointer_size,
4064                               FixedArray::kHeaderSize));
4065   __ mov(string_length,
4066          FieldOperand(string, String::kLengthOffset));
4067   __ shr(string_length, 1);
4068   __ lea(string,
4069          FieldOperand(string, SeqOneByteString::kHeaderSize));
4070   __ CopyBytes(string, result_pos, string_length, scratch);
4071   __ add(index, Immediate(1));
4072 
4073   __ cmp(index, array_length_operand);
4074   __ j(less, &loop_3);  // End while (index < length).
4075   __ jmp(&done);
4076 
4077 
4078   __ bind(&bailout);
4079   __ mov(result_operand, isolate()->factory()->undefined_value());
4080   __ bind(&done);
4081   __ mov(eax, result_operand);
4082   // Drop temp values from the stack, and restore context register.
4083   __ add(esp, Immediate(3 * kPointerSize));
4084 
4085   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4086   context()->Plug(eax);
4087 }
4088 
4089 
EmitDebugIsActive(CallRuntime * expr)4090 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4091   DCHECK(expr->arguments()->length() == 0);
4092   ExternalReference debug_is_active =
4093       ExternalReference::debug_is_active_address(isolate());
4094   __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4095   __ SmiTag(eax);
4096   context()->Plug(eax);
4097 }
4098 
4099 
VisitCallRuntime(CallRuntime * expr)4100 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4101   if (expr->function() != NULL &&
4102       expr->function()->intrinsic_type == Runtime::INLINE) {
4103     Comment cmnt(masm_, "[ InlineRuntimeCall");
4104     EmitInlineRuntimeCall(expr);
4105     return;
4106   }
4107 
4108   Comment cmnt(masm_, "[ CallRuntime");
4109   ZoneList<Expression*>* args = expr->arguments();
4110 
4111   if (expr->is_jsruntime()) {
4112     // Push the builtins object as receiver.
4113     __ mov(eax, GlobalObjectOperand());
4114     __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4115 
4116     // Load the function from the receiver.
4117     __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4118     __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4119     if (FLAG_vector_ics) {
4120       __ mov(VectorLoadICDescriptor::SlotRegister(),
4121              Immediate(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4122       CallLoadIC(NOT_CONTEXTUAL);
4123     } else {
4124       CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4125     }
4126 
4127     // Push the target function under the receiver.
4128     __ push(Operand(esp, 0));
4129     __ mov(Operand(esp, kPointerSize), eax);
4130 
4131     // Code common for calls using the IC.
4132     ZoneList<Expression*>* args = expr->arguments();
4133     int arg_count = args->length();
4134     for (int i = 0; i < arg_count; i++) {
4135       VisitForStackValue(args->at(i));
4136     }
4137 
4138     // Record source position of the IC call.
4139     SetSourcePosition(expr->position());
4140     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4141     __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4142     __ CallStub(&stub);
4143     // Restore context register.
4144     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4145     context()->DropAndPlug(1, eax);
4146 
4147   } else {
4148     // Push the arguments ("left-to-right").
4149     int arg_count = args->length();
4150     for (int i = 0; i < arg_count; i++) {
4151       VisitForStackValue(args->at(i));
4152     }
4153 
4154     // Call the C runtime function.
4155     __ CallRuntime(expr->function(), arg_count);
4156 
4157     context()->Plug(eax);
4158   }
4159 }
4160 
4161 
VisitUnaryOperation(UnaryOperation * expr)4162 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4163   switch (expr->op()) {
4164     case Token::DELETE: {
4165       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4166       Property* property = expr->expression()->AsProperty();
4167       VariableProxy* proxy = expr->expression()->AsVariableProxy();
4168 
4169       if (property != NULL) {
4170         VisitForStackValue(property->obj());
4171         VisitForStackValue(property->key());
4172         __ push(Immediate(Smi::FromInt(strict_mode())));
4173         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4174         context()->Plug(eax);
4175       } else if (proxy != NULL) {
4176         Variable* var = proxy->var();
4177         // Delete of an unqualified identifier is disallowed in strict mode
4178         // but "delete this" is allowed.
4179         DCHECK(strict_mode() == SLOPPY || var->is_this());
4180         if (var->IsUnallocated()) {
4181           __ push(GlobalObjectOperand());
4182           __ push(Immediate(var->name()));
4183           __ push(Immediate(Smi::FromInt(SLOPPY)));
4184           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4185           context()->Plug(eax);
4186         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4187           // Result of deleting non-global variables is false.  'this' is
4188           // not really a variable, though we implement it as one.  The
4189           // subexpression does not have side effects.
4190           context()->Plug(var->is_this());
4191         } else {
4192           // Non-global variable.  Call the runtime to try to delete from the
4193           // context where the variable was introduced.
4194           __ push(context_register());
4195           __ push(Immediate(var->name()));
4196           __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4197           context()->Plug(eax);
4198         }
4199       } else {
4200         // Result of deleting non-property, non-variable reference is true.
4201         // The subexpression may have side effects.
4202         VisitForEffect(expr->expression());
4203         context()->Plug(true);
4204       }
4205       break;
4206     }
4207 
4208     case Token::VOID: {
4209       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4210       VisitForEffect(expr->expression());
4211       context()->Plug(isolate()->factory()->undefined_value());
4212       break;
4213     }
4214 
4215     case Token::NOT: {
4216       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4217       if (context()->IsEffect()) {
4218         // Unary NOT has no side effects so it's only necessary to visit the
4219         // subexpression.  Match the optimizing compiler by not branching.
4220         VisitForEffect(expr->expression());
4221       } else if (context()->IsTest()) {
4222         const TestContext* test = TestContext::cast(context());
4223         // The labels are swapped for the recursive call.
4224         VisitForControl(expr->expression(),
4225                         test->false_label(),
4226                         test->true_label(),
4227                         test->fall_through());
4228         context()->Plug(test->true_label(), test->false_label());
4229       } else {
4230         // We handle value contexts explicitly rather than simply visiting
4231         // for control and plugging the control flow into the context,
4232         // because we need to prepare a pair of extra administrative AST ids
4233         // for the optimizing compiler.
4234         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4235         Label materialize_true, materialize_false, done;
4236         VisitForControl(expr->expression(),
4237                         &materialize_false,
4238                         &materialize_true,
4239                         &materialize_true);
4240         __ bind(&materialize_true);
4241         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4242         if (context()->IsAccumulatorValue()) {
4243           __ mov(eax, isolate()->factory()->true_value());
4244         } else {
4245           __ Push(isolate()->factory()->true_value());
4246         }
4247         __ jmp(&done, Label::kNear);
4248         __ bind(&materialize_false);
4249         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4250         if (context()->IsAccumulatorValue()) {
4251           __ mov(eax, isolate()->factory()->false_value());
4252         } else {
4253           __ Push(isolate()->factory()->false_value());
4254         }
4255         __ bind(&done);
4256       }
4257       break;
4258     }
4259 
4260     case Token::TYPEOF: {
4261       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4262       { StackValueContext context(this);
4263         VisitForTypeofValue(expr->expression());
4264       }
4265       __ CallRuntime(Runtime::kTypeof, 1);
4266       context()->Plug(eax);
4267       break;
4268     }
4269 
4270     default:
4271       UNREACHABLE();
4272   }
4273 }
4274 
4275 
VisitCountOperation(CountOperation * expr)4276 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4277   DCHECK(expr->expression()->IsValidReferenceExpression());
4278 
4279   Comment cmnt(masm_, "[ CountOperation");
4280   SetSourcePosition(expr->position());
4281 
4282   // Expression can only be a property, a global or a (parameter or local)
4283   // slot.
4284   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4285   LhsKind assign_type = VARIABLE;
4286   Property* prop = expr->expression()->AsProperty();
4287   // In case of a property we use the uninitialized expression context
4288   // of the key to detect a named property.
4289   if (prop != NULL) {
4290     assign_type =
4291         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4292   }
4293 
4294   // Evaluate expression and get value.
4295   if (assign_type == VARIABLE) {
4296     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4297     AccumulatorValueContext context(this);
4298     EmitVariableLoad(expr->expression()->AsVariableProxy());
4299   } else {
4300     // Reserve space for result of postfix operation.
4301     if (expr->is_postfix() && !context()->IsEffect()) {
4302       __ push(Immediate(Smi::FromInt(0)));
4303     }
4304     if (assign_type == NAMED_PROPERTY) {
4305       // Put the object both on the stack and in the register.
4306       VisitForStackValue(prop->obj());
4307       __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4308       EmitNamedPropertyLoad(prop);
4309     } else {
4310       VisitForStackValue(prop->obj());
4311       VisitForStackValue(prop->key());
4312       __ mov(LoadDescriptor::ReceiverRegister(),
4313              Operand(esp, kPointerSize));                       // Object.
4314       __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));  // Key.
4315       EmitKeyedPropertyLoad(prop);
4316     }
4317   }
4318 
4319   // We need a second deoptimization point after loading the value
4320   // in case evaluating the property load my have a side effect.
4321   if (assign_type == VARIABLE) {
4322     PrepareForBailout(expr->expression(), TOS_REG);
4323   } else {
4324     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4325   }
4326 
4327   // Inline smi case if we are in a loop.
4328   Label done, stub_call;
4329   JumpPatchSite patch_site(masm_);
4330   if (ShouldInlineSmiCase(expr->op())) {
4331     Label slow;
4332     patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4333 
4334     // Save result for postfix expressions.
4335     if (expr->is_postfix()) {
4336       if (!context()->IsEffect()) {
4337         // Save the result on the stack. If we have a named or keyed property
4338         // we store the result under the receiver that is currently on top
4339         // of the stack.
4340         switch (assign_type) {
4341           case VARIABLE:
4342             __ push(eax);
4343             break;
4344           case NAMED_PROPERTY:
4345             __ mov(Operand(esp, kPointerSize), eax);
4346             break;
4347           case KEYED_PROPERTY:
4348             __ mov(Operand(esp, 2 * kPointerSize), eax);
4349             break;
4350         }
4351       }
4352     }
4353 
4354     if (expr->op() == Token::INC) {
4355       __ add(eax, Immediate(Smi::FromInt(1)));
4356     } else {
4357       __ sub(eax, Immediate(Smi::FromInt(1)));
4358     }
4359     __ j(no_overflow, &done, Label::kNear);
4360     // Call stub. Undo operation first.
4361     if (expr->op() == Token::INC) {
4362       __ sub(eax, Immediate(Smi::FromInt(1)));
4363     } else {
4364       __ add(eax, Immediate(Smi::FromInt(1)));
4365     }
4366     __ jmp(&stub_call, Label::kNear);
4367     __ bind(&slow);
4368   }
4369   ToNumberStub convert_stub(isolate());
4370   __ CallStub(&convert_stub);
4371 
4372   // Save result for postfix expressions.
4373   if (expr->is_postfix()) {
4374     if (!context()->IsEffect()) {
4375       // Save the result on the stack. If we have a named or keyed property
4376       // we store the result under the receiver that is currently on top
4377       // of the stack.
4378       switch (assign_type) {
4379         case VARIABLE:
4380           __ push(eax);
4381           break;
4382         case NAMED_PROPERTY:
4383           __ mov(Operand(esp, kPointerSize), eax);
4384           break;
4385         case KEYED_PROPERTY:
4386           __ mov(Operand(esp, 2 * kPointerSize), eax);
4387           break;
4388       }
4389     }
4390   }
4391 
4392   // Record position before stub call.
4393   SetSourcePosition(expr->position());
4394 
4395   // Call stub for +1/-1.
4396   __ bind(&stub_call);
4397   __ mov(edx, eax);
4398   __ mov(eax, Immediate(Smi::FromInt(1)));
4399   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4400                                               NO_OVERWRITE).code();
4401   CallIC(code, expr->CountBinOpFeedbackId());
4402   patch_site.EmitPatchInfo();
4403   __ bind(&done);
4404 
4405   // Store the value returned in eax.
4406   switch (assign_type) {
4407     case VARIABLE:
4408       if (expr->is_postfix()) {
4409         // Perform the assignment as if via '='.
4410         { EffectContext context(this);
4411           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4412                                  Token::ASSIGN);
4413           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4414           context.Plug(eax);
4415         }
4416         // For all contexts except EffectContext We have the result on
4417         // top of the stack.
4418         if (!context()->IsEffect()) {
4419           context()->PlugTOS();
4420         }
4421       } else {
4422         // Perform the assignment as if via '='.
4423         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4424                                Token::ASSIGN);
4425         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4426         context()->Plug(eax);
4427       }
4428       break;
4429     case NAMED_PROPERTY: {
4430       __ mov(StoreDescriptor::NameRegister(),
4431              prop->key()->AsLiteral()->value());
4432       __ pop(StoreDescriptor::ReceiverRegister());
4433       CallStoreIC(expr->CountStoreFeedbackId());
4434       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4435       if (expr->is_postfix()) {
4436         if (!context()->IsEffect()) {
4437           context()->PlugTOS();
4438         }
4439       } else {
4440         context()->Plug(eax);
4441       }
4442       break;
4443     }
4444     case KEYED_PROPERTY: {
4445       __ pop(StoreDescriptor::NameRegister());
4446       __ pop(StoreDescriptor::ReceiverRegister());
4447       Handle<Code> ic =
4448           CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4449       CallIC(ic, expr->CountStoreFeedbackId());
4450       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4451       if (expr->is_postfix()) {
4452         // Result is on the stack
4453         if (!context()->IsEffect()) {
4454           context()->PlugTOS();
4455         }
4456       } else {
4457         context()->Plug(eax);
4458       }
4459       break;
4460     }
4461   }
4462 }
4463 
4464 
VisitForTypeofValue(Expression * expr)4465 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4466   VariableProxy* proxy = expr->AsVariableProxy();
4467   DCHECK(!context()->IsEffect());
4468   DCHECK(!context()->IsTest());
4469 
4470   if (proxy != NULL && proxy->var()->IsUnallocated()) {
4471     Comment cmnt(masm_, "[ Global variable");
4472     __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4473     __ mov(LoadDescriptor::NameRegister(), Immediate(proxy->name()));
4474     if (FLAG_vector_ics) {
4475       __ mov(VectorLoadICDescriptor::SlotRegister(),
4476              Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
4477     }
4478     // Use a regular load, not a contextual load, to avoid a reference
4479     // error.
4480     CallLoadIC(NOT_CONTEXTUAL);
4481     PrepareForBailout(expr, TOS_REG);
4482     context()->Plug(eax);
4483   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4484     Comment cmnt(masm_, "[ Lookup slot");
4485     Label done, slow;
4486 
4487     // Generate code for loading from variables potentially shadowed
4488     // by eval-introduced variables.
4489     EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4490 
4491     __ bind(&slow);
4492     __ push(esi);
4493     __ push(Immediate(proxy->name()));
4494     __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4495     PrepareForBailout(expr, TOS_REG);
4496     __ bind(&done);
4497 
4498     context()->Plug(eax);
4499   } else {
4500     // This expression cannot throw a reference error at the top level.
4501     VisitInDuplicateContext(expr);
4502   }
4503 }
4504 
4505 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4506 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4507                                                  Expression* sub_expr,
4508                                                  Handle<String> check) {
4509   Label materialize_true, materialize_false;
4510   Label* if_true = NULL;
4511   Label* if_false = NULL;
4512   Label* fall_through = NULL;
4513   context()->PrepareTest(&materialize_true, &materialize_false,
4514                          &if_true, &if_false, &fall_through);
4515 
4516   { AccumulatorValueContext context(this);
4517     VisitForTypeofValue(sub_expr);
4518   }
4519   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4520 
4521   Factory* factory = isolate()->factory();
4522   if (String::Equals(check, factory->number_string())) {
4523     __ JumpIfSmi(eax, if_true);
4524     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4525            isolate()->factory()->heap_number_map());
4526     Split(equal, if_true, if_false, fall_through);
4527   } else if (String::Equals(check, factory->string_string())) {
4528     __ JumpIfSmi(eax, if_false);
4529     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4530     __ j(above_equal, if_false);
4531     // Check for undetectable objects => false.
4532     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4533               1 << Map::kIsUndetectable);
4534     Split(zero, if_true, if_false, fall_through);
4535   } else if (String::Equals(check, factory->symbol_string())) {
4536     __ JumpIfSmi(eax, if_false);
4537     __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4538     Split(equal, if_true, if_false, fall_through);
4539   } else if (String::Equals(check, factory->boolean_string())) {
4540     __ cmp(eax, isolate()->factory()->true_value());
4541     __ j(equal, if_true);
4542     __ cmp(eax, isolate()->factory()->false_value());
4543     Split(equal, if_true, if_false, fall_through);
4544   } else if (String::Equals(check, factory->undefined_string())) {
4545     __ cmp(eax, isolate()->factory()->undefined_value());
4546     __ j(equal, if_true);
4547     __ JumpIfSmi(eax, if_false);
4548     // Check for undetectable objects => true.
4549     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4550     __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4551     __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4552     Split(not_zero, if_true, if_false, fall_through);
4553   } else if (String::Equals(check, factory->function_string())) {
4554     __ JumpIfSmi(eax, if_false);
4555     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4556     __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4557     __ j(equal, if_true);
4558     __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4559     Split(equal, if_true, if_false, fall_through);
4560   } else if (String::Equals(check, factory->object_string())) {
4561     __ JumpIfSmi(eax, if_false);
4562     __ cmp(eax, isolate()->factory()->null_value());
4563     __ j(equal, if_true);
4564     __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4565     __ j(below, if_false);
4566     __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4567     __ j(above, if_false);
4568     // Check for undetectable objects => false.
4569     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4570               1 << Map::kIsUndetectable);
4571     Split(zero, if_true, if_false, fall_through);
4572   } else {
4573     if (if_false != fall_through) __ jmp(if_false);
4574   }
4575   context()->Plug(if_true, if_false);
4576 }
4577 
4578 
VisitCompareOperation(CompareOperation * expr)4579 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4580   Comment cmnt(masm_, "[ CompareOperation");
4581   SetSourcePosition(expr->position());
4582 
4583   // First we try a fast inlined version of the compare when one of
4584   // the operands is a literal.
4585   if (TryLiteralCompare(expr)) return;
4586 
4587   // Always perform the comparison for its control flow.  Pack the result
4588   // into the expression's context after the comparison is performed.
4589   Label materialize_true, materialize_false;
4590   Label* if_true = NULL;
4591   Label* if_false = NULL;
4592   Label* fall_through = NULL;
4593   context()->PrepareTest(&materialize_true, &materialize_false,
4594                          &if_true, &if_false, &fall_through);
4595 
4596   Token::Value op = expr->op();
4597   VisitForStackValue(expr->left());
4598   switch (op) {
4599     case Token::IN:
4600       VisitForStackValue(expr->right());
4601       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4602       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4603       __ cmp(eax, isolate()->factory()->true_value());
4604       Split(equal, if_true, if_false, fall_through);
4605       break;
4606 
4607     case Token::INSTANCEOF: {
4608       VisitForStackValue(expr->right());
4609       InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4610       __ CallStub(&stub);
4611       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4612       __ test(eax, eax);
4613       // The stub returns 0 for true.
4614       Split(zero, if_true, if_false, fall_through);
4615       break;
4616     }
4617 
4618     default: {
4619       VisitForAccumulatorValue(expr->right());
4620       Condition cc = CompareIC::ComputeCondition(op);
4621       __ pop(edx);
4622 
4623       bool inline_smi_code = ShouldInlineSmiCase(op);
4624       JumpPatchSite patch_site(masm_);
4625       if (inline_smi_code) {
4626         Label slow_case;
4627         __ mov(ecx, edx);
4628         __ or_(ecx, eax);
4629         patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4630         __ cmp(edx, eax);
4631         Split(cc, if_true, if_false, NULL);
4632         __ bind(&slow_case);
4633       }
4634 
4635       // Record position and call the compare IC.
4636       SetSourcePosition(expr->position());
4637       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4638       CallIC(ic, expr->CompareOperationFeedbackId());
4639       patch_site.EmitPatchInfo();
4640 
4641       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4642       __ test(eax, eax);
4643       Split(cc, if_true, if_false, fall_through);
4644     }
4645   }
4646 
4647   // Convert the result of the comparison into one expected for this
4648   // expression's context.
4649   context()->Plug(if_true, if_false);
4650 }
4651 
4652 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4653 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4654                                               Expression* sub_expr,
4655                                               NilValue nil) {
4656   Label materialize_true, materialize_false;
4657   Label* if_true = NULL;
4658   Label* if_false = NULL;
4659   Label* fall_through = NULL;
4660   context()->PrepareTest(&materialize_true, &materialize_false,
4661                          &if_true, &if_false, &fall_through);
4662 
4663   VisitForAccumulatorValue(sub_expr);
4664   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4665 
4666   Handle<Object> nil_value = nil == kNullValue
4667       ? isolate()->factory()->null_value()
4668       : isolate()->factory()->undefined_value();
4669   if (expr->op() == Token::EQ_STRICT) {
4670     __ cmp(eax, nil_value);
4671     Split(equal, if_true, if_false, fall_through);
4672   } else {
4673     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4674     CallIC(ic, expr->CompareOperationFeedbackId());
4675     __ test(eax, eax);
4676     Split(not_zero, if_true, if_false, fall_through);
4677   }
4678   context()->Plug(if_true, if_false);
4679 }
4680 
4681 
VisitThisFunction(ThisFunction * expr)4682 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4683   __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4684   context()->Plug(eax);
4685 }
4686 
4687 
result_register()4688 Register FullCodeGenerator::result_register() {
4689   return eax;
4690 }
4691 
4692 
context_register()4693 Register FullCodeGenerator::context_register() {
4694   return esi;
4695 }
4696 
4697 
StoreToFrameField(int frame_offset,Register value)4698 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4699   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4700   __ mov(Operand(ebp, frame_offset), value);
4701 }
4702 
4703 
LoadContextField(Register dst,int context_index)4704 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4705   __ mov(dst, ContextOperand(esi, context_index));
4706 }
4707 
4708 
PushFunctionArgumentForContextAllocation()4709 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4710   Scope* declaration_scope = scope()->DeclarationScope();
4711   if (declaration_scope->is_global_scope() ||
4712       declaration_scope->is_module_scope()) {
4713     // Contexts nested in the native context have a canonical empty function
4714     // as their closure, not the anonymous closure containing the global
4715     // code.  Pass a smi sentinel and let the runtime look up the empty
4716     // function.
4717     __ push(Immediate(Smi::FromInt(0)));
4718   } else if (declaration_scope->is_eval_scope()) {
4719     // Contexts nested inside eval code have the same closure as the context
4720     // calling eval, not the anonymous closure containing the eval code.
4721     // Fetch it from the context.
4722     __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4723   } else {
4724     DCHECK(declaration_scope->is_function_scope());
4725     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4726   }
4727 }
4728 
4729 
4730 // ----------------------------------------------------------------------------
4731 // Non-local control flow support.
4732 
EnterFinallyBlock()4733 void FullCodeGenerator::EnterFinallyBlock() {
4734   // Cook return address on top of stack (smi encoded Code* delta)
4735   DCHECK(!result_register().is(edx));
4736   __ pop(edx);
4737   __ sub(edx, Immediate(masm_->CodeObject()));
4738   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4739   STATIC_ASSERT(kSmiTag == 0);
4740   __ SmiTag(edx);
4741   __ push(edx);
4742 
4743   // Store result register while executing finally block.
4744   __ push(result_register());
4745 
4746   // Store pending message while executing finally block.
4747   ExternalReference pending_message_obj =
4748       ExternalReference::address_of_pending_message_obj(isolate());
4749   __ mov(edx, Operand::StaticVariable(pending_message_obj));
4750   __ push(edx);
4751 
4752   ExternalReference has_pending_message =
4753       ExternalReference::address_of_has_pending_message(isolate());
4754   __ mov(edx, Operand::StaticVariable(has_pending_message));
4755   __ SmiTag(edx);
4756   __ push(edx);
4757 
4758   ExternalReference pending_message_script =
4759       ExternalReference::address_of_pending_message_script(isolate());
4760   __ mov(edx, Operand::StaticVariable(pending_message_script));
4761   __ push(edx);
4762 }
4763 
4764 
ExitFinallyBlock()4765 void FullCodeGenerator::ExitFinallyBlock() {
4766   DCHECK(!result_register().is(edx));
4767   // Restore pending message from stack.
4768   __ pop(edx);
4769   ExternalReference pending_message_script =
4770       ExternalReference::address_of_pending_message_script(isolate());
4771   __ mov(Operand::StaticVariable(pending_message_script), edx);
4772 
4773   __ pop(edx);
4774   __ SmiUntag(edx);
4775   ExternalReference has_pending_message =
4776       ExternalReference::address_of_has_pending_message(isolate());
4777   __ mov(Operand::StaticVariable(has_pending_message), edx);
4778 
4779   __ pop(edx);
4780   ExternalReference pending_message_obj =
4781       ExternalReference::address_of_pending_message_obj(isolate());
4782   __ mov(Operand::StaticVariable(pending_message_obj), edx);
4783 
4784   // Restore result register from stack.
4785   __ pop(result_register());
4786 
4787   // Uncook return address.
4788   __ pop(edx);
4789   __ SmiUntag(edx);
4790   __ add(edx, Immediate(masm_->CodeObject()));
4791   __ jmp(edx);
4792 }
4793 
4794 
4795 #undef __
4796 
4797 #define __ ACCESS_MASM(masm())
4798 
Exit(int * stack_depth,int * context_length)4799 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4800     int* stack_depth,
4801     int* context_length) {
4802   // The macros used here must preserve the result register.
4803 
4804   // Because the handler block contains the context of the finally
4805   // code, we can restore it directly from there for the finally code
4806   // rather than iteratively unwinding contexts via their previous
4807   // links.
4808   __ Drop(*stack_depth);  // Down to the handler block.
4809   if (*context_length > 0) {
4810     // Restore the context to its dedicated register and the stack.
4811     __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4812     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4813   }
4814   __ PopTryHandler();
4815   __ call(finally_entry_);
4816 
4817   *stack_depth = 0;
4818   *context_length = 0;
4819   return previous_;
4820 }
4821 
4822 #undef __
4823 
4824 
4825 static const byte kJnsInstruction = 0x79;
4826 static const byte kJnsOffset = 0x11;
4827 static const byte kNopByteOne = 0x66;
4828 static const byte kNopByteTwo = 0x90;
4829 #ifdef DEBUG
4830 static const byte kCallInstruction = 0xe8;
4831 #endif
4832 
4833 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)4834 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4835                             Address pc,
4836                             BackEdgeState target_state,
4837                             Code* replacement_code) {
4838   Address call_target_address = pc - kIntSize;
4839   Address jns_instr_address = call_target_address - 3;
4840   Address jns_offset_address = call_target_address - 2;
4841 
4842   switch (target_state) {
4843     case INTERRUPT:
4844       //     sub <profiling_counter>, <delta>  ;; Not changed
4845       //     jns ok
4846       //     call <interrupt stub>
4847       //   ok:
4848       *jns_instr_address = kJnsInstruction;
4849       *jns_offset_address = kJnsOffset;
4850       break;
4851     case ON_STACK_REPLACEMENT:
4852     case OSR_AFTER_STACK_CHECK:
4853       //     sub <profiling_counter>, <delta>  ;; Not changed
4854       //     nop
4855       //     nop
4856       //     call <on-stack replacment>
4857       //   ok:
4858       *jns_instr_address = kNopByteOne;
4859       *jns_offset_address = kNopByteTwo;
4860       break;
4861   }
4862 
4863   Assembler::set_target_address_at(call_target_address,
4864                                    unoptimized_code,
4865                                    replacement_code->entry());
4866   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4867       unoptimized_code, call_target_address, replacement_code);
4868 }
4869 
4870 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)4871 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4872     Isolate* isolate,
4873     Code* unoptimized_code,
4874     Address pc) {
4875   Address call_target_address = pc - kIntSize;
4876   Address jns_instr_address = call_target_address - 3;
4877   DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4878 
4879   if (*jns_instr_address == kJnsInstruction) {
4880     DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4881     DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4882               Assembler::target_address_at(call_target_address,
4883                                            unoptimized_code));
4884     return INTERRUPT;
4885   }
4886 
4887   DCHECK_EQ(kNopByteOne, *jns_instr_address);
4888   DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4889 
4890   if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4891       isolate->builtins()->OnStackReplacement()->entry()) {
4892     return ON_STACK_REPLACEMENT;
4893   }
4894 
4895   DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4896             Assembler::target_address_at(call_target_address,
4897                                          unoptimized_code));
4898   return OSR_AFTER_STACK_CHECK;
4899 }
4900 
4901 
4902 } }  // namespace v8::internal
4903 
4904 #endif  // V8_TARGET_ARCH_X87
4905