1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM
6 
7 #include "src/ast/scopes.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parsing/parser.h"
15 
16 #include "src/arm/code-stubs-arm.h"
17 #include "src/arm/macro-assembler-arm.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 #define __ ACCESS_MASM(masm_)
23 
24 
25 // A patch site is a location in the code which it is possible to patch. This
26 // class has a number of methods to emit the code which is patchable and the
27 // method EmitPatchInfo to record a marker back to the patchable code. This
28 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
29 // immediate value is used) is the delta from the pc to the first instruction of
30 // the patchable code.
31 class JumpPatchSite BASE_EMBEDDED {
32  public:
JumpPatchSite(MacroAssembler * masm)33   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34 #ifdef DEBUG
35     info_emitted_ = false;
36 #endif
37   }
38 
~JumpPatchSite()39   ~JumpPatchSite() {
40     DCHECK(patch_site_.is_bound() == info_emitted_);
41   }
42 
43   // When initially emitting this ensure that a jump is always generated to skip
44   // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)45   void EmitJumpIfNotSmi(Register reg, Label* target) {
46     DCHECK(!patch_site_.is_bound() && !info_emitted_);
47     Assembler::BlockConstPoolScope block_const_pool(masm_);
48     __ bind(&patch_site_);
49     __ cmp(reg, Operand(reg));
50     __ b(eq, target);  // Always taken before patched.
51   }
52 
53   // When initially emitting this ensure that a jump is never generated to skip
54   // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)55   void EmitJumpIfSmi(Register reg, Label* target) {
56     DCHECK(!patch_site_.is_bound() && !info_emitted_);
57     Assembler::BlockConstPoolScope block_const_pool(masm_);
58     __ bind(&patch_site_);
59     __ cmp(reg, Operand(reg));
60     __ b(ne, target);  // Never taken before patched.
61   }
62 
EmitPatchInfo()63   void EmitPatchInfo() {
64     // Block literal pool emission whilst recording patch site information.
65     Assembler::BlockConstPoolScope block_const_pool(masm_);
66     if (patch_site_.is_bound()) {
67       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
68       Register reg;
69       reg.set_code(delta_to_patch_site / kOff12Mask);
70       __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
71 #ifdef DEBUG
72       info_emitted_ = true;
73 #endif
74     } else {
75       __ nop();  // Signals no inlined code.
76     }
77   }
78 
79  private:
80   MacroAssembler* masm_;
81   Label patch_site_;
82 #ifdef DEBUG
83   bool info_emitted_;
84 #endif
85 };
86 
87 
88 // Generate code for a JS function.  On entry to the function the receiver
89 // and arguments have been pushed on the stack left to right.  The actual
90 // argument count matches the formal parameter count expected by the
91 // function.
92 //
93 // The live registers are:
94 //   o r1: the JS function object being called (i.e., ourselves)
95 //   o r3: the new target value
96 //   o cp: our context
97 //   o pp: our caller's constant pool pointer (if enabled)
98 //   o fp: our caller's frame pointer
99 //   o sp: stack pointer
100 //   o lr: return address
101 //
102 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
Generate()104 void FullCodeGenerator::Generate() {
105   CompilationInfo* info = info_;
106   profiling_counter_ = isolate()->factory()->NewCell(
107       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108   SetFunctionPosition(literal());
109   Comment cmnt(masm_, "[ function compiled by full code generator");
110 
111   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112 
113 #ifdef DEBUG
114   if (strlen(FLAG_stop_at) > 0 &&
115       info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
116     __ stop("stop-at");
117   }
118 #endif
119 
120   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
121     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
122     __ ldr(r2, MemOperand(sp, receiver_offset));
123     __ AssertNotSmi(r2);
124     __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
125     __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
126   }
127 
128   // Open a frame scope to indicate that there is a frame on the stack.  The
129   // MANUAL indicates that the scope shouldn't actually generate code to set up
130   // the frame (that is done below).
131   FrameScope frame_scope(masm_, StackFrame::MANUAL);
132 
133   info->set_prologue_offset(masm_->pc_offset());
134   __ Prologue(info->GeneratePreagedPrologue());
135 
136   { Comment cmnt(masm_, "[ Allocate locals");
137     int locals_count = info->scope()->num_stack_slots();
138     // Generators allocate locals, if any, in context slots.
139     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
140     if (locals_count > 0) {
141       if (locals_count >= 128) {
142         Label ok;
143         __ sub(r9, sp, Operand(locals_count * kPointerSize));
144         __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
145         __ cmp(r9, Operand(r2));
146         __ b(hs, &ok);
147         __ CallRuntime(Runtime::kThrowStackOverflow);
148         __ bind(&ok);
149       }
150       __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
151       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
152       if (locals_count >= kMaxPushes) {
153         int loop_iterations = locals_count / kMaxPushes;
154         __ mov(r2, Operand(loop_iterations));
155         Label loop_header;
156         __ bind(&loop_header);
157         // Do pushes.
158         for (int i = 0; i < kMaxPushes; i++) {
159           __ push(r9);
160         }
161         // Continue loop if not done.
162         __ sub(r2, r2, Operand(1), SetCC);
163         __ b(&loop_header, ne);
164       }
165       int remaining = locals_count % kMaxPushes;
166       // Emit the remaining pushes.
167       for (int i  = 0; i < remaining; i++) {
168         __ push(r9);
169       }
170     }
171   }
172 
173   bool function_in_register_r1 = true;
174 
175   // Possibly allocate a local context.
176   if (info->scope()->num_heap_slots() > 0) {
177     // Argument to NewContext is the function, which is still in r1.
178     Comment cmnt(masm_, "[ Allocate context");
179     bool need_write_barrier = true;
180     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
181     if (info->scope()->is_script_scope()) {
182       __ push(r1);
183       __ Push(info->scope()->GetScopeInfo(info->isolate()));
184       __ CallRuntime(Runtime::kNewScriptContext);
185       PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
186       // The new target value is not used, clobbering is safe.
187       DCHECK_NULL(info->scope()->new_target_var());
188     } else {
189       if (info->scope()->new_target_var() != nullptr) {
190         __ push(r3);  // Preserve new target.
191       }
192       if (slots <= FastNewContextStub::kMaximumSlots) {
193         FastNewContextStub stub(isolate(), slots);
194         __ CallStub(&stub);
195         // Result of FastNewContextStub is always in new space.
196         need_write_barrier = false;
197       } else {
198         __ push(r1);
199         __ CallRuntime(Runtime::kNewFunctionContext);
200       }
201       if (info->scope()->new_target_var() != nullptr) {
202         __ pop(r3);  // Preserve new target.
203       }
204     }
205     function_in_register_r1 = false;
206     // Context is returned in r0.  It replaces the context passed to us.
207     // It's saved in the stack and kept live in cp.
208     __ mov(cp, r0);
209     __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
210     // Copy any necessary parameters into the context.
211     int num_parameters = info->scope()->num_parameters();
212     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
213     for (int i = first_parameter; i < num_parameters; i++) {
214       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
215       if (var->IsContextSlot()) {
216         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
217             (num_parameters - 1 - i) * kPointerSize;
218         // Load parameter from stack.
219         __ ldr(r0, MemOperand(fp, parameter_offset));
220         // Store it in the context.
221         MemOperand target = ContextMemOperand(cp, var->index());
222         __ str(r0, target);
223 
224         // Update the write barrier.
225         if (need_write_barrier) {
226           __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
227                                     kLRHasBeenSaved, kDontSaveFPRegs);
228         } else if (FLAG_debug_code) {
229           Label done;
230           __ JumpIfInNewSpace(cp, r0, &done);
231           __ Abort(kExpectedNewSpaceObject);
232           __ bind(&done);
233         }
234       }
235     }
236   }
237 
238   // Register holding this function and new target are both trashed in case we
239   // bailout here. But since that can happen only when new target is not used
240   // and we allocate a context, the value of |function_in_register| is correct.
241   PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
242 
243   // Possibly set up a local binding to the this function which is used in
244   // derived constructors with super calls.
245   Variable* this_function_var = scope()->this_function_var();
246   if (this_function_var != nullptr) {
247     Comment cmnt(masm_, "[ This function");
248     if (!function_in_register_r1) {
249       __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
250       // The write barrier clobbers register again, keep it marked as such.
251     }
252     SetVar(this_function_var, r1, r0, r2);
253   }
254 
255   // Possibly set up a local binding to the new target value.
256   Variable* new_target_var = scope()->new_target_var();
257   if (new_target_var != nullptr) {
258     Comment cmnt(masm_, "[ new.target");
259     SetVar(new_target_var, r3, r0, r2);
260   }
261 
262   // Possibly allocate RestParameters
263   int rest_index;
264   Variable* rest_param = scope()->rest_parameter(&rest_index);
265   if (rest_param) {
266     Comment cmnt(masm_, "[ Allocate rest parameter array");
267 
268     int num_parameters = info->scope()->num_parameters();
269     int offset = num_parameters * kPointerSize;
270 
271     __ mov(RestParamAccessDescriptor::parameter_count(),
272            Operand(Smi::FromInt(num_parameters)));
273     __ add(RestParamAccessDescriptor::parameter_pointer(), fp,
274            Operand(StandardFrameConstants::kCallerSPOffset + offset));
275     __ mov(RestParamAccessDescriptor::rest_parameter_index(),
276            Operand(Smi::FromInt(rest_index)));
277     function_in_register_r1 = false;
278 
279     RestParamAccessStub stub(isolate());
280     __ CallStub(&stub);
281 
282     SetVar(rest_param, r0, r1, r2);
283   }
284 
285   Variable* arguments = scope()->arguments();
286   if (arguments != NULL) {
287     // Function uses arguments object.
288     Comment cmnt(masm_, "[ Allocate arguments object");
289     DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
290     if (!function_in_register_r1) {
291       // Load this again, if it's used by the local context below.
292       __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
293     }
294     // Receiver is just before the parameters on the caller's stack.
295     int num_parameters = info->scope()->num_parameters();
296     int offset = num_parameters * kPointerSize;
297     __ mov(ArgumentsAccessNewDescriptor::parameter_count(),
298            Operand(Smi::FromInt(num_parameters)));
299     __ add(ArgumentsAccessNewDescriptor::parameter_pointer(), fp,
300            Operand(StandardFrameConstants::kCallerSPOffset + offset));
301 
302     // Arguments to ArgumentsAccessStub:
303     //   function, parameter pointer, parameter count.
304     // The stub will rewrite parameter pointer and parameter count if the
305     // previous stack frame was an arguments adapter frame.
306     bool is_unmapped = is_strict(language_mode()) || !has_simple_parameters();
307     ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType(
308         is_unmapped, literal()->has_duplicate_parameters());
309     ArgumentsAccessStub stub(isolate(), type);
310     __ CallStub(&stub);
311 
312     SetVar(arguments, r0, r1, r2);
313   }
314 
315   if (FLAG_trace) {
316     __ CallRuntime(Runtime::kTraceEnter);
317   }
318 
319   // Visit the declarations and body unless there is an illegal
320   // redeclaration.
321   if (scope()->HasIllegalRedeclaration()) {
322     Comment cmnt(masm_, "[ Declarations");
323     VisitForEffect(scope()->GetIllegalRedeclaration());
324 
325   } else {
326     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
327     { Comment cmnt(masm_, "[ Declarations");
328       VisitDeclarations(scope()->declarations());
329     }
330 
331     // Assert that the declarations do not use ICs. Otherwise the debugger
332     // won't be able to redirect a PC at an IC to the correct IC in newly
333     // recompiled code.
334     DCHECK_EQ(0, ic_total_count_);
335 
336     { Comment cmnt(masm_, "[ Stack check");
337       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
338       Label ok;
339       __ LoadRoot(ip, Heap::kStackLimitRootIndex);
340       __ cmp(sp, Operand(ip));
341       __ b(hs, &ok);
342       Handle<Code> stack_check = isolate()->builtins()->StackCheck();
343       PredictableCodeSizeScope predictable(masm_);
344       predictable.ExpectSize(
345           masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
346       __ Call(stack_check, RelocInfo::CODE_TARGET);
347       __ bind(&ok);
348     }
349 
350     { Comment cmnt(masm_, "[ Body");
351       DCHECK(loop_depth() == 0);
352       VisitStatements(literal()->body());
353       DCHECK(loop_depth() == 0);
354     }
355   }
356 
357   // Always emit a 'return undefined' in case control fell off the end of
358   // the body.
359   { Comment cmnt(masm_, "[ return <undefined>;");
360     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
361   }
362   EmitReturnSequence();
363 
364   // Force emit the constant pool, so it doesn't get emitted in the middle
365   // of the back edge table.
366   masm()->CheckConstPool(true, false);
367 }
368 
369 
ClearAccumulator()370 void FullCodeGenerator::ClearAccumulator() {
371   __ mov(r0, Operand(Smi::FromInt(0)));
372 }
373 
374 
EmitProfilingCounterDecrement(int delta)375 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
376   __ mov(r2, Operand(profiling_counter_));
377   __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
378   __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
379   __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
380 }
381 
382 
383 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
384 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
385 #else
386 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
387 #endif
388 
389 
EmitProfilingCounterReset()390 void FullCodeGenerator::EmitProfilingCounterReset() {
391   Assembler::BlockConstPoolScope block_const_pool(masm_);
392   PredictableCodeSizeScope predictable_code_size_scope(
393       masm_, kProfileCounterResetSequenceLength);
394   Label start;
395   __ bind(&start);
396   int reset_value = FLAG_interrupt_budget;
397   __ mov(r2, Operand(profiling_counter_));
398   // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
399   // instructions (for ARMv6) depending upon whether it is an extended constant
400   // pool - insert nop to compensate.
401   int expected_instr_count =
402       (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
403   DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
404   while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
405     __ nop();
406   }
407   __ mov(r3, Operand(Smi::FromInt(reset_value)));
408   __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
409 }
410 
411 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)412 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
413                                                 Label* back_edge_target) {
414   Comment cmnt(masm_, "[ Back edge bookkeeping");
415   // Block literal pools whilst emitting back edge code.
416   Assembler::BlockConstPoolScope block_const_pool(masm_);
417   Label ok;
418 
419   DCHECK(back_edge_target->is_bound());
420   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
421   int weight = Min(kMaxBackEdgeWeight,
422                    Max(1, distance / kCodeSizeMultiplier));
423   EmitProfilingCounterDecrement(weight);
424   __ b(pl, &ok);
425   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
426 
427   // Record a mapping of this PC offset to the OSR id.  This is used to find
428   // the AST id from the unoptimized code in order to use it as a key into
429   // the deoptimization input data found in the optimized code.
430   RecordBackEdge(stmt->OsrEntryId());
431 
432   EmitProfilingCounterReset();
433 
434   __ bind(&ok);
435   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
436   // Record a mapping of the OSR id to this PC.  This is used if the OSR
437   // entry becomes the target of a bailout.  We don't expect it to be, but
438   // we want it to work if it is.
439   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
440 }
441 
442 
EmitReturnSequence()443 void FullCodeGenerator::EmitReturnSequence() {
444   Comment cmnt(masm_, "[ Return sequence");
445   if (return_label_.is_bound()) {
446     __ b(&return_label_);
447   } else {
448     __ bind(&return_label_);
449     if (FLAG_trace) {
450       // Push the return value on the stack as the parameter.
451       // Runtime::TraceExit returns its parameter in r0.
452       __ push(r0);
453       __ CallRuntime(Runtime::kTraceExit);
454     }
455     // Pretend that the exit is a backwards jump to the entry.
456     int weight = 1;
457     if (info_->ShouldSelfOptimize()) {
458       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
459     } else {
460       int distance = masm_->pc_offset();
461       weight = Min(kMaxBackEdgeWeight,
462                    Max(1, distance / kCodeSizeMultiplier));
463     }
464     EmitProfilingCounterDecrement(weight);
465     Label ok;
466     __ b(pl, &ok);
467     __ push(r0);
468     __ Call(isolate()->builtins()->InterruptCheck(),
469             RelocInfo::CODE_TARGET);
470     __ pop(r0);
471     EmitProfilingCounterReset();
472     __ bind(&ok);
473 
474     // Make sure that the constant pool is not emitted inside of the return
475     // sequence.
476     { Assembler::BlockConstPoolScope block_const_pool(masm_);
477       int32_t arg_count = info_->scope()->num_parameters() + 1;
478       int32_t sp_delta = arg_count * kPointerSize;
479       SetReturnPosition(literal());
480       // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
481       PredictableCodeSizeScope predictable(masm_, -1);
482       __ LeaveFrame(StackFrame::JAVA_SCRIPT);
483       { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
484         __ add(sp, sp, Operand(sp_delta));
485         __ Jump(lr);
486       }
487     }
488   }
489 }
490 
491 
Plug(Variable * var) const492 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
493   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
494   codegen()->GetVar(result_register(), var);
495   __ push(result_register());
496 }
497 
498 
Plug(Heap::RootListIndex index) const499 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
500 }
501 
502 
Plug(Heap::RootListIndex index) const503 void FullCodeGenerator::AccumulatorValueContext::Plug(
504     Heap::RootListIndex index) const {
505   __ LoadRoot(result_register(), index);
506 }
507 
508 
Plug(Heap::RootListIndex index) const509 void FullCodeGenerator::StackValueContext::Plug(
510     Heap::RootListIndex index) const {
511   __ LoadRoot(result_register(), index);
512   __ push(result_register());
513 }
514 
515 
Plug(Heap::RootListIndex index) const516 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
517   codegen()->PrepareForBailoutBeforeSplit(condition(),
518                                           true,
519                                           true_label_,
520                                           false_label_);
521   if (index == Heap::kUndefinedValueRootIndex ||
522       index == Heap::kNullValueRootIndex ||
523       index == Heap::kFalseValueRootIndex) {
524     if (false_label_ != fall_through_) __ b(false_label_);
525   } else if (index == Heap::kTrueValueRootIndex) {
526     if (true_label_ != fall_through_) __ b(true_label_);
527   } else {
528     __ LoadRoot(result_register(), index);
529     codegen()->DoTest(this);
530   }
531 }
532 
533 
Plug(Handle<Object> lit) const534 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
535 }
536 
537 
Plug(Handle<Object> lit) const538 void FullCodeGenerator::AccumulatorValueContext::Plug(
539     Handle<Object> lit) const {
540   __ mov(result_register(), Operand(lit));
541 }
542 
543 
Plug(Handle<Object> lit) const544 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
545   // Immediates cannot be pushed directly.
546   __ mov(result_register(), Operand(lit));
547   __ push(result_register());
548 }
549 
550 
Plug(Handle<Object> lit) const551 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
552   codegen()->PrepareForBailoutBeforeSplit(condition(),
553                                           true,
554                                           true_label_,
555                                           false_label_);
556   DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
557   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
558     if (false_label_ != fall_through_) __ b(false_label_);
559   } else if (lit->IsTrue() || lit->IsJSObject()) {
560     if (true_label_ != fall_through_) __ b(true_label_);
561   } else if (lit->IsString()) {
562     if (String::cast(*lit)->length() == 0) {
563       if (false_label_ != fall_through_) __ b(false_label_);
564     } else {
565       if (true_label_ != fall_through_) __ b(true_label_);
566     }
567   } else if (lit->IsSmi()) {
568     if (Smi::cast(*lit)->value() == 0) {
569       if (false_label_ != fall_through_) __ b(false_label_);
570     } else {
571       if (true_label_ != fall_through_) __ b(true_label_);
572     }
573   } else {
574     // For simplicity we always test the accumulator register.
575     __ mov(result_register(), Operand(lit));
576     codegen()->DoTest(this);
577   }
578 }
579 
580 
DropAndPlug(int count,Register reg) const581 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
582                                                    Register reg) const {
583   DCHECK(count > 0);
584   __ Drop(count);
585 }
586 
587 
DropAndPlug(int count,Register reg) const588 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
589     int count,
590     Register reg) const {
591   DCHECK(count > 0);
592   __ Drop(count);
593   __ Move(result_register(), reg);
594 }
595 
596 
DropAndPlug(int count,Register reg) const597 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
598                                                        Register reg) const {
599   DCHECK(count > 0);
600   if (count > 1) __ Drop(count - 1);
601   __ str(reg, MemOperand(sp, 0));
602 }
603 
604 
DropAndPlug(int count,Register reg) const605 void FullCodeGenerator::TestContext::DropAndPlug(int count,
606                                                  Register reg) const {
607   DCHECK(count > 0);
608   // For simplicity we always test the accumulator register.
609   __ Drop(count);
610   __ Move(result_register(), reg);
611   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
612   codegen()->DoTest(this);
613 }
614 
615 
Plug(Label * materialize_true,Label * materialize_false) const616 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
617                                             Label* materialize_false) const {
618   DCHECK(materialize_true == materialize_false);
619   __ bind(materialize_true);
620 }
621 
622 
Plug(Label * materialize_true,Label * materialize_false) const623 void FullCodeGenerator::AccumulatorValueContext::Plug(
624     Label* materialize_true,
625     Label* materialize_false) const {
626   Label done;
627   __ bind(materialize_true);
628   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
629   __ jmp(&done);
630   __ bind(materialize_false);
631   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
632   __ bind(&done);
633 }
634 
635 
Plug(Label * materialize_true,Label * materialize_false) const636 void FullCodeGenerator::StackValueContext::Plug(
637     Label* materialize_true,
638     Label* materialize_false) const {
639   Label done;
640   __ bind(materialize_true);
641   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
642   __ jmp(&done);
643   __ bind(materialize_false);
644   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
645   __ bind(&done);
646   __ push(ip);
647 }
648 
649 
Plug(Label * materialize_true,Label * materialize_false) const650 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
651                                           Label* materialize_false) const {
652   DCHECK(materialize_true == true_label_);
653   DCHECK(materialize_false == false_label_);
654 }
655 
656 
Plug(bool flag) const657 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
658   Heap::RootListIndex value_root_index =
659       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660   __ LoadRoot(result_register(), value_root_index);
661 }
662 
663 
Plug(bool flag) const664 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
665   Heap::RootListIndex value_root_index =
666       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
667   __ LoadRoot(ip, value_root_index);
668   __ push(ip);
669 }
670 
671 
Plug(bool flag) const672 void FullCodeGenerator::TestContext::Plug(bool flag) const {
673   codegen()->PrepareForBailoutBeforeSplit(condition(),
674                                           true,
675                                           true_label_,
676                                           false_label_);
677   if (flag) {
678     if (true_label_ != fall_through_) __ b(true_label_);
679   } else {
680     if (false_label_ != fall_through_) __ b(false_label_);
681   }
682 }
683 
684 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)685 void FullCodeGenerator::DoTest(Expression* condition,
686                                Label* if_true,
687                                Label* if_false,
688                                Label* fall_through) {
689   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
690   CallIC(ic, condition->test_id());
691   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
692   Split(eq, if_true, if_false, fall_through);
693 }
694 
695 
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)696 void FullCodeGenerator::Split(Condition cond,
697                               Label* if_true,
698                               Label* if_false,
699                               Label* fall_through) {
700   if (if_false == fall_through) {
701     __ b(cond, if_true);
702   } else if (if_true == fall_through) {
703     __ b(NegateCondition(cond), if_false);
704   } else {
705     __ b(cond, if_true);
706     __ b(if_false);
707   }
708 }
709 
710 
StackOperand(Variable * var)711 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
712   DCHECK(var->IsStackAllocated());
713   // Offset is negative because higher indexes are at lower addresses.
714   int offset = -var->index() * kPointerSize;
715   // Adjust by a (parameter or local) base offset.
716   if (var->IsParameter()) {
717     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
718   } else {
719     offset += JavaScriptFrameConstants::kLocal0Offset;
720   }
721   return MemOperand(fp, offset);
722 }
723 
724 
VarOperand(Variable * var,Register scratch)725 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
726   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
727   if (var->IsContextSlot()) {
728     int context_chain_length = scope()->ContextChainLength(var->scope());
729     __ LoadContext(scratch, context_chain_length);
730     return ContextMemOperand(scratch, var->index());
731   } else {
732     return StackOperand(var);
733   }
734 }
735 
736 
GetVar(Register dest,Variable * var)737 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
738   // Use destination as scratch.
739   MemOperand location = VarOperand(var, dest);
740   __ ldr(dest, location);
741 }
742 
743 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)744 void FullCodeGenerator::SetVar(Variable* var,
745                                Register src,
746                                Register scratch0,
747                                Register scratch1) {
748   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
749   DCHECK(!scratch0.is(src));
750   DCHECK(!scratch0.is(scratch1));
751   DCHECK(!scratch1.is(src));
752   MemOperand location = VarOperand(var, scratch0);
753   __ str(src, location);
754 
755   // Emit the write barrier code if the location is in the heap.
756   if (var->IsContextSlot()) {
757     __ RecordWriteContextSlot(scratch0,
758                               location.offset(),
759                               src,
760                               scratch1,
761                               kLRHasBeenSaved,
762                               kDontSaveFPRegs);
763   }
764 }
765 
766 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)767 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
768                                                      bool should_normalize,
769                                                      Label* if_true,
770                                                      Label* if_false) {
771   // Only prepare for bailouts before splits if we're in a test
772   // context. Otherwise, we let the Visit function deal with the
773   // preparation to avoid preparing with the same AST id twice.
774   if (!context()->IsTest()) return;
775 
776   Label skip;
777   if (should_normalize) __ b(&skip);
778   PrepareForBailout(expr, TOS_REG);
779   if (should_normalize) {
780     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
781     __ cmp(r0, ip);
782     Split(eq, if_true, if_false, NULL);
783     __ bind(&skip);
784   }
785 }
786 
787 
EmitDebugCheckDeclarationContext(Variable * variable)788 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
789   // The variable in the declaration always resides in the current function
790   // context.
791   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
792   if (generate_debug_code_) {
793     // Check that we're not inside a with or catch context.
794     __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
795     __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
796     __ Check(ne, kDeclarationInWithContext);
797     __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
798     __ Check(ne, kDeclarationInCatchContext);
799   }
800 }
801 
802 
VisitVariableDeclaration(VariableDeclaration * declaration)803 void FullCodeGenerator::VisitVariableDeclaration(
804     VariableDeclaration* declaration) {
805   // If it was not possible to allocate the variable at compile time, we
806   // need to "declare" it at runtime to make sure it actually exists in the
807   // local context.
808   VariableProxy* proxy = declaration->proxy();
809   VariableMode mode = declaration->mode();
810   Variable* variable = proxy->var();
811   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
812   switch (variable->location()) {
813     case VariableLocation::GLOBAL:
814     case VariableLocation::UNALLOCATED:
815       globals_->Add(variable->name(), zone());
816       globals_->Add(variable->binding_needs_init()
817                         ? isolate()->factory()->the_hole_value()
818                         : isolate()->factory()->undefined_value(),
819                     zone());
820       break;
821 
822     case VariableLocation::PARAMETER:
823     case VariableLocation::LOCAL:
824       if (hole_init) {
825         Comment cmnt(masm_, "[ VariableDeclaration");
826         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
827         __ str(r0, StackOperand(variable));
828       }
829       break;
830 
831     case VariableLocation::CONTEXT:
832       if (hole_init) {
833         Comment cmnt(masm_, "[ VariableDeclaration");
834         EmitDebugCheckDeclarationContext(variable);
835         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
836         __ str(r0, ContextMemOperand(cp, variable->index()));
837         // No write barrier since the_hole_value is in old space.
838         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
839       }
840       break;
841 
842     case VariableLocation::LOOKUP: {
843       Comment cmnt(masm_, "[ VariableDeclaration");
844       __ mov(r2, Operand(variable->name()));
845       // Declaration nodes are always introduced in one of four modes.
846       DCHECK(IsDeclaredVariableMode(mode));
847       // Push initial value, if any.
848       // Note: For variables we must not push an initial value (such as
849       // 'undefined') because we may have a (legal) redeclaration and we
850       // must not destroy the current value.
851       if (hole_init) {
852         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
853       } else {
854         __ mov(r0, Operand(Smi::FromInt(0)));  // Indicates no initial value.
855       }
856       __ Push(r2, r0);
857       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
858       __ CallRuntime(Runtime::kDeclareLookupSlot);
859       break;
860     }
861   }
862 }
863 
864 
VisitFunctionDeclaration(FunctionDeclaration * declaration)865 void FullCodeGenerator::VisitFunctionDeclaration(
866     FunctionDeclaration* declaration) {
867   VariableProxy* proxy = declaration->proxy();
868   Variable* variable = proxy->var();
869   switch (variable->location()) {
870     case VariableLocation::GLOBAL:
871     case VariableLocation::UNALLOCATED: {
872       globals_->Add(variable->name(), zone());
873       Handle<SharedFunctionInfo> function =
874           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
875       // Check for stack-overflow exception.
876       if (function.is_null()) return SetStackOverflow();
877       globals_->Add(function, zone());
878       break;
879     }
880 
881     case VariableLocation::PARAMETER:
882     case VariableLocation::LOCAL: {
883       Comment cmnt(masm_, "[ FunctionDeclaration");
884       VisitForAccumulatorValue(declaration->fun());
885       __ str(result_register(), StackOperand(variable));
886       break;
887     }
888 
889     case VariableLocation::CONTEXT: {
890       Comment cmnt(masm_, "[ FunctionDeclaration");
891       EmitDebugCheckDeclarationContext(variable);
892       VisitForAccumulatorValue(declaration->fun());
893       __ str(result_register(), ContextMemOperand(cp, variable->index()));
894       int offset = Context::SlotOffset(variable->index());
895       // We know that we have written a function, which is not a smi.
896       __ RecordWriteContextSlot(cp,
897                                 offset,
898                                 result_register(),
899                                 r2,
900                                 kLRHasBeenSaved,
901                                 kDontSaveFPRegs,
902                                 EMIT_REMEMBERED_SET,
903                                 OMIT_SMI_CHECK);
904       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
905       break;
906     }
907 
908     case VariableLocation::LOOKUP: {
909       Comment cmnt(masm_, "[ FunctionDeclaration");
910       __ mov(r2, Operand(variable->name()));
911       __ Push(r2);
912       // Push initial value for function declaration.
913       VisitForStackValue(declaration->fun());
914       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
915       __ CallRuntime(Runtime::kDeclareLookupSlot);
916       break;
917     }
918   }
919 }
920 
921 
DeclareGlobals(Handle<FixedArray> pairs)922 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
923   // Call the runtime to declare the globals.
924   __ mov(r1, Operand(pairs));
925   __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
926   __ Push(r1, r0);
927   __ CallRuntime(Runtime::kDeclareGlobals);
928   // Return value is ignored.
929 }
930 
931 
DeclareModules(Handle<FixedArray> descriptions)932 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
933   // Call the runtime to declare the modules.
934   __ Push(descriptions);
935   __ CallRuntime(Runtime::kDeclareModules);
936   // Return value is ignored.
937 }
938 
939 
VisitSwitchStatement(SwitchStatement * stmt)940 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
941   Comment cmnt(masm_, "[ SwitchStatement");
942   Breakable nested_statement(this, stmt);
943   SetStatementPosition(stmt);
944 
945   // Keep the switch value on the stack until a case matches.
946   VisitForStackValue(stmt->tag());
947   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
948 
949   ZoneList<CaseClause*>* clauses = stmt->cases();
950   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
951 
952   Label next_test;  // Recycled for each test.
953   // Compile all the tests with branches to their bodies.
954   for (int i = 0; i < clauses->length(); i++) {
955     CaseClause* clause = clauses->at(i);
956     clause->body_target()->Unuse();
957 
958     // The default is not a test, but remember it as final fall through.
959     if (clause->is_default()) {
960       default_clause = clause;
961       continue;
962     }
963 
964     Comment cmnt(masm_, "[ Case comparison");
965     __ bind(&next_test);
966     next_test.Unuse();
967 
968     // Compile the label expression.
969     VisitForAccumulatorValue(clause->label());
970 
971     // Perform the comparison as if via '==='.
972     __ ldr(r1, MemOperand(sp, 0));  // Switch value.
973     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
974     JumpPatchSite patch_site(masm_);
975     if (inline_smi_code) {
976       Label slow_case;
977       __ orr(r2, r1, r0);
978       patch_site.EmitJumpIfNotSmi(r2, &slow_case);
979 
980       __ cmp(r1, r0);
981       __ b(ne, &next_test);
982       __ Drop(1);  // Switch value is no longer needed.
983       __ b(clause->body_target());
984       __ bind(&slow_case);
985     }
986 
987     // Record position before stub call for type feedback.
988     SetExpressionPosition(clause);
989     Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
990                                              strength(language_mode())).code();
991     CallIC(ic, clause->CompareId());
992     patch_site.EmitPatchInfo();
993 
994     Label skip;
995     __ b(&skip);
996     PrepareForBailout(clause, TOS_REG);
997     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
998     __ cmp(r0, ip);
999     __ b(ne, &next_test);
1000     __ Drop(1);
1001     __ jmp(clause->body_target());
1002     __ bind(&skip);
1003 
1004     __ cmp(r0, Operand::Zero());
1005     __ b(ne, &next_test);
1006     __ Drop(1);  // Switch value is no longer needed.
1007     __ b(clause->body_target());
1008   }
1009 
1010   // Discard the test value and jump to the default if present, otherwise to
1011   // the end of the statement.
1012   __ bind(&next_test);
1013   __ Drop(1);  // Switch value is no longer needed.
1014   if (default_clause == NULL) {
1015     __ b(nested_statement.break_label());
1016   } else {
1017     __ b(default_clause->body_target());
1018   }
1019 
1020   // Compile all the case bodies.
1021   for (int i = 0; i < clauses->length(); i++) {
1022     Comment cmnt(masm_, "[ Case body");
1023     CaseClause* clause = clauses->at(i);
1024     __ bind(clause->body_target());
1025     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1026     VisitStatements(clause->statements());
1027   }
1028 
1029   __ bind(nested_statement.break_label());
1030   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1031 }
1032 
1033 
VisitForInStatement(ForInStatement * stmt)1034 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1035   Comment cmnt(masm_, "[ ForInStatement");
1036   SetStatementPosition(stmt, SKIP_BREAK);
1037 
1038   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1039 
1040   Label loop, exit;
1041   ForIn loop_statement(this, stmt);
1042   increment_loop_depth();
1043 
1044   // Get the object to enumerate over. If the object is null or undefined, skip
1045   // over the loop.  See ECMA-262 version 5, section 12.6.4.
1046   SetExpressionAsStatementPosition(stmt->enumerable());
1047   VisitForAccumulatorValue(stmt->enumerable());
1048   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1049   __ cmp(r0, ip);
1050   __ b(eq, &exit);
1051   Register null_value = r5;
1052   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1053   __ cmp(r0, null_value);
1054   __ b(eq, &exit);
1055 
1056   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1057 
1058   // Convert the object to a JS object.
1059   Label convert, done_convert;
1060   __ JumpIfSmi(r0, &convert);
1061   __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
1062   __ b(ge, &done_convert);
1063   __ bind(&convert);
1064   ToObjectStub stub(isolate());
1065   __ CallStub(&stub);
1066   __ bind(&done_convert);
1067   PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1068   __ push(r0);
1069 
1070   // Check for proxies.
1071   Label call_runtime;
1072   __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
1073   __ b(eq, &call_runtime);
1074 
1075   // Check cache validity in generated code. This is a fast case for
1076   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1077   // guarantee cache validity, call the runtime system to check cache
1078   // validity or get the property names in a fixed array.
1079   __ CheckEnumCache(null_value, &call_runtime);
1080 
1081   // The enum cache is valid.  Load the map of the object being
1082   // iterated over and use the cache for the iteration.
1083   Label use_cache;
1084   __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1085   __ b(&use_cache);
1086 
1087   // Get the set of properties to enumerate.
1088   __ bind(&call_runtime);
1089   __ push(r0);  // Duplicate the enumerable object on the stack.
1090   __ CallRuntime(Runtime::kGetPropertyNamesFast);
1091   PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1092 
1093   // If we got a map from the runtime call, we can do a fast
1094   // modification check. Otherwise, we got a fixed array, and we have
1095   // to do a slow check.
1096   Label fixed_array;
1097   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1098   __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1099   __ cmp(r2, ip);
1100   __ b(ne, &fixed_array);
1101 
1102   // We got a map in register r0. Get the enumeration cache from it.
1103   Label no_descriptors;
1104   __ bind(&use_cache);
1105 
1106   __ EnumLength(r1, r0);
1107   __ cmp(r1, Operand(Smi::FromInt(0)));
1108   __ b(eq, &no_descriptors);
1109 
1110   __ LoadInstanceDescriptors(r0, r2);
1111   __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1112   __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1113 
1114   // Set up the four remaining stack slots.
1115   __ push(r0);  // Map.
1116   __ mov(r0, Operand(Smi::FromInt(0)));
1117   // Push enumeration cache, enumeration cache length (as smi) and zero.
1118   __ Push(r2, r1, r0);
1119   __ jmp(&loop);
1120 
1121   __ bind(&no_descriptors);
1122   __ Drop(1);
1123   __ jmp(&exit);
1124 
1125   // We got a fixed array in register r0. Iterate through that.
1126   __ bind(&fixed_array);
1127 
1128   __ EmitLoadTypeFeedbackVector(r1);
1129   __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1130   int vector_index = SmiFromSlot(slot)->value();
1131   __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
1132   __ mov(r1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1133   __ Push(r1, r0);  // Smi and array
1134   __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1135   __ mov(r0, Operand(Smi::FromInt(0)));
1136   __ Push(r1, r0);  // Fixed array length (as smi) and initial index.
1137 
1138   // Generate code for doing the condition check.
1139   __ bind(&loop);
1140   SetExpressionAsStatementPosition(stmt->each());
1141 
1142   // Load the current count to r0, load the length to r1.
1143   __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1144   __ cmp(r0, r1);  // Compare to the array length.
1145   __ b(hs, loop_statement.break_label());
1146 
1147   // Get the current entry of the array into register r3.
1148   __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1149   __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1150   __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1151 
1152   // Get the expected map from the stack or a smi in the
1153   // permanent slow case into register r2.
1154   __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1155 
1156   // Check if the expected map still matches that of the enumerable.
1157   // If not, we may have to filter the key.
1158   Label update_each;
1159   __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1160   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1161   __ cmp(r4, Operand(r2));
1162   __ b(eq, &update_each);
1163 
1164   // Convert the entry to a string or (smi) 0 if it isn't a property
1165   // any more. If the property has been removed while iterating, we
1166   // just skip it.
1167   __ push(r1);  // Enumerable.
1168   __ push(r3);  // Current entry.
1169   __ CallRuntime(Runtime::kForInFilter);
1170   PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1171   __ mov(r3, Operand(r0));
1172   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1173   __ cmp(r0, ip);
1174   __ b(eq, loop_statement.continue_label());
1175 
1176   // Update the 'each' property or variable from the possibly filtered
1177   // entry in register r3.
1178   __ bind(&update_each);
1179   __ mov(result_register(), r3);
1180   // Perform the assignment as if via '='.
1181   { EffectContext context(this);
1182     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1183     PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1184   }
1185 
1186   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1187   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1188   // Generate code for the body of the loop.
1189   Visit(stmt->body());
1190 
1191   // Generate code for the going to the next element by incrementing
1192   // the index (smi) stored on top of the stack.
1193   __ bind(loop_statement.continue_label());
1194   __ pop(r0);
1195   __ add(r0, r0, Operand(Smi::FromInt(1)));
1196   __ push(r0);
1197 
1198   EmitBackEdgeBookkeeping(stmt, &loop);
1199   __ b(&loop);
1200 
1201   // Remove the pointers stored on the stack.
1202   __ bind(loop_statement.break_label());
1203   __ Drop(5);
1204 
1205   // Exit and decrement the loop depth.
1206   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1207   __ bind(&exit);
1208   decrement_loop_depth();
1209 }
1210 
1211 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1212 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1213                                        bool pretenure) {
1214   // Use the fast case closure allocation code that allocates in new
1215   // space for nested functions that don't need literals cloning. If
1216   // we're running with the --always-opt or the --prepare-always-opt
1217   // flag, we need to use the runtime function so that the new function
1218   // we are creating here gets a chance to have its code optimized and
1219   // doesn't just get a copy of the existing unoptimized code.
1220   if (!FLAG_always_opt &&
1221       !FLAG_prepare_always_opt &&
1222       !pretenure &&
1223       scope()->is_function_scope() &&
1224       info->num_literals() == 0) {
1225     FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1226     __ mov(r2, Operand(info));
1227     __ CallStub(&stub);
1228   } else {
1229     __ Push(info);
1230     __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1231                              : Runtime::kNewClosure);
1232   }
1233   context()->Plug(r0);
1234 }
1235 
1236 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1237 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1238                                           FeedbackVectorSlot slot) {
1239   DCHECK(NeedsHomeObject(initializer));
1240   __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1241   __ mov(StoreDescriptor::NameRegister(),
1242          Operand(isolate()->factory()->home_object_symbol()));
1243   __ ldr(StoreDescriptor::ValueRegister(),
1244          MemOperand(sp, offset * kPointerSize));
1245   EmitLoadStoreICSlot(slot);
1246   CallStoreIC();
1247 }
1248 
1249 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1250 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1251                                                      int offset,
1252                                                      FeedbackVectorSlot slot) {
1253   DCHECK(NeedsHomeObject(initializer));
1254   __ Move(StoreDescriptor::ReceiverRegister(), r0);
1255   __ mov(StoreDescriptor::NameRegister(),
1256          Operand(isolate()->factory()->home_object_symbol()));
1257   __ ldr(StoreDescriptor::ValueRegister(),
1258          MemOperand(sp, offset * kPointerSize));
1259   EmitLoadStoreICSlot(slot);
1260   CallStoreIC();
1261 }
1262 
1263 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1264 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1265                                                       TypeofMode typeof_mode,
1266                                                       Label* slow) {
1267   Register current = cp;
1268   Register next = r1;
1269   Register temp = r2;
1270 
1271   Scope* s = scope();
1272   while (s != NULL) {
1273     if (s->num_heap_slots() > 0) {
1274       if (s->calls_sloppy_eval()) {
1275         // Check that extension is "the hole".
1276         __ ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1277         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1278       }
1279       // Load next context in chain.
1280       __ ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1281       // Walk the rest of the chain without clobbering cp.
1282       current = next;
1283     }
1284     // If no outer scope calls eval, we do not need to check more
1285     // context extensions.
1286     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1287     s = s->outer_scope();
1288   }
1289 
1290   if (s->is_eval_scope()) {
1291     Label loop, fast;
1292     if (!current.is(next)) {
1293       __ Move(next, current);
1294     }
1295     __ bind(&loop);
1296     // Terminate at native context.
1297     __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1298     __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1299     __ cmp(temp, ip);
1300     __ b(eq, &fast);
1301     // Check that extension is "the hole".
1302     __ ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1303     __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1304     // Load next context in chain.
1305     __ ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1306     __ b(&loop);
1307     __ bind(&fast);
1308   }
1309 
1310   // All extension objects were empty and it is safe to use a normal global
1311   // load machinery.
1312   EmitGlobalVariableLoad(proxy, typeof_mode);
1313 }
1314 
1315 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1316 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1317                                                                 Label* slow) {
1318   DCHECK(var->IsContextSlot());
1319   Register context = cp;
1320   Register next = r3;
1321   Register temp = r4;
1322 
1323   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1324     if (s->num_heap_slots() > 0) {
1325       if (s->calls_sloppy_eval()) {
1326         // Check that extension is "the hole".
1327         __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1328         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1329       }
1330       __ ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1331       // Walk the rest of the chain without clobbering cp.
1332       context = next;
1333     }
1334   }
1335   // Check that last extension is "the hole".
1336   __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1337   __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1338 
1339   // This function is used only for loads, not stores, so it's safe to
1340   // return an cp-based operand (the write barrier cannot be allowed to
1341   // destroy the cp register).
1342   return ContextMemOperand(context, var->index());
1343 }
1344 
1345 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1346 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1347                                                   TypeofMode typeof_mode,
1348                                                   Label* slow, Label* done) {
1349   // Generate fast-case code for variables that might be shadowed by
1350   // eval-introduced variables.  Eval is used a lot without
1351   // introducing variables.  In those cases, we do not want to
1352   // perform a runtime call for all variables in the scope
1353   // containing the eval.
1354   Variable* var = proxy->var();
1355   if (var->mode() == DYNAMIC_GLOBAL) {
1356     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1357     __ jmp(done);
1358   } else if (var->mode() == DYNAMIC_LOCAL) {
1359     Variable* local = var->local_if_not_shadowed();
1360     __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1361     if (local->mode() == LET || local->mode() == CONST ||
1362         local->mode() == CONST_LEGACY) {
1363       __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1364       if (local->mode() == CONST_LEGACY) {
1365         __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1366       } else {  // LET || CONST
1367         __ b(ne, done);
1368         __ mov(r0, Operand(var->name()));
1369         __ push(r0);
1370         __ CallRuntime(Runtime::kThrowReferenceError);
1371       }
1372     }
1373     __ jmp(done);
1374   }
1375 }
1376 
1377 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1378 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1379                                                TypeofMode typeof_mode) {
1380   Variable* var = proxy->var();
1381   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1382          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1383   __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1384   __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1385   __ mov(LoadDescriptor::SlotRegister(),
1386          Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1387   CallLoadIC(typeof_mode);
1388 }
1389 
1390 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1391 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1392                                          TypeofMode typeof_mode) {
1393   // Record position before possible IC call.
1394   SetExpressionPosition(proxy);
1395   PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1396   Variable* var = proxy->var();
1397 
1398   // Three cases: global variables, lookup variables, and all other types of
1399   // variables.
1400   switch (var->location()) {
1401     case VariableLocation::GLOBAL:
1402     case VariableLocation::UNALLOCATED: {
1403       Comment cmnt(masm_, "[ Global variable");
1404       EmitGlobalVariableLoad(proxy, typeof_mode);
1405       context()->Plug(r0);
1406       break;
1407     }
1408 
1409     case VariableLocation::PARAMETER:
1410     case VariableLocation::LOCAL:
1411     case VariableLocation::CONTEXT: {
1412       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1413       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1414                                                : "[ Stack variable");
1415       if (NeedsHoleCheckForLoad(proxy)) {
1416         // Let and const need a read barrier.
1417         GetVar(r0, var);
1418         __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1419         if (var->mode() == LET || var->mode() == CONST) {
1420           // Throw a reference error when using an uninitialized let/const
1421           // binding in harmony mode.
1422           Label done;
1423           __ b(ne, &done);
1424           __ mov(r0, Operand(var->name()));
1425           __ push(r0);
1426           __ CallRuntime(Runtime::kThrowReferenceError);
1427           __ bind(&done);
1428         } else {
1429           // Uninitialized legacy const bindings are unholed.
1430           DCHECK(var->mode() == CONST_LEGACY);
1431           __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1432         }
1433         context()->Plug(r0);
1434         break;
1435       }
1436       context()->Plug(var);
1437       break;
1438     }
1439 
1440     case VariableLocation::LOOKUP: {
1441       Comment cmnt(masm_, "[ Lookup variable");
1442       Label done, slow;
1443       // Generate code for loading from variables potentially shadowed
1444       // by eval-introduced variables.
1445       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1446       __ bind(&slow);
1447       __ mov(r1, Operand(var->name()));
1448       __ Push(cp, r1);  // Context and name.
1449       Runtime::FunctionId function_id =
1450           typeof_mode == NOT_INSIDE_TYPEOF
1451               ? Runtime::kLoadLookupSlot
1452               : Runtime::kLoadLookupSlotNoReferenceError;
1453       __ CallRuntime(function_id);
1454       __ bind(&done);
1455       context()->Plug(r0);
1456     }
1457   }
1458 }
1459 
1460 
VisitRegExpLiteral(RegExpLiteral * expr)1461 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1462   Comment cmnt(masm_, "[ RegExpLiteral");
1463   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1464   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1465   __ mov(r1, Operand(expr->pattern()));
1466   __ mov(r0, Operand(Smi::FromInt(expr->flags())));
1467   FastCloneRegExpStub stub(isolate());
1468   __ CallStub(&stub);
1469   context()->Plug(r0);
1470 }
1471 
1472 
EmitAccessor(ObjectLiteralProperty * property)1473 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1474   Expression* expression = (property == NULL) ? NULL : property->value();
1475   if (expression == NULL) {
1476     __ LoadRoot(r1, Heap::kNullValueRootIndex);
1477     __ push(r1);
1478   } else {
1479     VisitForStackValue(expression);
1480     if (NeedsHomeObject(expression)) {
1481       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1482              property->kind() == ObjectLiteral::Property::SETTER);
1483       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1484       EmitSetHomeObject(expression, offset, property->GetSlot());
1485     }
1486   }
1487 }
1488 
1489 
VisitObjectLiteral(ObjectLiteral * expr)1490 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1491   Comment cmnt(masm_, "[ ObjectLiteral");
1492 
1493   Handle<FixedArray> constant_properties = expr->constant_properties();
1494   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1495   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1496   __ mov(r1, Operand(constant_properties));
1497   int flags = expr->ComputeFlags();
1498   __ mov(r0, Operand(Smi::FromInt(flags)));
1499   if (MustCreateObjectLiteralWithRuntime(expr)) {
1500     __ Push(r3, r2, r1, r0);
1501     __ CallRuntime(Runtime::kCreateObjectLiteral);
1502   } else {
1503     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1504     __ CallStub(&stub);
1505   }
1506   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1507 
1508   // If result_saved is true the result is on top of the stack.  If
1509   // result_saved is false the result is in r0.
1510   bool result_saved = false;
1511 
1512   AccessorTable accessor_table(zone());
1513   int property_index = 0;
1514   for (; property_index < expr->properties()->length(); property_index++) {
1515     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1516     if (property->is_computed_name()) break;
1517     if (property->IsCompileTimeValue()) continue;
1518 
1519     Literal* key = property->key()->AsLiteral();
1520     Expression* value = property->value();
1521     if (!result_saved) {
1522       __ push(r0);  // Save result on stack
1523       result_saved = true;
1524     }
1525     switch (property->kind()) {
1526       case ObjectLiteral::Property::CONSTANT:
1527         UNREACHABLE();
1528       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1529         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1530         // Fall through.
1531       case ObjectLiteral::Property::COMPUTED:
1532         // It is safe to use [[Put]] here because the boilerplate already
1533         // contains computed properties with an uninitialized value.
1534         if (key->value()->IsInternalizedString()) {
1535           if (property->emit_store()) {
1536             VisitForAccumulatorValue(value);
1537             DCHECK(StoreDescriptor::ValueRegister().is(r0));
1538             __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1539             __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1540             EmitLoadStoreICSlot(property->GetSlot(0));
1541             CallStoreIC();
1542             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1543 
1544             if (NeedsHomeObject(value)) {
1545               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1546             }
1547           } else {
1548             VisitForEffect(value);
1549           }
1550           break;
1551         }
1552         // Duplicate receiver on stack.
1553         __ ldr(r0, MemOperand(sp));
1554         __ push(r0);
1555         VisitForStackValue(key);
1556         VisitForStackValue(value);
1557         if (property->emit_store()) {
1558           if (NeedsHomeObject(value)) {
1559             EmitSetHomeObject(value, 2, property->GetSlot());
1560           }
1561           __ mov(r0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes
1562           __ push(r0);
1563           __ CallRuntime(Runtime::kSetProperty);
1564         } else {
1565           __ Drop(3);
1566         }
1567         break;
1568       case ObjectLiteral::Property::PROTOTYPE:
1569         // Duplicate receiver on stack.
1570         __ ldr(r0, MemOperand(sp));
1571         __ push(r0);
1572         VisitForStackValue(value);
1573         DCHECK(property->emit_store());
1574         __ CallRuntime(Runtime::kInternalSetPrototype);
1575         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1576                                NO_REGISTERS);
1577         break;
1578 
1579       case ObjectLiteral::Property::GETTER:
1580         if (property->emit_store()) {
1581           accessor_table.lookup(key)->second->getter = property;
1582         }
1583         break;
1584       case ObjectLiteral::Property::SETTER:
1585         if (property->emit_store()) {
1586           accessor_table.lookup(key)->second->setter = property;
1587         }
1588         break;
1589     }
1590   }
1591 
1592   // Emit code to define accessors, using only a single call to the runtime for
1593   // each pair of corresponding getters and setters.
1594   for (AccessorTable::Iterator it = accessor_table.begin();
1595        it != accessor_table.end();
1596        ++it) {
1597     __ ldr(r0, MemOperand(sp));  // Duplicate receiver.
1598     __ push(r0);
1599     VisitForStackValue(it->first);
1600     EmitAccessor(it->second->getter);
1601     EmitAccessor(it->second->setter);
1602     __ mov(r0, Operand(Smi::FromInt(NONE)));
1603     __ push(r0);
1604     __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked);
1605   }
1606 
1607   // Object literals have two parts. The "static" part on the left contains no
1608   // computed property names, and so we can compute its map ahead of time; see
1609   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1610   // starts with the first computed property name, and continues with all
1611   // properties to its right.  All the code from above initializes the static
1612   // component of the object literal, and arranges for the map of the result to
1613   // reflect the static order in which the keys appear. For the dynamic
1614   // properties, we compile them into a series of "SetOwnProperty" runtime
1615   // calls. This will preserve insertion order.
1616   for (; property_index < expr->properties()->length(); property_index++) {
1617     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1618 
1619     Expression* value = property->value();
1620     if (!result_saved) {
1621       __ push(r0);  // Save result on the stack
1622       result_saved = true;
1623     }
1624 
1625     __ ldr(r0, MemOperand(sp));  // Duplicate receiver.
1626     __ push(r0);
1627 
1628     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1629       DCHECK(!property->is_computed_name());
1630       VisitForStackValue(value);
1631       DCHECK(property->emit_store());
1632       __ CallRuntime(Runtime::kInternalSetPrototype);
1633       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1634                              NO_REGISTERS);
1635     } else {
1636       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1637       VisitForStackValue(value);
1638       if (NeedsHomeObject(value)) {
1639         EmitSetHomeObject(value, 2, property->GetSlot());
1640       }
1641 
1642       switch (property->kind()) {
1643         case ObjectLiteral::Property::CONSTANT:
1644         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1645         case ObjectLiteral::Property::COMPUTED:
1646           if (property->emit_store()) {
1647             __ mov(r0, Operand(Smi::FromInt(NONE)));
1648             __ push(r0);
1649             __ CallRuntime(Runtime::kDefineDataPropertyUnchecked);
1650           } else {
1651             __ Drop(3);
1652           }
1653           break;
1654 
1655         case ObjectLiteral::Property::PROTOTYPE:
1656           UNREACHABLE();
1657           break;
1658 
1659         case ObjectLiteral::Property::GETTER:
1660           __ mov(r0, Operand(Smi::FromInt(NONE)));
1661           __ push(r0);
1662           __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
1663           break;
1664 
1665         case ObjectLiteral::Property::SETTER:
1666           __ mov(r0, Operand(Smi::FromInt(NONE)));
1667           __ push(r0);
1668           __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
1669           break;
1670       }
1671     }
1672   }
1673 
1674   if (expr->has_function()) {
1675     DCHECK(result_saved);
1676     __ ldr(r0, MemOperand(sp));
1677     __ push(r0);
1678     __ CallRuntime(Runtime::kToFastProperties);
1679   }
1680 
1681   if (result_saved) {
1682     context()->PlugTOS();
1683   } else {
1684     context()->Plug(r0);
1685   }
1686 }
1687 
1688 
VisitArrayLiteral(ArrayLiteral * expr)1689 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1690   Comment cmnt(masm_, "[ ArrayLiteral");
1691 
1692   Handle<FixedArray> constant_elements = expr->constant_elements();
1693   bool has_fast_elements =
1694       IsFastObjectElementsKind(expr->constant_elements_kind());
1695   Handle<FixedArrayBase> constant_elements_values(
1696       FixedArrayBase::cast(constant_elements->get(1)));
1697 
1698   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1699   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1700     // If the only customer of allocation sites is transitioning, then
1701     // we can turn it off if we don't have anywhere else to transition to.
1702     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1703   }
1704 
1705   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1706   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1707   __ mov(r1, Operand(constant_elements));
1708   if (MustCreateArrayLiteralWithRuntime(expr)) {
1709     __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1710     __ Push(r3, r2, r1, r0);
1711     __ CallRuntime(Runtime::kCreateArrayLiteral);
1712   } else {
1713     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1714     __ CallStub(&stub);
1715   }
1716   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1717 
1718   bool result_saved = false;  // Is the result saved to the stack?
1719   ZoneList<Expression*>* subexprs = expr->values();
1720   int length = subexprs->length();
1721 
1722   // Emit code to evaluate all the non-constant subexpressions and to store
1723   // them into the newly cloned array.
1724   int array_index = 0;
1725   for (; array_index < length; array_index++) {
1726     Expression* subexpr = subexprs->at(array_index);
1727     if (subexpr->IsSpread()) break;
1728 
1729     // If the subexpression is a literal or a simple materialized literal it
1730     // is already set in the cloned array.
1731     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1732 
1733     if (!result_saved) {
1734       __ push(r0);
1735       result_saved = true;
1736     }
1737     VisitForAccumulatorValue(subexpr);
1738 
1739     __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1740     __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1741     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1742     Handle<Code> ic =
1743         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1744     CallIC(ic);
1745 
1746     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1747   }
1748 
1749   // In case the array literal contains spread expressions it has two parts. The
1750   // first part is  the "static" array which has a literal index is  handled
1751   // above. The second part is the part after the first spread expression
1752   // (inclusive) and these elements gets appended to the array. Note that the
1753   // number elements an iterable produces is unknown ahead of time.
1754   if (array_index < length && result_saved) {
1755     __ Pop(r0);
1756     result_saved = false;
1757   }
1758   for (; array_index < length; array_index++) {
1759     Expression* subexpr = subexprs->at(array_index);
1760 
1761     __ Push(r0);
1762     if (subexpr->IsSpread()) {
1763       VisitForStackValue(subexpr->AsSpread()->expression());
1764       __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1765                        CALL_FUNCTION);
1766     } else {
1767       VisitForStackValue(subexpr);
1768       __ CallRuntime(Runtime::kAppendElement);
1769     }
1770 
1771     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1772   }
1773 
1774   if (result_saved) {
1775     context()->PlugTOS();
1776   } else {
1777     context()->Plug(r0);
1778   }
1779 }
1780 
1781 
VisitAssignment(Assignment * expr)1782 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1783   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1784 
1785   Comment cmnt(masm_, "[ Assignment");
1786   SetExpressionPosition(expr, INSERT_BREAK);
1787 
1788   Property* property = expr->target()->AsProperty();
1789   LhsKind assign_type = Property::GetAssignType(property);
1790 
1791   // Evaluate LHS expression.
1792   switch (assign_type) {
1793     case VARIABLE:
1794       // Nothing to do here.
1795       break;
1796     case NAMED_PROPERTY:
1797       if (expr->is_compound()) {
1798         // We need the receiver both on the stack and in the register.
1799         VisitForStackValue(property->obj());
1800         __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1801       } else {
1802         VisitForStackValue(property->obj());
1803       }
1804       break;
1805     case NAMED_SUPER_PROPERTY:
1806       VisitForStackValue(
1807           property->obj()->AsSuperPropertyReference()->this_var());
1808       VisitForAccumulatorValue(
1809           property->obj()->AsSuperPropertyReference()->home_object());
1810       __ Push(result_register());
1811       if (expr->is_compound()) {
1812         const Register scratch = r1;
1813         __ ldr(scratch, MemOperand(sp, kPointerSize));
1814         __ Push(scratch);
1815         __ Push(result_register());
1816       }
1817       break;
1818     case KEYED_SUPER_PROPERTY:
1819       VisitForStackValue(
1820           property->obj()->AsSuperPropertyReference()->this_var());
1821       VisitForStackValue(
1822           property->obj()->AsSuperPropertyReference()->home_object());
1823       VisitForAccumulatorValue(property->key());
1824       __ Push(result_register());
1825       if (expr->is_compound()) {
1826         const Register scratch = r1;
1827         __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1828         __ Push(scratch);
1829         __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1830         __ Push(scratch);
1831         __ Push(result_register());
1832       }
1833       break;
1834     case KEYED_PROPERTY:
1835       if (expr->is_compound()) {
1836         VisitForStackValue(property->obj());
1837         VisitForStackValue(property->key());
1838         __ ldr(LoadDescriptor::ReceiverRegister(),
1839                MemOperand(sp, 1 * kPointerSize));
1840         __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1841       } else {
1842         VisitForStackValue(property->obj());
1843         VisitForStackValue(property->key());
1844       }
1845       break;
1846   }
1847 
1848   // For compound assignments we need another deoptimization point after the
1849   // variable/property load.
1850   if (expr->is_compound()) {
1851     { AccumulatorValueContext context(this);
1852       switch (assign_type) {
1853         case VARIABLE:
1854           EmitVariableLoad(expr->target()->AsVariableProxy());
1855           PrepareForBailout(expr->target(), TOS_REG);
1856           break;
1857         case NAMED_PROPERTY:
1858           EmitNamedPropertyLoad(property);
1859           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1860           break;
1861         case NAMED_SUPER_PROPERTY:
1862           EmitNamedSuperPropertyLoad(property);
1863           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1864           break;
1865         case KEYED_SUPER_PROPERTY:
1866           EmitKeyedSuperPropertyLoad(property);
1867           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1868           break;
1869         case KEYED_PROPERTY:
1870           EmitKeyedPropertyLoad(property);
1871           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1872           break;
1873       }
1874     }
1875 
1876     Token::Value op = expr->binary_op();
1877     __ push(r0);  // Left operand goes on the stack.
1878     VisitForAccumulatorValue(expr->value());
1879 
1880     AccumulatorValueContext context(this);
1881     if (ShouldInlineSmiCase(op)) {
1882       EmitInlineSmiBinaryOp(expr->binary_operation(),
1883                             op,
1884                             expr->target(),
1885                             expr->value());
1886     } else {
1887       EmitBinaryOp(expr->binary_operation(), op);
1888     }
1889 
1890     // Deoptimization point in case the binary operation may have side effects.
1891     PrepareForBailout(expr->binary_operation(), TOS_REG);
1892   } else {
1893     VisitForAccumulatorValue(expr->value());
1894   }
1895 
1896   SetExpressionPosition(expr);
1897 
1898   // Store the value.
1899   switch (assign_type) {
1900     case VARIABLE:
1901       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1902                              expr->op(), expr->AssignmentSlot());
1903       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1904       context()->Plug(r0);
1905       break;
1906     case NAMED_PROPERTY:
1907       EmitNamedPropertyAssignment(expr);
1908       break;
1909     case NAMED_SUPER_PROPERTY:
1910       EmitNamedSuperPropertyStore(property);
1911       context()->Plug(r0);
1912       break;
1913     case KEYED_SUPER_PROPERTY:
1914       EmitKeyedSuperPropertyStore(property);
1915       context()->Plug(r0);
1916       break;
1917     case KEYED_PROPERTY:
1918       EmitKeyedPropertyAssignment(expr);
1919       break;
1920   }
1921 }
1922 
1923 
VisitYield(Yield * expr)1924 void FullCodeGenerator::VisitYield(Yield* expr) {
1925   Comment cmnt(masm_, "[ Yield");
1926   SetExpressionPosition(expr);
1927 
1928   // Evaluate yielded value first; the initial iterator definition depends on
1929   // this.  It stays on the stack while we update the iterator.
1930   VisitForStackValue(expr->expression());
1931 
1932   switch (expr->yield_kind()) {
1933     case Yield::kSuspend:
1934       // Pop value from top-of-stack slot; box result into result register.
1935       EmitCreateIteratorResult(false);
1936       __ push(result_register());
1937       // Fall through.
1938     case Yield::kInitial: {
1939       Label suspend, continuation, post_runtime, resume;
1940 
1941       __ jmp(&suspend);
1942       __ bind(&continuation);
1943       __ RecordGeneratorContinuation();
1944       __ jmp(&resume);
1945 
1946       __ bind(&suspend);
1947       VisitForAccumulatorValue(expr->generator_object());
1948       DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1949       __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1950       __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1951       __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1952       __ mov(r1, cp);
1953       __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1954                           kLRHasBeenSaved, kDontSaveFPRegs);
1955       __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1956       __ cmp(sp, r1);
1957       __ b(eq, &post_runtime);
1958       __ push(r0);  // generator object
1959       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1960       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1961       __ bind(&post_runtime);
1962       __ pop(result_register());
1963       EmitReturnSequence();
1964 
1965       __ bind(&resume);
1966       context()->Plug(result_register());
1967       break;
1968     }
1969 
1970     case Yield::kFinal: {
1971       VisitForAccumulatorValue(expr->generator_object());
1972       __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
1973       __ str(r1, FieldMemOperand(result_register(),
1974                                  JSGeneratorObject::kContinuationOffset));
1975       // Pop value from top-of-stack slot, box result into result register.
1976       EmitCreateIteratorResult(true);
1977       EmitUnwindBeforeReturn();
1978       EmitReturnSequence();
1979       break;
1980     }
1981 
1982     case Yield::kDelegating: {
1983       VisitForStackValue(expr->generator_object());
1984 
1985       // Initial stack layout is as follows:
1986       // [sp + 1 * kPointerSize] iter
1987       // [sp + 0 * kPointerSize] g
1988 
1989       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1990       Label l_next, l_call, l_loop;
1991       Register load_receiver = LoadDescriptor::ReceiverRegister();
1992       Register load_name = LoadDescriptor::NameRegister();
1993 
1994       // Initial send value is undefined.
1995       __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1996       __ b(&l_next);
1997 
1998       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
1999       __ bind(&l_catch);
2000       __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);  // "throw"
2001       __ ldr(r3, MemOperand(sp, 1 * kPointerSize));          // iter
2002       __ Push(load_name, r3, r0);                       // "throw", iter, except
2003       __ jmp(&l_call);
2004 
2005       // try { received = %yield result }
2006       // Shuffle the received result above a try handler and yield it without
2007       // re-boxing.
2008       __ bind(&l_try);
2009       __ pop(r0);                                        // result
2010       int handler_index = NewHandlerTableEntry();
2011       EnterTryBlock(handler_index, &l_catch);
2012       const int try_block_size = TryCatch::kElementCount * kPointerSize;
2013       __ push(r0);                                       // result
2014 
2015       __ jmp(&l_suspend);
2016       __ bind(&l_continuation);
2017       __ RecordGeneratorContinuation();
2018       __ jmp(&l_resume);
2019 
2020       __ bind(&l_suspend);
2021       const int generator_object_depth = kPointerSize + try_block_size;
2022       __ ldr(r0, MemOperand(sp, generator_object_depth));
2023       __ push(r0);                                       // g
2024       __ Push(Smi::FromInt(handler_index));              // handler-index
2025       DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2026       __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2027       __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2028       __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2029       __ mov(r1, cp);
2030       __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2031                           kLRHasBeenSaved, kDontSaveFPRegs);
2032       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2033       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2034       __ pop(r0);                                      // result
2035       EmitReturnSequence();
2036       __ bind(&l_resume);                              // received in r0
2037       ExitTryBlock(handler_index);
2038 
2039       // receiver = iter; f = 'next'; arg = received;
2040       __ bind(&l_next);
2041 
2042       __ LoadRoot(load_name, Heap::knext_stringRootIndex);  // "next"
2043       __ ldr(r3, MemOperand(sp, 1 * kPointerSize));         // iter
2044       __ Push(load_name, r3, r0);                      // "next", iter, received
2045 
2046       // result = receiver[f](arg);
2047       __ bind(&l_call);
2048       __ ldr(load_receiver, MemOperand(sp, kPointerSize));
2049       __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
2050       __ mov(LoadDescriptor::SlotRegister(),
2051              Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2052       Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2053       CallIC(ic, TypeFeedbackId::None());
2054       __ mov(r1, r0);
2055       __ str(r1, MemOperand(sp, 2 * kPointerSize));
2056       SetCallPosition(expr);
2057       __ mov(r0, Operand(1));
2058       __ Call(
2059           isolate()->builtins()->Call(ConvertReceiverMode::kNotNullOrUndefined),
2060           RelocInfo::CODE_TARGET);
2061 
2062       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2063       __ Drop(1);  // The function is still on the stack; drop it.
2064 
2065       // if (!result.done) goto l_try;
2066       __ bind(&l_loop);
2067       __ Move(load_receiver, r0);
2068 
2069       __ push(load_receiver);                               // save result
2070       __ LoadRoot(load_name, Heap::kdone_stringRootIndex);  // "done"
2071       __ mov(LoadDescriptor::SlotRegister(),
2072              Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2073       CallLoadIC(NOT_INSIDE_TYPEOF);  // r0=result.done
2074       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2075       CallIC(bool_ic);
2076       __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
2077       __ b(ne, &l_try);
2078 
2079       // result.value
2080       __ pop(load_receiver);                                 // result
2081       __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);  // "value"
2082       __ mov(LoadDescriptor::SlotRegister(),
2083              Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2084       CallLoadIC(NOT_INSIDE_TYPEOF);                         // r0=result.value
2085       context()->DropAndPlug(2, r0);                         // drop iter and g
2086       break;
2087     }
2088   }
2089 }
2090 
2091 
EmitGeneratorResume(Expression * generator,Expression * value,JSGeneratorObject::ResumeMode resume_mode)2092 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2093     Expression *value,
2094     JSGeneratorObject::ResumeMode resume_mode) {
2095   // The value stays in r0, and is ultimately read by the resumed generator, as
2096   // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2097   // is read to throw the value when the resumed generator is already closed.
2098   // r1 will hold the generator object until the activation has been resumed.
2099   VisitForStackValue(generator);
2100   VisitForAccumulatorValue(value);
2101   __ pop(r1);
2102 
2103   // Load suspended function and context.
2104   __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2105   __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2106 
2107   // Load receiver and store as the first argument.
2108   __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2109   __ push(r2);
2110 
2111   // Push holes for the rest of the arguments to the generator function.
2112   __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2113   __ ldr(r3,
2114          FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2115   __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2116   Label push_argument_holes, push_frame;
2117   __ bind(&push_argument_holes);
2118   __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2119   __ b(mi, &push_frame);
2120   __ push(r2);
2121   __ jmp(&push_argument_holes);
2122 
2123   // Enter a new JavaScript frame, and initialize its slots as they were when
2124   // the generator was suspended.
2125   Label resume_frame, done;
2126   __ bind(&push_frame);
2127   __ bl(&resume_frame);
2128   __ jmp(&done);
2129   __ bind(&resume_frame);
2130   // lr = return address.
2131   // fp = caller's frame pointer.
2132   // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool),
2133   // cp = callee's context,
2134   // r4 = callee's JS function.
2135   __ PushFixedFrame(r4);
2136   // Adjust FP to point to saved FP.
2137   __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2138 
2139   // Load the operand stack size.
2140   __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2141   __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2142   __ SmiUntag(r3);
2143 
2144   // If we are sending a value and there is no operand stack, we can jump back
2145   // in directly.
2146   if (resume_mode == JSGeneratorObject::NEXT) {
2147     Label slow_resume;
2148     __ cmp(r3, Operand(0));
2149     __ b(ne, &slow_resume);
2150     __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2151 
2152     { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2153       if (FLAG_enable_embedded_constant_pool) {
2154         // Load the new code object's constant pool pointer.
2155         __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
2156       }
2157 
2158       __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2159       __ SmiUntag(r2);
2160       __ add(r3, r3, r2);
2161       __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2162       __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2163       __ Jump(r3);
2164     }
2165     __ bind(&slow_resume);
2166   }
2167 
2168   // Otherwise, we push holes for the operand stack and call the runtime to fix
2169   // up the stack and the handlers.
2170   Label push_operand_holes, call_resume;
2171   __ bind(&push_operand_holes);
2172   __ sub(r3, r3, Operand(1), SetCC);
2173   __ b(mi, &call_resume);
2174   __ push(r2);
2175   __ b(&push_operand_holes);
2176   __ bind(&call_resume);
2177   DCHECK(!result_register().is(r1));
2178   __ Push(r1, result_register());
2179   __ Push(Smi::FromInt(resume_mode));
2180   __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2181   // Not reached: the runtime call returns elsewhere.
2182   __ stop("not-reached");
2183 
2184   __ bind(&done);
2185   context()->Plug(result_register());
2186 }
2187 
2188 
EmitCreateIteratorResult(bool done)2189 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2190   Label allocate, done_allocate;
2191 
2192   __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate, TAG_OBJECT);
2193   __ b(&done_allocate);
2194 
2195   __ bind(&allocate);
2196   __ Push(Smi::FromInt(JSIteratorResult::kSize));
2197   __ CallRuntime(Runtime::kAllocateInNewSpace);
2198 
2199   __ bind(&done_allocate);
2200   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
2201   __ pop(r2);
2202   __ LoadRoot(r3,
2203               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2204   __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2205   __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2206   __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2207   __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2208   __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
2209   __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
2210 }
2211 
2212 
EmitNamedPropertyLoad(Property * prop)2213 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2214   SetExpressionPosition(prop);
2215   Literal* key = prop->key()->AsLiteral();
2216   DCHECK(!prop->IsSuperAccess());
2217 
2218   __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2219   __ mov(LoadDescriptor::SlotRegister(),
2220          Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2221   CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2222 }
2223 
2224 
EmitNamedSuperPropertyLoad(Property * prop)2225 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2226   // Stack: receiver, home_object.
2227   SetExpressionPosition(prop);
2228   Literal* key = prop->key()->AsLiteral();
2229   DCHECK(!key->value()->IsSmi());
2230   DCHECK(prop->IsSuperAccess());
2231 
2232   __ Push(key->value());
2233   __ Push(Smi::FromInt(language_mode()));
2234   __ CallRuntime(Runtime::kLoadFromSuper);
2235 }
2236 
2237 
EmitKeyedPropertyLoad(Property * prop)2238 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2239   SetExpressionPosition(prop);
2240   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2241   __ mov(LoadDescriptor::SlotRegister(),
2242          Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2243   CallIC(ic);
2244 }
2245 
2246 
EmitKeyedSuperPropertyLoad(Property * prop)2247 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2248   // Stack: receiver, home_object, key.
2249   SetExpressionPosition(prop);
2250   __ Push(Smi::FromInt(language_mode()));
2251   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2252 }
2253 
2254 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)2255 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2256                                               Token::Value op,
2257                                               Expression* left_expr,
2258                                               Expression* right_expr) {
2259   Label done, smi_case, stub_call;
2260 
2261   Register scratch1 = r2;
2262   Register scratch2 = r3;
2263 
2264   // Get the arguments.
2265   Register left = r1;
2266   Register right = r0;
2267   __ pop(left);
2268 
2269   // Perform combined smi check on both operands.
2270   __ orr(scratch1, left, Operand(right));
2271   STATIC_ASSERT(kSmiTag == 0);
2272   JumpPatchSite patch_site(masm_);
2273   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2274 
2275   __ bind(&stub_call);
2276   Handle<Code> code =
2277       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2278   CallIC(code, expr->BinaryOperationFeedbackId());
2279   patch_site.EmitPatchInfo();
2280   __ jmp(&done);
2281 
2282   __ bind(&smi_case);
2283   // Smi case. This code works the same way as the smi-smi case in the type
2284   // recording binary operation stub, see
2285   switch (op) {
2286     case Token::SAR:
2287       __ GetLeastBitsFromSmi(scratch1, right, 5);
2288       __ mov(right, Operand(left, ASR, scratch1));
2289       __ bic(right, right, Operand(kSmiTagMask));
2290       break;
2291     case Token::SHL: {
2292       __ SmiUntag(scratch1, left);
2293       __ GetLeastBitsFromSmi(scratch2, right, 5);
2294       __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2295       __ TrySmiTag(right, scratch1, &stub_call);
2296       break;
2297     }
2298     case Token::SHR: {
2299       __ SmiUntag(scratch1, left);
2300       __ GetLeastBitsFromSmi(scratch2, right, 5);
2301       __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2302       __ tst(scratch1, Operand(0xc0000000));
2303       __ b(ne, &stub_call);
2304       __ SmiTag(right, scratch1);
2305       break;
2306     }
2307     case Token::ADD:
2308       __ add(scratch1, left, Operand(right), SetCC);
2309       __ b(vs, &stub_call);
2310       __ mov(right, scratch1);
2311       break;
2312     case Token::SUB:
2313       __ sub(scratch1, left, Operand(right), SetCC);
2314       __ b(vs, &stub_call);
2315       __ mov(right, scratch1);
2316       break;
2317     case Token::MUL: {
2318       __ SmiUntag(ip, right);
2319       __ smull(scratch1, scratch2, left, ip);
2320       __ mov(ip, Operand(scratch1, ASR, 31));
2321       __ cmp(ip, Operand(scratch2));
2322       __ b(ne, &stub_call);
2323       __ cmp(scratch1, Operand::Zero());
2324       __ mov(right, Operand(scratch1), LeaveCC, ne);
2325       __ b(ne, &done);
2326       __ add(scratch2, right, Operand(left), SetCC);
2327       __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2328       __ b(mi, &stub_call);
2329       break;
2330     }
2331     case Token::BIT_OR:
2332       __ orr(right, left, Operand(right));
2333       break;
2334     case Token::BIT_AND:
2335       __ and_(right, left, Operand(right));
2336       break;
2337     case Token::BIT_XOR:
2338       __ eor(right, left, Operand(right));
2339       break;
2340     default:
2341       UNREACHABLE();
2342   }
2343 
2344   __ bind(&done);
2345   context()->Plug(r0);
2346 }
2347 
2348 
EmitClassDefineProperties(ClassLiteral * lit)2349 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2350   // Constructor is in r0.
2351   DCHECK(lit != NULL);
2352   __ push(r0);
2353 
2354   // No access check is needed here since the constructor is created by the
2355   // class literal.
2356   Register scratch = r1;
2357   __ ldr(scratch,
2358          FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
2359   __ push(scratch);
2360 
2361   for (int i = 0; i < lit->properties()->length(); i++) {
2362     ObjectLiteral::Property* property = lit->properties()->at(i);
2363     Expression* value = property->value();
2364 
2365     if (property->is_static()) {
2366       __ ldr(scratch, MemOperand(sp, kPointerSize));  // constructor
2367     } else {
2368       __ ldr(scratch, MemOperand(sp, 0));  // prototype
2369     }
2370     __ push(scratch);
2371     EmitPropertyKey(property, lit->GetIdForProperty(i));
2372 
2373     // The static prototype property is read only. We handle the non computed
2374     // property name case in the parser. Since this is the only case where we
2375     // need to check for an own read only property we special case this so we do
2376     // not need to do this for every property.
2377     if (property->is_static() && property->is_computed_name()) {
2378       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2379       __ push(r0);
2380     }
2381 
2382     VisitForStackValue(value);
2383     if (NeedsHomeObject(value)) {
2384       EmitSetHomeObject(value, 2, property->GetSlot());
2385     }
2386 
2387     switch (property->kind()) {
2388       case ObjectLiteral::Property::CONSTANT:
2389       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2390       case ObjectLiteral::Property::PROTOTYPE:
2391         UNREACHABLE();
2392       case ObjectLiteral::Property::COMPUTED:
2393         __ CallRuntime(Runtime::kDefineClassMethod);
2394         break;
2395 
2396       case ObjectLiteral::Property::GETTER:
2397         __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2398         __ push(r0);
2399         __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
2400         break;
2401 
2402       case ObjectLiteral::Property::SETTER:
2403         __ mov(r0, Operand(Smi::FromInt(DONT_ENUM)));
2404         __ push(r0);
2405         __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
2406         break;
2407 
2408       default:
2409         UNREACHABLE();
2410     }
2411   }
2412 
2413   // Set both the prototype and constructor to have fast properties, and also
2414   // freeze them in strong mode.
2415   __ CallRuntime(Runtime::kFinalizeClassDefinition);
2416 }
2417 
2418 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2419 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2420   __ pop(r1);
2421   Handle<Code> code =
2422       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2423   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2424   CallIC(code, expr->BinaryOperationFeedbackId());
2425   patch_site.EmitPatchInfo();
2426   context()->Plug(r0);
2427 }
2428 
2429 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2430 void FullCodeGenerator::EmitAssignment(Expression* expr,
2431                                        FeedbackVectorSlot slot) {
2432   DCHECK(expr->IsValidReferenceExpressionOrThis());
2433 
2434   Property* prop = expr->AsProperty();
2435   LhsKind assign_type = Property::GetAssignType(prop);
2436 
2437   switch (assign_type) {
2438     case VARIABLE: {
2439       Variable* var = expr->AsVariableProxy()->var();
2440       EffectContext context(this);
2441       EmitVariableAssignment(var, Token::ASSIGN, slot);
2442       break;
2443     }
2444     case NAMED_PROPERTY: {
2445       __ push(r0);  // Preserve value.
2446       VisitForAccumulatorValue(prop->obj());
2447       __ Move(StoreDescriptor::ReceiverRegister(), r0);
2448       __ pop(StoreDescriptor::ValueRegister());  // Restore value.
2449       __ mov(StoreDescriptor::NameRegister(),
2450              Operand(prop->key()->AsLiteral()->value()));
2451       EmitLoadStoreICSlot(slot);
2452       CallStoreIC();
2453       break;
2454     }
2455     case NAMED_SUPER_PROPERTY: {
2456       __ Push(r0);
2457       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2458       VisitForAccumulatorValue(
2459           prop->obj()->AsSuperPropertyReference()->home_object());
2460       // stack: value, this; r0: home_object
2461       Register scratch = r2;
2462       Register scratch2 = r3;
2463       __ mov(scratch, result_register());              // home_object
2464       __ ldr(r0, MemOperand(sp, kPointerSize));        // value
2465       __ ldr(scratch2, MemOperand(sp, 0));             // this
2466       __ str(scratch2, MemOperand(sp, kPointerSize));  // this
2467       __ str(scratch, MemOperand(sp, 0));              // home_object
2468       // stack: this, home_object; r0: value
2469       EmitNamedSuperPropertyStore(prop);
2470       break;
2471     }
2472     case KEYED_SUPER_PROPERTY: {
2473       __ Push(r0);
2474       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2475       VisitForStackValue(
2476           prop->obj()->AsSuperPropertyReference()->home_object());
2477       VisitForAccumulatorValue(prop->key());
2478       Register scratch = r2;
2479       Register scratch2 = r3;
2480       __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
2481       // stack: value, this, home_object; r0: key, r3: value
2482       __ ldr(scratch, MemOperand(sp, kPointerSize));  // this
2483       __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2484       __ ldr(scratch, MemOperand(sp, 0));  // home_object
2485       __ str(scratch, MemOperand(sp, kPointerSize));
2486       __ str(r0, MemOperand(sp, 0));
2487       __ Move(r0, scratch2);
2488       // stack: this, home_object, key; r0: value.
2489       EmitKeyedSuperPropertyStore(prop);
2490       break;
2491     }
2492     case KEYED_PROPERTY: {
2493       __ push(r0);  // Preserve value.
2494       VisitForStackValue(prop->obj());
2495       VisitForAccumulatorValue(prop->key());
2496       __ Move(StoreDescriptor::NameRegister(), r0);
2497       __ Pop(StoreDescriptor::ValueRegister(),
2498              StoreDescriptor::ReceiverRegister());
2499       EmitLoadStoreICSlot(slot);
2500       Handle<Code> ic =
2501           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2502       CallIC(ic);
2503       break;
2504     }
2505   }
2506   context()->Plug(r0);
2507 }
2508 
2509 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2510 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2511     Variable* var, MemOperand location) {
2512   __ str(result_register(), location);
2513   if (var->IsContextSlot()) {
2514     // RecordWrite may destroy all its register arguments.
2515     __ mov(r3, result_register());
2516     int offset = Context::SlotOffset(var->index());
2517     __ RecordWriteContextSlot(
2518         r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2519   }
2520 }
2521 
2522 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2523 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2524                                                FeedbackVectorSlot slot) {
2525   if (var->IsUnallocated()) {
2526     // Global var, const, or let.
2527     __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2528     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2529     EmitLoadStoreICSlot(slot);
2530     CallStoreIC();
2531 
2532   } else if (var->mode() == LET && op != Token::INIT) {
2533     // Non-initializing assignment to let variable needs a write barrier.
2534     DCHECK(!var->IsLookupSlot());
2535     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2536     Label assign;
2537     MemOperand location = VarOperand(var, r1);
2538     __ ldr(r3, location);
2539     __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2540     __ b(ne, &assign);
2541     __ mov(r3, Operand(var->name()));
2542     __ push(r3);
2543     __ CallRuntime(Runtime::kThrowReferenceError);
2544     // Perform the assignment.
2545     __ bind(&assign);
2546     EmitStoreToStackLocalOrContextSlot(var, location);
2547 
2548   } else if (var->mode() == CONST && op != Token::INIT) {
2549     // Assignment to const variable needs a write barrier.
2550     DCHECK(!var->IsLookupSlot());
2551     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2552     Label const_error;
2553     MemOperand location = VarOperand(var, r1);
2554     __ ldr(r3, location);
2555     __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2556     __ b(ne, &const_error);
2557     __ mov(r3, Operand(var->name()));
2558     __ push(r3);
2559     __ CallRuntime(Runtime::kThrowReferenceError);
2560     __ bind(&const_error);
2561     __ CallRuntime(Runtime::kThrowConstAssignError);
2562 
2563   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2564     // Initializing assignment to const {this} needs a write barrier.
2565     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2566     Label uninitialized_this;
2567     MemOperand location = VarOperand(var, r1);
2568     __ ldr(r3, location);
2569     __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2570     __ b(eq, &uninitialized_this);
2571     __ mov(r0, Operand(var->name()));
2572     __ Push(r0);
2573     __ CallRuntime(Runtime::kThrowReferenceError);
2574     __ bind(&uninitialized_this);
2575     EmitStoreToStackLocalOrContextSlot(var, location);
2576 
2577   } else if (!var->is_const_mode() ||
2578              (var->mode() == CONST && op == Token::INIT)) {
2579     if (var->IsLookupSlot()) {
2580       // Assignment to var.
2581       __ push(r0);  // Value.
2582       __ mov(r1, Operand(var->name()));
2583       __ mov(r0, Operand(Smi::FromInt(language_mode())));
2584       __ Push(cp, r1, r0);  // Context, name, language mode.
2585       __ CallRuntime(Runtime::kStoreLookupSlot);
2586     } else {
2587       // Assignment to var or initializing assignment to let/const in harmony
2588       // mode.
2589       DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2590       MemOperand location = VarOperand(var, r1);
2591       if (generate_debug_code_ && var->mode() == LET && op == Token::INIT) {
2592         // Check for an uninitialized let binding.
2593         __ ldr(r2, location);
2594         __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2595         __ Check(eq, kLetBindingReInitialization);
2596       }
2597       EmitStoreToStackLocalOrContextSlot(var, location);
2598     }
2599 
2600   } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2601     // Const initializers need a write barrier.
2602     DCHECK(!var->IsParameter());  // No const parameters.
2603     if (var->IsLookupSlot()) {
2604       __ push(r0);
2605       __ mov(r0, Operand(var->name()));
2606       __ Push(cp, r0);  // Context and name.
2607       __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2608     } else {
2609       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2610       Label skip;
2611       MemOperand location = VarOperand(var, r1);
2612       __ ldr(r2, location);
2613       __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2614       __ b(ne, &skip);
2615       EmitStoreToStackLocalOrContextSlot(var, location);
2616       __ bind(&skip);
2617     }
2618 
2619   } else {
2620     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2621     if (is_strict(language_mode())) {
2622       __ CallRuntime(Runtime::kThrowConstAssignError);
2623     }
2624     // Silently ignore store in sloppy mode.
2625   }
2626 }
2627 
2628 
EmitNamedPropertyAssignment(Assignment * expr)2629 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2630   // Assignment to a property, using a named store IC.
2631   Property* prop = expr->target()->AsProperty();
2632   DCHECK(prop != NULL);
2633   DCHECK(prop->key()->IsLiteral());
2634 
2635   __ mov(StoreDescriptor::NameRegister(),
2636          Operand(prop->key()->AsLiteral()->value()));
2637   __ pop(StoreDescriptor::ReceiverRegister());
2638   EmitLoadStoreICSlot(expr->AssignmentSlot());
2639   CallStoreIC();
2640 
2641   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2642   context()->Plug(r0);
2643 }
2644 
2645 
EmitNamedSuperPropertyStore(Property * prop)2646 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2647   // Assignment to named property of super.
2648   // r0 : value
2649   // stack : receiver ('this'), home_object
2650   DCHECK(prop != NULL);
2651   Literal* key = prop->key()->AsLiteral();
2652   DCHECK(key != NULL);
2653 
2654   __ Push(key->value());
2655   __ Push(r0);
2656   __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2657                                              : Runtime::kStoreToSuper_Sloppy));
2658 }
2659 
2660 
EmitKeyedSuperPropertyStore(Property * prop)2661 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2662   // Assignment to named property of super.
2663   // r0 : value
2664   // stack : receiver ('this'), home_object, key
2665   DCHECK(prop != NULL);
2666 
2667   __ Push(r0);
2668   __ CallRuntime((is_strict(language_mode())
2669                       ? Runtime::kStoreKeyedToSuper_Strict
2670                       : Runtime::kStoreKeyedToSuper_Sloppy));
2671 }
2672 
2673 
EmitKeyedPropertyAssignment(Assignment * expr)2674 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2675   // Assignment to a property, using a keyed store IC.
2676   __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2677   DCHECK(StoreDescriptor::ValueRegister().is(r0));
2678 
2679   Handle<Code> ic =
2680       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2681   EmitLoadStoreICSlot(expr->AssignmentSlot());
2682   CallIC(ic);
2683 
2684   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2685   context()->Plug(r0);
2686 }
2687 
2688 
VisitProperty(Property * expr)2689 void FullCodeGenerator::VisitProperty(Property* expr) {
2690   Comment cmnt(masm_, "[ Property");
2691   SetExpressionPosition(expr);
2692 
2693   Expression* key = expr->key();
2694 
2695   if (key->IsPropertyName()) {
2696     if (!expr->IsSuperAccess()) {
2697       VisitForAccumulatorValue(expr->obj());
2698       __ Move(LoadDescriptor::ReceiverRegister(), r0);
2699       EmitNamedPropertyLoad(expr);
2700     } else {
2701       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2702       VisitForStackValue(
2703           expr->obj()->AsSuperPropertyReference()->home_object());
2704       EmitNamedSuperPropertyLoad(expr);
2705     }
2706   } else {
2707     if (!expr->IsSuperAccess()) {
2708       VisitForStackValue(expr->obj());
2709       VisitForAccumulatorValue(expr->key());
2710       __ Move(LoadDescriptor::NameRegister(), r0);
2711       __ pop(LoadDescriptor::ReceiverRegister());
2712       EmitKeyedPropertyLoad(expr);
2713     } else {
2714       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2715       VisitForStackValue(
2716           expr->obj()->AsSuperPropertyReference()->home_object());
2717       VisitForStackValue(expr->key());
2718       EmitKeyedSuperPropertyLoad(expr);
2719     }
2720   }
2721   PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2722   context()->Plug(r0);
2723 }
2724 
2725 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2726 void FullCodeGenerator::CallIC(Handle<Code> code,
2727                                TypeFeedbackId ast_id) {
2728   ic_total_count_++;
2729   // All calls must have a predictable size in full-codegen code to ensure that
2730   // the debugger can patch them correctly.
2731   __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2732           NEVER_INLINE_TARGET_ADDRESS);
2733 }
2734 
2735 
2736 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2737 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2738   Expression* callee = expr->expression();
2739 
2740   // Get the target function.
2741   ConvertReceiverMode convert_mode;
2742   if (callee->IsVariableProxy()) {
2743     { StackValueContext context(this);
2744       EmitVariableLoad(callee->AsVariableProxy());
2745       PrepareForBailout(callee, NO_REGISTERS);
2746     }
2747     // Push undefined as receiver. This is patched in the method prologue if it
2748     // is a sloppy mode method.
2749     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2750     __ push(ip);
2751     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2752   } else {
2753     // Load the function from the receiver.
2754     DCHECK(callee->IsProperty());
2755     DCHECK(!callee->AsProperty()->IsSuperAccess());
2756     __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2757     EmitNamedPropertyLoad(callee->AsProperty());
2758     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2759     // Push the target function under the receiver.
2760     __ ldr(ip, MemOperand(sp, 0));
2761     __ push(ip);
2762     __ str(r0, MemOperand(sp, kPointerSize));
2763     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2764   }
2765 
2766   EmitCall(expr, convert_mode);
2767 }
2768 
2769 
EmitSuperCallWithLoadIC(Call * expr)2770 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2771   Expression* callee = expr->expression();
2772   DCHECK(callee->IsProperty());
2773   Property* prop = callee->AsProperty();
2774   DCHECK(prop->IsSuperAccess());
2775   SetExpressionPosition(prop);
2776 
2777   Literal* key = prop->key()->AsLiteral();
2778   DCHECK(!key->value()->IsSmi());
2779   // Load the function from the receiver.
2780   const Register scratch = r1;
2781   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2782   VisitForStackValue(super_ref->home_object());
2783   VisitForAccumulatorValue(super_ref->this_var());
2784   __ Push(r0);
2785   __ Push(r0);
2786   __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2787   __ Push(scratch);
2788   __ Push(key->value());
2789   __ Push(Smi::FromInt(language_mode()));
2790 
2791   // Stack here:
2792   //  - home_object
2793   //  - this (receiver)
2794   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2795   //  - home_object
2796   //  - key
2797   //  - language_mode
2798   __ CallRuntime(Runtime::kLoadFromSuper);
2799 
2800   // Replace home_object with target function.
2801   __ str(r0, MemOperand(sp, kPointerSize));
2802 
2803   // Stack here:
2804   // - target function
2805   // - this (receiver)
2806   EmitCall(expr);
2807 }
2808 
2809 
2810 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2811 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2812                                                 Expression* key) {
2813   // Load the key.
2814   VisitForAccumulatorValue(key);
2815 
2816   Expression* callee = expr->expression();
2817 
2818   // Load the function from the receiver.
2819   DCHECK(callee->IsProperty());
2820   __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2821   __ Move(LoadDescriptor::NameRegister(), r0);
2822   EmitKeyedPropertyLoad(callee->AsProperty());
2823   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2824 
2825   // Push the target function under the receiver.
2826   __ ldr(ip, MemOperand(sp, 0));
2827   __ push(ip);
2828   __ str(r0, MemOperand(sp, kPointerSize));
2829 
2830   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2831 }
2832 
2833 
EmitKeyedSuperCallWithLoadIC(Call * expr)2834 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2835   Expression* callee = expr->expression();
2836   DCHECK(callee->IsProperty());
2837   Property* prop = callee->AsProperty();
2838   DCHECK(prop->IsSuperAccess());
2839 
2840   SetExpressionPosition(prop);
2841   // Load the function from the receiver.
2842   const Register scratch = r1;
2843   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2844   VisitForStackValue(super_ref->home_object());
2845   VisitForAccumulatorValue(super_ref->this_var());
2846   __ Push(r0);
2847   __ Push(r0);
2848   __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2849   __ Push(scratch);
2850   VisitForStackValue(prop->key());
2851   __ Push(Smi::FromInt(language_mode()));
2852 
2853   // Stack here:
2854   //  - home_object
2855   //  - this (receiver)
2856   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2857   //  - home_object
2858   //  - key
2859   //  - language_mode
2860   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2861 
2862   // Replace home_object with target function.
2863   __ str(r0, MemOperand(sp, kPointerSize));
2864 
2865   // Stack here:
2866   // - target function
2867   // - this (receiver)
2868   EmitCall(expr);
2869 }
2870 
2871 
EmitCall(Call * expr,ConvertReceiverMode mode)2872 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2873   // Load the arguments.
2874   ZoneList<Expression*>* args = expr->arguments();
2875   int arg_count = args->length();
2876   for (int i = 0; i < arg_count; i++) {
2877     VisitForStackValue(args->at(i));
2878   }
2879 
2880   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2881   SetCallPosition(expr);
2882   Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code();
2883   __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2884   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2885   // Don't assign a type feedback id to the IC, since type feedback is provided
2886   // by the vector above.
2887   CallIC(ic);
2888 
2889   RecordJSReturnSite(expr);
2890   // Restore context register.
2891   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2892   context()->DropAndPlug(1, r0);
2893 }
2894 
2895 
EmitResolvePossiblyDirectEval(int arg_count)2896 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2897   // r4: copy of the first argument or undefined if it doesn't exist.
2898   if (arg_count > 0) {
2899     __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2900   } else {
2901     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2902   }
2903 
2904   // r3: the receiver of the enclosing function.
2905   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2906 
2907   // r2: language mode.
2908   __ mov(r2, Operand(Smi::FromInt(language_mode())));
2909 
2910   // r1: the start position of the scope the calls resides in.
2911   __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2912 
2913   // Do the runtime call.
2914   __ Push(r4, r3, r2, r1);
2915   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2916 }
2917 
2918 
2919 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2920 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2921   VariableProxy* callee = expr->expression()->AsVariableProxy();
2922   if (callee->var()->IsLookupSlot()) {
2923     Label slow, done;
2924     SetExpressionPosition(callee);
2925     // Generate code for loading from variables potentially shadowed
2926     // by eval-introduced variables.
2927     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2928 
2929     __ bind(&slow);
2930     // Call the runtime to find the function to call (returned in r0)
2931     // and the object holding it (returned in edx).
2932     DCHECK(!context_register().is(r2));
2933     __ mov(r2, Operand(callee->name()));
2934     __ Push(context_register(), r2);
2935     __ CallRuntime(Runtime::kLoadLookupSlot);
2936     __ Push(r0, r1);  // Function, receiver.
2937     PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2938 
2939     // If fast case code has been generated, emit code to push the
2940     // function and receiver and have the slow path jump around this
2941     // code.
2942     if (done.is_linked()) {
2943       Label call;
2944       __ b(&call);
2945       __ bind(&done);
2946       // Push function.
2947       __ push(r0);
2948       // The receiver is implicitly the global receiver. Indicate this
2949       // by passing the hole to the call function stub.
2950       __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2951       __ push(r1);
2952       __ bind(&call);
2953     }
2954   } else {
2955     VisitForStackValue(callee);
2956     // refEnv.WithBaseObject()
2957     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2958     __ push(r2);  // Reserved receiver slot.
2959   }
2960 }
2961 
2962 
EmitPossiblyEvalCall(Call * expr)2963 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2964   // In a call to eval, we first call
2965   // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need
2966   // to call.  Then we call the resolved function using the given arguments.
2967   ZoneList<Expression*>* args = expr->arguments();
2968   int arg_count = args->length();
2969 
2970   PushCalleeAndWithBaseObject(expr);
2971 
2972   // Push the arguments.
2973   for (int i = 0; i < arg_count; i++) {
2974     VisitForStackValue(args->at(i));
2975   }
2976 
2977   // Push a copy of the function (found below the arguments) and
2978   // resolve eval.
2979   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2980   __ push(r1);
2981   EmitResolvePossiblyDirectEval(arg_count);
2982 
2983   // Touch up the stack with the resolved function.
2984   __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2985 
2986   PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2987 
2988   // Record source position for debugger.
2989   SetCallPosition(expr);
2990   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2991   __ mov(r0, Operand(arg_count));
2992   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2993   RecordJSReturnSite(expr);
2994   // Restore context register.
2995   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2996   context()->DropAndPlug(1, r0);
2997 }
2998 
2999 
VisitCallNew(CallNew * expr)3000 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3001   Comment cmnt(masm_, "[ CallNew");
3002   // According to ECMA-262, section 11.2.2, page 44, the function
3003   // expression in new calls must be evaluated before the
3004   // arguments.
3005 
3006   // Push constructor on the stack.  If it's not a function it's used as
3007   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3008   // ignored.
3009   DCHECK(!expr->expression()->IsSuperPropertyReference());
3010   VisitForStackValue(expr->expression());
3011 
3012   // Push the arguments ("left-to-right") on the stack.
3013   ZoneList<Expression*>* args = expr->arguments();
3014   int arg_count = args->length();
3015   for (int i = 0; i < arg_count; i++) {
3016     VisitForStackValue(args->at(i));
3017   }
3018 
3019   // Call the construct call builtin that handles allocation and
3020   // constructor invocation.
3021   SetConstructCallPosition(expr);
3022 
3023   // Load function and argument count into r1 and r0.
3024   __ mov(r0, Operand(arg_count));
3025   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3026 
3027   // Record call targets in unoptimized code.
3028   __ EmitLoadTypeFeedbackVector(r2);
3029   __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3030 
3031   CallConstructStub stub(isolate());
3032   __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
3033   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3034   // Restore context register.
3035   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3036   context()->Plug(r0);
3037 }
3038 
3039 
EmitSuperConstructorCall(Call * expr)3040 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3041   SuperCallReference* super_call_ref =
3042       expr->expression()->AsSuperCallReference();
3043   DCHECK_NOT_NULL(super_call_ref);
3044 
3045   // Push the super constructor target on the stack (may be null,
3046   // but the Construct builtin can deal with that properly).
3047   VisitForAccumulatorValue(super_call_ref->this_function_var());
3048   __ AssertFunction(result_register());
3049   __ ldr(result_register(),
3050          FieldMemOperand(result_register(), HeapObject::kMapOffset));
3051   __ ldr(result_register(),
3052          FieldMemOperand(result_register(), Map::kPrototypeOffset));
3053   __ Push(result_register());
3054 
3055   // Push the arguments ("left-to-right") on the stack.
3056   ZoneList<Expression*>* args = expr->arguments();
3057   int arg_count = args->length();
3058   for (int i = 0; i < arg_count; i++) {
3059     VisitForStackValue(args->at(i));
3060   }
3061 
3062   // Call the construct call builtin that handles allocation and
3063   // constructor invocation.
3064   SetConstructCallPosition(expr);
3065 
3066   // Load new target into r3.
3067   VisitForAccumulatorValue(super_call_ref->new_target_var());
3068   __ mov(r3, result_register());
3069 
3070   // Load function and argument count into r1 and r0.
3071   __ mov(r0, Operand(arg_count));
3072   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3073 
3074   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3075 
3076   RecordJSReturnSite(expr);
3077 
3078   // Restore context register.
3079   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3080   context()->Plug(r0);
3081 }
3082 
3083 
EmitIsSmi(CallRuntime * expr)3084 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3085   ZoneList<Expression*>* args = expr->arguments();
3086   DCHECK(args->length() == 1);
3087 
3088   VisitForAccumulatorValue(args->at(0));
3089 
3090   Label materialize_true, materialize_false;
3091   Label* if_true = NULL;
3092   Label* if_false = NULL;
3093   Label* fall_through = NULL;
3094   context()->PrepareTest(&materialize_true, &materialize_false,
3095                          &if_true, &if_false, &fall_through);
3096 
3097   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3098   __ SmiTst(r0);
3099   Split(eq, if_true, if_false, fall_through);
3100 
3101   context()->Plug(if_true, if_false);
3102 }
3103 
3104 
EmitIsJSReceiver(CallRuntime * expr)3105 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
3106   ZoneList<Expression*>* args = expr->arguments();
3107   DCHECK(args->length() == 1);
3108 
3109   VisitForAccumulatorValue(args->at(0));
3110 
3111   Label materialize_true, materialize_false;
3112   Label* if_true = NULL;
3113   Label* if_false = NULL;
3114   Label* fall_through = NULL;
3115   context()->PrepareTest(&materialize_true, &materialize_false,
3116                          &if_true, &if_false, &fall_through);
3117 
3118   __ JumpIfSmi(r0, if_false);
3119   __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
3120   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3121   Split(ge, if_true, if_false, fall_through);
3122 
3123   context()->Plug(if_true, if_false);
3124 }
3125 
3126 
EmitIsSimdValue(CallRuntime * expr)3127 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3128   ZoneList<Expression*>* args = expr->arguments();
3129   DCHECK(args->length() == 1);
3130 
3131   VisitForAccumulatorValue(args->at(0));
3132 
3133   Label materialize_true, materialize_false;
3134   Label* if_true = NULL;
3135   Label* if_false = NULL;
3136   Label* fall_through = NULL;
3137   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3138                          &if_false, &fall_through);
3139 
3140   __ JumpIfSmi(r0, if_false);
3141   __ CompareObjectType(r0, r1, r1, SIMD128_VALUE_TYPE);
3142   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3143   Split(eq, if_true, if_false, fall_through);
3144 
3145   context()->Plug(if_true, if_false);
3146 }
3147 
3148 
EmitIsFunction(CallRuntime * expr)3149 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3150   ZoneList<Expression*>* args = expr->arguments();
3151   DCHECK(args->length() == 1);
3152 
3153   VisitForAccumulatorValue(args->at(0));
3154 
3155   Label materialize_true, materialize_false;
3156   Label* if_true = NULL;
3157   Label* if_false = NULL;
3158   Label* fall_through = NULL;
3159   context()->PrepareTest(&materialize_true, &materialize_false,
3160                          &if_true, &if_false, &fall_through);
3161 
3162   __ JumpIfSmi(r0, if_false);
3163   __ CompareObjectType(r0, r1, r2, FIRST_FUNCTION_TYPE);
3164   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3165   Split(hs, if_true, if_false, fall_through);
3166 
3167   context()->Plug(if_true, if_false);
3168 }
3169 
3170 
EmitIsMinusZero(CallRuntime * expr)3171 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3172   ZoneList<Expression*>* args = expr->arguments();
3173   DCHECK(args->length() == 1);
3174 
3175   VisitForAccumulatorValue(args->at(0));
3176 
3177   Label materialize_true, materialize_false;
3178   Label* if_true = NULL;
3179   Label* if_false = NULL;
3180   Label* fall_through = NULL;
3181   context()->PrepareTest(&materialize_true, &materialize_false,
3182                          &if_true, &if_false, &fall_through);
3183 
3184   __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3185   __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3186   __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3187   __ cmp(r2, Operand(0x80000000));
3188   __ cmp(r1, Operand(0x00000000), eq);
3189 
3190   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3191   Split(eq, if_true, if_false, fall_through);
3192 
3193   context()->Plug(if_true, if_false);
3194 }
3195 
3196 
EmitIsArray(CallRuntime * expr)3197 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3198   ZoneList<Expression*>* args = expr->arguments();
3199   DCHECK(args->length() == 1);
3200 
3201   VisitForAccumulatorValue(args->at(0));
3202 
3203   Label materialize_true, materialize_false;
3204   Label* if_true = NULL;
3205   Label* if_false = NULL;
3206   Label* fall_through = NULL;
3207   context()->PrepareTest(&materialize_true, &materialize_false,
3208                          &if_true, &if_false, &fall_through);
3209 
3210   __ JumpIfSmi(r0, if_false);
3211   __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3212   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3213   Split(eq, if_true, if_false, fall_through);
3214 
3215   context()->Plug(if_true, if_false);
3216 }
3217 
3218 
EmitIsTypedArray(CallRuntime * expr)3219 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3220   ZoneList<Expression*>* args = expr->arguments();
3221   DCHECK(args->length() == 1);
3222 
3223   VisitForAccumulatorValue(args->at(0));
3224 
3225   Label materialize_true, materialize_false;
3226   Label* if_true = NULL;
3227   Label* if_false = NULL;
3228   Label* fall_through = NULL;
3229   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3230                          &if_false, &fall_through);
3231 
3232   __ JumpIfSmi(r0, if_false);
3233   __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
3234   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3235   Split(eq, if_true, if_false, fall_through);
3236 
3237   context()->Plug(if_true, if_false);
3238 }
3239 
3240 
EmitIsRegExp(CallRuntime * expr)3241 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3242   ZoneList<Expression*>* args = expr->arguments();
3243   DCHECK(args->length() == 1);
3244 
3245   VisitForAccumulatorValue(args->at(0));
3246 
3247   Label materialize_true, materialize_false;
3248   Label* if_true = NULL;
3249   Label* if_false = NULL;
3250   Label* fall_through = NULL;
3251   context()->PrepareTest(&materialize_true, &materialize_false,
3252                          &if_true, &if_false, &fall_through);
3253 
3254   __ JumpIfSmi(r0, if_false);
3255   __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3256   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3257   Split(eq, if_true, if_false, fall_through);
3258 
3259   context()->Plug(if_true, if_false);
3260 }
3261 
3262 
EmitIsJSProxy(CallRuntime * expr)3263 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3264   ZoneList<Expression*>* args = expr->arguments();
3265   DCHECK(args->length() == 1);
3266 
3267   VisitForAccumulatorValue(args->at(0));
3268 
3269   Label materialize_true, materialize_false;
3270   Label* if_true = NULL;
3271   Label* if_false = NULL;
3272   Label* fall_through = NULL;
3273   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3274                          &if_false, &fall_through);
3275 
3276   __ JumpIfSmi(r0, if_false);
3277   __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
3278   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3279   Split(eq, if_true, if_false, fall_through);
3280 
3281   context()->Plug(if_true, if_false);
3282 }
3283 
3284 
EmitObjectEquals(CallRuntime * expr)3285 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3286   ZoneList<Expression*>* args = expr->arguments();
3287   DCHECK(args->length() == 2);
3288 
3289   // Load the two objects into registers and perform the comparison.
3290   VisitForStackValue(args->at(0));
3291   VisitForAccumulatorValue(args->at(1));
3292 
3293   Label materialize_true, materialize_false;
3294   Label* if_true = NULL;
3295   Label* if_false = NULL;
3296   Label* fall_through = NULL;
3297   context()->PrepareTest(&materialize_true, &materialize_false,
3298                          &if_true, &if_false, &fall_through);
3299 
3300   __ pop(r1);
3301   __ cmp(r0, r1);
3302   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3303   Split(eq, if_true, if_false, fall_through);
3304 
3305   context()->Plug(if_true, if_false);
3306 }
3307 
3308 
EmitArguments(CallRuntime * expr)3309 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3310   ZoneList<Expression*>* args = expr->arguments();
3311   DCHECK(args->length() == 1);
3312 
3313   // ArgumentsAccessStub expects the key in edx and the formal
3314   // parameter count in r0.
3315   VisitForAccumulatorValue(args->at(0));
3316   __ mov(r1, r0);
3317   __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3318   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3319   __ CallStub(&stub);
3320   context()->Plug(r0);
3321 }
3322 
3323 
EmitArgumentsLength(CallRuntime * expr)3324 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3325   DCHECK(expr->arguments()->length() == 0);
3326 
3327   // Get the number of formal parameters.
3328   __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3329 
3330   // Check if the calling frame is an arguments adaptor frame.
3331   __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3332   __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3333   __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3334 
3335   // Arguments adaptor case: Read the arguments length from the
3336   // adaptor frame.
3337   __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3338 
3339   context()->Plug(r0);
3340 }
3341 
3342 
EmitClassOf(CallRuntime * expr)3343 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3344   ZoneList<Expression*>* args = expr->arguments();
3345   DCHECK(args->length() == 1);
3346   Label done, null, function, non_function_constructor;
3347 
3348   VisitForAccumulatorValue(args->at(0));
3349 
3350   // If the object is not a JSReceiver, we return null.
3351   __ JumpIfSmi(r0, &null);
3352   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3353   __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3354   // Map is now in r0.
3355   __ b(lt, &null);
3356 
3357   // Return 'Function' for JSFunction objects.
3358   __ cmp(r1, Operand(JS_FUNCTION_TYPE));
3359   __ b(eq, &function);
3360 
3361   // Check if the constructor in the map is a JS function.
3362   Register instance_type = r2;
3363   __ GetMapConstructor(r0, r0, r1, instance_type);
3364   __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
3365   __ b(ne, &non_function_constructor);
3366 
3367   // r0 now contains the constructor function. Grab the
3368   // instance class name from there.
3369   __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3370   __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3371   __ b(&done);
3372 
3373   // Functions have class 'Function'.
3374   __ bind(&function);
3375   __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
3376   __ jmp(&done);
3377 
3378   // Objects with a non-function constructor have class 'Object'.
3379   __ bind(&non_function_constructor);
3380   __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3381   __ jmp(&done);
3382 
3383   // Non-JS objects have class null.
3384   __ bind(&null);
3385   __ LoadRoot(r0, Heap::kNullValueRootIndex);
3386 
3387   // All done.
3388   __ bind(&done);
3389 
3390   context()->Plug(r0);
3391 }
3392 
3393 
EmitValueOf(CallRuntime * expr)3394 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3395   ZoneList<Expression*>* args = expr->arguments();
3396   DCHECK(args->length() == 1);
3397   VisitForAccumulatorValue(args->at(0));  // Load the object.
3398 
3399   Label done;
3400   // If the object is a smi return the object.
3401   __ JumpIfSmi(r0, &done);
3402   // If the object is not a value type, return the object.
3403   __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3404   __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3405 
3406   __ bind(&done);
3407   context()->Plug(r0);
3408 }
3409 
3410 
EmitIsDate(CallRuntime * expr)3411 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3412   ZoneList<Expression*>* args = expr->arguments();
3413   DCHECK_EQ(1, args->length());
3414 
3415   VisitForAccumulatorValue(args->at(0));
3416 
3417   Label materialize_true, materialize_false;
3418   Label* if_true = nullptr;
3419   Label* if_false = nullptr;
3420   Label* fall_through = nullptr;
3421   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3422                          &if_false, &fall_through);
3423 
3424   __ JumpIfSmi(r0, if_false);
3425   __ CompareObjectType(r0, r1, r1, JS_DATE_TYPE);
3426   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3427   Split(eq, if_true, if_false, fall_through);
3428 
3429   context()->Plug(if_true, if_false);
3430 }
3431 
3432 
EmitOneByteSeqStringSetChar(CallRuntime * expr)3433 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3434   ZoneList<Expression*>* args = expr->arguments();
3435   DCHECK_EQ(3, args->length());
3436 
3437   Register string = r0;
3438   Register index = r1;
3439   Register value = r2;
3440 
3441   VisitForStackValue(args->at(0));        // index
3442   VisitForStackValue(args->at(1));        // value
3443   VisitForAccumulatorValue(args->at(2));  // string
3444   __ Pop(index, value);
3445 
3446   if (FLAG_debug_code) {
3447     __ SmiTst(value);
3448     __ Check(eq, kNonSmiValue);
3449     __ SmiTst(index);
3450     __ Check(eq, kNonSmiIndex);
3451     __ SmiUntag(index, index);
3452     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3453     __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3454     __ SmiTag(index, index);
3455   }
3456 
3457   __ SmiUntag(value, value);
3458   __ add(ip,
3459          string,
3460          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3461   __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3462   context()->Plug(string);
3463 }
3464 
3465 
EmitTwoByteSeqStringSetChar(CallRuntime * expr)3466 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3467   ZoneList<Expression*>* args = expr->arguments();
3468   DCHECK_EQ(3, args->length());
3469 
3470   Register string = r0;
3471   Register index = r1;
3472   Register value = r2;
3473 
3474   VisitForStackValue(args->at(0));        // index
3475   VisitForStackValue(args->at(1));        // value
3476   VisitForAccumulatorValue(args->at(2));  // string
3477   __ Pop(index, value);
3478 
3479   if (FLAG_debug_code) {
3480     __ SmiTst(value);
3481     __ Check(eq, kNonSmiValue);
3482     __ SmiTst(index);
3483     __ Check(eq, kNonSmiIndex);
3484     __ SmiUntag(index, index);
3485     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3486     __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3487     __ SmiTag(index, index);
3488   }
3489 
3490   __ SmiUntag(value, value);
3491   __ add(ip,
3492          string,
3493          Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3494   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3495   __ strh(value, MemOperand(ip, index));
3496   context()->Plug(string);
3497 }
3498 
3499 
EmitSetValueOf(CallRuntime * expr)3500 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3501   ZoneList<Expression*>* args = expr->arguments();
3502   DCHECK(args->length() == 2);
3503   VisitForStackValue(args->at(0));  // Load the object.
3504   VisitForAccumulatorValue(args->at(1));  // Load the value.
3505   __ pop(r1);  // r0 = value. r1 = object.
3506 
3507   Label done;
3508   // If the object is a smi, return the value.
3509   __ JumpIfSmi(r1, &done);
3510 
3511   // If the object is not a value type, return the value.
3512   __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3513   __ b(ne, &done);
3514 
3515   // Store the value.
3516   __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3517   // Update the write barrier.  Save the value as it will be
3518   // overwritten by the write barrier code and is needed afterward.
3519   __ mov(r2, r0);
3520   __ RecordWriteField(
3521       r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3522 
3523   __ bind(&done);
3524   context()->Plug(r0);
3525 }
3526 
3527 
EmitToInteger(CallRuntime * expr)3528 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3529   ZoneList<Expression*>* args = expr->arguments();
3530   DCHECK_EQ(1, args->length());
3531 
3532   // Load the argument into r0 and convert it.
3533   VisitForAccumulatorValue(args->at(0));
3534 
3535   // Convert the object to an integer.
3536   Label done_convert;
3537   __ JumpIfSmi(r0, &done_convert);
3538   __ Push(r0);
3539   __ CallRuntime(Runtime::kToInteger);
3540   __ bind(&done_convert);
3541   context()->Plug(r0);
3542 }
3543 
3544 
EmitToName(CallRuntime * expr)3545 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3546   ZoneList<Expression*>* args = expr->arguments();
3547   DCHECK_EQ(1, args->length());
3548 
3549   // Load the argument into r0 and convert it.
3550   VisitForAccumulatorValue(args->at(0));
3551 
3552   Label convert, done_convert;
3553   __ JumpIfSmi(r0, &convert);
3554   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3555   __ CompareObjectType(r0, r1, r1, LAST_NAME_TYPE);
3556   __ b(ls, &done_convert);
3557   __ bind(&convert);
3558   __ Push(r0);
3559   __ CallRuntime(Runtime::kToName);
3560   __ bind(&done_convert);
3561   context()->Plug(r0);
3562 }
3563 
3564 
EmitStringCharFromCode(CallRuntime * expr)3565 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3566   ZoneList<Expression*>* args = expr->arguments();
3567   DCHECK(args->length() == 1);
3568   VisitForAccumulatorValue(args->at(0));
3569 
3570   Label done;
3571   StringCharFromCodeGenerator generator(r0, r1);
3572   generator.GenerateFast(masm_);
3573   __ jmp(&done);
3574 
3575   NopRuntimeCallHelper call_helper;
3576   generator.GenerateSlow(masm_, call_helper);
3577 
3578   __ bind(&done);
3579   context()->Plug(r1);
3580 }
3581 
3582 
EmitStringCharCodeAt(CallRuntime * expr)3583 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3584   ZoneList<Expression*>* args = expr->arguments();
3585   DCHECK(args->length() == 2);
3586   VisitForStackValue(args->at(0));
3587   VisitForAccumulatorValue(args->at(1));
3588 
3589   Register object = r1;
3590   Register index = r0;
3591   Register result = r3;
3592 
3593   __ pop(object);
3594 
3595   Label need_conversion;
3596   Label index_out_of_range;
3597   Label done;
3598   StringCharCodeAtGenerator generator(object,
3599                                       index,
3600                                       result,
3601                                       &need_conversion,
3602                                       &need_conversion,
3603                                       &index_out_of_range,
3604                                       STRING_INDEX_IS_NUMBER);
3605   generator.GenerateFast(masm_);
3606   __ jmp(&done);
3607 
3608   __ bind(&index_out_of_range);
3609   // When the index is out of range, the spec requires us to return
3610   // NaN.
3611   __ LoadRoot(result, Heap::kNanValueRootIndex);
3612   __ jmp(&done);
3613 
3614   __ bind(&need_conversion);
3615   // Load the undefined value into the result register, which will
3616   // trigger conversion.
3617   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3618   __ jmp(&done);
3619 
3620   NopRuntimeCallHelper call_helper;
3621   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3622 
3623   __ bind(&done);
3624   context()->Plug(result);
3625 }
3626 
3627 
EmitStringCharAt(CallRuntime * expr)3628 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3629   ZoneList<Expression*>* args = expr->arguments();
3630   DCHECK(args->length() == 2);
3631   VisitForStackValue(args->at(0));
3632   VisitForAccumulatorValue(args->at(1));
3633 
3634   Register object = r1;
3635   Register index = r0;
3636   Register scratch = r3;
3637   Register result = r0;
3638 
3639   __ pop(object);
3640 
3641   Label need_conversion;
3642   Label index_out_of_range;
3643   Label done;
3644   StringCharAtGenerator generator(object,
3645                                   index,
3646                                   scratch,
3647                                   result,
3648                                   &need_conversion,
3649                                   &need_conversion,
3650                                   &index_out_of_range,
3651                                   STRING_INDEX_IS_NUMBER);
3652   generator.GenerateFast(masm_);
3653   __ jmp(&done);
3654 
3655   __ bind(&index_out_of_range);
3656   // When the index is out of range, the spec requires us to return
3657   // the empty string.
3658   __ LoadRoot(result, Heap::kempty_stringRootIndex);
3659   __ jmp(&done);
3660 
3661   __ bind(&need_conversion);
3662   // Move smi zero into the result register, which will trigger
3663   // conversion.
3664   __ mov(result, Operand(Smi::FromInt(0)));
3665   __ jmp(&done);
3666 
3667   NopRuntimeCallHelper call_helper;
3668   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3669 
3670   __ bind(&done);
3671   context()->Plug(result);
3672 }
3673 
3674 
EmitCall(CallRuntime * expr)3675 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3676   ZoneList<Expression*>* args = expr->arguments();
3677   DCHECK_LE(2, args->length());
3678   // Push target, receiver and arguments onto the stack.
3679   for (Expression* const arg : *args) {
3680     VisitForStackValue(arg);
3681   }
3682   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3683   // Move target to r1.
3684   int const argc = args->length() - 2;
3685   __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
3686   // Call the target.
3687   __ mov(r0, Operand(argc));
3688   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3689   // Restore context register.
3690   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3691   // Discard the function left on TOS.
3692   context()->DropAndPlug(1, r0);
3693 }
3694 
3695 
EmitHasCachedArrayIndex(CallRuntime * expr)3696 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3697   ZoneList<Expression*>* args = expr->arguments();
3698   VisitForAccumulatorValue(args->at(0));
3699 
3700   Label materialize_true, materialize_false;
3701   Label* if_true = NULL;
3702   Label* if_false = NULL;
3703   Label* fall_through = NULL;
3704   context()->PrepareTest(&materialize_true, &materialize_false,
3705                          &if_true, &if_false, &fall_through);
3706 
3707   __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3708   __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3709   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3710   Split(eq, if_true, if_false, fall_through);
3711 
3712   context()->Plug(if_true, if_false);
3713 }
3714 
3715 
EmitGetCachedArrayIndex(CallRuntime * expr)3716 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3717   ZoneList<Expression*>* args = expr->arguments();
3718   DCHECK(args->length() == 1);
3719   VisitForAccumulatorValue(args->at(0));
3720 
3721   __ AssertString(r0);
3722 
3723   __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3724   __ IndexFromHash(r0, r0);
3725 
3726   context()->Plug(r0);
3727 }
3728 
3729 
EmitGetSuperConstructor(CallRuntime * expr)3730 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3731   ZoneList<Expression*>* args = expr->arguments();
3732   DCHECK_EQ(1, args->length());
3733   VisitForAccumulatorValue(args->at(0));
3734   __ AssertFunction(r0);
3735   __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3736   __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
3737   context()->Plug(r0);
3738 }
3739 
3740 
EmitFastOneByteArrayJoin(CallRuntime * expr)3741 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3742   Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3743       not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3744       one_char_separator_loop_entry, long_separator_loop;
3745   ZoneList<Expression*>* args = expr->arguments();
3746   DCHECK(args->length() == 2);
3747   VisitForStackValue(args->at(1));
3748   VisitForAccumulatorValue(args->at(0));
3749 
3750   // All aliases of the same register have disjoint lifetimes.
3751   Register array = r0;
3752   Register elements = no_reg;  // Will be r0.
3753   Register result = no_reg;  // Will be r0.
3754   Register separator = r1;
3755   Register array_length = r2;
3756   Register result_pos = no_reg;  // Will be r2
3757   Register string_length = r3;
3758   Register string = r4;
3759   Register element = r5;
3760   Register elements_end = r6;
3761   Register scratch = r9;
3762 
3763   // Separator operand is on the stack.
3764   __ pop(separator);
3765 
3766   // Check that the array is a JSArray.
3767   __ JumpIfSmi(array, &bailout);
3768   __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3769   __ b(ne, &bailout);
3770 
3771   // Check that the array has fast elements.
3772   __ CheckFastElements(scratch, array_length, &bailout);
3773 
3774   // If the array has length zero, return the empty string.
3775   __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3776   __ SmiUntag(array_length, SetCC);
3777   __ b(ne, &non_trivial_array);
3778   __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3779   __ b(&done);
3780 
3781   __ bind(&non_trivial_array);
3782 
3783   // Get the FixedArray containing array's elements.
3784   elements = array;
3785   __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3786   array = no_reg;  // End of array's live range.
3787 
3788   // Check that all array elements are sequential one-byte strings, and
3789   // accumulate the sum of their lengths, as a smi-encoded value.
3790   __ mov(string_length, Operand::Zero());
3791   __ add(element,
3792          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3793   __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3794   // Loop condition: while (element < elements_end).
3795   // Live values in registers:
3796   //   elements: Fixed array of strings.
3797   //   array_length: Length of the fixed array of strings (not smi)
3798   //   separator: Separator string
3799   //   string_length: Accumulated sum of string lengths (smi).
3800   //   element: Current array element.
3801   //   elements_end: Array end.
3802   if (generate_debug_code_) {
3803     __ cmp(array_length, Operand::Zero());
3804     __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3805   }
3806   __ bind(&loop);
3807   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3808   __ JumpIfSmi(string, &bailout);
3809   __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3810   __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3811   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
3812   __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3813   __ add(string_length, string_length, Operand(scratch), SetCC);
3814   __ b(vs, &bailout);
3815   __ cmp(element, elements_end);
3816   __ b(lt, &loop);
3817 
3818   // If array_length is 1, return elements[0], a string.
3819   __ cmp(array_length, Operand(1));
3820   __ b(ne, &not_size_one_array);
3821   __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3822   __ b(&done);
3823 
3824   __ bind(&not_size_one_array);
3825 
3826   // Live values in registers:
3827   //   separator: Separator string
3828   //   array_length: Length of the array.
3829   //   string_length: Sum of string lengths (smi).
3830   //   elements: FixedArray of strings.
3831 
3832   // Check that the separator is a flat one-byte string.
3833   __ JumpIfSmi(separator, &bailout);
3834   __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
3835   __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3836   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout);
3837 
3838   // Add (separator length times array_length) - separator length to the
3839   // string_length to get the length of the result string. array_length is not
3840   // smi but the other values are, so the result is a smi
3841   __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3842   __ sub(string_length, string_length, Operand(scratch));
3843   __ smull(scratch, ip, array_length, scratch);
3844   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3845   // zero.
3846   __ cmp(ip, Operand::Zero());
3847   __ b(ne, &bailout);
3848   __ tst(scratch, Operand(0x80000000));
3849   __ b(ne, &bailout);
3850   __ add(string_length, string_length, Operand(scratch), SetCC);
3851   __ b(vs, &bailout);
3852   __ SmiUntag(string_length);
3853 
3854   // Bailout for large object allocations.
3855   __ cmp(string_length, Operand(Page::kMaxRegularHeapObjectSize));
3856   __ b(gt, &bailout);
3857 
3858   // Get first element in the array to free up the elements register to be used
3859   // for the result.
3860   __ add(element,
3861          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3862   result = elements;  // End of live range for elements.
3863   elements = no_reg;
3864   // Live values in registers:
3865   //   element: First array element
3866   //   separator: Separator string
3867   //   string_length: Length of result string (not smi)
3868   //   array_length: Length of the array.
3869   __ AllocateOneByteString(result, string_length, scratch,
3870                            string,        // used as scratch
3871                            elements_end,  // used as scratch
3872                            &bailout);
3873   // Prepare for looping. Set up elements_end to end of the array. Set
3874   // result_pos to the position of the result where to write the first
3875   // character.
3876   __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3877   result_pos = array_length;  // End of live range for array_length.
3878   array_length = no_reg;
3879   __ add(result_pos,
3880          result,
3881          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3882 
3883   // Check the length of the separator.
3884   __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3885   __ cmp(scratch, Operand(Smi::FromInt(1)));
3886   __ b(eq, &one_char_separator);
3887   __ b(gt, &long_separator);
3888 
3889   // Empty separator case
3890   __ bind(&empty_separator_loop);
3891   // Live values in registers:
3892   //   result_pos: the position to which we are currently copying characters.
3893   //   element: Current array element.
3894   //   elements_end: Array end.
3895 
3896   // Copy next array element to the result.
3897   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3898   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3899   __ SmiUntag(string_length);
3900   __ add(string,
3901          string,
3902          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3903   __ CopyBytes(string, result_pos, string_length, scratch);
3904   __ cmp(element, elements_end);
3905   __ b(lt, &empty_separator_loop);  // End while (element < elements_end).
3906   DCHECK(result.is(r0));
3907   __ b(&done);
3908 
3909   // One-character separator case
3910   __ bind(&one_char_separator);
3911   // Replace separator with its one-byte character value.
3912   __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3913   // Jump into the loop after the code that copies the separator, so the first
3914   // element is not preceded by a separator
3915   __ jmp(&one_char_separator_loop_entry);
3916 
3917   __ bind(&one_char_separator_loop);
3918   // Live values in registers:
3919   //   result_pos: the position to which we are currently copying characters.
3920   //   element: Current array element.
3921   //   elements_end: Array end.
3922   //   separator: Single separator one-byte char (in lower byte).
3923 
3924   // Copy the separator character to the result.
3925   __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3926 
3927   // Copy next array element to the result.
3928   __ bind(&one_char_separator_loop_entry);
3929   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3930   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3931   __ SmiUntag(string_length);
3932   __ add(string,
3933          string,
3934          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3935   __ CopyBytes(string, result_pos, string_length, scratch);
3936   __ cmp(element, elements_end);
3937   __ b(lt, &one_char_separator_loop);  // End while (element < elements_end).
3938   DCHECK(result.is(r0));
3939   __ b(&done);
3940 
3941   // Long separator case (separator is more than one character). Entry is at the
3942   // label long_separator below.
3943   __ bind(&long_separator_loop);
3944   // Live values in registers:
3945   //   result_pos: the position to which we are currently copying characters.
3946   //   element: Current array element.
3947   //   elements_end: Array end.
3948   //   separator: Separator string.
3949 
3950   // Copy the separator to the result.
3951   __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3952   __ SmiUntag(string_length);
3953   __ add(string,
3954          separator,
3955          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3956   __ CopyBytes(string, result_pos, string_length, scratch);
3957 
3958   __ bind(&long_separator);
3959   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3960   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3961   __ SmiUntag(string_length);
3962   __ add(string,
3963          string,
3964          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3965   __ CopyBytes(string, result_pos, string_length, scratch);
3966   __ cmp(element, elements_end);
3967   __ b(lt, &long_separator_loop);  // End while (element < elements_end).
3968   DCHECK(result.is(r0));
3969   __ b(&done);
3970 
3971   __ bind(&bailout);
3972   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3973   __ bind(&done);
3974   context()->Plug(r0);
3975 }
3976 
3977 
EmitDebugIsActive(CallRuntime * expr)3978 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3979   DCHECK(expr->arguments()->length() == 0);
3980   ExternalReference debug_is_active =
3981       ExternalReference::debug_is_active_address(isolate());
3982   __ mov(ip, Operand(debug_is_active));
3983   __ ldrb(r0, MemOperand(ip));
3984   __ SmiTag(r0);
3985   context()->Plug(r0);
3986 }
3987 
3988 
EmitCreateIterResultObject(CallRuntime * expr)3989 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3990   ZoneList<Expression*>* args = expr->arguments();
3991   DCHECK_EQ(2, args->length());
3992   VisitForStackValue(args->at(0));
3993   VisitForStackValue(args->at(1));
3994 
3995   Label runtime, done;
3996 
3997   __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime, TAG_OBJECT);
3998   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
3999   __ pop(r3);
4000   __ pop(r2);
4001   __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
4002   __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4003   __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
4004   __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
4005   __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
4006   __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
4007   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4008   __ b(&done);
4009 
4010   __ bind(&runtime);
4011   __ CallRuntime(Runtime::kCreateIterResultObject);
4012 
4013   __ bind(&done);
4014   context()->Plug(r0);
4015 }
4016 
4017 
EmitLoadJSRuntimeFunction(CallRuntime * expr)4018 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4019   // Push undefined as the receiver.
4020   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4021   __ push(r0);
4022 
4023   __ LoadNativeContextSlot(expr->context_index(), r0);
4024 }
4025 
4026 
EmitCallJSRuntimeFunction(CallRuntime * expr)4027 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4028   ZoneList<Expression*>* args = expr->arguments();
4029   int arg_count = args->length();
4030 
4031   SetCallPosition(expr);
4032   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4033   __ mov(r0, Operand(arg_count));
4034   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
4035           RelocInfo::CODE_TARGET);
4036 }
4037 
4038 
VisitCallRuntime(CallRuntime * expr)4039 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4040   ZoneList<Expression*>* args = expr->arguments();
4041   int arg_count = args->length();
4042 
4043   if (expr->is_jsruntime()) {
4044     Comment cmnt(masm_, "[ CallRuntime");
4045     EmitLoadJSRuntimeFunction(expr);
4046 
4047     // Push the target function under the receiver.
4048     __ ldr(ip, MemOperand(sp, 0));
4049     __ push(ip);
4050     __ str(r0, MemOperand(sp, kPointerSize));
4051 
4052     // Push the arguments ("left-to-right").
4053     for (int i = 0; i < arg_count; i++) {
4054       VisitForStackValue(args->at(i));
4055     }
4056 
4057     PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4058     EmitCallJSRuntimeFunction(expr);
4059 
4060     // Restore context register.
4061     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4062 
4063     context()->DropAndPlug(1, r0);
4064 
4065   } else {
4066     const Runtime::Function* function = expr->function();
4067     switch (function->function_id) {
4068 #define CALL_INTRINSIC_GENERATOR(Name)     \
4069   case Runtime::kInline##Name: {           \
4070     Comment cmnt(masm_, "[ Inline" #Name); \
4071     return Emit##Name(expr);               \
4072   }
4073       FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4074 #undef CALL_INTRINSIC_GENERATOR
4075       default: {
4076         Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4077         // Push the arguments ("left-to-right").
4078         for (int i = 0; i < arg_count; i++) {
4079           VisitForStackValue(args->at(i));
4080         }
4081 
4082         // Call the C runtime function.
4083         PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4084         __ CallRuntime(expr->function(), arg_count);
4085         context()->Plug(r0);
4086       }
4087     }
4088   }
4089 }
4090 
4091 
VisitUnaryOperation(UnaryOperation * expr)4092 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4093   switch (expr->op()) {
4094     case Token::DELETE: {
4095       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4096       Property* property = expr->expression()->AsProperty();
4097       VariableProxy* proxy = expr->expression()->AsVariableProxy();
4098 
4099       if (property != NULL) {
4100         VisitForStackValue(property->obj());
4101         VisitForStackValue(property->key());
4102         __ CallRuntime(is_strict(language_mode())
4103                            ? Runtime::kDeleteProperty_Strict
4104                            : Runtime::kDeleteProperty_Sloppy);
4105         context()->Plug(r0);
4106       } else if (proxy != NULL) {
4107         Variable* var = proxy->var();
4108         // Delete of an unqualified identifier is disallowed in strict mode but
4109         // "delete this" is allowed.
4110         bool is_this = var->HasThisName(isolate());
4111         DCHECK(is_sloppy(language_mode()) || is_this);
4112         if (var->IsUnallocatedOrGlobalSlot()) {
4113           __ LoadGlobalObject(r2);
4114           __ mov(r1, Operand(var->name()));
4115           __ Push(r2, r1);
4116           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
4117           context()->Plug(r0);
4118         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4119           // Result of deleting non-global, non-dynamic variables is false.
4120           // The subexpression does not have side effects.
4121           context()->Plug(is_this);
4122         } else {
4123           // Non-global variable.  Call the runtime to try to delete from the
4124           // context where the variable was introduced.
4125           DCHECK(!context_register().is(r2));
4126           __ mov(r2, Operand(var->name()));
4127           __ Push(context_register(), r2);
4128           __ CallRuntime(Runtime::kDeleteLookupSlot);
4129           context()->Plug(r0);
4130         }
4131       } else {
4132         // Result of deleting non-property, non-variable reference is true.
4133         // The subexpression may have side effects.
4134         VisitForEffect(expr->expression());
4135         context()->Plug(true);
4136       }
4137       break;
4138     }
4139 
4140     case Token::VOID: {
4141       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4142       VisitForEffect(expr->expression());
4143       context()->Plug(Heap::kUndefinedValueRootIndex);
4144       break;
4145     }
4146 
4147     case Token::NOT: {
4148       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4149       if (context()->IsEffect()) {
4150         // Unary NOT has no side effects so it's only necessary to visit the
4151         // subexpression.  Match the optimizing compiler by not branching.
4152         VisitForEffect(expr->expression());
4153       } else if (context()->IsTest()) {
4154         const TestContext* test = TestContext::cast(context());
4155         // The labels are swapped for the recursive call.
4156         VisitForControl(expr->expression(),
4157                         test->false_label(),
4158                         test->true_label(),
4159                         test->fall_through());
4160         context()->Plug(test->true_label(), test->false_label());
4161       } else {
4162         // We handle value contexts explicitly rather than simply visiting
4163         // for control and plugging the control flow into the context,
4164         // because we need to prepare a pair of extra administrative AST ids
4165         // for the optimizing compiler.
4166         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4167         Label materialize_true, materialize_false, done;
4168         VisitForControl(expr->expression(),
4169                         &materialize_false,
4170                         &materialize_true,
4171                         &materialize_true);
4172         __ bind(&materialize_true);
4173         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4174         __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4175         if (context()->IsStackValue()) __ push(r0);
4176         __ jmp(&done);
4177         __ bind(&materialize_false);
4178         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4179         __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4180         if (context()->IsStackValue()) __ push(r0);
4181         __ bind(&done);
4182       }
4183       break;
4184     }
4185 
4186     case Token::TYPEOF: {
4187       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4188       {
4189         AccumulatorValueContext context(this);
4190         VisitForTypeofValue(expr->expression());
4191       }
4192       __ mov(r3, r0);
4193       TypeofStub typeof_stub(isolate());
4194       __ CallStub(&typeof_stub);
4195       context()->Plug(r0);
4196       break;
4197     }
4198 
4199     default:
4200       UNREACHABLE();
4201   }
4202 }
4203 
4204 
VisitCountOperation(CountOperation * expr)4205 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4206   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4207 
4208   Comment cmnt(masm_, "[ CountOperation");
4209 
4210   Property* prop = expr->expression()->AsProperty();
4211   LhsKind assign_type = Property::GetAssignType(prop);
4212 
4213   // Evaluate expression and get value.
4214   if (assign_type == VARIABLE) {
4215     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4216     AccumulatorValueContext context(this);
4217     EmitVariableLoad(expr->expression()->AsVariableProxy());
4218   } else {
4219     // Reserve space for result of postfix operation.
4220     if (expr->is_postfix() && !context()->IsEffect()) {
4221       __ mov(ip, Operand(Smi::FromInt(0)));
4222       __ push(ip);
4223     }
4224     switch (assign_type) {
4225       case NAMED_PROPERTY: {
4226         // Put the object both on the stack and in the register.
4227         VisitForStackValue(prop->obj());
4228         __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4229         EmitNamedPropertyLoad(prop);
4230         break;
4231       }
4232 
4233       case NAMED_SUPER_PROPERTY: {
4234         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4235         VisitForAccumulatorValue(
4236             prop->obj()->AsSuperPropertyReference()->home_object());
4237         __ Push(result_register());
4238         const Register scratch = r1;
4239         __ ldr(scratch, MemOperand(sp, kPointerSize));
4240         __ Push(scratch);
4241         __ Push(result_register());
4242         EmitNamedSuperPropertyLoad(prop);
4243         break;
4244       }
4245 
4246       case KEYED_SUPER_PROPERTY: {
4247         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4248         VisitForStackValue(
4249             prop->obj()->AsSuperPropertyReference()->home_object());
4250         VisitForAccumulatorValue(prop->key());
4251         __ Push(result_register());
4252         const Register scratch = r1;
4253         __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4254         __ Push(scratch);
4255         __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4256         __ Push(scratch);
4257         __ Push(result_register());
4258         EmitKeyedSuperPropertyLoad(prop);
4259         break;
4260       }
4261 
4262       case KEYED_PROPERTY: {
4263         VisitForStackValue(prop->obj());
4264         VisitForStackValue(prop->key());
4265         __ ldr(LoadDescriptor::ReceiverRegister(),
4266                MemOperand(sp, 1 * kPointerSize));
4267         __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4268         EmitKeyedPropertyLoad(prop);
4269         break;
4270       }
4271 
4272       case VARIABLE:
4273         UNREACHABLE();
4274     }
4275   }
4276 
4277   // We need a second deoptimization point after loading the value
4278   // in case evaluating the property load my have a side effect.
4279   if (assign_type == VARIABLE) {
4280     PrepareForBailout(expr->expression(), TOS_REG);
4281   } else {
4282     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4283   }
4284 
4285   // Inline smi case if we are in a loop.
4286   Label stub_call, done;
4287   JumpPatchSite patch_site(masm_);
4288 
4289   int count_value = expr->op() == Token::INC ? 1 : -1;
4290   if (ShouldInlineSmiCase(expr->op())) {
4291     Label slow;
4292     patch_site.EmitJumpIfNotSmi(r0, &slow);
4293 
4294     // Save result for postfix expressions.
4295     if (expr->is_postfix()) {
4296       if (!context()->IsEffect()) {
4297         // Save the result on the stack. If we have a named or keyed property
4298         // we store the result under the receiver that is currently on top
4299         // of the stack.
4300         switch (assign_type) {
4301           case VARIABLE:
4302             __ push(r0);
4303             break;
4304           case NAMED_PROPERTY:
4305             __ str(r0, MemOperand(sp, kPointerSize));
4306             break;
4307           case NAMED_SUPER_PROPERTY:
4308             __ str(r0, MemOperand(sp, 2 * kPointerSize));
4309             break;
4310           case KEYED_PROPERTY:
4311             __ str(r0, MemOperand(sp, 2 * kPointerSize));
4312             break;
4313           case KEYED_SUPER_PROPERTY:
4314             __ str(r0, MemOperand(sp, 3 * kPointerSize));
4315             break;
4316         }
4317       }
4318     }
4319 
4320     __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4321     __ b(vc, &done);
4322     // Call stub. Undo operation first.
4323     __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4324     __ jmp(&stub_call);
4325     __ bind(&slow);
4326   }
4327   if (!is_strong(language_mode())) {
4328     ToNumberStub convert_stub(isolate());
4329     __ CallStub(&convert_stub);
4330     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4331   }
4332 
4333   // Save result for postfix expressions.
4334   if (expr->is_postfix()) {
4335     if (!context()->IsEffect()) {
4336       // Save the result on the stack. If we have a named or keyed property
4337       // we store the result under the receiver that is currently on top
4338       // of the stack.
4339       switch (assign_type) {
4340         case VARIABLE:
4341           __ push(r0);
4342           break;
4343         case NAMED_PROPERTY:
4344           __ str(r0, MemOperand(sp, kPointerSize));
4345           break;
4346         case NAMED_SUPER_PROPERTY:
4347           __ str(r0, MemOperand(sp, 2 * kPointerSize));
4348           break;
4349         case KEYED_PROPERTY:
4350           __ str(r0, MemOperand(sp, 2 * kPointerSize));
4351           break;
4352         case KEYED_SUPER_PROPERTY:
4353           __ str(r0, MemOperand(sp, 3 * kPointerSize));
4354           break;
4355       }
4356     }
4357   }
4358 
4359 
4360   __ bind(&stub_call);
4361   __ mov(r1, r0);
4362   __ mov(r0, Operand(Smi::FromInt(count_value)));
4363 
4364   SetExpressionPosition(expr);
4365 
4366   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4367                                               strength(language_mode())).code();
4368   CallIC(code, expr->CountBinOpFeedbackId());
4369   patch_site.EmitPatchInfo();
4370   __ bind(&done);
4371 
4372   if (is_strong(language_mode())) {
4373     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4374   }
4375   // Store the value returned in r0.
4376   switch (assign_type) {
4377     case VARIABLE:
4378       if (expr->is_postfix()) {
4379         { EffectContext context(this);
4380           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4381                                  Token::ASSIGN, expr->CountSlot());
4382           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4383           context.Plug(r0);
4384         }
4385         // For all contexts except EffectConstant We have the result on
4386         // top of the stack.
4387         if (!context()->IsEffect()) {
4388           context()->PlugTOS();
4389         }
4390       } else {
4391         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4392                                Token::ASSIGN, expr->CountSlot());
4393         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4394         context()->Plug(r0);
4395       }
4396       break;
4397     case NAMED_PROPERTY: {
4398       __ mov(StoreDescriptor::NameRegister(),
4399              Operand(prop->key()->AsLiteral()->value()));
4400       __ pop(StoreDescriptor::ReceiverRegister());
4401       EmitLoadStoreICSlot(expr->CountSlot());
4402       CallStoreIC();
4403       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4404       if (expr->is_postfix()) {
4405         if (!context()->IsEffect()) {
4406           context()->PlugTOS();
4407         }
4408       } else {
4409         context()->Plug(r0);
4410       }
4411       break;
4412     }
4413     case NAMED_SUPER_PROPERTY: {
4414       EmitNamedSuperPropertyStore(prop);
4415       if (expr->is_postfix()) {
4416         if (!context()->IsEffect()) {
4417           context()->PlugTOS();
4418         }
4419       } else {
4420         context()->Plug(r0);
4421       }
4422       break;
4423     }
4424     case KEYED_SUPER_PROPERTY: {
4425       EmitKeyedSuperPropertyStore(prop);
4426       if (expr->is_postfix()) {
4427         if (!context()->IsEffect()) {
4428           context()->PlugTOS();
4429         }
4430       } else {
4431         context()->Plug(r0);
4432       }
4433       break;
4434     }
4435     case KEYED_PROPERTY: {
4436       __ Pop(StoreDescriptor::ReceiverRegister(),
4437              StoreDescriptor::NameRegister());
4438       Handle<Code> ic =
4439           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4440       EmitLoadStoreICSlot(expr->CountSlot());
4441       CallIC(ic);
4442       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4443       if (expr->is_postfix()) {
4444         if (!context()->IsEffect()) {
4445           context()->PlugTOS();
4446         }
4447       } else {
4448         context()->Plug(r0);
4449       }
4450       break;
4451     }
4452   }
4453 }
4454 
4455 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4456 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4457                                                  Expression* sub_expr,
4458                                                  Handle<String> check) {
4459   Label materialize_true, materialize_false;
4460   Label* if_true = NULL;
4461   Label* if_false = NULL;
4462   Label* fall_through = NULL;
4463   context()->PrepareTest(&materialize_true, &materialize_false,
4464                          &if_true, &if_false, &fall_through);
4465 
4466   { AccumulatorValueContext context(this);
4467     VisitForTypeofValue(sub_expr);
4468   }
4469   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4470 
4471   Factory* factory = isolate()->factory();
4472   if (String::Equals(check, factory->number_string())) {
4473     __ JumpIfSmi(r0, if_true);
4474     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4475     __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4476     __ cmp(r0, ip);
4477     Split(eq, if_true, if_false, fall_through);
4478   } else if (String::Equals(check, factory->string_string())) {
4479     __ JumpIfSmi(r0, if_false);
4480     __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4481     Split(lt, if_true, if_false, fall_through);
4482   } else if (String::Equals(check, factory->symbol_string())) {
4483     __ JumpIfSmi(r0, if_false);
4484     __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4485     Split(eq, if_true, if_false, fall_through);
4486   } else if (String::Equals(check, factory->boolean_string())) {
4487     __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4488     __ b(eq, if_true);
4489     __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4490     Split(eq, if_true, if_false, fall_through);
4491   } else if (String::Equals(check, factory->undefined_string())) {
4492     __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4493     __ b(eq, if_true);
4494     __ JumpIfSmi(r0, if_false);
4495     // Check for undetectable objects => true.
4496     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4497     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4498     __ tst(r1, Operand(1 << Map::kIsUndetectable));
4499     Split(ne, if_true, if_false, fall_through);
4500 
4501   } else if (String::Equals(check, factory->function_string())) {
4502     __ JumpIfSmi(r0, if_false);
4503     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4504     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4505     __ and_(r1, r1,
4506             Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
4507     __ cmp(r1, Operand(1 << Map::kIsCallable));
4508     Split(eq, if_true, if_false, fall_through);
4509   } else if (String::Equals(check, factory->object_string())) {
4510     __ JumpIfSmi(r0, if_false);
4511     __ CompareRoot(r0, Heap::kNullValueRootIndex);
4512     __ b(eq, if_true);
4513     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4514     __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
4515     __ b(lt, if_false);
4516     // Check for callable or undetectable objects => false.
4517     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4518     __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
4519     Split(eq, if_true, if_false, fall_through);
4520 // clang-format off
4521 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
4522   } else if (String::Equals(check, factory->type##_string())) { \
4523     __ JumpIfSmi(r0, if_false);                                 \
4524     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));    \
4525     __ CompareRoot(r0, Heap::k##Type##MapRootIndex);            \
4526     Split(eq, if_true, if_false, fall_through);
4527   SIMD128_TYPES(SIMD128_TYPE)
4528 #undef SIMD128_TYPE
4529     // clang-format on
4530   } else {
4531     if (if_false != fall_through) __ jmp(if_false);
4532   }
4533   context()->Plug(if_true, if_false);
4534 }
4535 
4536 
VisitCompareOperation(CompareOperation * expr)4537 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4538   Comment cmnt(masm_, "[ CompareOperation");
4539   SetExpressionPosition(expr);
4540 
4541   // First we try a fast inlined version of the compare when one of
4542   // the operands is a literal.
4543   if (TryLiteralCompare(expr)) return;
4544 
4545   // Always perform the comparison for its control flow.  Pack the result
4546   // into the expression's context after the comparison is performed.
4547   Label materialize_true, materialize_false;
4548   Label* if_true = NULL;
4549   Label* if_false = NULL;
4550   Label* fall_through = NULL;
4551   context()->PrepareTest(&materialize_true, &materialize_false,
4552                          &if_true, &if_false, &fall_through);
4553 
4554   Token::Value op = expr->op();
4555   VisitForStackValue(expr->left());
4556   switch (op) {
4557     case Token::IN:
4558       VisitForStackValue(expr->right());
4559       __ CallRuntime(Runtime::kHasProperty);
4560       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4561       __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4562       Split(eq, if_true, if_false, fall_through);
4563       break;
4564 
4565     case Token::INSTANCEOF: {
4566       VisitForAccumulatorValue(expr->right());
4567       __ pop(r1);
4568       InstanceOfStub stub(isolate());
4569       __ CallStub(&stub);
4570       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4571       __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4572       Split(eq, if_true, if_false, fall_through);
4573       break;
4574     }
4575 
4576     default: {
4577       VisitForAccumulatorValue(expr->right());
4578       Condition cond = CompareIC::ComputeCondition(op);
4579       __ pop(r1);
4580 
4581       bool inline_smi_code = ShouldInlineSmiCase(op);
4582       JumpPatchSite patch_site(masm_);
4583       if (inline_smi_code) {
4584         Label slow_case;
4585         __ orr(r2, r0, Operand(r1));
4586         patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4587         __ cmp(r1, r0);
4588         Split(cond, if_true, if_false, NULL);
4589         __ bind(&slow_case);
4590       }
4591 
4592       Handle<Code> ic = CodeFactory::CompareIC(
4593                             isolate(), op, strength(language_mode())).code();
4594       CallIC(ic, expr->CompareOperationFeedbackId());
4595       patch_site.EmitPatchInfo();
4596       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4597       __ cmp(r0, Operand::Zero());
4598       Split(cond, if_true, if_false, fall_through);
4599     }
4600   }
4601 
4602   // Convert the result of the comparison into one expected for this
4603   // expression's context.
4604   context()->Plug(if_true, if_false);
4605 }
4606 
4607 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4608 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4609                                               Expression* sub_expr,
4610                                               NilValue nil) {
4611   Label materialize_true, materialize_false;
4612   Label* if_true = NULL;
4613   Label* if_false = NULL;
4614   Label* fall_through = NULL;
4615   context()->PrepareTest(&materialize_true, &materialize_false,
4616                          &if_true, &if_false, &fall_through);
4617 
4618   VisitForAccumulatorValue(sub_expr);
4619   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4620   if (expr->op() == Token::EQ_STRICT) {
4621     Heap::RootListIndex nil_value = nil == kNullValue ?
4622         Heap::kNullValueRootIndex :
4623         Heap::kUndefinedValueRootIndex;
4624     __ LoadRoot(r1, nil_value);
4625     __ cmp(r0, r1);
4626     Split(eq, if_true, if_false, fall_through);
4627   } else {
4628     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4629     CallIC(ic, expr->CompareOperationFeedbackId());
4630     __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4631     Split(eq, if_true, if_false, fall_through);
4632   }
4633   context()->Plug(if_true, if_false);
4634 }
4635 
4636 
VisitThisFunction(ThisFunction * expr)4637 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4638   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4639   context()->Plug(r0);
4640 }
4641 
4642 
result_register()4643 Register FullCodeGenerator::result_register() {
4644   return r0;
4645 }
4646 
4647 
context_register()4648 Register FullCodeGenerator::context_register() {
4649   return cp;
4650 }
4651 
4652 
StoreToFrameField(int frame_offset,Register value)4653 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4654   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4655   __ str(value, MemOperand(fp, frame_offset));
4656 }
4657 
4658 
LoadContextField(Register dst,int context_index)4659 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4660   __ ldr(dst, ContextMemOperand(cp, context_index));
4661 }
4662 
4663 
PushFunctionArgumentForContextAllocation()4664 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4665   Scope* closure_scope = scope()->ClosureScope();
4666   if (closure_scope->is_script_scope() ||
4667       closure_scope->is_module_scope()) {
4668     // Contexts nested in the native context have a canonical empty function
4669     // as their closure, not the anonymous closure containing the global
4670     // code.
4671     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
4672   } else if (closure_scope->is_eval_scope()) {
4673     // Contexts created by a call to eval have the same closure as the
4674     // context calling eval, not the anonymous closure containing the eval
4675     // code.  Fetch it from the context.
4676     __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4677   } else {
4678     DCHECK(closure_scope->is_function_scope());
4679     __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4680   }
4681   __ push(ip);
4682 }
4683 
4684 
4685 // ----------------------------------------------------------------------------
4686 // Non-local control flow support.
4687 
EnterFinallyBlock()4688 void FullCodeGenerator::EnterFinallyBlock() {
4689   DCHECK(!result_register().is(r1));
4690   // Store result register while executing finally block.
4691   __ push(result_register());
4692   // Cook return address in link register to stack (smi encoded Code* delta)
4693   __ sub(r1, lr, Operand(masm_->CodeObject()));
4694   __ SmiTag(r1);
4695 
4696   // Store result register while executing finally block.
4697   __ push(r1);
4698 
4699   // Store pending message while executing finally block.
4700   ExternalReference pending_message_obj =
4701       ExternalReference::address_of_pending_message_obj(isolate());
4702   __ mov(ip, Operand(pending_message_obj));
4703   __ ldr(r1, MemOperand(ip));
4704   __ push(r1);
4705 
4706   ClearPendingMessage();
4707 }
4708 
4709 
ExitFinallyBlock()4710 void FullCodeGenerator::ExitFinallyBlock() {
4711   DCHECK(!result_register().is(r1));
4712   // Restore pending message from stack.
4713   __ pop(r1);
4714   ExternalReference pending_message_obj =
4715       ExternalReference::address_of_pending_message_obj(isolate());
4716   __ mov(ip, Operand(pending_message_obj));
4717   __ str(r1, MemOperand(ip));
4718 
4719   // Restore result register from stack.
4720   __ pop(r1);
4721 
4722   // Uncook return address and return.
4723   __ pop(result_register());
4724   __ SmiUntag(r1);
4725   __ add(pc, r1, Operand(masm_->CodeObject()));
4726 }
4727 
4728 
ClearPendingMessage()4729 void FullCodeGenerator::ClearPendingMessage() {
4730   DCHECK(!result_register().is(r1));
4731   ExternalReference pending_message_obj =
4732       ExternalReference::address_of_pending_message_obj(isolate());
4733   __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
4734   __ mov(ip, Operand(pending_message_obj));
4735   __ str(r1, MemOperand(ip));
4736 }
4737 
4738 
EmitLoadStoreICSlot(FeedbackVectorSlot slot)4739 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4740   DCHECK(!slot.IsInvalid());
4741   __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4742          Operand(SmiFromSlot(slot)));
4743 }
4744 
4745 
4746 #undef __
4747 
4748 
GetInterruptImmediateLoadAddress(Address pc)4749 static Address GetInterruptImmediateLoadAddress(Address pc) {
4750   Address load_address = pc - 2 * Assembler::kInstrSize;
4751   if (!FLAG_enable_embedded_constant_pool) {
4752     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4753   } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4754     // This is an extended constant pool lookup.
4755     if (CpuFeatures::IsSupported(ARMv7)) {
4756       load_address -= 2 * Assembler::kInstrSize;
4757       DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4758       DCHECK(Assembler::IsMovT(
4759           Memory::int32_at(load_address + Assembler::kInstrSize)));
4760     } else {
4761       load_address -= 4 * Assembler::kInstrSize;
4762       DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4763       DCHECK(Assembler::IsOrrImmed(
4764           Memory::int32_at(load_address + Assembler::kInstrSize)));
4765       DCHECK(Assembler::IsOrrImmed(
4766           Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4767       DCHECK(Assembler::IsOrrImmed(
4768           Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
4769     }
4770   } else if (CpuFeatures::IsSupported(ARMv7) &&
4771              Assembler::IsMovT(Memory::int32_at(load_address))) {
4772     // This is a movw / movt immediate load.
4773     load_address -= Assembler::kInstrSize;
4774     DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4775   } else if (!CpuFeatures::IsSupported(ARMv7) &&
4776              Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
4777     // This is a mov / orr immediate load.
4778     load_address -= 3 * Assembler::kInstrSize;
4779     DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4780     DCHECK(Assembler::IsOrrImmed(
4781         Memory::int32_at(load_address + Assembler::kInstrSize)));
4782     DCHECK(Assembler::IsOrrImmed(
4783         Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4784   } else {
4785     // This is a small constant pool lookup.
4786     DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4787   }
4788   return load_address;
4789 }
4790 
4791 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)4792 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4793                             Address pc,
4794                             BackEdgeState target_state,
4795                             Code* replacement_code) {
4796   Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4797   Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4798   Isolate* isolate = unoptimized_code->GetIsolate();
4799   CodePatcher patcher(isolate, branch_address, 1);
4800   switch (target_state) {
4801     case INTERRUPT:
4802     {
4803       //  <decrement profiling counter>
4804       //   bpl ok
4805       //   ; load interrupt stub address into ip - either of (for ARMv7):
4806       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
4807       //   ldr ip, [pc/pp, #imm]  |   movw ip, #imm     |   movw ip, #imm
4808       //                          |   movt ip, #imm     |   movw ip, #imm
4809       //                          |   ldr  ip, [pp, ip]
4810       //   ; or (for ARMv6):
4811       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
4812       //   ldr ip, [pc/pp, #imm]  |   mov ip, #imm      |   mov ip, #imm
4813       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
4814       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
4815       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
4816       //   blx ip
4817       //  <reset profiling counter>
4818       //  ok-label
4819 
4820       // Calculate branch offset to the ok-label - this is the difference
4821       // between the branch address and |pc| (which points at <blx ip>) plus
4822       // kProfileCounterResetSequence instructions
4823       int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
4824                           kProfileCounterResetSequenceLength;
4825       patcher.masm()->b(branch_offset, pl);
4826       break;
4827     }
4828     case ON_STACK_REPLACEMENT:
4829     case OSR_AFTER_STACK_CHECK:
4830       //  <decrement profiling counter>
4831       //   mov r0, r0 (NOP)
4832       //   ; load on-stack replacement address into ip - either of (for ARMv7):
4833       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
4834       //   ldr ip, [pc/pp, #imm]  |   movw ip, #imm     |   movw ip, #imm
4835       //                          |   movt ip, #imm>    |   movw ip, #imm
4836       //                          |   ldr  ip, [pp, ip]
4837       //   ; or (for ARMv6):
4838       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
4839       //   ldr ip, [pc/pp, #imm]  |   mov ip, #imm      |   mov ip, #imm
4840       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
4841       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
4842       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
4843       //   blx ip
4844       //  <reset profiling counter>
4845       //  ok-label
4846       patcher.masm()->nop();
4847       break;
4848   }
4849 
4850   // Replace the call address.
4851   Assembler::set_target_address_at(isolate, pc_immediate_load_address,
4852                                    unoptimized_code, replacement_code->entry());
4853 
4854   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4855       unoptimized_code, pc_immediate_load_address, replacement_code);
4856 }
4857 
4858 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)4859 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4860     Isolate* isolate,
4861     Code* unoptimized_code,
4862     Address pc) {
4863   DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
4864 
4865   Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4866   Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4867   Address interrupt_address = Assembler::target_address_at(
4868       pc_immediate_load_address, unoptimized_code);
4869 
4870   if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4871     DCHECK(interrupt_address ==
4872            isolate->builtins()->InterruptCheck()->entry());
4873     return INTERRUPT;
4874   }
4875 
4876   DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
4877 
4878   if (interrupt_address ==
4879       isolate->builtins()->OnStackReplacement()->entry()) {
4880     return ON_STACK_REPLACEMENT;
4881   }
4882 
4883   DCHECK(interrupt_address ==
4884          isolate->builtins()->OsrAfterStackCheck()->entry());
4885   return OSR_AFTER_STACK_CHECK;
4886 }
4887 
4888 
4889 }  // namespace internal
4890 }  // namespace v8
4891 
4892 #endif  // V8_TARGET_ARCH_ARM
4893