1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM64
6 
7 #include "src/ast/scopes.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parsing/parser.h"
15 
16 #include "src/arm64/code-stubs-arm64.h"
17 #include "src/arm64/frames-arm64.h"
18 #include "src/arm64/macro-assembler-arm64.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 #define __ ACCESS_MASM(masm_)
24 
25 class JumpPatchSite BASE_EMBEDDED {
26  public:
JumpPatchSite(MacroAssembler * masm)27   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
28 #ifdef DEBUG
29     info_emitted_ = false;
30 #endif
31   }
32 
~JumpPatchSite()33   ~JumpPatchSite() {
34     if (patch_site_.is_bound()) {
35       DCHECK(info_emitted_);
36     } else {
37       DCHECK(reg_.IsNone());
38     }
39   }
40 
EmitJumpIfNotSmi(Register reg,Label * target)41   void EmitJumpIfNotSmi(Register reg, Label* target) {
42     // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
43     InstructionAccurateScope scope(masm_, 1);
44     DCHECK(!info_emitted_);
45     DCHECK(reg.Is64Bits());
46     DCHECK(!reg.Is(csp));
47     reg_ = reg;
48     __ bind(&patch_site_);
49     __ tbz(xzr, 0, target);   // Always taken before patched.
50   }
51 
EmitJumpIfSmi(Register reg,Label * target)52   void EmitJumpIfSmi(Register reg, Label* target) {
53     // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
54     InstructionAccurateScope scope(masm_, 1);
55     DCHECK(!info_emitted_);
56     DCHECK(reg.Is64Bits());
57     DCHECK(!reg.Is(csp));
58     reg_ = reg;
59     __ bind(&patch_site_);
60     __ tbnz(xzr, 0, target);  // Never taken before patched.
61   }
62 
EmitJumpIfEitherNotSmi(Register reg1,Register reg2,Label * target)63   void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
64     UseScratchRegisterScope temps(masm_);
65     Register temp = temps.AcquireX();
66     __ Orr(temp, reg1, reg2);
67     EmitJumpIfNotSmi(temp, target);
68   }
69 
EmitPatchInfo()70   void EmitPatchInfo() {
71     Assembler::BlockPoolsScope scope(masm_);
72     InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
73 #ifdef DEBUG
74     info_emitted_ = true;
75 #endif
76   }
77 
78  private:
79   MacroAssembler* masm_;
80   Label patch_site_;
81   Register reg_;
82 #ifdef DEBUG
83   bool info_emitted_;
84 #endif
85 };
86 
87 
88 // Generate code for a JS function. On entry to the function the receiver
89 // and arguments have been pushed on the stack left to right. The actual
90 // argument count matches the formal parameter count expected by the
91 // function.
92 //
93 // The live registers are:
94 //   - x1: the JS function object being called (i.e. ourselves).
95 //   - x3: the new target value
96 //   - cp: our context.
97 //   - fp: our caller's frame pointer.
98 //   - jssp: stack pointer.
99 //   - lr: return address.
100 //
101 // The function builds a JS frame. See JavaScriptFrameConstants in
102 // frames-arm.h for its layout.
Generate()103 void FullCodeGenerator::Generate() {
104   CompilationInfo* info = info_;
105   profiling_counter_ = isolate()->factory()->NewCell(
106       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
107   SetFunctionPosition(literal());
108   Comment cmnt(masm_, "[ Function compiled by full code generator");
109 
110   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
111 
112 #ifdef DEBUG
113   if (strlen(FLAG_stop_at) > 0 &&
114       info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
115     __ Debug("stop-at", __LINE__, BREAK);
116   }
117 #endif
118 
119   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
120     int receiver_offset = info->scope()->num_parameters() * kXRegSize;
121     __ Peek(x10, receiver_offset);
122     __ AssertNotSmi(x10);
123     __ CompareObjectType(x10, x10, x11, FIRST_JS_RECEIVER_TYPE);
124     __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
125   }
126 
127   // Open a frame scope to indicate that there is a frame on the stack.
128   // The MANUAL indicates that the scope shouldn't actually generate code
129   // to set up the frame because we do it manually below.
130   FrameScope frame_scope(masm_, StackFrame::MANUAL);
131 
132   // This call emits the following sequence in a way that can be patched for
133   // code ageing support:
134   //  Push(lr, fp, cp, x1);
135   //  Add(fp, jssp, 2 * kPointerSize);
136   info->set_prologue_offset(masm_->pc_offset());
137   __ Prologue(info->GeneratePreagedPrologue());
138 
139   // Reserve space on the stack for locals.
140   { Comment cmnt(masm_, "[ Allocate locals");
141     int locals_count = info->scope()->num_stack_slots();
142     // Generators allocate locals, if any, in context slots.
143     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
144 
145     if (locals_count > 0) {
146       if (locals_count >= 128) {
147         Label ok;
148         DCHECK(jssp.Is(__ StackPointer()));
149         __ Sub(x10, jssp, locals_count * kPointerSize);
150         __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
151         __ B(hs, &ok);
152         __ CallRuntime(Runtime::kThrowStackOverflow);
153         __ Bind(&ok);
154       }
155       __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
156       if (FLAG_optimize_for_size) {
157         __ PushMultipleTimes(x10 , locals_count);
158       } else {
159         const int kMaxPushes = 32;
160         if (locals_count >= kMaxPushes) {
161           int loop_iterations = locals_count / kMaxPushes;
162           __ Mov(x2, loop_iterations);
163           Label loop_header;
164           __ Bind(&loop_header);
165           // Do pushes.
166           __ PushMultipleTimes(x10 , kMaxPushes);
167           __ Subs(x2, x2, 1);
168           __ B(ne, &loop_header);
169         }
170         int remaining = locals_count % kMaxPushes;
171         // Emit the remaining pushes.
172         __ PushMultipleTimes(x10 , remaining);
173       }
174     }
175   }
176 
177   bool function_in_register_x1 = true;
178 
179   if (info->scope()->num_heap_slots() > 0) {
180     // Argument to NewContext is the function, which is still in x1.
181     Comment cmnt(masm_, "[ Allocate context");
182     bool need_write_barrier = true;
183     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184     if (info->scope()->is_script_scope()) {
185       __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
186       __ Push(x1, x10);
187       __ CallRuntime(Runtime::kNewScriptContext);
188       PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
189       // The new target value is not used, clobbering is safe.
190       DCHECK_NULL(info->scope()->new_target_var());
191     } else {
192       if (info->scope()->new_target_var() != nullptr) {
193         __ Push(x3);  // Preserve new target.
194       }
195       if (slots <= FastNewContextStub::kMaximumSlots) {
196         FastNewContextStub stub(isolate(), slots);
197         __ CallStub(&stub);
198         // Result of FastNewContextStub is always in new space.
199         need_write_barrier = false;
200       } else {
201         __ Push(x1);
202         __ CallRuntime(Runtime::kNewFunctionContext);
203       }
204       if (info->scope()->new_target_var() != nullptr) {
205         __ Pop(x3);  // Restore new target.
206       }
207     }
208     function_in_register_x1 = false;
209     // Context is returned in x0.  It replaces the context passed to us.
210     // It's saved in the stack and kept live in cp.
211     __ Mov(cp, x0);
212     __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
213     // Copy any necessary parameters into the context.
214     int num_parameters = info->scope()->num_parameters();
215     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216     for (int i = first_parameter; i < num_parameters; i++) {
217       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
218       if (var->IsContextSlot()) {
219         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220             (num_parameters - 1 - i) * kPointerSize;
221         // Load parameter from stack.
222         __ Ldr(x10, MemOperand(fp, parameter_offset));
223         // Store it in the context.
224         MemOperand target = ContextMemOperand(cp, var->index());
225         __ Str(x10, target);
226 
227         // Update the write barrier.
228         if (need_write_barrier) {
229           __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
230                                     x11, kLRHasBeenSaved, kDontSaveFPRegs);
231         } else if (FLAG_debug_code) {
232           Label done;
233           __ JumpIfInNewSpace(cp, &done);
234           __ Abort(kExpectedNewSpaceObject);
235           __ bind(&done);
236         }
237       }
238     }
239   }
240 
241   // Register holding this function and new target are both trashed in case we
242   // bailout here. But since that can happen only when new target is not used
243   // and we allocate a context, the value of |function_in_register| is correct.
244   PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
245 
246   // Possibly set up a local binding to the this function which is used in
247   // derived constructors with super calls.
248   Variable* this_function_var = scope()->this_function_var();
249   if (this_function_var != nullptr) {
250     Comment cmnt(masm_, "[ This function");
251     if (!function_in_register_x1) {
252       __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253       // The write barrier clobbers register again, keep it marked as such.
254     }
255     SetVar(this_function_var, x1, x0, x2);
256   }
257 
258   // Possibly set up a local binding to the new target value.
259   Variable* new_target_var = scope()->new_target_var();
260   if (new_target_var != nullptr) {
261     Comment cmnt(masm_, "[ new.target");
262     SetVar(new_target_var, x3, x0, x2);
263   }
264 
265   // Possibly allocate RestParameters
266   int rest_index;
267   Variable* rest_param = scope()->rest_parameter(&rest_index);
268   if (rest_param) {
269     Comment cmnt(masm_, "[ Allocate rest parameter array");
270 
271     int num_parameters = info->scope()->num_parameters();
272     int offset = num_parameters * kPointerSize;
273     __ Mov(RestParamAccessDescriptor::parameter_count(),
274            Smi::FromInt(num_parameters));
275     __ Add(RestParamAccessDescriptor::parameter_pointer(), fp,
276            StandardFrameConstants::kCallerSPOffset + offset);
277     __ Mov(RestParamAccessDescriptor::rest_parameter_index(),
278            Smi::FromInt(rest_index));
279 
280     function_in_register_x1 = false;
281 
282     RestParamAccessStub stub(isolate());
283     __ CallStub(&stub);
284 
285     SetVar(rest_param, x0, x1, x2);
286   }
287 
288   Variable* arguments = scope()->arguments();
289   if (arguments != NULL) {
290     // Function uses arguments object.
291     Comment cmnt(masm_, "[ Allocate arguments object");
292     DCHECK(x1.is(ArgumentsAccessNewDescriptor::function()));
293     if (!function_in_register_x1) {
294       // Load this again, if it's used by the local context below.
295       __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
296     }
297     // Receiver is just before the parameters on the caller's stack.
298     int num_parameters = info->scope()->num_parameters();
299     int offset = num_parameters * kPointerSize;
300     __ Mov(ArgumentsAccessNewDescriptor::parameter_count(),
301            Smi::FromInt(num_parameters));
302     __ Add(ArgumentsAccessNewDescriptor::parameter_pointer(), fp,
303            StandardFrameConstants::kCallerSPOffset + offset);
304 
305     // Arguments to ArgumentsAccessStub:
306     //   function, parameter pointer, parameter count.
307     // The stub will rewrite parameter pointer and parameter count if the
308     // previous stack frame was an arguments adapter frame.
309     bool is_unmapped = is_strict(language_mode()) || !has_simple_parameters();
310     ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType(
311         is_unmapped, literal()->has_duplicate_parameters());
312     ArgumentsAccessStub stub(isolate(), type);
313     __ CallStub(&stub);
314 
315     SetVar(arguments, x0, x1, x2);
316   }
317 
318   if (FLAG_trace) {
319     __ CallRuntime(Runtime::kTraceEnter);
320   }
321 
322   // Visit the declarations and body unless there is an illegal
323   // redeclaration.
324   if (scope()->HasIllegalRedeclaration()) {
325     Comment cmnt(masm_, "[ Declarations");
326     VisitForEffect(scope()->GetIllegalRedeclaration());
327 
328   } else {
329     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
330     { Comment cmnt(masm_, "[ Declarations");
331       VisitDeclarations(scope()->declarations());
332     }
333 
334     // Assert that the declarations do not use ICs. Otherwise the debugger
335     // won't be able to redirect a PC at an IC to the correct IC in newly
336     // recompiled code.
337     DCHECK_EQ(0, ic_total_count_);
338 
339     {
340       Comment cmnt(masm_, "[ Stack check");
341       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
342       Label ok;
343       DCHECK(jssp.Is(__ StackPointer()));
344       __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
345       __ B(hs, &ok);
346       PredictableCodeSizeScope predictable(masm_,
347                                            Assembler::kCallSizeWithRelocation);
348       __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
349       __ Bind(&ok);
350     }
351 
352     {
353       Comment cmnt(masm_, "[ Body");
354       DCHECK(loop_depth() == 0);
355       VisitStatements(literal()->body());
356       DCHECK(loop_depth() == 0);
357     }
358   }
359 
360   // Always emit a 'return undefined' in case control fell off the end of
361   // the body.
362   { Comment cmnt(masm_, "[ return <undefined>;");
363     __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
364   }
365   EmitReturnSequence();
366 
367   // Force emission of the pools, so they don't get emitted in the middle
368   // of the back edge table.
369   masm()->CheckVeneerPool(true, false);
370   masm()->CheckConstPool(true, false);
371 }
372 
373 
ClearAccumulator()374 void FullCodeGenerator::ClearAccumulator() {
375   __ Mov(x0, Smi::FromInt(0));
376 }
377 
378 
EmitProfilingCounterDecrement(int delta)379 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
380   __ Mov(x2, Operand(profiling_counter_));
381   __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
382   __ Subs(x3, x3, Smi::FromInt(delta));
383   __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
384 }
385 
386 
EmitProfilingCounterReset()387 void FullCodeGenerator::EmitProfilingCounterReset() {
388   int reset_value = FLAG_interrupt_budget;
389   __ Mov(x2, Operand(profiling_counter_));
390   __ Mov(x3, Smi::FromInt(reset_value));
391   __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
392 }
393 
394 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)395 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
396                                                 Label* back_edge_target) {
397   DCHECK(jssp.Is(__ StackPointer()));
398   Comment cmnt(masm_, "[ Back edge bookkeeping");
399   // Block literal pools whilst emitting back edge code.
400   Assembler::BlockPoolsScope block_const_pool(masm_);
401   Label ok;
402 
403   DCHECK(back_edge_target->is_bound());
404   // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
405   // to reduce the absolute error due to the integer division. To do that,
406   // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
407   // the result).
408   int distance =
409       static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
410                        kCodeSizeMultiplier / 2);
411   int weight = Min(kMaxBackEdgeWeight,
412                    Max(1, distance / kCodeSizeMultiplier));
413   EmitProfilingCounterDecrement(weight);
414   __ B(pl, &ok);
415   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
416 
417   // Record a mapping of this PC offset to the OSR id.  This is used to find
418   // the AST id from the unoptimized code in order to use it as a key into
419   // the deoptimization input data found in the optimized code.
420   RecordBackEdge(stmt->OsrEntryId());
421 
422   EmitProfilingCounterReset();
423 
424   __ Bind(&ok);
425   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
426   // Record a mapping of the OSR id to this PC.  This is used if the OSR
427   // entry becomes the target of a bailout.  We don't expect it to be, but
428   // we want it to work if it is.
429   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
430 }
431 
432 
EmitReturnSequence()433 void FullCodeGenerator::EmitReturnSequence() {
434   Comment cmnt(masm_, "[ Return sequence");
435 
436   if (return_label_.is_bound()) {
437     __ B(&return_label_);
438 
439   } else {
440     __ Bind(&return_label_);
441     if (FLAG_trace) {
442       // Push the return value on the stack as the parameter.
443       // Runtime::TraceExit returns its parameter in x0.
444       __ Push(result_register());
445       __ CallRuntime(Runtime::kTraceExit);
446       DCHECK(x0.Is(result_register()));
447     }
448     // Pretend that the exit is a backwards jump to the entry.
449     int weight = 1;
450     if (info_->ShouldSelfOptimize()) {
451       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
452     } else {
453       int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
454       weight = Min(kMaxBackEdgeWeight,
455                    Max(1, distance / kCodeSizeMultiplier));
456     }
457     EmitProfilingCounterDecrement(weight);
458     Label ok;
459     __ B(pl, &ok);
460     __ Push(x0);
461     __ Call(isolate()->builtins()->InterruptCheck(),
462             RelocInfo::CODE_TARGET);
463     __ Pop(x0);
464     EmitProfilingCounterReset();
465     __ Bind(&ok);
466 
467     SetReturnPosition(literal());
468     const Register& current_sp = __ StackPointer();
469     // Nothing ensures 16 bytes alignment here.
470     DCHECK(!current_sp.Is(csp));
471     __ Mov(current_sp, fp);
472     __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
473     // Drop the arguments and receiver and return.
474     // TODO(all): This implementation is overkill as it supports 2**31+1
475     // arguments, consider how to improve it without creating a security
476     // hole.
477     __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
478     __ Add(current_sp, current_sp, ip0);
479     __ Ret();
480     int32_t arg_count = info_->scope()->num_parameters() + 1;
481     __ dc64(kXRegSize * arg_count);
482   }
483 }
484 
485 
Plug(Variable * var) const486 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
487   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
488   codegen()->GetVar(result_register(), var);
489   __ Push(result_register());
490 }
491 
492 
Plug(Heap::RootListIndex index) const493 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
494   // Root values have no side effects.
495 }
496 
497 
Plug(Heap::RootListIndex index) const498 void FullCodeGenerator::AccumulatorValueContext::Plug(
499     Heap::RootListIndex index) const {
500   __ LoadRoot(result_register(), index);
501 }
502 
503 
Plug(Heap::RootListIndex index) const504 void FullCodeGenerator::StackValueContext::Plug(
505     Heap::RootListIndex index) const {
506   __ LoadRoot(result_register(), index);
507   __ Push(result_register());
508 }
509 
510 
Plug(Heap::RootListIndex index) const511 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
512   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
513                                           false_label_);
514   if (index == Heap::kUndefinedValueRootIndex ||
515       index == Heap::kNullValueRootIndex ||
516       index == Heap::kFalseValueRootIndex) {
517     if (false_label_ != fall_through_) __ B(false_label_);
518   } else if (index == Heap::kTrueValueRootIndex) {
519     if (true_label_ != fall_through_) __ B(true_label_);
520   } else {
521     __ LoadRoot(result_register(), index);
522     codegen()->DoTest(this);
523   }
524 }
525 
526 
Plug(Handle<Object> lit) const527 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
528 }
529 
530 
Plug(Handle<Object> lit) const531 void FullCodeGenerator::AccumulatorValueContext::Plug(
532     Handle<Object> lit) const {
533   __ Mov(result_register(), Operand(lit));
534 }
535 
536 
Plug(Handle<Object> lit) const537 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
538   // Immediates cannot be pushed directly.
539   __ Mov(result_register(), Operand(lit));
540   __ Push(result_register());
541 }
542 
543 
Plug(Handle<Object> lit) const544 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
545   codegen()->PrepareForBailoutBeforeSplit(condition(),
546                                           true,
547                                           true_label_,
548                                           false_label_);
549   DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
550   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
551     if (false_label_ != fall_through_) __ B(false_label_);
552   } else if (lit->IsTrue() || lit->IsJSObject()) {
553     if (true_label_ != fall_through_) __ B(true_label_);
554   } else if (lit->IsString()) {
555     if (String::cast(*lit)->length() == 0) {
556       if (false_label_ != fall_through_) __ B(false_label_);
557     } else {
558       if (true_label_ != fall_through_) __ B(true_label_);
559     }
560   } else if (lit->IsSmi()) {
561     if (Smi::cast(*lit)->value() == 0) {
562       if (false_label_ != fall_through_) __ B(false_label_);
563     } else {
564       if (true_label_ != fall_through_) __ B(true_label_);
565     }
566   } else {
567     // For simplicity we always test the accumulator register.
568     __ Mov(result_register(), Operand(lit));
569     codegen()->DoTest(this);
570   }
571 }
572 
573 
DropAndPlug(int count,Register reg) const574 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
575                                                    Register reg) const {
576   DCHECK(count > 0);
577   __ Drop(count);
578 }
579 
580 
DropAndPlug(int count,Register reg) const581 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
582     int count,
583     Register reg) const {
584   DCHECK(count > 0);
585   __ Drop(count);
586   __ Move(result_register(), reg);
587 }
588 
589 
DropAndPlug(int count,Register reg) const590 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
591                                                        Register reg) const {
592   DCHECK(count > 0);
593   if (count > 1) __ Drop(count - 1);
594   __ Poke(reg, 0);
595 }
596 
597 
DropAndPlug(int count,Register reg) const598 void FullCodeGenerator::TestContext::DropAndPlug(int count,
599                                                  Register reg) const {
600   DCHECK(count > 0);
601   // For simplicity we always test the accumulator register.
602   __ Drop(count);
603   __ Mov(result_register(), reg);
604   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
605   codegen()->DoTest(this);
606 }
607 
608 
Plug(Label * materialize_true,Label * materialize_false) const609 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
610                                             Label* materialize_false) const {
611   DCHECK(materialize_true == materialize_false);
612   __ Bind(materialize_true);
613 }
614 
615 
Plug(Label * materialize_true,Label * materialize_false) const616 void FullCodeGenerator::AccumulatorValueContext::Plug(
617     Label* materialize_true,
618     Label* materialize_false) const {
619   Label done;
620   __ Bind(materialize_true);
621   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
622   __ B(&done);
623   __ Bind(materialize_false);
624   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
625   __ Bind(&done);
626 }
627 
628 
Plug(Label * materialize_true,Label * materialize_false) const629 void FullCodeGenerator::StackValueContext::Plug(
630     Label* materialize_true,
631     Label* materialize_false) const {
632   Label done;
633   __ Bind(materialize_true);
634   __ LoadRoot(x10, Heap::kTrueValueRootIndex);
635   __ B(&done);
636   __ Bind(materialize_false);
637   __ LoadRoot(x10, Heap::kFalseValueRootIndex);
638   __ Bind(&done);
639   __ Push(x10);
640 }
641 
642 
Plug(Label * materialize_true,Label * materialize_false) const643 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
644                                           Label* materialize_false) const {
645   DCHECK(materialize_true == true_label_);
646   DCHECK(materialize_false == false_label_);
647 }
648 
649 
Plug(bool flag) const650 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
651   Heap::RootListIndex value_root_index =
652       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
653   __ LoadRoot(result_register(), value_root_index);
654 }
655 
656 
Plug(bool flag) const657 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
658   Heap::RootListIndex value_root_index =
659       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
660   __ LoadRoot(x10, value_root_index);
661   __ Push(x10);
662 }
663 
664 
Plug(bool flag) const665 void FullCodeGenerator::TestContext::Plug(bool flag) const {
666   codegen()->PrepareForBailoutBeforeSplit(condition(),
667                                           true,
668                                           true_label_,
669                                           false_label_);
670   if (flag) {
671     if (true_label_ != fall_through_) {
672       __ B(true_label_);
673     }
674   } else {
675     if (false_label_ != fall_through_) {
676       __ B(false_label_);
677     }
678   }
679 }
680 
681 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)682 void FullCodeGenerator::DoTest(Expression* condition,
683                                Label* if_true,
684                                Label* if_false,
685                                Label* fall_through) {
686   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
687   CallIC(ic, condition->test_id());
688   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
689   Split(eq, if_true, if_false, fall_through);
690 }
691 
692 
693 // If (cond), branch to if_true.
694 // If (!cond), branch to if_false.
695 // fall_through is used as an optimization in cases where only one branch
696 // instruction is necessary.
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)697 void FullCodeGenerator::Split(Condition cond,
698                               Label* if_true,
699                               Label* if_false,
700                               Label* fall_through) {
701   if (if_false == fall_through) {
702     __ B(cond, if_true);
703   } else if (if_true == fall_through) {
704     DCHECK(if_false != fall_through);
705     __ B(NegateCondition(cond), if_false);
706   } else {
707     __ B(cond, if_true);
708     __ B(if_false);
709   }
710 }
711 
712 
StackOperand(Variable * var)713 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
714   // Offset is negative because higher indexes are at lower addresses.
715   int offset = -var->index() * kXRegSize;
716   // Adjust by a (parameter or local) base offset.
717   if (var->IsParameter()) {
718     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
719   } else {
720     offset += JavaScriptFrameConstants::kLocal0Offset;
721   }
722   return MemOperand(fp, offset);
723 }
724 
725 
VarOperand(Variable * var,Register scratch)726 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
727   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
728   if (var->IsContextSlot()) {
729     int context_chain_length = scope()->ContextChainLength(var->scope());
730     __ LoadContext(scratch, context_chain_length);
731     return ContextMemOperand(scratch, var->index());
732   } else {
733     return StackOperand(var);
734   }
735 }
736 
737 
GetVar(Register dest,Variable * var)738 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
739   // Use destination as scratch.
740   MemOperand location = VarOperand(var, dest);
741   __ Ldr(dest, location);
742 }
743 
744 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)745 void FullCodeGenerator::SetVar(Variable* var,
746                                Register src,
747                                Register scratch0,
748                                Register scratch1) {
749   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
750   DCHECK(!AreAliased(src, scratch0, scratch1));
751   MemOperand location = VarOperand(var, scratch0);
752   __ Str(src, location);
753 
754   // Emit the write barrier code if the location is in the heap.
755   if (var->IsContextSlot()) {
756     // scratch0 contains the correct context.
757     __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
758                               src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
759   }
760 }
761 
762 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)763 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
764                                                      bool should_normalize,
765                                                      Label* if_true,
766                                                      Label* if_false) {
767   // Only prepare for bailouts before splits if we're in a test
768   // context. Otherwise, we let the Visit function deal with the
769   // preparation to avoid preparing with the same AST id twice.
770   if (!context()->IsTest()) return;
771 
772   // TODO(all): Investigate to see if there is something to work on here.
773   Label skip;
774   if (should_normalize) {
775     __ B(&skip);
776   }
777   PrepareForBailout(expr, TOS_REG);
778   if (should_normalize) {
779     __ CompareRoot(x0, Heap::kTrueValueRootIndex);
780     Split(eq, if_true, if_false, NULL);
781     __ Bind(&skip);
782   }
783 }
784 
785 
EmitDebugCheckDeclarationContext(Variable * variable)786 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
787   // The variable in the declaration always resides in the current function
788   // context.
789   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
790   if (generate_debug_code_) {
791     // Check that we're not inside a with or catch context.
792     __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
793     __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
794     __ Check(ne, kDeclarationInWithContext);
795     __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
796     __ Check(ne, kDeclarationInCatchContext);
797   }
798 }
799 
800 
VisitVariableDeclaration(VariableDeclaration * declaration)801 void FullCodeGenerator::VisitVariableDeclaration(
802     VariableDeclaration* declaration) {
803   // If it was not possible to allocate the variable at compile time, we
804   // need to "declare" it at runtime to make sure it actually exists in the
805   // local context.
806   VariableProxy* proxy = declaration->proxy();
807   VariableMode mode = declaration->mode();
808   Variable* variable = proxy->var();
809   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
810 
811   switch (variable->location()) {
812     case VariableLocation::GLOBAL:
813     case VariableLocation::UNALLOCATED:
814       globals_->Add(variable->name(), zone());
815       globals_->Add(variable->binding_needs_init()
816                         ? isolate()->factory()->the_hole_value()
817                         : isolate()->factory()->undefined_value(),
818                     zone());
819       break;
820 
821     case VariableLocation::PARAMETER:
822     case VariableLocation::LOCAL:
823       if (hole_init) {
824         Comment cmnt(masm_, "[ VariableDeclaration");
825         __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
826         __ Str(x10, StackOperand(variable));
827       }
828       break;
829 
830     case VariableLocation::CONTEXT:
831       if (hole_init) {
832         Comment cmnt(masm_, "[ VariableDeclaration");
833         EmitDebugCheckDeclarationContext(variable);
834         __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
835         __ Str(x10, ContextMemOperand(cp, variable->index()));
836         // No write barrier since the_hole_value is in old space.
837         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
838       }
839       break;
840 
841     case VariableLocation::LOOKUP: {
842       Comment cmnt(masm_, "[ VariableDeclaration");
843       __ Mov(x2, Operand(variable->name()));
844       // Declaration nodes are always introduced in one of four modes.
845       DCHECK(IsDeclaredVariableMode(mode));
846       // Push initial value, if any.
847       // Note: For variables we must not push an initial value (such as
848       // 'undefined') because we may have a (legal) redeclaration and we
849       // must not destroy the current value.
850       if (hole_init) {
851         __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
852         __ Push(x2, x0);
853       } else {
854         // Pushing 0 (xzr) indicates no initial value.
855         __ Push(x2, xzr);
856       }
857       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
858       __ CallRuntime(Runtime::kDeclareLookupSlot);
859       break;
860     }
861   }
862 }
863 
864 
VisitFunctionDeclaration(FunctionDeclaration * declaration)865 void FullCodeGenerator::VisitFunctionDeclaration(
866     FunctionDeclaration* declaration) {
867   VariableProxy* proxy = declaration->proxy();
868   Variable* variable = proxy->var();
869   switch (variable->location()) {
870     case VariableLocation::GLOBAL:
871     case VariableLocation::UNALLOCATED: {
872       globals_->Add(variable->name(), zone());
873       Handle<SharedFunctionInfo> function =
874           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
875       // Check for stack overflow exception.
876       if (function.is_null()) return SetStackOverflow();
877       globals_->Add(function, zone());
878       break;
879     }
880 
881     case VariableLocation::PARAMETER:
882     case VariableLocation::LOCAL: {
883       Comment cmnt(masm_, "[ Function Declaration");
884       VisitForAccumulatorValue(declaration->fun());
885       __ Str(result_register(), StackOperand(variable));
886       break;
887     }
888 
889     case VariableLocation::CONTEXT: {
890       Comment cmnt(masm_, "[ Function Declaration");
891       EmitDebugCheckDeclarationContext(variable);
892       VisitForAccumulatorValue(declaration->fun());
893       __ Str(result_register(), ContextMemOperand(cp, variable->index()));
894       int offset = Context::SlotOffset(variable->index());
895       // We know that we have written a function, which is not a smi.
896       __ RecordWriteContextSlot(cp,
897                                 offset,
898                                 result_register(),
899                                 x2,
900                                 kLRHasBeenSaved,
901                                 kDontSaveFPRegs,
902                                 EMIT_REMEMBERED_SET,
903                                 OMIT_SMI_CHECK);
904       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
905       break;
906     }
907 
908     case VariableLocation::LOOKUP: {
909       Comment cmnt(masm_, "[ Function Declaration");
910       __ Mov(x2, Operand(variable->name()));
911       __ Push(x2);
912       // Push initial value for function declaration.
913       VisitForStackValue(declaration->fun());
914       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
915       __ CallRuntime(Runtime::kDeclareLookupSlot);
916       break;
917     }
918   }
919 }
920 
921 
DeclareGlobals(Handle<FixedArray> pairs)922 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
923   // Call the runtime to declare the globals.
924   __ Mov(x11, Operand(pairs));
925   Register flags = xzr;
926   if (Smi::FromInt(DeclareGlobalsFlags())) {
927     flags = x10;
928   __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
929   }
930   __ Push(x11, flags);
931   __ CallRuntime(Runtime::kDeclareGlobals);
932   // Return value is ignored.
933 }
934 
935 
DeclareModules(Handle<FixedArray> descriptions)936 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
937   // Call the runtime to declare the modules.
938   __ Push(descriptions);
939   __ CallRuntime(Runtime::kDeclareModules);
940   // Return value is ignored.
941 }
942 
943 
VisitSwitchStatement(SwitchStatement * stmt)944 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
945   ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
946   Comment cmnt(masm_, "[ SwitchStatement");
947   Breakable nested_statement(this, stmt);
948   SetStatementPosition(stmt);
949 
950   // Keep the switch value on the stack until a case matches.
951   VisitForStackValue(stmt->tag());
952   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
953 
954   ZoneList<CaseClause*>* clauses = stmt->cases();
955   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
956 
957   Label next_test;  // Recycled for each test.
958   // Compile all the tests with branches to their bodies.
959   for (int i = 0; i < clauses->length(); i++) {
960     CaseClause* clause = clauses->at(i);
961     clause->body_target()->Unuse();
962 
963     // The default is not a test, but remember it as final fall through.
964     if (clause->is_default()) {
965       default_clause = clause;
966       continue;
967     }
968 
969     Comment cmnt(masm_, "[ Case comparison");
970     __ Bind(&next_test);
971     next_test.Unuse();
972 
973     // Compile the label expression.
974     VisitForAccumulatorValue(clause->label());
975 
976     // Perform the comparison as if via '==='.
977     __ Peek(x1, 0);   // Switch value.
978 
979     JumpPatchSite patch_site(masm_);
980     if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
981       Label slow_case;
982       patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
983       __ Cmp(x1, x0);
984       __ B(ne, &next_test);
985       __ Drop(1);  // Switch value is no longer needed.
986       __ B(clause->body_target());
987       __ Bind(&slow_case);
988     }
989 
990     // Record position before stub call for type feedback.
991     SetExpressionPosition(clause);
992     Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
993                                              strength(language_mode())).code();
994     CallIC(ic, clause->CompareId());
995     patch_site.EmitPatchInfo();
996 
997     Label skip;
998     __ B(&skip);
999     PrepareForBailout(clause, TOS_REG);
1000     __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1001     __ Drop(1);
1002     __ B(clause->body_target());
1003     __ Bind(&skip);
1004 
1005     __ Cbnz(x0, &next_test);
1006     __ Drop(1);  // Switch value is no longer needed.
1007     __ B(clause->body_target());
1008   }
1009 
1010   // Discard the test value and jump to the default if present, otherwise to
1011   // the end of the statement.
1012   __ Bind(&next_test);
1013   __ Drop(1);  // Switch value is no longer needed.
1014   if (default_clause == NULL) {
1015     __ B(nested_statement.break_label());
1016   } else {
1017     __ B(default_clause->body_target());
1018   }
1019 
1020   // Compile all the case bodies.
1021   for (int i = 0; i < clauses->length(); i++) {
1022     Comment cmnt(masm_, "[ Case body");
1023     CaseClause* clause = clauses->at(i);
1024     __ Bind(clause->body_target());
1025     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1026     VisitStatements(clause->statements());
1027   }
1028 
1029   __ Bind(nested_statement.break_label());
1030   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1031 }
1032 
1033 
VisitForInStatement(ForInStatement * stmt)1034 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1035   ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1036   Comment cmnt(masm_, "[ ForInStatement");
1037   SetStatementPosition(stmt, SKIP_BREAK);
1038 
1039   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1040 
1041   // TODO(all): This visitor probably needs better comments and a revisit.
1042 
1043   Label loop, exit;
1044   ForIn loop_statement(this, stmt);
1045   increment_loop_depth();
1046 
1047   // Get the object to enumerate over. If the object is null or undefined, skip
1048   // over the loop.  See ECMA-262 version 5, section 12.6.4.
1049   SetExpressionAsStatementPosition(stmt->enumerable());
1050   VisitForAccumulatorValue(stmt->enumerable());
1051   __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1052   Register null_value = x15;
1053   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1054   __ Cmp(x0, null_value);
1055   __ B(eq, &exit);
1056 
1057   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1058 
1059   // Convert the object to a JS object.
1060   Label convert, done_convert;
1061   __ JumpIfSmi(x0, &convert);
1062   __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, &done_convert, ge);
1063   __ Bind(&convert);
1064   ToObjectStub stub(isolate());
1065   __ CallStub(&stub);
1066   __ Bind(&done_convert);
1067   PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1068   __ Push(x0);
1069 
1070   // Check for proxies.
1071   Label call_runtime;
1072   __ JumpIfObjectType(x0, x10, x11, JS_PROXY_TYPE, &call_runtime, eq);
1073 
1074   // Check cache validity in generated code. This is a fast case for
1075   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1076   // guarantee cache validity, call the runtime system to check cache
1077   // validity or get the property names in a fixed array.
1078   __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1079 
1080   // The enum cache is valid.  Load the map of the object being
1081   // iterated over and use the cache for the iteration.
1082   Label use_cache;
1083   __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1084   __ B(&use_cache);
1085 
1086   // Get the set of properties to enumerate.
1087   __ Bind(&call_runtime);
1088   __ Push(x0);  // Duplicate the enumerable object on the stack.
1089   __ CallRuntime(Runtime::kGetPropertyNamesFast);
1090   PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1091 
1092   // If we got a map from the runtime call, we can do a fast
1093   // modification check. Otherwise, we got a fixed array, and we have
1094   // to do a slow check.
1095   Label fixed_array, no_descriptors;
1096   __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1097   __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1098 
1099   // We got a map in register x0. Get the enumeration cache from it.
1100   __ Bind(&use_cache);
1101 
1102   __ EnumLengthUntagged(x1, x0);
1103   __ Cbz(x1, &no_descriptors);
1104 
1105   __ LoadInstanceDescriptors(x0, x2);
1106   __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1107   __ Ldr(x2,
1108          FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1109 
1110   // Set up the four remaining stack slots.
1111   __ SmiTag(x1);
1112   // Map, enumeration cache, enum cache length, zero (both last as smis).
1113   __ Push(x0, x2, x1, xzr);
1114   __ B(&loop);
1115 
1116   __ Bind(&no_descriptors);
1117   __ Drop(1);
1118   __ B(&exit);
1119 
1120   // We got a fixed array in register x0. Iterate through that.
1121   __ Bind(&fixed_array);
1122 
1123   __ EmitLoadTypeFeedbackVector(x1);
1124   __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1125   int vector_index = SmiFromSlot(slot)->value();
1126   __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1127   __ Mov(x1, Smi::FromInt(1));  // Smi(1) indicates slow check.
1128   __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1129   // Smi and array, fixed array length (as smi) and initial index.
1130   __ Push(x1, x0, x2, xzr);
1131 
1132   // Generate code for doing the condition check.
1133   __ Bind(&loop);
1134   SetExpressionAsStatementPosition(stmt->each());
1135 
1136   // Load the current count to x0, load the length to x1.
1137   __ PeekPair(x0, x1, 0);
1138   __ Cmp(x0, x1);  // Compare to the array length.
1139   __ B(hs, loop_statement.break_label());
1140 
1141   // Get the current entry of the array into register r3.
1142   __ Peek(x10, 2 * kXRegSize);
1143   __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1144   __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1145 
1146   // Get the expected map from the stack or a smi in the
1147   // permanent slow case into register x10.
1148   __ Peek(x2, 3 * kXRegSize);
1149 
1150   // Check if the expected map still matches that of the enumerable.
1151   // If not, we may have to filter the key.
1152   Label update_each;
1153   __ Peek(x1, 4 * kXRegSize);
1154   __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1155   __ Cmp(x11, x2);
1156   __ B(eq, &update_each);
1157 
1158   // Convert the entry to a string or (smi) 0 if it isn't a property
1159   // any more. If the property has been removed while iterating, we
1160   // just skip it.
1161   __ Push(x1, x3);
1162   __ CallRuntime(Runtime::kForInFilter);
1163   PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1164   __ Mov(x3, x0);
1165   __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1166                 loop_statement.continue_label());
1167 
1168   // Update the 'each' property or variable from the possibly filtered
1169   // entry in register x3.
1170   __ Bind(&update_each);
1171   __ Mov(result_register(), x3);
1172   // Perform the assignment as if via '='.
1173   { EffectContext context(this);
1174     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1175     PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1176   }
1177 
1178   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1179   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1180   // Generate code for the body of the loop.
1181   Visit(stmt->body());
1182 
1183   // Generate code for going to the next element by incrementing
1184   // the index (smi) stored on top of the stack.
1185   __ Bind(loop_statement.continue_label());
1186   // TODO(all): We could use a callee saved register to avoid popping.
1187   __ Pop(x0);
1188   __ Add(x0, x0, Smi::FromInt(1));
1189   __ Push(x0);
1190 
1191   EmitBackEdgeBookkeeping(stmt, &loop);
1192   __ B(&loop);
1193 
1194   // Remove the pointers stored on the stack.
1195   __ Bind(loop_statement.break_label());
1196   __ Drop(5);
1197 
1198   // Exit and decrement the loop depth.
1199   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1200   __ Bind(&exit);
1201   decrement_loop_depth();
1202 }
1203 
1204 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1205 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1206                                        bool pretenure) {
1207   // Use the fast case closure allocation code that allocates in new space for
1208   // nested functions that don't need literals cloning. If we're running with
1209   // the --always-opt or the --prepare-always-opt flag, we need to use the
1210   // runtime function so that the new function we are creating here gets a
1211   // chance to have its code optimized and doesn't just get a copy of the
1212   // existing unoptimized code.
1213   if (!FLAG_always_opt &&
1214       !FLAG_prepare_always_opt &&
1215       !pretenure &&
1216       scope()->is_function_scope() &&
1217       info->num_literals() == 0) {
1218     FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1219     __ Mov(x2, Operand(info));
1220     __ CallStub(&stub);
1221   } else {
1222     __ Push(info);
1223     __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1224                              : Runtime::kNewClosure);
1225   }
1226   context()->Plug(x0);
1227 }
1228 
1229 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1230 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1231                                           FeedbackVectorSlot slot) {
1232   DCHECK(NeedsHomeObject(initializer));
1233   __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1234   __ Mov(StoreDescriptor::NameRegister(),
1235          Operand(isolate()->factory()->home_object_symbol()));
1236   __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1237   EmitLoadStoreICSlot(slot);
1238   CallStoreIC();
1239 }
1240 
1241 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1242 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1243                                                      int offset,
1244                                                      FeedbackVectorSlot slot) {
1245   DCHECK(NeedsHomeObject(initializer));
1246   __ Move(StoreDescriptor::ReceiverRegister(), x0);
1247   __ Mov(StoreDescriptor::NameRegister(),
1248          Operand(isolate()->factory()->home_object_symbol()));
1249   __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1250   EmitLoadStoreICSlot(slot);
1251   CallStoreIC();
1252 }
1253 
1254 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1255 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1256                                                       TypeofMode typeof_mode,
1257                                                       Label* slow) {
1258   Register current = cp;
1259   Register next = x10;
1260   Register temp = x11;
1261 
1262   Scope* s = scope();
1263   while (s != NULL) {
1264     if (s->num_heap_slots() > 0) {
1265       if (s->calls_sloppy_eval()) {
1266         // Check that extension is "the hole".
1267         __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1268         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1269       }
1270       // Load next context in chain.
1271       __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1272       // Walk the rest of the chain without clobbering cp.
1273       current = next;
1274     }
1275     // If no outer scope calls eval, we do not need to check more
1276     // context extensions.
1277     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1278     s = s->outer_scope();
1279   }
1280 
1281   if (s->is_eval_scope()) {
1282     Label loop, fast;
1283     __ Mov(next, current);
1284 
1285     __ Bind(&loop);
1286     // Terminate at native context.
1287     __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1288     __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1289     // Check that extension is "the hole".
1290     __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1291     __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1292     // Load next context in chain.
1293     __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1294     __ B(&loop);
1295     __ Bind(&fast);
1296   }
1297 
1298   // All extension objects were empty and it is safe to use a normal global
1299   // load machinery.
1300   EmitGlobalVariableLoad(proxy, typeof_mode);
1301 }
1302 
1303 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1304 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1305                                                                 Label* slow) {
1306   DCHECK(var->IsContextSlot());
1307   Register context = cp;
1308   Register next = x10;
1309   Register temp = x11;
1310 
1311   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1312     if (s->num_heap_slots() > 0) {
1313       if (s->calls_sloppy_eval()) {
1314         // Check that extension is "the hole".
1315         __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1316         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1317       }
1318       __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1319       // Walk the rest of the chain without clobbering cp.
1320       context = next;
1321     }
1322   }
1323   // Check that last extension is "the hole".
1324   __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1325   __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1326 
1327   // This function is used only for loads, not stores, so it's safe to
1328   // return an cp-based operand (the write barrier cannot be allowed to
1329   // destroy the cp register).
1330   return ContextMemOperand(context, var->index());
1331 }
1332 
1333 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1334 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1335                                                   TypeofMode typeof_mode,
1336                                                   Label* slow, Label* done) {
1337   // Generate fast-case code for variables that might be shadowed by
1338   // eval-introduced variables.  Eval is used a lot without
1339   // introducing variables.  In those cases, we do not want to
1340   // perform a runtime call for all variables in the scope
1341   // containing the eval.
1342   Variable* var = proxy->var();
1343   if (var->mode() == DYNAMIC_GLOBAL) {
1344     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1345     __ B(done);
1346   } else if (var->mode() == DYNAMIC_LOCAL) {
1347     Variable* local = var->local_if_not_shadowed();
1348     __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1349     if (local->mode() == LET || local->mode() == CONST ||
1350         local->mode() == CONST_LEGACY) {
1351       __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1352       if (local->mode() == CONST_LEGACY) {
1353         __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1354       } else {  // LET || CONST
1355         __ Mov(x0, Operand(var->name()));
1356         __ Push(x0);
1357         __ CallRuntime(Runtime::kThrowReferenceError);
1358       }
1359     }
1360     __ B(done);
1361   }
1362 }
1363 
1364 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1365 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1366                                                TypeofMode typeof_mode) {
1367   Variable* var = proxy->var();
1368   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1369          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1370   __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1371   __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1372   __ Mov(LoadDescriptor::SlotRegister(),
1373          SmiFromSlot(proxy->VariableFeedbackSlot()));
1374   CallLoadIC(typeof_mode);
1375 }
1376 
1377 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1378 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1379                                          TypeofMode typeof_mode) {
1380   // Record position before possible IC call.
1381   SetExpressionPosition(proxy);
1382   PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1383   Variable* var = proxy->var();
1384 
1385   // Three cases: global variables, lookup variables, and all other types of
1386   // variables.
1387   switch (var->location()) {
1388     case VariableLocation::GLOBAL:
1389     case VariableLocation::UNALLOCATED: {
1390       Comment cmnt(masm_, "Global variable");
1391       EmitGlobalVariableLoad(proxy, typeof_mode);
1392       context()->Plug(x0);
1393       break;
1394     }
1395 
1396     case VariableLocation::PARAMETER:
1397     case VariableLocation::LOCAL:
1398     case VariableLocation::CONTEXT: {
1399       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1400       Comment cmnt(masm_, var->IsContextSlot()
1401                               ? "Context variable"
1402                               : "Stack variable");
1403       if (NeedsHoleCheckForLoad(proxy)) {
1404         // Let and const need a read barrier.
1405         GetVar(x0, var);
1406         Label done;
1407         __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1408         if (var->mode() == LET || var->mode() == CONST) {
1409           // Throw a reference error when using an uninitialized let/const
1410           // binding in harmony mode.
1411           __ Mov(x0, Operand(var->name()));
1412           __ Push(x0);
1413           __ CallRuntime(Runtime::kThrowReferenceError);
1414           __ Bind(&done);
1415         } else {
1416           // Uninitialized legacy const bindings are unholed.
1417           DCHECK(var->mode() == CONST_LEGACY);
1418           __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1419           __ Bind(&done);
1420         }
1421         context()->Plug(x0);
1422         break;
1423       }
1424       context()->Plug(var);
1425       break;
1426     }
1427 
1428     case VariableLocation::LOOKUP: {
1429       Label done, slow;
1430       // Generate code for loading from variables potentially shadowed by
1431       // eval-introduced variables.
1432       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1433       __ Bind(&slow);
1434       Comment cmnt(masm_, "Lookup variable");
1435       __ Mov(x1, Operand(var->name()));
1436       __ Push(cp, x1);  // Context and name.
1437       Runtime::FunctionId function_id =
1438           typeof_mode == NOT_INSIDE_TYPEOF
1439               ? Runtime::kLoadLookupSlot
1440               : Runtime::kLoadLookupSlotNoReferenceError;
1441       __ CallRuntime(function_id);
1442       __ Bind(&done);
1443       context()->Plug(x0);
1444       break;
1445     }
1446   }
1447 }
1448 
1449 
VisitRegExpLiteral(RegExpLiteral * expr)1450 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1451   Comment cmnt(masm_, "[ RegExpLiteral");
1452   __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1453   __ Mov(x2, Smi::FromInt(expr->literal_index()));
1454   __ Mov(x1, Operand(expr->pattern()));
1455   __ Mov(x0, Smi::FromInt(expr->flags()));
1456   FastCloneRegExpStub stub(isolate());
1457   __ CallStub(&stub);
1458   context()->Plug(x0);
1459 }
1460 
1461 
EmitAccessor(ObjectLiteralProperty * property)1462 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1463   Expression* expression = (property == NULL) ? NULL : property->value();
1464   if (expression == NULL) {
1465     __ LoadRoot(x10, Heap::kNullValueRootIndex);
1466     __ Push(x10);
1467   } else {
1468     VisitForStackValue(expression);
1469     if (NeedsHomeObject(expression)) {
1470       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1471              property->kind() == ObjectLiteral::Property::SETTER);
1472       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1473       EmitSetHomeObject(expression, offset, property->GetSlot());
1474     }
1475   }
1476 }
1477 
1478 
VisitObjectLiteral(ObjectLiteral * expr)1479 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1480   Comment cmnt(masm_, "[ ObjectLiteral");
1481 
1482   Handle<FixedArray> constant_properties = expr->constant_properties();
1483   __ Ldr(x3, MemOperand(fp,  JavaScriptFrameConstants::kFunctionOffset));
1484   __ Mov(x2, Smi::FromInt(expr->literal_index()));
1485   __ Mov(x1, Operand(constant_properties));
1486   int flags = expr->ComputeFlags();
1487   __ Mov(x0, Smi::FromInt(flags));
1488   if (MustCreateObjectLiteralWithRuntime(expr)) {
1489     __ Push(x3, x2, x1, x0);
1490     __ CallRuntime(Runtime::kCreateObjectLiteral);
1491   } else {
1492     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1493     __ CallStub(&stub);
1494   }
1495   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1496 
1497   // If result_saved is true the result is on top of the stack.  If
1498   // result_saved is false the result is in x0.
1499   bool result_saved = false;
1500 
1501   AccessorTable accessor_table(zone());
1502   int property_index = 0;
1503   for (; property_index < expr->properties()->length(); property_index++) {
1504     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1505     if (property->is_computed_name()) break;
1506     if (property->IsCompileTimeValue()) continue;
1507 
1508     Literal* key = property->key()->AsLiteral();
1509     Expression* value = property->value();
1510     if (!result_saved) {
1511       __ Push(x0);  // Save result on stack
1512       result_saved = true;
1513     }
1514     switch (property->kind()) {
1515       case ObjectLiteral::Property::CONSTANT:
1516         UNREACHABLE();
1517       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1518         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1519         // Fall through.
1520       case ObjectLiteral::Property::COMPUTED:
1521         // It is safe to use [[Put]] here because the boilerplate already
1522         // contains computed properties with an uninitialized value.
1523         if (key->value()->IsInternalizedString()) {
1524           if (property->emit_store()) {
1525             VisitForAccumulatorValue(value);
1526             DCHECK(StoreDescriptor::ValueRegister().is(x0));
1527             __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1528             __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1529             EmitLoadStoreICSlot(property->GetSlot(0));
1530             CallStoreIC();
1531             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1532 
1533             if (NeedsHomeObject(value)) {
1534               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1535             }
1536           } else {
1537             VisitForEffect(value);
1538           }
1539           break;
1540         }
1541         __ Peek(x0, 0);
1542         __ Push(x0);
1543         VisitForStackValue(key);
1544         VisitForStackValue(value);
1545         if (property->emit_store()) {
1546           if (NeedsHomeObject(value)) {
1547             EmitSetHomeObject(value, 2, property->GetSlot());
1548           }
1549           __ Mov(x0, Smi::FromInt(SLOPPY));  // Language mode
1550           __ Push(x0);
1551           __ CallRuntime(Runtime::kSetProperty);
1552         } else {
1553           __ Drop(3);
1554         }
1555         break;
1556       case ObjectLiteral::Property::PROTOTYPE:
1557         DCHECK(property->emit_store());
1558         // Duplicate receiver on stack.
1559         __ Peek(x0, 0);
1560         __ Push(x0);
1561         VisitForStackValue(value);
1562         __ CallRuntime(Runtime::kInternalSetPrototype);
1563         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1564                                NO_REGISTERS);
1565         break;
1566       case ObjectLiteral::Property::GETTER:
1567         if (property->emit_store()) {
1568           accessor_table.lookup(key)->second->getter = property;
1569         }
1570         break;
1571       case ObjectLiteral::Property::SETTER:
1572         if (property->emit_store()) {
1573           accessor_table.lookup(key)->second->setter = property;
1574         }
1575         break;
1576     }
1577   }
1578 
1579   // Emit code to define accessors, using only a single call to the runtime for
1580   // each pair of corresponding getters and setters.
1581   for (AccessorTable::Iterator it = accessor_table.begin();
1582        it != accessor_table.end();
1583        ++it) {
1584       __ Peek(x10, 0);  // Duplicate receiver.
1585       __ Push(x10);
1586       VisitForStackValue(it->first);
1587       EmitAccessor(it->second->getter);
1588       EmitAccessor(it->second->setter);
1589       __ Mov(x10, Smi::FromInt(NONE));
1590       __ Push(x10);
1591       __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked);
1592   }
1593 
1594   // Object literals have two parts. The "static" part on the left contains no
1595   // computed property names, and so we can compute its map ahead of time; see
1596   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1597   // starts with the first computed property name, and continues with all
1598   // properties to its right.  All the code from above initializes the static
1599   // component of the object literal, and arranges for the map of the result to
1600   // reflect the static order in which the keys appear. For the dynamic
1601   // properties, we compile them into a series of "SetOwnProperty" runtime
1602   // calls. This will preserve insertion order.
1603   for (; property_index < expr->properties()->length(); property_index++) {
1604     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1605 
1606     Expression* value = property->value();
1607     if (!result_saved) {
1608       __ Push(x0);  // Save result on stack
1609       result_saved = true;
1610     }
1611 
1612     __ Peek(x10, 0);  // Duplicate receiver.
1613     __ Push(x10);
1614 
1615     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1616       DCHECK(!property->is_computed_name());
1617       VisitForStackValue(value);
1618       DCHECK(property->emit_store());
1619       __ CallRuntime(Runtime::kInternalSetPrototype);
1620       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1621                              NO_REGISTERS);
1622     } else {
1623       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1624       VisitForStackValue(value);
1625       if (NeedsHomeObject(value)) {
1626         EmitSetHomeObject(value, 2, property->GetSlot());
1627       }
1628 
1629       switch (property->kind()) {
1630         case ObjectLiteral::Property::CONSTANT:
1631         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1632         case ObjectLiteral::Property::COMPUTED:
1633           if (property->emit_store()) {
1634             __ Mov(x0, Smi::FromInt(NONE));
1635             __ Push(x0);
1636             __ CallRuntime(Runtime::kDefineDataPropertyUnchecked);
1637           } else {
1638             __ Drop(3);
1639           }
1640           break;
1641 
1642         case ObjectLiteral::Property::PROTOTYPE:
1643           UNREACHABLE();
1644           break;
1645 
1646         case ObjectLiteral::Property::GETTER:
1647           __ Mov(x0, Smi::FromInt(NONE));
1648           __ Push(x0);
1649           __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
1650           break;
1651 
1652         case ObjectLiteral::Property::SETTER:
1653           __ Mov(x0, Smi::FromInt(NONE));
1654           __ Push(x0);
1655           __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
1656           break;
1657       }
1658     }
1659   }
1660 
1661   if (expr->has_function()) {
1662     DCHECK(result_saved);
1663     __ Peek(x0, 0);
1664     __ Push(x0);
1665     __ CallRuntime(Runtime::kToFastProperties);
1666   }
1667 
1668   if (result_saved) {
1669     context()->PlugTOS();
1670   } else {
1671     context()->Plug(x0);
1672   }
1673 }
1674 
1675 
VisitArrayLiteral(ArrayLiteral * expr)1676 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1677   Comment cmnt(masm_, "[ ArrayLiteral");
1678 
1679   Handle<FixedArray> constant_elements = expr->constant_elements();
1680   bool has_fast_elements =
1681       IsFastObjectElementsKind(expr->constant_elements_kind());
1682 
1683   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1684   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1685     // If the only customer of allocation sites is transitioning, then
1686     // we can turn it off if we don't have anywhere else to transition to.
1687     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1688   }
1689 
1690   __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1691   __ Mov(x2, Smi::FromInt(expr->literal_index()));
1692   __ Mov(x1, Operand(constant_elements));
1693   if (MustCreateArrayLiteralWithRuntime(expr)) {
1694     __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1695     __ Push(x3, x2, x1, x0);
1696     __ CallRuntime(Runtime::kCreateArrayLiteral);
1697   } else {
1698     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1699     __ CallStub(&stub);
1700   }
1701   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1702 
1703   bool result_saved = false;  // Is the result saved to the stack?
1704   ZoneList<Expression*>* subexprs = expr->values();
1705   int length = subexprs->length();
1706 
1707   // Emit code to evaluate all the non-constant subexpressions and to store
1708   // them into the newly cloned array.
1709   int array_index = 0;
1710   for (; array_index < length; array_index++) {
1711     Expression* subexpr = subexprs->at(array_index);
1712     if (subexpr->IsSpread()) break;
1713 
1714     // If the subexpression is a literal or a simple materialized literal it
1715     // is already set in the cloned array.
1716     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1717 
1718     if (!result_saved) {
1719       __ Push(x0);
1720       result_saved = true;
1721     }
1722     VisitForAccumulatorValue(subexpr);
1723 
1724     __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1725     __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1726     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1727     Handle<Code> ic =
1728         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1729     CallIC(ic);
1730 
1731     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1732   }
1733 
1734   // In case the array literal contains spread expressions it has two parts. The
1735   // first part is  the "static" array which has a literal index is  handled
1736   // above. The second part is the part after the first spread expression
1737   // (inclusive) and these elements gets appended to the array. Note that the
1738   // number elements an iterable produces is unknown ahead of time.
1739   if (array_index < length && result_saved) {
1740     __ Pop(x0);
1741     result_saved = false;
1742   }
1743   for (; array_index < length; array_index++) {
1744     Expression* subexpr = subexprs->at(array_index);
1745 
1746     __ Push(x0);
1747     if (subexpr->IsSpread()) {
1748       VisitForStackValue(subexpr->AsSpread()->expression());
1749       __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1750                        CALL_FUNCTION);
1751     } else {
1752       VisitForStackValue(subexpr);
1753       __ CallRuntime(Runtime::kAppendElement);
1754     }
1755 
1756     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1757   }
1758 
1759   if (result_saved) {
1760     context()->PlugTOS();
1761   } else {
1762     context()->Plug(x0);
1763   }
1764 }
1765 
1766 
VisitAssignment(Assignment * expr)1767 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1768   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1769 
1770   Comment cmnt(masm_, "[ Assignment");
1771   SetExpressionPosition(expr, INSERT_BREAK);
1772 
1773   Property* property = expr->target()->AsProperty();
1774   LhsKind assign_type = Property::GetAssignType(property);
1775 
1776   // Evaluate LHS expression.
1777   switch (assign_type) {
1778     case VARIABLE:
1779       // Nothing to do here.
1780       break;
1781     case NAMED_PROPERTY:
1782       if (expr->is_compound()) {
1783         // We need the receiver both on the stack and in the register.
1784         VisitForStackValue(property->obj());
1785         __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1786       } else {
1787         VisitForStackValue(property->obj());
1788       }
1789       break;
1790     case NAMED_SUPER_PROPERTY:
1791       VisitForStackValue(
1792           property->obj()->AsSuperPropertyReference()->this_var());
1793       VisitForAccumulatorValue(
1794           property->obj()->AsSuperPropertyReference()->home_object());
1795       __ Push(result_register());
1796       if (expr->is_compound()) {
1797         const Register scratch = x10;
1798         __ Peek(scratch, kPointerSize);
1799         __ Push(scratch, result_register());
1800       }
1801       break;
1802     case KEYED_SUPER_PROPERTY:
1803       VisitForStackValue(
1804           property->obj()->AsSuperPropertyReference()->this_var());
1805       VisitForStackValue(
1806           property->obj()->AsSuperPropertyReference()->home_object());
1807       VisitForAccumulatorValue(property->key());
1808       __ Push(result_register());
1809       if (expr->is_compound()) {
1810         const Register scratch1 = x10;
1811         const Register scratch2 = x11;
1812         __ Peek(scratch1, 2 * kPointerSize);
1813         __ Peek(scratch2, kPointerSize);
1814         __ Push(scratch1, scratch2, result_register());
1815       }
1816       break;
1817     case KEYED_PROPERTY:
1818       if (expr->is_compound()) {
1819         VisitForStackValue(property->obj());
1820         VisitForStackValue(property->key());
1821         __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1822         __ Peek(LoadDescriptor::NameRegister(), 0);
1823       } else {
1824         VisitForStackValue(property->obj());
1825         VisitForStackValue(property->key());
1826       }
1827       break;
1828   }
1829 
1830   // For compound assignments we need another deoptimization point after the
1831   // variable/property load.
1832   if (expr->is_compound()) {
1833     { AccumulatorValueContext context(this);
1834       switch (assign_type) {
1835         case VARIABLE:
1836           EmitVariableLoad(expr->target()->AsVariableProxy());
1837           PrepareForBailout(expr->target(), TOS_REG);
1838           break;
1839         case NAMED_PROPERTY:
1840           EmitNamedPropertyLoad(property);
1841           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1842           break;
1843         case NAMED_SUPER_PROPERTY:
1844           EmitNamedSuperPropertyLoad(property);
1845           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1846           break;
1847         case KEYED_SUPER_PROPERTY:
1848           EmitKeyedSuperPropertyLoad(property);
1849           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1850           break;
1851         case KEYED_PROPERTY:
1852           EmitKeyedPropertyLoad(property);
1853           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1854           break;
1855       }
1856     }
1857 
1858     Token::Value op = expr->binary_op();
1859     __ Push(x0);  // Left operand goes on the stack.
1860     VisitForAccumulatorValue(expr->value());
1861 
1862     AccumulatorValueContext context(this);
1863     if (ShouldInlineSmiCase(op)) {
1864       EmitInlineSmiBinaryOp(expr->binary_operation(),
1865                             op,
1866                             expr->target(),
1867                             expr->value());
1868     } else {
1869       EmitBinaryOp(expr->binary_operation(), op);
1870     }
1871 
1872     // Deoptimization point in case the binary operation may have side effects.
1873     PrepareForBailout(expr->binary_operation(), TOS_REG);
1874   } else {
1875     VisitForAccumulatorValue(expr->value());
1876   }
1877 
1878   SetExpressionPosition(expr);
1879 
1880   // Store the value.
1881   switch (assign_type) {
1882     case VARIABLE:
1883       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1884                              expr->op(), expr->AssignmentSlot());
1885       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1886       context()->Plug(x0);
1887       break;
1888     case NAMED_PROPERTY:
1889       EmitNamedPropertyAssignment(expr);
1890       break;
1891     case NAMED_SUPER_PROPERTY:
1892       EmitNamedSuperPropertyStore(property);
1893       context()->Plug(x0);
1894       break;
1895     case KEYED_SUPER_PROPERTY:
1896       EmitKeyedSuperPropertyStore(property);
1897       context()->Plug(x0);
1898       break;
1899     case KEYED_PROPERTY:
1900       EmitKeyedPropertyAssignment(expr);
1901       break;
1902   }
1903 }
1904 
1905 
EmitNamedPropertyLoad(Property * prop)1906 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1907   SetExpressionPosition(prop);
1908   Literal* key = prop->key()->AsLiteral();
1909   DCHECK(!prop->IsSuperAccess());
1910 
1911   __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
1912   __ Mov(LoadDescriptor::SlotRegister(),
1913          SmiFromSlot(prop->PropertyFeedbackSlot()));
1914   CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
1915 }
1916 
1917 
EmitNamedSuperPropertyLoad(Property * prop)1918 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
1919   // Stack: receiver, home_object.
1920   SetExpressionPosition(prop);
1921   Literal* key = prop->key()->AsLiteral();
1922   DCHECK(!key->value()->IsSmi());
1923   DCHECK(prop->IsSuperAccess());
1924 
1925   __ Push(key->value());
1926   __ Push(Smi::FromInt(language_mode()));
1927   __ CallRuntime(Runtime::kLoadFromSuper);
1928 }
1929 
1930 
EmitKeyedPropertyLoad(Property * prop)1931 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1932   SetExpressionPosition(prop);
1933   // Call keyed load IC. It has arguments key and receiver in x0 and x1.
1934   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
1935   __ Mov(LoadDescriptor::SlotRegister(),
1936          SmiFromSlot(prop->PropertyFeedbackSlot()));
1937   CallIC(ic);
1938 }
1939 
1940 
EmitKeyedSuperPropertyLoad(Property * prop)1941 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
1942   // Stack: receiver, home_object, key.
1943   SetExpressionPosition(prop);
1944   __ Push(Smi::FromInt(language_mode()));
1945   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
1946 }
1947 
1948 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1949 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1950                                               Token::Value op,
1951                                               Expression* left_expr,
1952                                               Expression* right_expr) {
1953   Label done, both_smis, stub_call;
1954 
1955   // Get the arguments.
1956   Register left = x1;
1957   Register right = x0;
1958   Register result = x0;
1959   __ Pop(left);
1960 
1961   // Perform combined smi check on both operands.
1962   __ Orr(x10, left, right);
1963   JumpPatchSite patch_site(masm_);
1964   patch_site.EmitJumpIfSmi(x10, &both_smis);
1965 
1966   __ Bind(&stub_call);
1967 
1968   Handle<Code> code =
1969       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
1970   {
1971     Assembler::BlockPoolsScope scope(masm_);
1972     CallIC(code, expr->BinaryOperationFeedbackId());
1973     patch_site.EmitPatchInfo();
1974   }
1975   __ B(&done);
1976 
1977   __ Bind(&both_smis);
1978   // Smi case. This code works in the same way as the smi-smi case in the type
1979   // recording binary operation stub, see
1980   // BinaryOpStub::GenerateSmiSmiOperation for comments.
1981   // TODO(all): That doesn't exist any more. Where are the comments?
1982   //
1983   // The set of operations that needs to be supported here is controlled by
1984   // FullCodeGenerator::ShouldInlineSmiCase().
1985   switch (op) {
1986     case Token::SAR:
1987       __ Ubfx(right, right, kSmiShift, 5);
1988       __ Asr(result, left, right);
1989       __ Bic(result, result, kSmiShiftMask);
1990       break;
1991     case Token::SHL:
1992       __ Ubfx(right, right, kSmiShift, 5);
1993       __ Lsl(result, left, right);
1994       break;
1995     case Token::SHR:
1996       // If `left >>> right` >= 0x80000000, the result is not representable in a
1997       // signed 32-bit smi.
1998       __ Ubfx(right, right, kSmiShift, 5);
1999       __ Lsr(x10, left, right);
2000       __ Tbnz(x10, kXSignBit, &stub_call);
2001       __ Bic(result, x10, kSmiShiftMask);
2002       break;
2003     case Token::ADD:
2004       __ Adds(x10, left, right);
2005       __ B(vs, &stub_call);
2006       __ Mov(result, x10);
2007       break;
2008     case Token::SUB:
2009       __ Subs(x10, left, right);
2010       __ B(vs, &stub_call);
2011       __ Mov(result, x10);
2012       break;
2013     case Token::MUL: {
2014       Label not_minus_zero, done;
2015       STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2016       STATIC_ASSERT(kSmiTag == 0);
2017       __ Smulh(x10, left, right);
2018       __ Cbnz(x10, &not_minus_zero);
2019       __ Eor(x11, left, right);
2020       __ Tbnz(x11, kXSignBit, &stub_call);
2021       __ Mov(result, x10);
2022       __ B(&done);
2023       __ Bind(&not_minus_zero);
2024       __ Cls(x11, x10);
2025       __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2026       __ B(lt, &stub_call);
2027       __ SmiTag(result, x10);
2028       __ Bind(&done);
2029       break;
2030     }
2031     case Token::BIT_OR:
2032       __ Orr(result, left, right);
2033       break;
2034     case Token::BIT_AND:
2035       __ And(result, left, right);
2036       break;
2037     case Token::BIT_XOR:
2038       __ Eor(result, left, right);
2039       break;
2040     default:
2041       UNREACHABLE();
2042   }
2043 
2044   __ Bind(&done);
2045   context()->Plug(x0);
2046 }
2047 
2048 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2049 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2050   __ Pop(x1);
2051   Handle<Code> code =
2052       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2053   JumpPatchSite patch_site(masm_);    // Unbound, signals no inlined smi code.
2054   {
2055     Assembler::BlockPoolsScope scope(masm_);
2056     CallIC(code, expr->BinaryOperationFeedbackId());
2057     patch_site.EmitPatchInfo();
2058   }
2059   context()->Plug(x0);
2060 }
2061 
2062 
EmitClassDefineProperties(ClassLiteral * lit)2063 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2064   // Constructor is in x0.
2065   DCHECK(lit != NULL);
2066   __ push(x0);
2067 
2068   // No access check is needed here since the constructor is created by the
2069   // class literal.
2070   Register scratch = x1;
2071   __ Ldr(scratch,
2072          FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2073   __ Push(scratch);
2074 
2075   for (int i = 0; i < lit->properties()->length(); i++) {
2076     ObjectLiteral::Property* property = lit->properties()->at(i);
2077     Expression* value = property->value();
2078 
2079     if (property->is_static()) {
2080       __ Peek(scratch, kPointerSize);  // constructor
2081     } else {
2082       __ Peek(scratch, 0);  // prototype
2083     }
2084     __ Push(scratch);
2085     EmitPropertyKey(property, lit->GetIdForProperty(i));
2086 
2087     // The static prototype property is read only. We handle the non computed
2088     // property name case in the parser. Since this is the only case where we
2089     // need to check for an own read only property we special case this so we do
2090     // not need to do this for every property.
2091     if (property->is_static() && property->is_computed_name()) {
2092       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2093       __ Push(x0);
2094     }
2095 
2096     VisitForStackValue(value);
2097     if (NeedsHomeObject(value)) {
2098       EmitSetHomeObject(value, 2, property->GetSlot());
2099     }
2100 
2101     switch (property->kind()) {
2102       case ObjectLiteral::Property::CONSTANT:
2103       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2104       case ObjectLiteral::Property::PROTOTYPE:
2105         UNREACHABLE();
2106       case ObjectLiteral::Property::COMPUTED:
2107         __ CallRuntime(Runtime::kDefineClassMethod);
2108         break;
2109 
2110       case ObjectLiteral::Property::GETTER:
2111         __ Mov(x0, Smi::FromInt(DONT_ENUM));
2112         __ Push(x0);
2113         __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
2114         break;
2115 
2116       case ObjectLiteral::Property::SETTER:
2117         __ Mov(x0, Smi::FromInt(DONT_ENUM));
2118         __ Push(x0);
2119         __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
2120         break;
2121 
2122       default:
2123         UNREACHABLE();
2124     }
2125   }
2126 
2127   // Set both the prototype and constructor to have fast properties, and also
2128   // freeze them in strong mode.
2129   __ CallRuntime(Runtime::kFinalizeClassDefinition);
2130 }
2131 
2132 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2133 void FullCodeGenerator::EmitAssignment(Expression* expr,
2134                                        FeedbackVectorSlot slot) {
2135   DCHECK(expr->IsValidReferenceExpressionOrThis());
2136 
2137   Property* prop = expr->AsProperty();
2138   LhsKind assign_type = Property::GetAssignType(prop);
2139 
2140   switch (assign_type) {
2141     case VARIABLE: {
2142       Variable* var = expr->AsVariableProxy()->var();
2143       EffectContext context(this);
2144       EmitVariableAssignment(var, Token::ASSIGN, slot);
2145       break;
2146     }
2147     case NAMED_PROPERTY: {
2148       __ Push(x0);  // Preserve value.
2149       VisitForAccumulatorValue(prop->obj());
2150       // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2151       // this copy.
2152       __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2153       __ Pop(StoreDescriptor::ValueRegister());  // Restore value.
2154       __ Mov(StoreDescriptor::NameRegister(),
2155              Operand(prop->key()->AsLiteral()->value()));
2156       EmitLoadStoreICSlot(slot);
2157       CallStoreIC();
2158       break;
2159     }
2160     case NAMED_SUPER_PROPERTY: {
2161       __ Push(x0);
2162       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2163       VisitForAccumulatorValue(
2164           prop->obj()->AsSuperPropertyReference()->home_object());
2165       // stack: value, this; x0: home_object
2166       Register scratch = x10;
2167       Register scratch2 = x11;
2168       __ mov(scratch, result_register());  // home_object
2169       __ Peek(x0, kPointerSize);           // value
2170       __ Peek(scratch2, 0);                // this
2171       __ Poke(scratch2, kPointerSize);     // this
2172       __ Poke(scratch, 0);                 // home_object
2173       // stack: this, home_object; x0: value
2174       EmitNamedSuperPropertyStore(prop);
2175       break;
2176     }
2177     case KEYED_SUPER_PROPERTY: {
2178       __ Push(x0);
2179       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2180       VisitForStackValue(
2181           prop->obj()->AsSuperPropertyReference()->home_object());
2182       VisitForAccumulatorValue(prop->key());
2183       Register scratch = x10;
2184       Register scratch2 = x11;
2185       __ Peek(scratch2, 2 * kPointerSize);  // value
2186       // stack: value, this, home_object; x0: key, x11: value
2187       __ Peek(scratch, kPointerSize);  // this
2188       __ Poke(scratch, 2 * kPointerSize);
2189       __ Peek(scratch, 0);  // home_object
2190       __ Poke(scratch, kPointerSize);
2191       __ Poke(x0, 0);
2192       __ Move(x0, scratch2);
2193       // stack: this, home_object, key; x0: value.
2194       EmitKeyedSuperPropertyStore(prop);
2195       break;
2196     }
2197     case KEYED_PROPERTY: {
2198       __ Push(x0);  // Preserve value.
2199       VisitForStackValue(prop->obj());
2200       VisitForAccumulatorValue(prop->key());
2201       __ Mov(StoreDescriptor::NameRegister(), x0);
2202       __ Pop(StoreDescriptor::ReceiverRegister(),
2203              StoreDescriptor::ValueRegister());
2204       EmitLoadStoreICSlot(slot);
2205       Handle<Code> ic =
2206           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2207       CallIC(ic);
2208       break;
2209     }
2210   }
2211   context()->Plug(x0);
2212 }
2213 
2214 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2215 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2216     Variable* var, MemOperand location) {
2217   __ Str(result_register(), location);
2218   if (var->IsContextSlot()) {
2219     // RecordWrite may destroy all its register arguments.
2220     __ Mov(x10, result_register());
2221     int offset = Context::SlotOffset(var->index());
2222     __ RecordWriteContextSlot(
2223         x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2224   }
2225 }
2226 
2227 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2228 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2229                                                FeedbackVectorSlot slot) {
2230   ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2231   if (var->IsUnallocated()) {
2232     // Global var, const, or let.
2233     __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2234     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2235     EmitLoadStoreICSlot(slot);
2236     CallStoreIC();
2237 
2238   } else if (var->mode() == LET && op != Token::INIT) {
2239     // Non-initializing assignment to let variable needs a write barrier.
2240     DCHECK(!var->IsLookupSlot());
2241     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2242     Label assign;
2243     MemOperand location = VarOperand(var, x1);
2244     __ Ldr(x10, location);
2245     __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2246     __ Mov(x10, Operand(var->name()));
2247     __ Push(x10);
2248     __ CallRuntime(Runtime::kThrowReferenceError);
2249     // Perform the assignment.
2250     __ Bind(&assign);
2251     EmitStoreToStackLocalOrContextSlot(var, location);
2252 
2253   } else if (var->mode() == CONST && op != Token::INIT) {
2254     // Assignment to const variable needs a write barrier.
2255     DCHECK(!var->IsLookupSlot());
2256     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2257     Label const_error;
2258     MemOperand location = VarOperand(var, x1);
2259     __ Ldr(x10, location);
2260     __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2261     __ Mov(x10, Operand(var->name()));
2262     __ Push(x10);
2263     __ CallRuntime(Runtime::kThrowReferenceError);
2264     __ Bind(&const_error);
2265     __ CallRuntime(Runtime::kThrowConstAssignError);
2266 
2267   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2268     // Initializing assignment to const {this} needs a write barrier.
2269     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2270     Label uninitialized_this;
2271     MemOperand location = VarOperand(var, x1);
2272     __ Ldr(x10, location);
2273     __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2274     __ Mov(x0, Operand(var->name()));
2275     __ Push(x0);
2276     __ CallRuntime(Runtime::kThrowReferenceError);
2277     __ bind(&uninitialized_this);
2278     EmitStoreToStackLocalOrContextSlot(var, location);
2279 
2280   } else if (!var->is_const_mode() ||
2281              (var->mode() == CONST && op == Token::INIT)) {
2282     if (var->IsLookupSlot()) {
2283       // Assignment to var.
2284       __ Mov(x11, Operand(var->name()));
2285       __ Mov(x10, Smi::FromInt(language_mode()));
2286       // jssp[0]  : mode.
2287       // jssp[8]  : name.
2288       // jssp[16] : context.
2289       // jssp[24] : value.
2290       __ Push(x0, cp, x11, x10);
2291       __ CallRuntime(Runtime::kStoreLookupSlot);
2292     } else {
2293       // Assignment to var or initializing assignment to let/const in harmony
2294       // mode.
2295       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2296       MemOperand location = VarOperand(var, x1);
2297       if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2298         __ Ldr(x10, location);
2299         __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2300         __ Check(eq, kLetBindingReInitialization);
2301       }
2302       EmitStoreToStackLocalOrContextSlot(var, location);
2303     }
2304 
2305   } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2306     // Const initializers need a write barrier.
2307     DCHECK(!var->IsParameter());  // No const parameters.
2308     if (var->IsLookupSlot()) {
2309       __ Mov(x1, Operand(var->name()));
2310       __ Push(x0, cp, x1);
2311       __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2312     } else {
2313       DCHECK(var->IsStackLocal() || var->IsContextSlot());
2314       Label skip;
2315       MemOperand location = VarOperand(var, x1);
2316       __ Ldr(x10, location);
2317       __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2318       EmitStoreToStackLocalOrContextSlot(var, location);
2319       __ Bind(&skip);
2320     }
2321 
2322   } else {
2323     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2324     if (is_strict(language_mode())) {
2325       __ CallRuntime(Runtime::kThrowConstAssignError);
2326     }
2327     // Silently ignore store in sloppy mode.
2328   }
2329 }
2330 
2331 
EmitNamedPropertyAssignment(Assignment * expr)2332 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2333   ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2334   // Assignment to a property, using a named store IC.
2335   Property* prop = expr->target()->AsProperty();
2336   DCHECK(prop != NULL);
2337   DCHECK(prop->key()->IsLiteral());
2338 
2339   __ Mov(StoreDescriptor::NameRegister(),
2340          Operand(prop->key()->AsLiteral()->value()));
2341   __ Pop(StoreDescriptor::ReceiverRegister());
2342   EmitLoadStoreICSlot(expr->AssignmentSlot());
2343   CallStoreIC();
2344 
2345   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2346   context()->Plug(x0);
2347 }
2348 
2349 
EmitNamedSuperPropertyStore(Property * prop)2350 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2351   // Assignment to named property of super.
2352   // x0 : value
2353   // stack : receiver ('this'), home_object
2354   DCHECK(prop != NULL);
2355   Literal* key = prop->key()->AsLiteral();
2356   DCHECK(key != NULL);
2357 
2358   __ Push(key->value());
2359   __ Push(x0);
2360   __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2361                                              : Runtime::kStoreToSuper_Sloppy));
2362 }
2363 
2364 
EmitKeyedSuperPropertyStore(Property * prop)2365 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2366   // Assignment to named property of super.
2367   // x0 : value
2368   // stack : receiver ('this'), home_object, key
2369   DCHECK(prop != NULL);
2370 
2371   __ Push(x0);
2372   __ CallRuntime((is_strict(language_mode())
2373                       ? Runtime::kStoreKeyedToSuper_Strict
2374                       : Runtime::kStoreKeyedToSuper_Sloppy));
2375 }
2376 
2377 
EmitKeyedPropertyAssignment(Assignment * expr)2378 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2379   ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2380   // Assignment to a property, using a keyed store IC.
2381 
2382   // TODO(all): Could we pass this in registers rather than on the stack?
2383   __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2384   DCHECK(StoreDescriptor::ValueRegister().is(x0));
2385 
2386   Handle<Code> ic =
2387       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2388   EmitLoadStoreICSlot(expr->AssignmentSlot());
2389   CallIC(ic);
2390 
2391   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2392   context()->Plug(x0);
2393 }
2394 
2395 
VisitProperty(Property * expr)2396 void FullCodeGenerator::VisitProperty(Property* expr) {
2397   Comment cmnt(masm_, "[ Property");
2398   SetExpressionPosition(expr);
2399   Expression* key = expr->key();
2400 
2401   if (key->IsPropertyName()) {
2402     if (!expr->IsSuperAccess()) {
2403       VisitForAccumulatorValue(expr->obj());
2404       __ Move(LoadDescriptor::ReceiverRegister(), x0);
2405       EmitNamedPropertyLoad(expr);
2406     } else {
2407       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2408       VisitForStackValue(
2409           expr->obj()->AsSuperPropertyReference()->home_object());
2410       EmitNamedSuperPropertyLoad(expr);
2411     }
2412   } else {
2413     if (!expr->IsSuperAccess()) {
2414       VisitForStackValue(expr->obj());
2415       VisitForAccumulatorValue(expr->key());
2416       __ Move(LoadDescriptor::NameRegister(), x0);
2417       __ Pop(LoadDescriptor::ReceiverRegister());
2418       EmitKeyedPropertyLoad(expr);
2419     } else {
2420       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2421       VisitForStackValue(
2422           expr->obj()->AsSuperPropertyReference()->home_object());
2423       VisitForStackValue(expr->key());
2424       EmitKeyedSuperPropertyLoad(expr);
2425     }
2426   }
2427   PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2428   context()->Plug(x0);
2429 }
2430 
2431 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2432 void FullCodeGenerator::CallIC(Handle<Code> code,
2433                                TypeFeedbackId ast_id) {
2434   ic_total_count_++;
2435   // All calls must have a predictable size in full-codegen code to ensure that
2436   // the debugger can patch them correctly.
2437   __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2438 }
2439 
2440 
2441 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2442 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2443   ASM_LOCATION("FullCodeGenerator::EmitCallWithLoadIC");
2444   Expression* callee = expr->expression();
2445 
2446   // Get the target function.
2447   ConvertReceiverMode convert_mode;
2448   if (callee->IsVariableProxy()) {
2449     { StackValueContext context(this);
2450       EmitVariableLoad(callee->AsVariableProxy());
2451       PrepareForBailout(callee, NO_REGISTERS);
2452     }
2453     // Push undefined as receiver. This is patched in the method prologue if it
2454     // is a sloppy mode method.
2455     {
2456       UseScratchRegisterScope temps(masm_);
2457       Register temp = temps.AcquireX();
2458       __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2459       __ Push(temp);
2460     }
2461     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2462   } else {
2463     // Load the function from the receiver.
2464     DCHECK(callee->IsProperty());
2465     DCHECK(!callee->AsProperty()->IsSuperAccess());
2466     __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2467     EmitNamedPropertyLoad(callee->AsProperty());
2468     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2469     // Push the target function under the receiver.
2470     __ Pop(x10);
2471     __ Push(x0, x10);
2472     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2473   }
2474 
2475   EmitCall(expr, convert_mode);
2476 }
2477 
2478 
EmitSuperCallWithLoadIC(Call * expr)2479 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2480   ASM_LOCATION("FullCodeGenerator::EmitSuperCallWithLoadIC");
2481   Expression* callee = expr->expression();
2482   DCHECK(callee->IsProperty());
2483   Property* prop = callee->AsProperty();
2484   DCHECK(prop->IsSuperAccess());
2485   SetExpressionPosition(prop);
2486 
2487   Literal* key = prop->key()->AsLiteral();
2488   DCHECK(!key->value()->IsSmi());
2489 
2490   // Load the function from the receiver.
2491   const Register scratch = x10;
2492   SuperPropertyReference* super_ref =
2493       callee->AsProperty()->obj()->AsSuperPropertyReference();
2494   VisitForStackValue(super_ref->home_object());
2495   VisitForAccumulatorValue(super_ref->this_var());
2496   __ Push(x0);
2497   __ Peek(scratch, kPointerSize);
2498   __ Push(x0, scratch);
2499   __ Push(key->value());
2500   __ Push(Smi::FromInt(language_mode()));
2501 
2502   // Stack here:
2503   //  - home_object
2504   //  - this (receiver)
2505   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2506   //  - home_object
2507   //  - language_mode
2508   __ CallRuntime(Runtime::kLoadFromSuper);
2509 
2510   // Replace home_object with target function.
2511   __ Poke(x0, kPointerSize);
2512 
2513   // Stack here:
2514   // - target function
2515   // - this (receiver)
2516   EmitCall(expr);
2517 }
2518 
2519 
2520 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2521 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2522                                                 Expression* key) {
2523   ASM_LOCATION("FullCodeGenerator::EmitKeyedCallWithLoadIC");
2524   // Load the key.
2525   VisitForAccumulatorValue(key);
2526 
2527   Expression* callee = expr->expression();
2528 
2529   // Load the function from the receiver.
2530   DCHECK(callee->IsProperty());
2531   __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2532   __ Move(LoadDescriptor::NameRegister(), x0);
2533   EmitKeyedPropertyLoad(callee->AsProperty());
2534   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2535 
2536   // Push the target function under the receiver.
2537   __ Pop(x10);
2538   __ Push(x0, x10);
2539 
2540   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2541 }
2542 
2543 
EmitKeyedSuperCallWithLoadIC(Call * expr)2544 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2545   ASM_LOCATION("FullCodeGenerator::EmitKeyedSuperCallWithLoadIC");
2546   Expression* callee = expr->expression();
2547   DCHECK(callee->IsProperty());
2548   Property* prop = callee->AsProperty();
2549   DCHECK(prop->IsSuperAccess());
2550   SetExpressionPosition(prop);
2551 
2552   // Load the function from the receiver.
2553   const Register scratch = x10;
2554   SuperPropertyReference* super_ref =
2555       callee->AsProperty()->obj()->AsSuperPropertyReference();
2556   VisitForStackValue(super_ref->home_object());
2557   VisitForAccumulatorValue(super_ref->this_var());
2558   __ Push(x0);
2559   __ Peek(scratch, kPointerSize);
2560   __ Push(x0, scratch);
2561   VisitForStackValue(prop->key());
2562   __ Push(Smi::FromInt(language_mode()));
2563 
2564   // Stack here:
2565   //  - home_object
2566   //  - this (receiver)
2567   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2568   //  - home_object
2569   //  - key
2570   //  - language_mode
2571   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2572 
2573   // Replace home_object with target function.
2574   __ Poke(x0, kPointerSize);
2575 
2576   // Stack here:
2577   // - target function
2578   // - this (receiver)
2579   EmitCall(expr);
2580 }
2581 
2582 
EmitCall(Call * expr,ConvertReceiverMode mode)2583 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2584   ASM_LOCATION("FullCodeGenerator::EmitCall");
2585   // Load the arguments.
2586   ZoneList<Expression*>* args = expr->arguments();
2587   int arg_count = args->length();
2588   for (int i = 0; i < arg_count; i++) {
2589     VisitForStackValue(args->at(i));
2590   }
2591 
2592   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2593   SetCallPosition(expr);
2594 
2595   Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code();
2596   __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2597   __ Peek(x1, (arg_count + 1) * kXRegSize);
2598   // Don't assign a type feedback id to the IC, since type feedback is provided
2599   // by the vector above.
2600   CallIC(ic);
2601 
2602   RecordJSReturnSite(expr);
2603   // Restore context register.
2604   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2605   context()->DropAndPlug(1, x0);
2606 }
2607 
2608 
EmitResolvePossiblyDirectEval(int arg_count)2609 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2610   ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2611   // Prepare to push a copy of the first argument or undefined if it doesn't
2612   // exist.
2613   if (arg_count > 0) {
2614     __ Peek(x9, arg_count * kXRegSize);
2615   } else {
2616     __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2617   }
2618 
2619   __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2620 
2621   // Prepare to push the language mode.
2622   __ Mov(x11, Smi::FromInt(language_mode()));
2623   // Prepare to push the start position of the scope the calls resides in.
2624   __ Mov(x12, Smi::FromInt(scope()->start_position()));
2625 
2626   // Push.
2627   __ Push(x9, x10, x11, x12);
2628 
2629   // Do the runtime call.
2630   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2631 }
2632 
2633 
2634 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2635 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2636   VariableProxy* callee = expr->expression()->AsVariableProxy();
2637   if (callee->var()->IsLookupSlot()) {
2638     Label slow, done;
2639     SetExpressionPosition(callee);
2640       // Generate code for loading from variables potentially shadowed
2641       // by eval-introduced variables.
2642       EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2643 
2644     __ Bind(&slow);
2645     // Call the runtime to find the function to call (returned in x0)
2646     // and the object holding it (returned in x1).
2647     __ Mov(x10, Operand(callee->name()));
2648     __ Push(context_register(), x10);
2649     __ CallRuntime(Runtime::kLoadLookupSlot);
2650     __ Push(x0, x1);  // Receiver, function.
2651     PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2652 
2653     // If fast case code has been generated, emit code to push the
2654     // function and receiver and have the slow path jump around this
2655     // code.
2656     if (done.is_linked()) {
2657       Label call;
2658       __ B(&call);
2659       __ Bind(&done);
2660       // Push function.
2661       // The receiver is implicitly the global receiver. Indicate this
2662       // by passing the undefined to the call function stub.
2663       __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2664       __ Push(x0, x1);
2665       __ Bind(&call);
2666     }
2667   } else {
2668     VisitForStackValue(callee);
2669     // refEnv.WithBaseObject()
2670     __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2671     __ Push(x10);  // Reserved receiver slot.
2672   }
2673 }
2674 
2675 
EmitPossiblyEvalCall(Call * expr)2676 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2677   ASM_LOCATION("FullCodeGenerator::EmitPossiblyEvalCall");
2678   // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2679   // to resolve the function we need to call.  Then we call the resolved
2680   // function using the given arguments.
2681   ZoneList<Expression*>* args = expr->arguments();
2682   int arg_count = args->length();
2683 
2684   PushCalleeAndWithBaseObject(expr);
2685 
2686   // Push the arguments.
2687   for (int i = 0; i < arg_count; i++) {
2688     VisitForStackValue(args->at(i));
2689   }
2690 
2691   // Push a copy of the function (found below the arguments) and
2692   // resolve eval.
2693   __ Peek(x10, (arg_count + 1) * kPointerSize);
2694   __ Push(x10);
2695   EmitResolvePossiblyDirectEval(arg_count);
2696 
2697   // Touch up the stack with the resolved function.
2698   __ Poke(x0, (arg_count + 1) * kPointerSize);
2699 
2700   PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2701 
2702   // Record source position for debugger.
2703   SetCallPosition(expr);
2704 
2705   // Call the evaluated function.
2706   __ Peek(x1, (arg_count + 1) * kXRegSize);
2707   __ Mov(x0, arg_count);
2708   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2709   RecordJSReturnSite(expr);
2710   // Restore context register.
2711   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2712   context()->DropAndPlug(1, x0);
2713 }
2714 
2715 
VisitCallNew(CallNew * expr)2716 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2717   Comment cmnt(masm_, "[ CallNew");
2718   // According to ECMA-262, section 11.2.2, page 44, the function
2719   // expression in new calls must be evaluated before the
2720   // arguments.
2721 
2722   // Push constructor on the stack.  If it's not a function it's used as
2723   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2724   // ignored.
2725   DCHECK(!expr->expression()->IsSuperPropertyReference());
2726   VisitForStackValue(expr->expression());
2727 
2728   // Push the arguments ("left-to-right") on the stack.
2729   ZoneList<Expression*>* args = expr->arguments();
2730   int arg_count = args->length();
2731   for (int i = 0; i < arg_count; i++) {
2732     VisitForStackValue(args->at(i));
2733   }
2734 
2735   // Call the construct call builtin that handles allocation and
2736   // constructor invocation.
2737   SetConstructCallPosition(expr);
2738 
2739   // Load function and argument count into x1 and x0.
2740   __ Mov(x0, arg_count);
2741   __ Peek(x1, arg_count * kXRegSize);
2742 
2743   // Record call targets in unoptimized code.
2744   __ EmitLoadTypeFeedbackVector(x2);
2745   __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2746 
2747   CallConstructStub stub(isolate());
2748   __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2749   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2750   // Restore context register.
2751   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2752   context()->Plug(x0);
2753 }
2754 
2755 
EmitSuperConstructorCall(Call * expr)2756 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2757   ASM_LOCATION("FullCodeGenerator::EmitSuperConstructorCall");
2758   SuperCallReference* super_call_ref =
2759       expr->expression()->AsSuperCallReference();
2760   DCHECK_NOT_NULL(super_call_ref);
2761 
2762   // Push the super constructor target on the stack (may be null,
2763   // but the Construct builtin can deal with that properly).
2764   VisitForAccumulatorValue(super_call_ref->this_function_var());
2765   __ AssertFunction(result_register());
2766   __ Ldr(result_register(),
2767          FieldMemOperand(result_register(), HeapObject::kMapOffset));
2768   __ Ldr(result_register(),
2769          FieldMemOperand(result_register(), Map::kPrototypeOffset));
2770   __ Push(result_register());
2771 
2772   // Push the arguments ("left-to-right") on the stack.
2773   ZoneList<Expression*>* args = expr->arguments();
2774   int arg_count = args->length();
2775   for (int i = 0; i < arg_count; i++) {
2776     VisitForStackValue(args->at(i));
2777   }
2778 
2779   // Call the construct call builtin that handles allocation and
2780   // constructor invocation.
2781   SetConstructCallPosition(expr);
2782 
2783   // Load new target into x3.
2784   VisitForAccumulatorValue(super_call_ref->new_target_var());
2785   __ Mov(x3, result_register());
2786 
2787   // Load function and argument count into x1 and x0.
2788   __ Mov(x0, arg_count);
2789   __ Peek(x1, arg_count * kXRegSize);
2790 
2791   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2792 
2793   RecordJSReturnSite(expr);
2794 
2795   // Restore context register.
2796   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2797   context()->Plug(x0);
2798 }
2799 
2800 
EmitIsSmi(CallRuntime * expr)2801 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2802   ZoneList<Expression*>* args = expr->arguments();
2803   DCHECK(args->length() == 1);
2804 
2805   VisitForAccumulatorValue(args->at(0));
2806 
2807   Label materialize_true, materialize_false;
2808   Label* if_true = NULL;
2809   Label* if_false = NULL;
2810   Label* fall_through = NULL;
2811   context()->PrepareTest(&materialize_true, &materialize_false,
2812                          &if_true, &if_false, &fall_through);
2813 
2814   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2815   __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2816 
2817   context()->Plug(if_true, if_false);
2818 }
2819 
2820 
EmitIsJSReceiver(CallRuntime * expr)2821 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2822   ZoneList<Expression*>* args = expr->arguments();
2823   DCHECK(args->length() == 1);
2824 
2825   VisitForAccumulatorValue(args->at(0));
2826 
2827   Label materialize_true, materialize_false;
2828   Label* if_true = NULL;
2829   Label* if_false = NULL;
2830   Label* fall_through = NULL;
2831   context()->PrepareTest(&materialize_true, &materialize_false,
2832                          &if_true, &if_false, &fall_through);
2833 
2834   __ JumpIfSmi(x0, if_false);
2835   __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2836   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2837   Split(ge, if_true, if_false, fall_through);
2838 
2839   context()->Plug(if_true, if_false);
2840 }
2841 
2842 
EmitIsSimdValue(CallRuntime * expr)2843 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
2844   ZoneList<Expression*>* args = expr->arguments();
2845   DCHECK(args->length() == 1);
2846 
2847   VisitForAccumulatorValue(args->at(0));
2848 
2849   Label materialize_true, materialize_false;
2850   Label* if_true = NULL;
2851   Label* if_false = NULL;
2852   Label* fall_through = NULL;
2853   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2854                          &if_false, &fall_through);
2855 
2856   __ JumpIfSmi(x0, if_false);
2857   __ CompareObjectType(x0, x10, x11, SIMD128_VALUE_TYPE);
2858   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2859   Split(eq, if_true, if_false, fall_through);
2860 
2861   context()->Plug(if_true, if_false);
2862 }
2863 
2864 
EmitIsFunction(CallRuntime * expr)2865 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2866   ZoneList<Expression*>* args = expr->arguments();
2867   DCHECK(args->length() == 1);
2868 
2869   VisitForAccumulatorValue(args->at(0));
2870 
2871   Label materialize_true, materialize_false;
2872   Label* if_true = NULL;
2873   Label* if_false = NULL;
2874   Label* fall_through = NULL;
2875   context()->PrepareTest(&materialize_true, &materialize_false,
2876                          &if_true, &if_false, &fall_through);
2877 
2878   __ JumpIfSmi(x0, if_false);
2879   __ CompareObjectType(x0, x10, x11, FIRST_FUNCTION_TYPE);
2880   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2881   Split(hs, if_true, if_false, fall_through);
2882 
2883   context()->Plug(if_true, if_false);
2884 }
2885 
2886 
EmitIsMinusZero(CallRuntime * expr)2887 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2888   ZoneList<Expression*>* args = expr->arguments();
2889   DCHECK(args->length() == 1);
2890 
2891   VisitForAccumulatorValue(args->at(0));
2892 
2893   Label materialize_true, materialize_false;
2894   Label* if_true = NULL;
2895   Label* if_false = NULL;
2896   Label* fall_through = NULL;
2897   context()->PrepareTest(&materialize_true, &materialize_false,
2898                          &if_true, &if_false, &fall_through);
2899 
2900   // Only a HeapNumber can be -0.0, so return false if we have something else.
2901   __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
2902 
2903   // Test the bit pattern.
2904   __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
2905   __ Cmp(x10, 1);   // Set V on 0x8000000000000000.
2906 
2907   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2908   Split(vs, if_true, if_false, fall_through);
2909 
2910   context()->Plug(if_true, if_false);
2911 }
2912 
2913 
EmitIsArray(CallRuntime * expr)2914 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2915   ZoneList<Expression*>* args = expr->arguments();
2916   DCHECK(args->length() == 1);
2917 
2918   VisitForAccumulatorValue(args->at(0));
2919 
2920   Label materialize_true, materialize_false;
2921   Label* if_true = NULL;
2922   Label* if_false = NULL;
2923   Label* fall_through = NULL;
2924   context()->PrepareTest(&materialize_true, &materialize_false,
2925                          &if_true, &if_false, &fall_through);
2926 
2927   __ JumpIfSmi(x0, if_false);
2928   __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2929   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2930   Split(eq, if_true, if_false, fall_through);
2931 
2932   context()->Plug(if_true, if_false);
2933 }
2934 
2935 
EmitIsTypedArray(CallRuntime * expr)2936 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2937   ZoneList<Expression*>* args = expr->arguments();
2938   DCHECK(args->length() == 1);
2939 
2940   VisitForAccumulatorValue(args->at(0));
2941 
2942   Label materialize_true, materialize_false;
2943   Label* if_true = NULL;
2944   Label* if_false = NULL;
2945   Label* fall_through = NULL;
2946   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2947                          &if_false, &fall_through);
2948 
2949   __ JumpIfSmi(x0, if_false);
2950   __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
2951   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2952   Split(eq, if_true, if_false, fall_through);
2953 
2954   context()->Plug(if_true, if_false);
2955 }
2956 
2957 
EmitIsRegExp(CallRuntime * expr)2958 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2959   ZoneList<Expression*>* args = expr->arguments();
2960   DCHECK(args->length() == 1);
2961 
2962   VisitForAccumulatorValue(args->at(0));
2963 
2964   Label materialize_true, materialize_false;
2965   Label* if_true = NULL;
2966   Label* if_false = NULL;
2967   Label* fall_through = NULL;
2968   context()->PrepareTest(&materialize_true, &materialize_false,
2969                          &if_true, &if_false, &fall_through);
2970 
2971   __ JumpIfSmi(x0, if_false);
2972   __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2973   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2974   Split(eq, if_true, if_false, fall_through);
2975 
2976   context()->Plug(if_true, if_false);
2977 }
2978 
2979 
EmitIsJSProxy(CallRuntime * expr)2980 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2981   ZoneList<Expression*>* args = expr->arguments();
2982   DCHECK(args->length() == 1);
2983 
2984   VisitForAccumulatorValue(args->at(0));
2985 
2986   Label materialize_true, materialize_false;
2987   Label* if_true = NULL;
2988   Label* if_false = NULL;
2989   Label* fall_through = NULL;
2990   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2991                          &if_false, &fall_through);
2992 
2993   __ JumpIfSmi(x0, if_false);
2994   __ CompareObjectType(x0, x10, x11, JS_PROXY_TYPE);
2995   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2996   Split(eq, if_true, if_false, fall_through);
2997 
2998   context()->Plug(if_true, if_false);
2999 }
3000 
3001 
EmitObjectEquals(CallRuntime * expr)3002 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3003   ZoneList<Expression*>* args = expr->arguments();
3004   DCHECK(args->length() == 2);
3005 
3006   // Load the two objects into registers and perform the comparison.
3007   VisitForStackValue(args->at(0));
3008   VisitForAccumulatorValue(args->at(1));
3009 
3010   Label materialize_true, materialize_false;
3011   Label* if_true = NULL;
3012   Label* if_false = NULL;
3013   Label* fall_through = NULL;
3014   context()->PrepareTest(&materialize_true, &materialize_false,
3015                          &if_true, &if_false, &fall_through);
3016 
3017   __ Pop(x1);
3018   __ Cmp(x0, x1);
3019   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3020   Split(eq, if_true, if_false, fall_through);
3021 
3022   context()->Plug(if_true, if_false);
3023 }
3024 
3025 
EmitArguments(CallRuntime * expr)3026 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3027   ZoneList<Expression*>* args = expr->arguments();
3028   DCHECK(args->length() == 1);
3029 
3030   // ArgumentsAccessStub expects the key in x1.
3031   VisitForAccumulatorValue(args->at(0));
3032   __ Mov(x1, x0);
3033   __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3034   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3035   __ CallStub(&stub);
3036   context()->Plug(x0);
3037 }
3038 
3039 
EmitArgumentsLength(CallRuntime * expr)3040 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3041   DCHECK(expr->arguments()->length() == 0);
3042   Label exit;
3043   // Get the number of formal parameters.
3044   __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3045 
3046   // Check if the calling frame is an arguments adaptor frame.
3047   __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3048   __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3049   __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3050   __ B(ne, &exit);
3051 
3052   // Arguments adaptor case: Read the arguments length from the
3053   // adaptor frame.
3054   __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3055 
3056   __ Bind(&exit);
3057   context()->Plug(x0);
3058 }
3059 
3060 
EmitClassOf(CallRuntime * expr)3061 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3062   ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3063   ZoneList<Expression*>* args = expr->arguments();
3064   DCHECK(args->length() == 1);
3065   Label done, null, function, non_function_constructor;
3066 
3067   VisitForAccumulatorValue(args->at(0));
3068 
3069   // If the object is not a JSReceiver, we return null.
3070   __ JumpIfSmi(x0, &null);
3071   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3072   __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
3073   // x10: object's map.
3074   // x11: object's type.
3075   __ B(lt, &null);
3076 
3077   // Return 'Function' for JSFunction objects.
3078   __ Cmp(x11, JS_FUNCTION_TYPE);
3079   __ B(eq, &function);
3080 
3081   // Check if the constructor in the map is a JS function.
3082   Register instance_type = x14;
3083   __ GetMapConstructor(x12, x10, x13, instance_type);
3084   __ Cmp(instance_type, JS_FUNCTION_TYPE);
3085   __ B(ne, &non_function_constructor);
3086 
3087   // x12 now contains the constructor function. Grab the
3088   // instance class name from there.
3089   __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3090   __ Ldr(x0,
3091          FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3092   __ B(&done);
3093 
3094   // Functions have class 'Function'.
3095   __ Bind(&function);
3096   __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3097   __ B(&done);
3098 
3099   // Objects with a non-function constructor have class 'Object'.
3100   __ Bind(&non_function_constructor);
3101   __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3102   __ B(&done);
3103 
3104   // Non-JS objects have class null.
3105   __ Bind(&null);
3106   __ LoadRoot(x0, Heap::kNullValueRootIndex);
3107 
3108   // All done.
3109   __ Bind(&done);
3110 
3111   context()->Plug(x0);
3112 }
3113 
3114 
EmitValueOf(CallRuntime * expr)3115 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3116   ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3117   ZoneList<Expression*>* args = expr->arguments();
3118   DCHECK(args->length() == 1);
3119   VisitForAccumulatorValue(args->at(0));  // Load the object.
3120 
3121   Label done;
3122   // If the object is a smi return the object.
3123   __ JumpIfSmi(x0, &done);
3124   // If the object is not a value type, return the object.
3125   __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3126   __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3127 
3128   __ Bind(&done);
3129   context()->Plug(x0);
3130 }
3131 
3132 
EmitIsDate(CallRuntime * expr)3133 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3134   ZoneList<Expression*>* args = expr->arguments();
3135   DCHECK_EQ(1, args->length());
3136 
3137   VisitForAccumulatorValue(args->at(0));
3138 
3139   Label materialize_true, materialize_false;
3140   Label* if_true = nullptr;
3141   Label* if_false = nullptr;
3142   Label* fall_through = nullptr;
3143   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3144                          &if_false, &fall_through);
3145 
3146   __ JumpIfSmi(x0, if_false);
3147   __ CompareObjectType(x0, x10, x11, JS_DATE_TYPE);
3148   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3149   Split(eq, if_true, if_false, fall_through);
3150 
3151   context()->Plug(if_true, if_false);
3152 }
3153 
3154 
EmitOneByteSeqStringSetChar(CallRuntime * expr)3155 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3156   ZoneList<Expression*>* args = expr->arguments();
3157   DCHECK_EQ(3, args->length());
3158 
3159   Register string = x0;
3160   Register index = x1;
3161   Register value = x2;
3162   Register scratch = x10;
3163 
3164   VisitForStackValue(args->at(0));        // index
3165   VisitForStackValue(args->at(1));        // value
3166   VisitForAccumulatorValue(args->at(2));  // string
3167   __ Pop(value, index);
3168 
3169   if (FLAG_debug_code) {
3170     __ AssertSmi(value, kNonSmiValue);
3171     __ AssertSmi(index, kNonSmiIndex);
3172     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3173     __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3174                                  one_byte_seq_type);
3175   }
3176 
3177   __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3178   __ SmiUntag(value);
3179   __ SmiUntag(index);
3180   __ Strb(value, MemOperand(scratch, index));
3181   context()->Plug(string);
3182 }
3183 
3184 
EmitTwoByteSeqStringSetChar(CallRuntime * expr)3185 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3186   ZoneList<Expression*>* args = expr->arguments();
3187   DCHECK_EQ(3, args->length());
3188 
3189   Register string = x0;
3190   Register index = x1;
3191   Register value = x2;
3192   Register scratch = x10;
3193 
3194   VisitForStackValue(args->at(0));        // index
3195   VisitForStackValue(args->at(1));        // value
3196   VisitForAccumulatorValue(args->at(2));  // string
3197   __ Pop(value, index);
3198 
3199   if (FLAG_debug_code) {
3200     __ AssertSmi(value, kNonSmiValue);
3201     __ AssertSmi(index, kNonSmiIndex);
3202     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3203     __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3204                                  two_byte_seq_type);
3205   }
3206 
3207   __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3208   __ SmiUntag(value);
3209   __ SmiUntag(index);
3210   __ Strh(value, MemOperand(scratch, index, LSL, 1));
3211   context()->Plug(string);
3212 }
3213 
3214 
EmitSetValueOf(CallRuntime * expr)3215 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3216   ZoneList<Expression*>* args = expr->arguments();
3217   DCHECK(args->length() == 2);
3218   VisitForStackValue(args->at(0));  // Load the object.
3219   VisitForAccumulatorValue(args->at(1));  // Load the value.
3220   __ Pop(x1);
3221   // x0 = value.
3222   // x1 = object.
3223 
3224   Label done;
3225   // If the object is a smi, return the value.
3226   __ JumpIfSmi(x1, &done);
3227 
3228   // If the object is not a value type, return the value.
3229   __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3230 
3231   // Store the value.
3232   __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3233   // Update the write barrier. Save the value as it will be
3234   // overwritten by the write barrier code and is needed afterward.
3235   __ Mov(x10, x0);
3236   __ RecordWriteField(
3237       x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3238 
3239   __ Bind(&done);
3240   context()->Plug(x0);
3241 }
3242 
3243 
EmitToInteger(CallRuntime * expr)3244 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3245   ZoneList<Expression*>* args = expr->arguments();
3246   DCHECK_EQ(1, args->length());
3247 
3248   // Load the argument into x0 and convert it.
3249   VisitForAccumulatorValue(args->at(0));
3250 
3251   // Convert the object to an integer.
3252   Label done_convert;
3253   __ JumpIfSmi(x0, &done_convert);
3254   __ Push(x0);
3255   __ CallRuntime(Runtime::kToInteger);
3256   __ bind(&done_convert);
3257   context()->Plug(x0);
3258 }
3259 
3260 
EmitToName(CallRuntime * expr)3261 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3262   ZoneList<Expression*>* args = expr->arguments();
3263   DCHECK_EQ(1, args->length());
3264 
3265   // Load the argument into x0 and convert it.
3266   VisitForAccumulatorValue(args->at(0));
3267 
3268   Label convert, done_convert;
3269   __ JumpIfSmi(x0, &convert);
3270   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3271   __ JumpIfObjectType(x0, x1, x1, LAST_NAME_TYPE, &done_convert, ls);
3272   __ Bind(&convert);
3273   __ Push(x0);
3274   __ CallRuntime(Runtime::kToName);
3275   __ Bind(&done_convert);
3276   context()->Plug(x0);
3277 }
3278 
3279 
EmitStringCharFromCode(CallRuntime * expr)3280 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3281   ZoneList<Expression*>* args = expr->arguments();
3282   DCHECK(args->length() == 1);
3283 
3284   VisitForAccumulatorValue(args->at(0));
3285 
3286   Label done;
3287   Register code = x0;
3288   Register result = x1;
3289 
3290   StringCharFromCodeGenerator generator(code, result);
3291   generator.GenerateFast(masm_);
3292   __ B(&done);
3293 
3294   NopRuntimeCallHelper call_helper;
3295   generator.GenerateSlow(masm_, call_helper);
3296 
3297   __ Bind(&done);
3298   context()->Plug(result);
3299 }
3300 
3301 
EmitStringCharCodeAt(CallRuntime * expr)3302 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3303   ZoneList<Expression*>* args = expr->arguments();
3304   DCHECK(args->length() == 2);
3305 
3306   VisitForStackValue(args->at(0));
3307   VisitForAccumulatorValue(args->at(1));
3308 
3309   Register object = x1;
3310   Register index = x0;
3311   Register result = x3;
3312 
3313   __ Pop(object);
3314 
3315   Label need_conversion;
3316   Label index_out_of_range;
3317   Label done;
3318   StringCharCodeAtGenerator generator(object,
3319                                       index,
3320                                       result,
3321                                       &need_conversion,
3322                                       &need_conversion,
3323                                       &index_out_of_range,
3324                                       STRING_INDEX_IS_NUMBER);
3325   generator.GenerateFast(masm_);
3326   __ B(&done);
3327 
3328   __ Bind(&index_out_of_range);
3329   // When the index is out of range, the spec requires us to return NaN.
3330   __ LoadRoot(result, Heap::kNanValueRootIndex);
3331   __ B(&done);
3332 
3333   __ Bind(&need_conversion);
3334   // Load the undefined value into the result register, which will
3335   // trigger conversion.
3336   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3337   __ B(&done);
3338 
3339   NopRuntimeCallHelper call_helper;
3340   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3341 
3342   __ Bind(&done);
3343   context()->Plug(result);
3344 }
3345 
3346 
EmitStringCharAt(CallRuntime * expr)3347 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3348   ZoneList<Expression*>* args = expr->arguments();
3349   DCHECK(args->length() == 2);
3350 
3351   VisitForStackValue(args->at(0));
3352   VisitForAccumulatorValue(args->at(1));
3353 
3354   Register object = x1;
3355   Register index = x0;
3356   Register result = x0;
3357 
3358   __ Pop(object);
3359 
3360   Label need_conversion;
3361   Label index_out_of_range;
3362   Label done;
3363   StringCharAtGenerator generator(object,
3364                                   index,
3365                                   x3,
3366                                   result,
3367                                   &need_conversion,
3368                                   &need_conversion,
3369                                   &index_out_of_range,
3370                                   STRING_INDEX_IS_NUMBER);
3371   generator.GenerateFast(masm_);
3372   __ B(&done);
3373 
3374   __ Bind(&index_out_of_range);
3375   // When the index is out of range, the spec requires us to return
3376   // the empty string.
3377   __ LoadRoot(result, Heap::kempty_stringRootIndex);
3378   __ B(&done);
3379 
3380   __ Bind(&need_conversion);
3381   // Move smi zero into the result register, which will trigger conversion.
3382   __ Mov(result, Smi::FromInt(0));
3383   __ B(&done);
3384 
3385   NopRuntimeCallHelper call_helper;
3386   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3387 
3388   __ Bind(&done);
3389   context()->Plug(result);
3390 }
3391 
3392 
EmitCall(CallRuntime * expr)3393 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3394   ASM_LOCATION("FullCodeGenerator::EmitCall");
3395   ZoneList<Expression*>* args = expr->arguments();
3396   DCHECK_LE(2, args->length());
3397   // Push target, receiver and arguments onto the stack.
3398   for (Expression* const arg : *args) {
3399     VisitForStackValue(arg);
3400   }
3401   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3402   // Move target to x1.
3403   int const argc = args->length() - 2;
3404   __ Peek(x1, (argc + 1) * kXRegSize);
3405   // Call the target.
3406   __ Mov(x0, argc);
3407   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3408   // Restore context register.
3409   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3410   // Discard the function left on TOS.
3411   context()->DropAndPlug(1, x0);
3412 }
3413 
3414 
EmitHasCachedArrayIndex(CallRuntime * expr)3415 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3416   ZoneList<Expression*>* args = expr->arguments();
3417   VisitForAccumulatorValue(args->at(0));
3418 
3419   Label materialize_true, materialize_false;
3420   Label* if_true = NULL;
3421   Label* if_false = NULL;
3422   Label* fall_through = NULL;
3423   context()->PrepareTest(&materialize_true, &materialize_false,
3424                          &if_true, &if_false, &fall_through);
3425 
3426   __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3427   __ Tst(x10, String::kContainsCachedArrayIndexMask);
3428   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3429   Split(eq, if_true, if_false, fall_through);
3430 
3431   context()->Plug(if_true, if_false);
3432 }
3433 
3434 
EmitGetCachedArrayIndex(CallRuntime * expr)3435 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3436   ZoneList<Expression*>* args = expr->arguments();
3437   DCHECK(args->length() == 1);
3438   VisitForAccumulatorValue(args->at(0));
3439 
3440   __ AssertString(x0);
3441 
3442   __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3443   __ IndexFromHash(x10, x0);
3444 
3445   context()->Plug(x0);
3446 }
3447 
3448 
EmitGetSuperConstructor(CallRuntime * expr)3449 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3450   ZoneList<Expression*>* args = expr->arguments();
3451   DCHECK_EQ(1, args->length());
3452   VisitForAccumulatorValue(args->at(0));
3453   __ AssertFunction(x0);
3454   __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3455   __ Ldr(x0, FieldMemOperand(x0, Map::kPrototypeOffset));
3456   context()->Plug(x0);
3457 }
3458 
3459 
EmitFastOneByteArrayJoin(CallRuntime * expr)3460 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3461   ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
3462 
3463   ZoneList<Expression*>* args = expr->arguments();
3464   DCHECK(args->length() == 2);
3465   VisitForStackValue(args->at(1));
3466   VisitForAccumulatorValue(args->at(0));
3467 
3468   Register array = x0;
3469   Register result = x0;
3470   Register elements = x1;
3471   Register element = x2;
3472   Register separator = x3;
3473   Register array_length = x4;
3474   Register result_pos = x5;
3475   Register map = x6;
3476   Register string_length = x10;
3477   Register elements_end = x11;
3478   Register string = x12;
3479   Register scratch1 = x13;
3480   Register scratch2 = x14;
3481   Register scratch3 = x7;
3482   Register separator_length = x15;
3483 
3484   Label bailout, done, one_char_separator, long_separator,
3485       non_trivial_array, not_size_one_array, loop,
3486       empty_separator_loop, one_char_separator_loop,
3487       one_char_separator_loop_entry, long_separator_loop;
3488 
3489   // The separator operand is on the stack.
3490   __ Pop(separator);
3491 
3492   // Check that the array is a JSArray.
3493   __ JumpIfSmi(array, &bailout);
3494   __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3495 
3496   // Check that the array has fast elements.
3497   __ CheckFastElements(map, scratch1, &bailout);
3498 
3499   // If the array has length zero, return the empty string.
3500   // Load and untag the length of the array.
3501   // It is an unsigned value, so we can skip sign extension.
3502   // We assume little endianness.
3503   __ Ldrsw(array_length,
3504            UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3505   __ Cbnz(array_length, &non_trivial_array);
3506   __ LoadRoot(result, Heap::kempty_stringRootIndex);
3507   __ B(&done);
3508 
3509   __ Bind(&non_trivial_array);
3510   // Get the FixedArray containing array's elements.
3511   __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3512 
3513   // Check that all array elements are sequential one-byte strings, and
3514   // accumulate the sum of their lengths.
3515   __ Mov(string_length, 0);
3516   __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3517   __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3518   // Loop condition: while (element < elements_end).
3519   // Live values in registers:
3520   //   elements: Fixed array of strings.
3521   //   array_length: Length of the fixed array of strings (not smi)
3522   //   separator: Separator string
3523   //   string_length: Accumulated sum of string lengths (not smi).
3524   //   element: Current array element.
3525   //   elements_end: Array end.
3526   if (FLAG_debug_code) {
3527     __ Cmp(array_length, 0);
3528     __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3529   }
3530   __ Bind(&loop);
3531   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3532   __ JumpIfSmi(string, &bailout);
3533   __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3534   __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3535   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3536   __ Ldrsw(scratch1,
3537            UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3538   __ Adds(string_length, string_length, scratch1);
3539   __ B(vs, &bailout);
3540   __ Cmp(element, elements_end);
3541   __ B(lt, &loop);
3542 
3543   // If array_length is 1, return elements[0], a string.
3544   __ Cmp(array_length, 1);
3545   __ B(ne, &not_size_one_array);
3546   __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3547   __ B(&done);
3548 
3549   __ Bind(&not_size_one_array);
3550 
3551   // Live values in registers:
3552   //   separator: Separator string
3553   //   array_length: Length of the array (not smi).
3554   //   string_length: Sum of string lengths (not smi).
3555   //   elements: FixedArray of strings.
3556 
3557   // Check that the separator is a flat one-byte string.
3558   __ JumpIfSmi(separator, &bailout);
3559   __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3560   __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3561   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3562 
3563   // Add (separator length times array_length) - separator length to the
3564   // string_length to get the length of the result string.
3565   // Load the separator length as untagged.
3566   // We assume little endianness, and that the length is positive.
3567   __ Ldrsw(separator_length,
3568            UntagSmiFieldMemOperand(separator,
3569                                    SeqOneByteString::kLengthOffset));
3570   __ Sub(string_length, string_length, separator_length);
3571   __ Umaddl(string_length, array_length.W(), separator_length.W(),
3572             string_length);
3573 
3574   // Bailout for large object allocations.
3575   __ Cmp(string_length, Page::kMaxRegularHeapObjectSize);
3576   __ B(gt, &bailout);
3577 
3578   // Get first element in the array.
3579   __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3580   // Live values in registers:
3581   //   element: First array element
3582   //   separator: Separator string
3583   //   string_length: Length of result string (not smi)
3584   //   array_length: Length of the array (not smi).
3585   __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
3586                            &bailout);
3587 
3588   // Prepare for looping. Set up elements_end to end of the array. Set
3589   // result_pos to the position of the result where to write the first
3590   // character.
3591   // TODO(all): useless unless AllocateOneByteString trashes the register.
3592   __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3593   __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3594 
3595   // Check the length of the separator.
3596   __ Cmp(separator_length, 1);
3597   __ B(eq, &one_char_separator);
3598   __ B(gt, &long_separator);
3599 
3600   // Empty separator case
3601   __ Bind(&empty_separator_loop);
3602   // Live values in registers:
3603   //   result_pos: the position to which we are currently copying characters.
3604   //   element: Current array element.
3605   //   elements_end: Array end.
3606 
3607   // Copy next array element to the result.
3608   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3609   __ Ldrsw(string_length,
3610            UntagSmiFieldMemOperand(string, String::kLengthOffset));
3611   __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3612   __ CopyBytes(result_pos, string, string_length, scratch1);
3613   __ Cmp(element, elements_end);
3614   __ B(lt, &empty_separator_loop);  // End while (element < elements_end).
3615   __ B(&done);
3616 
3617   // One-character separator case
3618   __ Bind(&one_char_separator);
3619   // Replace separator with its one-byte character value.
3620   __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3621   // Jump into the loop after the code that copies the separator, so the first
3622   // element is not preceded by a separator
3623   __ B(&one_char_separator_loop_entry);
3624 
3625   __ Bind(&one_char_separator_loop);
3626   // Live values in registers:
3627   //   result_pos: the position to which we are currently copying characters.
3628   //   element: Current array element.
3629   //   elements_end: Array end.
3630   //   separator: Single separator one-byte char (in lower byte).
3631 
3632   // Copy the separator character to the result.
3633   __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
3634 
3635   // Copy next array element to the result.
3636   __ Bind(&one_char_separator_loop_entry);
3637   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3638   __ Ldrsw(string_length,
3639            UntagSmiFieldMemOperand(string, String::kLengthOffset));
3640   __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3641   __ CopyBytes(result_pos, string, string_length, scratch1);
3642   __ Cmp(element, elements_end);
3643   __ B(lt, &one_char_separator_loop);  // End while (element < elements_end).
3644   __ B(&done);
3645 
3646   // Long separator case (separator is more than one character). Entry is at the
3647   // label long_separator below.
3648   __ Bind(&long_separator_loop);
3649   // Live values in registers:
3650   //   result_pos: the position to which we are currently copying characters.
3651   //   element: Current array element.
3652   //   elements_end: Array end.
3653   //   separator: Separator string.
3654 
3655   // Copy the separator to the result.
3656   // TODO(all): hoist next two instructions.
3657   __ Ldrsw(string_length,
3658            UntagSmiFieldMemOperand(separator, String::kLengthOffset));
3659   __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3660   __ CopyBytes(result_pos, string, string_length, scratch1);
3661 
3662   __ Bind(&long_separator);
3663   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3664   __ Ldrsw(string_length,
3665            UntagSmiFieldMemOperand(string, String::kLengthOffset));
3666   __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3667   __ CopyBytes(result_pos, string, string_length, scratch1);
3668   __ Cmp(element, elements_end);
3669   __ B(lt, &long_separator_loop);  // End while (element < elements_end).
3670   __ B(&done);
3671 
3672   __ Bind(&bailout);
3673   // Returning undefined will force slower code to handle it.
3674   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3675   __ Bind(&done);
3676   context()->Plug(result);
3677 }
3678 
3679 
EmitDebugIsActive(CallRuntime * expr)3680 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3681   DCHECK(expr->arguments()->length() == 0);
3682   ExternalReference debug_is_active =
3683       ExternalReference::debug_is_active_address(isolate());
3684   __ Mov(x10, debug_is_active);
3685   __ Ldrb(x0, MemOperand(x10));
3686   __ SmiTag(x0);
3687   context()->Plug(x0);
3688 }
3689 
3690 
EmitCreateIterResultObject(CallRuntime * expr)3691 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3692   ZoneList<Expression*>* args = expr->arguments();
3693   DCHECK_EQ(2, args->length());
3694   VisitForStackValue(args->at(0));
3695   VisitForStackValue(args->at(1));
3696 
3697   Label runtime, done;
3698 
3699   Register result = x0;
3700   __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime, TAG_OBJECT);
3701   Register map_reg = x1;
3702   Register result_value = x2;
3703   Register boolean_done = x3;
3704   Register empty_fixed_array = x4;
3705   Register untagged_result = x5;
3706   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
3707   __ Pop(boolean_done);
3708   __ Pop(result_value);
3709   __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3710   STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3711                 JSObject::kElementsOffset);
3712   STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3713                 JSIteratorResult::kDoneOffset);
3714   __ ObjectUntag(untagged_result, result);
3715   __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3716   __ Stp(empty_fixed_array, empty_fixed_array,
3717          MemOperand(untagged_result, JSObject::kPropertiesOffset));
3718   __ Stp(result_value, boolean_done,
3719          MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3720   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3721   __ B(&done);
3722 
3723   __ Bind(&runtime);
3724   __ CallRuntime(Runtime::kCreateIterResultObject);
3725 
3726   __ Bind(&done);
3727   context()->Plug(x0);
3728 }
3729 
3730 
EmitLoadJSRuntimeFunction(CallRuntime * expr)3731 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3732   // Push undefined as the receiver.
3733   __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3734   __ Push(x0);
3735 
3736   __ LoadNativeContextSlot(expr->context_index(), x0);
3737 }
3738 
3739 
EmitCallJSRuntimeFunction(CallRuntime * expr)3740 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3741   ZoneList<Expression*>* args = expr->arguments();
3742   int arg_count = args->length();
3743 
3744   SetCallPosition(expr);
3745   __ Peek(x1, (arg_count + 1) * kPointerSize);
3746   __ Mov(x0, arg_count);
3747   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3748           RelocInfo::CODE_TARGET);
3749 }
3750 
3751 
VisitCallRuntime(CallRuntime * expr)3752 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3753   ZoneList<Expression*>* args = expr->arguments();
3754   int arg_count = args->length();
3755 
3756   if (expr->is_jsruntime()) {
3757     Comment cmnt(masm_, "[ CallRunTime");
3758     EmitLoadJSRuntimeFunction(expr);
3759 
3760     // Push the target function under the receiver.
3761     __ Pop(x10);
3762     __ Push(x0, x10);
3763 
3764     for (int i = 0; i < arg_count; i++) {
3765       VisitForStackValue(args->at(i));
3766     }
3767 
3768     PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3769     EmitCallJSRuntimeFunction(expr);
3770 
3771     // Restore context register.
3772     __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3773 
3774     context()->DropAndPlug(1, x0);
3775 
3776   } else {
3777     const Runtime::Function* function = expr->function();
3778     switch (function->function_id) {
3779 #define CALL_INTRINSIC_GENERATOR(Name)     \
3780   case Runtime::kInline##Name: {           \
3781     Comment cmnt(masm_, "[ Inline" #Name); \
3782     return Emit##Name(expr);               \
3783   }
3784       FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3785 #undef CALL_INTRINSIC_GENERATOR
3786       default: {
3787         Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3788         // Push the arguments ("left-to-right").
3789         for (int i = 0; i < arg_count; i++) {
3790           VisitForStackValue(args->at(i));
3791         }
3792 
3793         // Call the C runtime function.
3794         PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3795         __ CallRuntime(expr->function(), arg_count);
3796         context()->Plug(x0);
3797       }
3798     }
3799   }
3800 }
3801 
3802 
VisitUnaryOperation(UnaryOperation * expr)3803 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3804   switch (expr->op()) {
3805     case Token::DELETE: {
3806       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3807       Property* property = expr->expression()->AsProperty();
3808       VariableProxy* proxy = expr->expression()->AsVariableProxy();
3809 
3810       if (property != NULL) {
3811         VisitForStackValue(property->obj());
3812         VisitForStackValue(property->key());
3813         __ CallRuntime(is_strict(language_mode())
3814                            ? Runtime::kDeleteProperty_Strict
3815                            : Runtime::kDeleteProperty_Sloppy);
3816         context()->Plug(x0);
3817       } else if (proxy != NULL) {
3818         Variable* var = proxy->var();
3819         // Delete of an unqualified identifier is disallowed in strict mode but
3820         // "delete this" is allowed.
3821         bool is_this = var->HasThisName(isolate());
3822         DCHECK(is_sloppy(language_mode()) || is_this);
3823         if (var->IsUnallocatedOrGlobalSlot()) {
3824           __ LoadGlobalObject(x12);
3825           __ Mov(x11, Operand(var->name()));
3826           __ Push(x12, x11);
3827           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3828           context()->Plug(x0);
3829         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3830           // Result of deleting non-global, non-dynamic variables is false.
3831           // The subexpression does not have side effects.
3832           context()->Plug(is_this);
3833         } else {
3834           // Non-global variable.  Call the runtime to try to delete from the
3835           // context where the variable was introduced.
3836           __ Mov(x2, Operand(var->name()));
3837           __ Push(context_register(), x2);
3838           __ CallRuntime(Runtime::kDeleteLookupSlot);
3839           context()->Plug(x0);
3840         }
3841       } else {
3842         // Result of deleting non-property, non-variable reference is true.
3843         // The subexpression may have side effects.
3844         VisitForEffect(expr->expression());
3845         context()->Plug(true);
3846       }
3847       break;
3848       break;
3849     }
3850     case Token::VOID: {
3851       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3852       VisitForEffect(expr->expression());
3853       context()->Plug(Heap::kUndefinedValueRootIndex);
3854       break;
3855     }
3856     case Token::NOT: {
3857       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3858       if (context()->IsEffect()) {
3859         // Unary NOT has no side effects so it's only necessary to visit the
3860         // subexpression.  Match the optimizing compiler by not branching.
3861         VisitForEffect(expr->expression());
3862       } else if (context()->IsTest()) {
3863         const TestContext* test = TestContext::cast(context());
3864         // The labels are swapped for the recursive call.
3865         VisitForControl(expr->expression(),
3866                         test->false_label(),
3867                         test->true_label(),
3868                         test->fall_through());
3869         context()->Plug(test->true_label(), test->false_label());
3870       } else {
3871         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3872         // TODO(jbramley): This could be much more efficient using (for
3873         // example) the CSEL instruction.
3874         Label materialize_true, materialize_false, done;
3875         VisitForControl(expr->expression(),
3876                         &materialize_false,
3877                         &materialize_true,
3878                         &materialize_true);
3879 
3880         __ Bind(&materialize_true);
3881         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3882         __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3883         __ B(&done);
3884 
3885         __ Bind(&materialize_false);
3886         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3887         __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3888         __ B(&done);
3889 
3890         __ Bind(&done);
3891         if (context()->IsStackValue()) {
3892           __ Push(result_register());
3893         }
3894       }
3895       break;
3896     }
3897     case Token::TYPEOF: {
3898       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3899       {
3900         AccumulatorValueContext context(this);
3901         VisitForTypeofValue(expr->expression());
3902       }
3903       __ Mov(x3, x0);
3904       TypeofStub typeof_stub(isolate());
3905       __ CallStub(&typeof_stub);
3906       context()->Plug(x0);
3907       break;
3908     }
3909     default:
3910       UNREACHABLE();
3911   }
3912 }
3913 
3914 
VisitCountOperation(CountOperation * expr)3915 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3916   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3917 
3918   Comment cmnt(masm_, "[ CountOperation");
3919 
3920   Property* prop = expr->expression()->AsProperty();
3921   LhsKind assign_type = Property::GetAssignType(prop);
3922 
3923   // Evaluate expression and get value.
3924   if (assign_type == VARIABLE) {
3925     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3926     AccumulatorValueContext context(this);
3927     EmitVariableLoad(expr->expression()->AsVariableProxy());
3928   } else {
3929     // Reserve space for result of postfix operation.
3930     if (expr->is_postfix() && !context()->IsEffect()) {
3931       __ Push(xzr);
3932     }
3933     switch (assign_type) {
3934       case NAMED_PROPERTY: {
3935         // Put the object both on the stack and in the register.
3936         VisitForStackValue(prop->obj());
3937         __ Peek(LoadDescriptor::ReceiverRegister(), 0);
3938         EmitNamedPropertyLoad(prop);
3939         break;
3940       }
3941 
3942       case NAMED_SUPER_PROPERTY: {
3943         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3944         VisitForAccumulatorValue(
3945             prop->obj()->AsSuperPropertyReference()->home_object());
3946         __ Push(result_register());
3947         const Register scratch = x10;
3948         __ Peek(scratch, kPointerSize);
3949         __ Push(scratch, result_register());
3950         EmitNamedSuperPropertyLoad(prop);
3951         break;
3952       }
3953 
3954       case KEYED_SUPER_PROPERTY: {
3955         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3956         VisitForStackValue(
3957             prop->obj()->AsSuperPropertyReference()->home_object());
3958         VisitForAccumulatorValue(prop->key());
3959         __ Push(result_register());
3960         const Register scratch1 = x10;
3961         const Register scratch2 = x11;
3962         __ Peek(scratch1, 2 * kPointerSize);
3963         __ Peek(scratch2, kPointerSize);
3964         __ Push(scratch1, scratch2, result_register());
3965         EmitKeyedSuperPropertyLoad(prop);
3966         break;
3967       }
3968 
3969       case KEYED_PROPERTY: {
3970         VisitForStackValue(prop->obj());
3971         VisitForStackValue(prop->key());
3972         __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
3973         __ Peek(LoadDescriptor::NameRegister(), 0);
3974         EmitKeyedPropertyLoad(prop);
3975         break;
3976       }
3977 
3978       case VARIABLE:
3979         UNREACHABLE();
3980     }
3981   }
3982 
3983   // We need a second deoptimization point after loading the value
3984   // in case evaluating the property load my have a side effect.
3985   if (assign_type == VARIABLE) {
3986     PrepareForBailout(expr->expression(), TOS_REG);
3987   } else {
3988     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3989   }
3990 
3991   // Inline smi case if we are in a loop.
3992   Label stub_call, done;
3993   JumpPatchSite patch_site(masm_);
3994 
3995   int count_value = expr->op() == Token::INC ? 1 : -1;
3996   if (ShouldInlineSmiCase(expr->op())) {
3997     Label slow;
3998     patch_site.EmitJumpIfNotSmi(x0, &slow);
3999 
4000     // Save result for postfix expressions.
4001     if (expr->is_postfix()) {
4002       if (!context()->IsEffect()) {
4003         // Save the result on the stack. If we have a named or keyed property we
4004         // store the result under the receiver that is currently on top of the
4005         // stack.
4006         switch (assign_type) {
4007           case VARIABLE:
4008             __ Push(x0);
4009             break;
4010           case NAMED_PROPERTY:
4011             __ Poke(x0, kPointerSize);
4012             break;
4013           case NAMED_SUPER_PROPERTY:
4014             __ Poke(x0, kPointerSize * 2);
4015             break;
4016           case KEYED_PROPERTY:
4017             __ Poke(x0, kPointerSize * 2);
4018             break;
4019           case KEYED_SUPER_PROPERTY:
4020             __ Poke(x0, kPointerSize * 3);
4021             break;
4022         }
4023       }
4024     }
4025 
4026     __ Adds(x0, x0, Smi::FromInt(count_value));
4027     __ B(vc, &done);
4028     // Call stub. Undo operation first.
4029     __ Sub(x0, x0, Smi::FromInt(count_value));
4030     __ B(&stub_call);
4031     __ Bind(&slow);
4032   }
4033   if (!is_strong(language_mode())) {
4034     ToNumberStub convert_stub(isolate());
4035     __ CallStub(&convert_stub);
4036     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4037   }
4038 
4039   // Save result for postfix expressions.
4040   if (expr->is_postfix()) {
4041     if (!context()->IsEffect()) {
4042       // Save the result on the stack. If we have a named or keyed property
4043       // we store the result under the receiver that is currently on top
4044       // of the stack.
4045       switch (assign_type) {
4046         case VARIABLE:
4047           __ Push(x0);
4048           break;
4049         case NAMED_PROPERTY:
4050           __ Poke(x0, kXRegSize);
4051           break;
4052         case NAMED_SUPER_PROPERTY:
4053           __ Poke(x0, 2 * kXRegSize);
4054           break;
4055         case KEYED_PROPERTY:
4056           __ Poke(x0, 2 * kXRegSize);
4057           break;
4058         case KEYED_SUPER_PROPERTY:
4059           __ Poke(x0, 3 * kXRegSize);
4060           break;
4061       }
4062     }
4063   }
4064 
4065   __ Bind(&stub_call);
4066   __ Mov(x1, x0);
4067   __ Mov(x0, Smi::FromInt(count_value));
4068 
4069   SetExpressionPosition(expr);
4070 
4071   {
4072     Assembler::BlockPoolsScope scope(masm_);
4073     Handle<Code> code =
4074         CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4075                                 strength(language_mode())).code();
4076     CallIC(code, expr->CountBinOpFeedbackId());
4077     patch_site.EmitPatchInfo();
4078   }
4079   __ Bind(&done);
4080 
4081   if (is_strong(language_mode())) {
4082     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4083   }
4084   // Store the value returned in x0.
4085   switch (assign_type) {
4086     case VARIABLE:
4087       if (expr->is_postfix()) {
4088         { EffectContext context(this);
4089           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4090                                  Token::ASSIGN, expr->CountSlot());
4091           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4092           context.Plug(x0);
4093         }
4094         // For all contexts except EffectConstant We have the result on
4095         // top of the stack.
4096         if (!context()->IsEffect()) {
4097           context()->PlugTOS();
4098         }
4099       } else {
4100         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4101                                Token::ASSIGN, expr->CountSlot());
4102         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4103         context()->Plug(x0);
4104       }
4105       break;
4106     case NAMED_PROPERTY: {
4107       __ Mov(StoreDescriptor::NameRegister(),
4108              Operand(prop->key()->AsLiteral()->value()));
4109       __ Pop(StoreDescriptor::ReceiverRegister());
4110       EmitLoadStoreICSlot(expr->CountSlot());
4111       CallStoreIC();
4112       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4113       if (expr->is_postfix()) {
4114         if (!context()->IsEffect()) {
4115           context()->PlugTOS();
4116         }
4117       } else {
4118         context()->Plug(x0);
4119       }
4120       break;
4121     }
4122     case NAMED_SUPER_PROPERTY: {
4123       EmitNamedSuperPropertyStore(prop);
4124       if (expr->is_postfix()) {
4125         if (!context()->IsEffect()) {
4126           context()->PlugTOS();
4127         }
4128       } else {
4129         context()->Plug(x0);
4130       }
4131       break;
4132     }
4133     case KEYED_SUPER_PROPERTY: {
4134       EmitKeyedSuperPropertyStore(prop);
4135       if (expr->is_postfix()) {
4136         if (!context()->IsEffect()) {
4137           context()->PlugTOS();
4138         }
4139       } else {
4140         context()->Plug(x0);
4141       }
4142       break;
4143     }
4144     case KEYED_PROPERTY: {
4145       __ Pop(StoreDescriptor::NameRegister());
4146       __ Pop(StoreDescriptor::ReceiverRegister());
4147       Handle<Code> ic =
4148           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4149       EmitLoadStoreICSlot(expr->CountSlot());
4150       CallIC(ic);
4151       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4152       if (expr->is_postfix()) {
4153         if (!context()->IsEffect()) {
4154           context()->PlugTOS();
4155         }
4156       } else {
4157         context()->Plug(x0);
4158       }
4159       break;
4160     }
4161   }
4162 }
4163 
4164 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4165 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4166                                                  Expression* sub_expr,
4167                                                  Handle<String> check) {
4168   ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4169   Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4170   Label materialize_true, materialize_false;
4171   Label* if_true = NULL;
4172   Label* if_false = NULL;
4173   Label* fall_through = NULL;
4174   context()->PrepareTest(&materialize_true, &materialize_false,
4175                          &if_true, &if_false, &fall_through);
4176 
4177   { AccumulatorValueContext context(this);
4178     VisitForTypeofValue(sub_expr);
4179   }
4180   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4181 
4182   Factory* factory = isolate()->factory();
4183   if (String::Equals(check, factory->number_string())) {
4184     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4185     __ JumpIfSmi(x0, if_true);
4186     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4187     __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4188     Split(eq, if_true, if_false, fall_through);
4189   } else if (String::Equals(check, factory->string_string())) {
4190     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4191     __ JumpIfSmi(x0, if_false);
4192     __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
4193     Split(lt, if_true, if_false, fall_through);
4194   } else if (String::Equals(check, factory->symbol_string())) {
4195     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4196     __ JumpIfSmi(x0, if_false);
4197     __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4198     Split(eq, if_true, if_false, fall_through);
4199   } else if (String::Equals(check, factory->boolean_string())) {
4200     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4201     __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4202     __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4203     Split(eq, if_true, if_false, fall_through);
4204   } else if (String::Equals(check, factory->undefined_string())) {
4205     ASM_LOCATION(
4206         "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4207     __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4208     __ JumpIfSmi(x0, if_false);
4209     // Check for undetectable objects => true.
4210     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4211     __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4212     __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4213                     fall_through);
4214   } else if (String::Equals(check, factory->function_string())) {
4215     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4216     __ JumpIfSmi(x0, if_false);
4217     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4218     __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4219     __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
4220     __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true,
4221                        if_false, fall_through);
4222   } else if (String::Equals(check, factory->object_string())) {
4223     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4224     __ JumpIfSmi(x0, if_false);
4225     __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4226     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4227     __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, if_false, lt);
4228     // Check for callable or undetectable objects => false.
4229     __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
4230     __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable),
4231                     if_true, if_false, fall_through);
4232 // clang-format off
4233 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
4234   } else if (String::Equals(check, factory->type##_string())) { \
4235     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
4236                  #type "_string");                              \
4237     __ JumpIfSmi(x0, if_true);                                  \
4238     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));    \
4239     __ CompareRoot(x0, Heap::k##Type##MapRootIndex);            \
4240     Split(eq, if_true, if_false, fall_through);
4241   SIMD128_TYPES(SIMD128_TYPE)
4242 #undef SIMD128_TYPE
4243     // clang-format on
4244   } else {
4245     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4246     if (if_false != fall_through) __ B(if_false);
4247   }
4248   context()->Plug(if_true, if_false);
4249 }
4250 
4251 
VisitCompareOperation(CompareOperation * expr)4252 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4253   Comment cmnt(masm_, "[ CompareOperation");
4254   SetExpressionPosition(expr);
4255 
4256   // Try to generate an optimized comparison with a literal value.
4257   // TODO(jbramley): This only checks common values like NaN or undefined.
4258   // Should it also handle ARM64 immediate operands?
4259   if (TryLiteralCompare(expr)) {
4260     return;
4261   }
4262 
4263   // Assign labels according to context()->PrepareTest.
4264   Label materialize_true;
4265   Label materialize_false;
4266   Label* if_true = NULL;
4267   Label* if_false = NULL;
4268   Label* fall_through = NULL;
4269   context()->PrepareTest(&materialize_true, &materialize_false,
4270                          &if_true, &if_false, &fall_through);
4271 
4272   Token::Value op = expr->op();
4273   VisitForStackValue(expr->left());
4274   switch (op) {
4275     case Token::IN:
4276       VisitForStackValue(expr->right());
4277       __ CallRuntime(Runtime::kHasProperty);
4278       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4279       __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4280       Split(eq, if_true, if_false, fall_through);
4281       break;
4282 
4283     case Token::INSTANCEOF: {
4284       VisitForAccumulatorValue(expr->right());
4285       __ Pop(x1);
4286       InstanceOfStub stub(isolate());
4287       __ CallStub(&stub);
4288       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4289       __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4290       Split(eq, if_true, if_false, fall_through);
4291       break;
4292     }
4293 
4294     default: {
4295       VisitForAccumulatorValue(expr->right());
4296       Condition cond = CompareIC::ComputeCondition(op);
4297 
4298       // Pop the stack value.
4299       __ Pop(x1);
4300 
4301       JumpPatchSite patch_site(masm_);
4302       if (ShouldInlineSmiCase(op)) {
4303         Label slow_case;
4304         patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4305         __ Cmp(x1, x0);
4306         Split(cond, if_true, if_false, NULL);
4307         __ Bind(&slow_case);
4308       }
4309 
4310       Handle<Code> ic = CodeFactory::CompareIC(
4311                             isolate(), op, strength(language_mode())).code();
4312       CallIC(ic, expr->CompareOperationFeedbackId());
4313       patch_site.EmitPatchInfo();
4314       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4315       __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4316     }
4317   }
4318 
4319   // Convert the result of the comparison into one expected for this
4320   // expression's context.
4321   context()->Plug(if_true, if_false);
4322 }
4323 
4324 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4325 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4326                                               Expression* sub_expr,
4327                                               NilValue nil) {
4328   ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4329   Label materialize_true, materialize_false;
4330   Label* if_true = NULL;
4331   Label* if_false = NULL;
4332   Label* fall_through = NULL;
4333   context()->PrepareTest(&materialize_true, &materialize_false,
4334                          &if_true, &if_false, &fall_through);
4335 
4336   VisitForAccumulatorValue(sub_expr);
4337   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4338 
4339   if (expr->op() == Token::EQ_STRICT) {
4340     Heap::RootListIndex nil_value = nil == kNullValue ?
4341         Heap::kNullValueRootIndex :
4342         Heap::kUndefinedValueRootIndex;
4343     __ CompareRoot(x0, nil_value);
4344     Split(eq, if_true, if_false, fall_through);
4345   } else {
4346     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4347     CallIC(ic, expr->CompareOperationFeedbackId());
4348     __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4349     Split(eq, if_true, if_false, fall_through);
4350   }
4351 
4352   context()->Plug(if_true, if_false);
4353 }
4354 
4355 
VisitThisFunction(ThisFunction * expr)4356 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4357   __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4358   context()->Plug(x0);
4359 }
4360 
4361 
VisitYield(Yield * expr)4362 void FullCodeGenerator::VisitYield(Yield* expr) {
4363   Comment cmnt(masm_, "[ Yield");
4364   SetExpressionPosition(expr);
4365 
4366   // Evaluate yielded value first; the initial iterator definition depends on
4367   // this. It stays on the stack while we update the iterator.
4368   VisitForStackValue(expr->expression());
4369 
4370   // TODO(jbramley): Tidy this up once the merge is done, using named registers
4371   // and suchlike. The implementation changes a little by bleeding_edge so I
4372   // don't want to spend too much time on it now.
4373 
4374   switch (expr->yield_kind()) {
4375     case Yield::kSuspend:
4376       // Pop value from top-of-stack slot; box result into result register.
4377       EmitCreateIteratorResult(false);
4378       __ Push(result_register());
4379       // Fall through.
4380     case Yield::kInitial: {
4381       Label suspend, continuation, post_runtime, resume;
4382 
4383       __ B(&suspend);
4384       // TODO(jbramley): This label is bound here because the following code
4385       // looks at its pos(). Is it possible to do something more efficient here,
4386       // perhaps using Adr?
4387       __ Bind(&continuation);
4388       __ RecordGeneratorContinuation();
4389       __ B(&resume);
4390 
4391       __ Bind(&suspend);
4392       VisitForAccumulatorValue(expr->generator_object());
4393       DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4394       __ Mov(x1, Smi::FromInt(continuation.pos()));
4395       __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4396       __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4397       __ Mov(x1, cp);
4398       __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4399                           kLRHasBeenSaved, kDontSaveFPRegs);
4400       __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4401       __ Cmp(__ StackPointer(), x1);
4402       __ B(eq, &post_runtime);
4403       __ Push(x0);  // generator object
4404       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4405       __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4406       __ Bind(&post_runtime);
4407       __ Pop(result_register());
4408       EmitReturnSequence();
4409 
4410       __ Bind(&resume);
4411       context()->Plug(result_register());
4412       break;
4413     }
4414 
4415     case Yield::kFinal: {
4416       VisitForAccumulatorValue(expr->generator_object());
4417       __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4418       __ Str(x1, FieldMemOperand(result_register(),
4419                                  JSGeneratorObject::kContinuationOffset));
4420       // Pop value from top-of-stack slot, box result into result register.
4421       EmitCreateIteratorResult(true);
4422       EmitUnwindBeforeReturn();
4423       EmitReturnSequence();
4424       break;
4425     }
4426 
4427     case Yield::kDelegating: {
4428       VisitForStackValue(expr->generator_object());
4429 
4430       // Initial stack layout is as follows:
4431       // [sp + 1 * kPointerSize] iter
4432       // [sp + 0 * kPointerSize] g
4433 
4434       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4435       Label l_next, l_call, l_loop;
4436       Register load_receiver = LoadDescriptor::ReceiverRegister();
4437       Register load_name = LoadDescriptor::NameRegister();
4438 
4439       // Initial send value is undefined.
4440       __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4441       __ B(&l_next);
4442 
4443       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4444       __ Bind(&l_catch);
4445       __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);  // "throw"
4446       __ Peek(x3, 1 * kPointerSize);                         // iter
4447       __ Push(load_name, x3, x0);                       // "throw", iter, except
4448       __ B(&l_call);
4449 
4450       // try { received = %yield result }
4451       // Shuffle the received result above a try handler and yield it without
4452       // re-boxing.
4453       __ Bind(&l_try);
4454       __ Pop(x0);                                        // result
4455       int handler_index = NewHandlerTableEntry();
4456       EnterTryBlock(handler_index, &l_catch);
4457       const int try_block_size = TryCatch::kElementCount * kPointerSize;
4458       __ Push(x0);                                       // result
4459 
4460       __ B(&l_suspend);
4461       // TODO(jbramley): This label is bound here because the following code
4462       // looks at its pos(). Is it possible to do something more efficient here,
4463       // perhaps using Adr?
4464       __ Bind(&l_continuation);
4465       __ RecordGeneratorContinuation();
4466       __ B(&l_resume);
4467 
4468       __ Bind(&l_suspend);
4469       const int generator_object_depth = kPointerSize + try_block_size;
4470       __ Peek(x0, generator_object_depth);
4471       __ Push(x0);                                       // g
4472       __ Push(Smi::FromInt(handler_index));              // handler-index
4473       DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4474       __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4475       __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4476       __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4477       __ Mov(x1, cp);
4478       __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4479                           kLRHasBeenSaved, kDontSaveFPRegs);
4480       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
4481       __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4482       __ Pop(x0);                                        // result
4483       EmitReturnSequence();
4484       __ Bind(&l_resume);                                // received in x0
4485       ExitTryBlock(handler_index);
4486 
4487       // receiver = iter; f = 'next'; arg = received;
4488       __ Bind(&l_next);
4489 
4490       __ LoadRoot(load_name, Heap::knext_stringRootIndex);  // "next"
4491       __ Peek(x3, 1 * kPointerSize);                        // iter
4492       __ Push(load_name, x3, x0);                      // "next", iter, received
4493 
4494       // result = receiver[f](arg);
4495       __ Bind(&l_call);
4496       __ Peek(load_receiver, 1 * kPointerSize);
4497       __ Peek(load_name, 2 * kPointerSize);
4498       __ Mov(LoadDescriptor::SlotRegister(),
4499              SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
4500       Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
4501       CallIC(ic, TypeFeedbackId::None());
4502       __ Mov(x1, x0);
4503       __ Poke(x1, 2 * kPointerSize);
4504       SetCallPosition(expr);
4505       __ Mov(x0, 1);
4506       __ Call(
4507           isolate()->builtins()->Call(ConvertReceiverMode::kNotNullOrUndefined),
4508           RelocInfo::CODE_TARGET);
4509 
4510       __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4511       __ Drop(1);  // The function is still on the stack; drop it.
4512 
4513       // if (!result.done) goto l_try;
4514       __ Bind(&l_loop);
4515       __ Move(load_receiver, x0);
4516 
4517       __ Push(load_receiver);                               // save result
4518       __ LoadRoot(load_name, Heap::kdone_stringRootIndex);  // "done"
4519       __ Mov(LoadDescriptor::SlotRegister(),
4520              SmiFromSlot(expr->DoneFeedbackSlot()));
4521       CallLoadIC(NOT_INSIDE_TYPEOF);  // x0=result.done
4522       // The ToBooleanStub argument (result.done) is in x0.
4523       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4524       CallIC(bool_ic);
4525       __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
4526       __ B(ne, &l_try);
4527 
4528       // result.value
4529       __ Pop(load_receiver);                                 // result
4530       __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);  // "value"
4531       __ Mov(LoadDescriptor::SlotRegister(),
4532              SmiFromSlot(expr->ValueFeedbackSlot()));
4533       CallLoadIC(NOT_INSIDE_TYPEOF);                         // x0=result.value
4534       context()->DropAndPlug(2, x0);                         // drop iter and g
4535       break;
4536     }
4537   }
4538 }
4539 
4540 
EmitGeneratorResume(Expression * generator,Expression * value,JSGeneratorObject::ResumeMode resume_mode)4541 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4542     Expression *value,
4543     JSGeneratorObject::ResumeMode resume_mode) {
4544   ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4545   Register generator_object = x1;
4546   Register the_hole = x2;
4547   Register operand_stack_size = w3;
4548   Register function = x4;
4549 
4550   // The value stays in x0, and is ultimately read by the resumed generator, as
4551   // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
4552   // is read to throw the value when the resumed generator is already closed. x1
4553   // will hold the generator object until the activation has been resumed.
4554   VisitForStackValue(generator);
4555   VisitForAccumulatorValue(value);
4556   __ Pop(generator_object);
4557 
4558   // Load suspended function and context.
4559   __ Ldr(cp, FieldMemOperand(generator_object,
4560                              JSGeneratorObject::kContextOffset));
4561   __ Ldr(function, FieldMemOperand(generator_object,
4562                                    JSGeneratorObject::kFunctionOffset));
4563 
4564   // Load receiver and store as the first argument.
4565   __ Ldr(x10, FieldMemOperand(generator_object,
4566                               JSGeneratorObject::kReceiverOffset));
4567   __ Push(x10);
4568 
4569   // Push holes for the rest of the arguments to the generator function.
4570   __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4571 
4572   // The number of arguments is stored as an int32_t, and -1 is a marker
4573   // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4574   // extension to correctly handle it. However, in this case, we operate on
4575   // 32-bit W registers, so extension isn't required.
4576   __ Ldr(w10, FieldMemOperand(x10,
4577                               SharedFunctionInfo::kFormalParameterCountOffset));
4578   __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4579   __ PushMultipleTimes(the_hole, w10);
4580 
4581   // Enter a new JavaScript frame, and initialize its slots as they were when
4582   // the generator was suspended.
4583   Label resume_frame, done;
4584   __ Bl(&resume_frame);
4585   __ B(&done);
4586 
4587   __ Bind(&resume_frame);
4588   __ Push(lr,           // Return address.
4589           fp,           // Caller's frame pointer.
4590           cp,           // Callee's context.
4591           function);    // Callee's JS Function.
4592   __ Add(fp, __ StackPointer(), kPointerSize * 2);
4593 
4594   // Load and untag the operand stack size.
4595   __ Ldr(x10, FieldMemOperand(generator_object,
4596                               JSGeneratorObject::kOperandStackOffset));
4597   __ Ldr(operand_stack_size,
4598          UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4599 
4600   // If we are sending a value and there is no operand stack, we can jump back
4601   // in directly.
4602   if (resume_mode == JSGeneratorObject::NEXT) {
4603     Label slow_resume;
4604     __ Cbnz(operand_stack_size, &slow_resume);
4605     __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4606     __ Ldrsw(x11,
4607              UntagSmiFieldMemOperand(generator_object,
4608                                      JSGeneratorObject::kContinuationOffset));
4609     __ Add(x10, x10, x11);
4610     __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4611     __ Str(x12, FieldMemOperand(generator_object,
4612                                 JSGeneratorObject::kContinuationOffset));
4613     __ Br(x10);
4614 
4615     __ Bind(&slow_resume);
4616   }
4617 
4618   // Otherwise, we push holes for the operand stack and call the runtime to fix
4619   // up the stack and the handlers.
4620   __ PushMultipleTimes(the_hole, operand_stack_size);
4621 
4622   __ Mov(x10, Smi::FromInt(resume_mode));
4623   __ Push(generator_object, result_register(), x10);
4624   __ CallRuntime(Runtime::kResumeJSGeneratorObject);
4625   // Not reached: the runtime call returns elsewhere.
4626   __ Unreachable();
4627 
4628   __ Bind(&done);
4629   context()->Plug(result_register());
4630 }
4631 
4632 
EmitCreateIteratorResult(bool done)4633 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4634   Label allocate, done_allocate;
4635 
4636   // Allocate and populate an object with this form: { value: VAL, done: DONE }
4637 
4638   Register result = x0;
4639   __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate, TAG_OBJECT);
4640   __ B(&done_allocate);
4641 
4642   __ Bind(&allocate);
4643   __ Push(Smi::FromInt(JSIteratorResult::kSize));
4644   __ CallRuntime(Runtime::kAllocateInNewSpace);
4645 
4646   __ Bind(&done_allocate);
4647   Register map_reg = x1;
4648   Register result_value = x2;
4649   Register boolean_done = x3;
4650   Register empty_fixed_array = x4;
4651   Register untagged_result = x5;
4652   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
4653   __ Pop(result_value);
4654   __ LoadRoot(boolean_done,
4655               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
4656   __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
4657   STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
4658                 JSObject::kElementsOffset);
4659   STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
4660                 JSIteratorResult::kDoneOffset);
4661   __ ObjectUntag(untagged_result, result);
4662   __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4663   __ Stp(empty_fixed_array, empty_fixed_array,
4664          MemOperand(untagged_result, JSObject::kPropertiesOffset));
4665   __ Stp(result_value, boolean_done,
4666          MemOperand(untagged_result, JSIteratorResult::kValueOffset));
4667   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4668 }
4669 
4670 
4671 // TODO(all): I don't like this method.
4672 // It seems to me that in too many places x0 is used in place of this.
4673 // Also, this function is not suitable for all places where x0 should be
4674 // abstracted (eg. when used as an argument). But some places assume that the
4675 // first argument register is x0, and use this function instead.
4676 // Considering that most of the register allocation is hard-coded in the
4677 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4678 // that abstracting the allocation through functions would not yield any
4679 // performance benefit, I think the existence of this function is debatable.
result_register()4680 Register FullCodeGenerator::result_register() {
4681   return x0;
4682 }
4683 
4684 
context_register()4685 Register FullCodeGenerator::context_register() {
4686   return cp;
4687 }
4688 
4689 
StoreToFrameField(int frame_offset,Register value)4690 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4691   DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4692   __ Str(value, MemOperand(fp, frame_offset));
4693 }
4694 
4695 
LoadContextField(Register dst,int context_index)4696 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4697   __ Ldr(dst, ContextMemOperand(cp, context_index));
4698 }
4699 
4700 
PushFunctionArgumentForContextAllocation()4701 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4702   Scope* closure_scope = scope()->ClosureScope();
4703   if (closure_scope->is_script_scope() ||
4704       closure_scope->is_module_scope()) {
4705     // Contexts nested in the native context have a canonical empty function
4706     // as their closure, not the anonymous closure containing the global
4707     // code.
4708     DCHECK(kSmiTag == 0);
4709     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, x10);
4710   } else if (closure_scope->is_eval_scope()) {
4711     // Contexts created by a call to eval have the same closure as the
4712     // context calling eval, not the anonymous closure containing the eval
4713     // code.  Fetch it from the context.
4714     __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4715   } else {
4716     DCHECK(closure_scope->is_function_scope());
4717     __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4718   }
4719   __ Push(x10);
4720 }
4721 
4722 
EnterFinallyBlock()4723 void FullCodeGenerator::EnterFinallyBlock() {
4724   ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4725   DCHECK(!result_register().is(x10));
4726   // Preserve the result register while executing finally block.
4727   // Also cook the return address in lr to the stack (smi encoded Code* delta).
4728   __ Sub(x10, lr, Operand(masm_->CodeObject()));
4729   __ SmiTag(x10);
4730   __ Push(result_register(), x10);
4731 
4732   // Store pending message while executing finally block.
4733   ExternalReference pending_message_obj =
4734       ExternalReference::address_of_pending_message_obj(isolate());
4735   __ Mov(x10, pending_message_obj);
4736   __ Ldr(x10, MemOperand(x10));
4737   __ Push(x10);
4738 
4739   ClearPendingMessage();
4740 }
4741 
4742 
ExitFinallyBlock()4743 void FullCodeGenerator::ExitFinallyBlock() {
4744   ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4745   DCHECK(!result_register().is(x10));
4746 
4747   // Restore pending message from stack.
4748   __ Pop(x10);
4749   ExternalReference pending_message_obj =
4750       ExternalReference::address_of_pending_message_obj(isolate());
4751   __ Mov(x13, pending_message_obj);
4752   __ Str(x10, MemOperand(x13));
4753 
4754   // Restore result register and cooked return address from the stack.
4755   __ Pop(x10, result_register());
4756 
4757   // Uncook the return address (see EnterFinallyBlock).
4758   __ SmiUntag(x10);
4759   __ Add(x11, x10, Operand(masm_->CodeObject()));
4760   __ Br(x11);
4761 }
4762 
4763 
ClearPendingMessage()4764 void FullCodeGenerator::ClearPendingMessage() {
4765   DCHECK(!result_register().is(x10));
4766   ExternalReference pending_message_obj =
4767       ExternalReference::address_of_pending_message_obj(isolate());
4768   __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
4769   __ Mov(x13, pending_message_obj);
4770   __ Str(x10, MemOperand(x13));
4771 }
4772 
4773 
EmitLoadStoreICSlot(FeedbackVectorSlot slot)4774 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4775   DCHECK(!slot.IsInvalid());
4776   __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
4777 }
4778 
4779 
4780 #undef __
4781 
4782 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)4783 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4784                             Address pc,
4785                             BackEdgeState target_state,
4786                             Code* replacement_code) {
4787   // Turn the jump into a nop.
4788   Address branch_address = pc - 3 * kInstructionSize;
4789   Isolate* isolate = unoptimized_code->GetIsolate();
4790   PatchingAssembler patcher(isolate, branch_address, 1);
4791 
4792   DCHECK(Instruction::Cast(branch_address)
4793              ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4794          (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4795           Instruction::Cast(branch_address)->ImmPCOffset() ==
4796               6 * kInstructionSize));
4797 
4798   switch (target_state) {
4799     case INTERRUPT:
4800       //  <decrement profiling counter>
4801       //  .. .. .. ..       b.pl ok
4802       //  .. .. .. ..       ldr x16, pc+<interrupt stub address>
4803       //  .. .. .. ..       blr x16
4804       //  ... more instructions.
4805       //  ok-label
4806       // Jump offset is 6 instructions.
4807       patcher.b(6, pl);
4808       break;
4809     case ON_STACK_REPLACEMENT:
4810     case OSR_AFTER_STACK_CHECK:
4811       //  <decrement profiling counter>
4812       //  .. .. .. ..       mov x0, x0 (NOP)
4813       //  .. .. .. ..       ldr x16, pc+<on-stack replacement address>
4814       //  .. .. .. ..       blr x16
4815       patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4816       break;
4817   }
4818 
4819   // Replace the call address.
4820   Instruction* load = Instruction::Cast(pc)->preceding(2);
4821   Address interrupt_address_pointer =
4822       reinterpret_cast<Address>(load) + load->ImmPCOffset();
4823   DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
4824           reinterpret_cast<uint64_t>(
4825               isolate->builtins()->OnStackReplacement()->entry())) ||
4826          (Memory::uint64_at(interrupt_address_pointer) ==
4827           reinterpret_cast<uint64_t>(
4828               isolate->builtins()->InterruptCheck()->entry())) ||
4829          (Memory::uint64_at(interrupt_address_pointer) ==
4830           reinterpret_cast<uint64_t>(
4831               isolate->builtins()->OsrAfterStackCheck()->entry())) ||
4832          (Memory::uint64_at(interrupt_address_pointer) ==
4833           reinterpret_cast<uint64_t>(
4834               isolate->builtins()->OnStackReplacement()->entry())));
4835   Memory::uint64_at(interrupt_address_pointer) =
4836       reinterpret_cast<uint64_t>(replacement_code->entry());
4837 
4838   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4839       unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4840 }
4841 
4842 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)4843 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4844     Isolate* isolate,
4845     Code* unoptimized_code,
4846     Address pc) {
4847   // TODO(jbramley): There should be some extra assertions here (as in the ARM
4848   // back-end), but this function is gone in bleeding_edge so it might not
4849   // matter anyway.
4850   Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4851 
4852   if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4853     Instruction* load = Instruction::Cast(pc)->preceding(2);
4854     uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4855                                        load->ImmPCOffset());
4856     if (entry == reinterpret_cast<uint64_t>(
4857         isolate->builtins()->OnStackReplacement()->entry())) {
4858       return ON_STACK_REPLACEMENT;
4859     } else if (entry == reinterpret_cast<uint64_t>(
4860         isolate->builtins()->OsrAfterStackCheck()->entry())) {
4861       return OSR_AFTER_STACK_CHECK;
4862     } else {
4863       UNREACHABLE();
4864     }
4865   }
4866 
4867   return INTERRUPT;
4868 }
4869 
4870 
4871 }  // namespace internal
4872 }  // namespace v8
4873 
4874 #endif  // V8_TARGET_ARCH_ARM64
4875