1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_ARM64
6
7 #include "src/full-codegen/full-codegen.h"
8 #include "src/ast/compile-time-value.h"
9 #include "src/ast/scopes.h"
10 #include "src/code-factory.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/ic/ic.h"
17
18 #include "src/arm64/code-stubs-arm64.h"
19 #include "src/arm64/frames-arm64.h"
20 #include "src/arm64/macro-assembler-arm64.h"
21
22 namespace v8 {
23 namespace internal {
24
25 #define __ ACCESS_MASM(masm())
26
27 class JumpPatchSite BASE_EMBEDDED {
28 public:
JumpPatchSite(MacroAssembler * masm)29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
30 #ifdef DEBUG
31 info_emitted_ = false;
32 #endif
33 }
34
~JumpPatchSite()35 ~JumpPatchSite() {
36 if (patch_site_.is_bound()) {
37 DCHECK(info_emitted_);
38 } else {
39 DCHECK(reg_.IsNone());
40 }
41 }
42
EmitJumpIfNotSmi(Register reg,Label * target)43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
45 InstructionAccurateScope scope(masm_, 1);
46 DCHECK(!info_emitted_);
47 DCHECK(reg.Is64Bits());
48 DCHECK(!reg.Is(csp));
49 reg_ = reg;
50 __ bind(&patch_site_);
51 __ tbz(xzr, 0, target); // Always taken before patched.
52 }
53
EmitJumpIfSmi(Register reg,Label * target)54 void EmitJumpIfSmi(Register reg, Label* target) {
55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
56 InstructionAccurateScope scope(masm_, 1);
57 DCHECK(!info_emitted_);
58 DCHECK(reg.Is64Bits());
59 DCHECK(!reg.Is(csp));
60 reg_ = reg;
61 __ bind(&patch_site_);
62 __ tbnz(xzr, 0, target); // Never taken before patched.
63 }
64
EmitJumpIfEitherNotSmi(Register reg1,Register reg2,Label * target)65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
66 UseScratchRegisterScope temps(masm_);
67 Register temp = temps.AcquireX();
68 __ Orr(temp, reg1, reg2);
69 EmitJumpIfNotSmi(temp, target);
70 }
71
EmitPatchInfo()72 void EmitPatchInfo() {
73 Assembler::BlockPoolsScope scope(masm_);
74 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
75 #ifdef DEBUG
76 info_emitted_ = true;
77 #endif
78 }
79
80 private:
masm()81 MacroAssembler* masm() { return masm_; }
82 MacroAssembler* masm_;
83 Label patch_site_;
84 Register reg_;
85 #ifdef DEBUG
86 bool info_emitted_;
87 #endif
88 };
89
90
91 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the
94 // function.
95 //
96 // The live registers are:
97 // - x1: the JS function object being called (i.e. ourselves).
98 // - x3: the new target value
99 // - cp: our context.
100 // - fp: our caller's frame pointer.
101 // - jssp: stack pointer.
102 // - lr: return address.
103 //
104 // The function builds a JS frame. See JavaScriptFrameConstants in
105 // frames-arm.h for its layout.
Generate()106 void FullCodeGenerator::Generate() {
107 CompilationInfo* info = info_;
108 profiling_counter_ = isolate()->factory()->NewCell(
109 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
110 SetFunctionPosition(literal());
111 Comment cmnt(masm_, "[ Function compiled by full code generator");
112
113 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
114
115 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
116 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
117 __ Peek(x10, receiver_offset);
118 __ AssertNotSmi(x10);
119 __ CompareObjectType(x10, x10, x11, FIRST_JS_RECEIVER_TYPE);
120 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
121 }
122
123 // Open a frame scope to indicate that there is a frame on the stack.
124 // The MANUAL indicates that the scope shouldn't actually generate code
125 // to set up the frame because we do it manually below.
126 FrameScope frame_scope(masm_, StackFrame::MANUAL);
127
128 // This call emits the following sequence in a way that can be patched for
129 // code ageing support:
130 // Push(lr, fp, cp, x1);
131 // Add(fp, jssp, 2 * kPointerSize);
132 info->set_prologue_offset(masm_->pc_offset());
133 __ Prologue(info->GeneratePreagedPrologue());
134
135 // Increment invocation count for the function.
136 {
137 Comment cmnt(masm_, "[ Increment invocation count");
138 __ Ldr(x11, FieldMemOperand(x1, JSFunction::kLiteralsOffset));
139 __ Ldr(x11, FieldMemOperand(x11, LiteralsArray::kFeedbackVectorOffset));
140 __ Ldr(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
141 kPointerSize +
142 TypeFeedbackVector::kHeaderSize));
143 __ Add(x10, x10, Operand(Smi::FromInt(1)));
144 __ Str(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
145 kPointerSize +
146 TypeFeedbackVector::kHeaderSize));
147 }
148
149 // Reserve space on the stack for locals.
150 { Comment cmnt(masm_, "[ Allocate locals");
151 int locals_count = info->scope()->num_stack_slots();
152 // Generators allocate locals, if any, in context slots.
153 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
154 OperandStackDepthIncrement(locals_count);
155 if (locals_count > 0) {
156 if (locals_count >= 128) {
157 Label ok;
158 DCHECK(jssp.Is(__ StackPointer()));
159 __ Sub(x10, jssp, locals_count * kPointerSize);
160 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
161 __ B(hs, &ok);
162 __ CallRuntime(Runtime::kThrowStackOverflow);
163 __ Bind(&ok);
164 }
165 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
166 if (FLAG_optimize_for_size) {
167 __ PushMultipleTimes(x10 , locals_count);
168 } else {
169 const int kMaxPushes = 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ Mov(x2, loop_iterations);
173 Label loop_header;
174 __ Bind(&loop_header);
175 // Do pushes.
176 __ PushMultipleTimes(x10 , kMaxPushes);
177 __ Subs(x2, x2, 1);
178 __ B(ne, &loop_header);
179 }
180 int remaining = locals_count % kMaxPushes;
181 // Emit the remaining pushes.
182 __ PushMultipleTimes(x10 , remaining);
183 }
184 }
185 }
186
187 bool function_in_register_x1 = true;
188
189 if (info->scope()->NeedsContext()) {
190 // Argument to NewContext is the function, which is still in x1.
191 Comment cmnt(masm_, "[ Allocate context");
192 bool need_write_barrier = true;
193 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
194 if (info->scope()->is_script_scope()) {
195 __ Mov(x10, Operand(info->scope()->scope_info()));
196 __ Push(x1, x10);
197 __ CallRuntime(Runtime::kNewScriptContext);
198 PrepareForBailoutForId(BailoutId::ScriptContext(),
199 BailoutState::TOS_REGISTER);
200 // The new target value is not used, clobbering is safe.
201 DCHECK_NULL(info->scope()->new_target_var());
202 } else {
203 if (info->scope()->new_target_var() != nullptr) {
204 __ Push(x3); // Preserve new target.
205 }
206 if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
207 FastNewFunctionContextStub stub(isolate());
208 __ Mov(FastNewFunctionContextDescriptor::SlotsRegister(), slots);
209 __ CallStub(&stub);
210 // Result of FastNewFunctionContextStub is always in new space.
211 need_write_barrier = false;
212 } else {
213 __ Push(x1);
214 __ CallRuntime(Runtime::kNewFunctionContext);
215 }
216 if (info->scope()->new_target_var() != nullptr) {
217 __ Pop(x3); // Restore new target.
218 }
219 }
220 function_in_register_x1 = false;
221 // Context is returned in x0. It replaces the context passed to us.
222 // It's saved in the stack and kept live in cp.
223 __ Mov(cp, x0);
224 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
225 // Copy any necessary parameters into the context.
226 int num_parameters = info->scope()->num_parameters();
227 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
228 for (int i = first_parameter; i < num_parameters; i++) {
229 Variable* var =
230 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
231 if (var->IsContextSlot()) {
232 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
233 (num_parameters - 1 - i) * kPointerSize;
234 // Load parameter from stack.
235 __ Ldr(x10, MemOperand(fp, parameter_offset));
236 // Store it in the context.
237 MemOperand target = ContextMemOperand(cp, var->index());
238 __ Str(x10, target);
239
240 // Update the write barrier.
241 if (need_write_barrier) {
242 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
243 x11, kLRHasBeenSaved, kDontSaveFPRegs);
244 } else if (FLAG_debug_code) {
245 Label done;
246 __ JumpIfInNewSpace(cp, &done);
247 __ Abort(kExpectedNewSpaceObject);
248 __ bind(&done);
249 }
250 }
251 }
252 }
253
254 // Register holding this function and new target are both trashed in case we
255 // bailout here. But since that can happen only when new target is not used
256 // and we allocate a context, the value of |function_in_register| is correct.
257 PrepareForBailoutForId(BailoutId::FunctionContext(),
258 BailoutState::NO_REGISTERS);
259
260 // Possibly set up a local binding to the this function which is used in
261 // derived constructors with super calls.
262 Variable* this_function_var = info->scope()->this_function_var();
263 if (this_function_var != nullptr) {
264 Comment cmnt(masm_, "[ This function");
265 if (!function_in_register_x1) {
266 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
267 // The write barrier clobbers register again, keep it marked as such.
268 }
269 SetVar(this_function_var, x1, x0, x2);
270 }
271
272 // Possibly set up a local binding to the new target value.
273 Variable* new_target_var = info->scope()->new_target_var();
274 if (new_target_var != nullptr) {
275 Comment cmnt(masm_, "[ new.target");
276 SetVar(new_target_var, x3, x0, x2);
277 }
278
279 // Possibly allocate RestParameters
280 Variable* rest_param = info->scope()->rest_parameter();
281 if (rest_param != nullptr) {
282 Comment cmnt(masm_, "[ Allocate rest parameter array");
283 if (!function_in_register_x1) {
284 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
285 }
286 FastNewRestParameterStub stub(isolate());
287 __ CallStub(&stub);
288 function_in_register_x1 = false;
289 SetVar(rest_param, x0, x1, x2);
290 }
291
292 Variable* arguments = info->scope()->arguments();
293 if (arguments != NULL) {
294 // Function uses arguments object.
295 Comment cmnt(masm_, "[ Allocate arguments object");
296 if (!function_in_register_x1) {
297 // Load this again, if it's used by the local context below.
298 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
299 }
300 if (is_strict(language_mode()) || !has_simple_parameters()) {
301 FastNewStrictArgumentsStub stub(isolate());
302 __ CallStub(&stub);
303 } else if (literal()->has_duplicate_parameters()) {
304 __ Push(x1);
305 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
306 } else {
307 FastNewSloppyArgumentsStub stub(isolate());
308 __ CallStub(&stub);
309 }
310
311 SetVar(arguments, x0, x1, x2);
312 }
313
314 if (FLAG_trace) {
315 __ CallRuntime(Runtime::kTraceEnter);
316 }
317
318 // Visit the declarations and body.
319 PrepareForBailoutForId(BailoutId::FunctionEntry(),
320 BailoutState::NO_REGISTERS);
321 {
322 Comment cmnt(masm_, "[ Declarations");
323 VisitDeclarations(scope()->declarations());
324 }
325
326 // Assert that the declarations do not use ICs. Otherwise the debugger
327 // won't be able to redirect a PC at an IC to the correct IC in newly
328 // recompiled code.
329 DCHECK_EQ(0, ic_total_count_);
330
331 {
332 Comment cmnt(masm_, "[ Stack check");
333 PrepareForBailoutForId(BailoutId::Declarations(),
334 BailoutState::NO_REGISTERS);
335 Label ok;
336 DCHECK(jssp.Is(__ StackPointer()));
337 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
338 __ B(hs, &ok);
339 PredictableCodeSizeScope predictable(masm_,
340 Assembler::kCallSizeWithRelocation);
341 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
342 __ Bind(&ok);
343 }
344
345 {
346 Comment cmnt(masm_, "[ Body");
347 DCHECK(loop_depth() == 0);
348 VisitStatements(literal()->body());
349 DCHECK(loop_depth() == 0);
350 }
351
352 // Always emit a 'return undefined' in case control fell off the end of
353 // the body.
354 { Comment cmnt(masm_, "[ return <undefined>;");
355 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
356 }
357 EmitReturnSequence();
358
359 // Force emission of the pools, so they don't get emitted in the middle
360 // of the back edge table.
361 masm()->CheckVeneerPool(true, false);
362 masm()->CheckConstPool(true, false);
363 }
364
ClearAccumulator()365 void FullCodeGenerator::ClearAccumulator() { __ Mov(x0, Smi::kZero); }
366
EmitProfilingCounterDecrement(int delta)367 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
368 __ Mov(x2, Operand(profiling_counter_));
369 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
370 __ Subs(x3, x3, Smi::FromInt(delta));
371 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
372 }
373
374
EmitProfilingCounterReset()375 void FullCodeGenerator::EmitProfilingCounterReset() {
376 int reset_value = FLAG_interrupt_budget;
377 __ Mov(x2, Operand(profiling_counter_));
378 __ Mov(x3, Smi::FromInt(reset_value));
379 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
380 }
381
382
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)383 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
384 Label* back_edge_target) {
385 DCHECK(jssp.Is(__ StackPointer()));
386 Comment cmnt(masm_, "[ Back edge bookkeeping");
387 // Block literal pools whilst emitting back edge code.
388 Assembler::BlockPoolsScope block_const_pool(masm_);
389 Label ok;
390
391 DCHECK(back_edge_target->is_bound());
392 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
393 // to reduce the absolute error due to the integer division. To do that,
394 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
395 // the result).
396 int distance =
397 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
398 kCodeSizeMultiplier / 2);
399 int weight = Min(kMaxBackEdgeWeight,
400 Max(1, distance / kCodeSizeMultiplier));
401 EmitProfilingCounterDecrement(weight);
402 __ B(pl, &ok);
403 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
404
405 // Record a mapping of this PC offset to the OSR id. This is used to find
406 // the AST id from the unoptimized code in order to use it as a key into
407 // the deoptimization input data found in the optimized code.
408 RecordBackEdge(stmt->OsrEntryId());
409
410 EmitProfilingCounterReset();
411
412 __ Bind(&ok);
413 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
414 // Record a mapping of the OSR id to this PC. This is used if the OSR
415 // entry becomes the target of a bailout. We don't expect it to be, but
416 // we want it to work if it is.
417 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
418 }
419
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)420 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
421 bool is_tail_call) {
422 // Pretend that the exit is a backwards jump to the entry.
423 int weight = 1;
424 if (info_->ShouldSelfOptimize()) {
425 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
426 } else {
427 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
428 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
429 }
430 EmitProfilingCounterDecrement(weight);
431 Label ok;
432 __ B(pl, &ok);
433 // Don't need to save result register if we are going to do a tail call.
434 if (!is_tail_call) {
435 __ Push(x0);
436 }
437 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
438 if (!is_tail_call) {
439 __ Pop(x0);
440 }
441 EmitProfilingCounterReset();
442 __ Bind(&ok);
443 }
444
EmitReturnSequence()445 void FullCodeGenerator::EmitReturnSequence() {
446 Comment cmnt(masm_, "[ Return sequence");
447
448 if (return_label_.is_bound()) {
449 __ B(&return_label_);
450
451 } else {
452 __ Bind(&return_label_);
453 if (FLAG_trace) {
454 // Push the return value on the stack as the parameter.
455 // Runtime::TraceExit returns its parameter in x0.
456 __ Push(result_register());
457 __ CallRuntime(Runtime::kTraceExit);
458 DCHECK(x0.Is(result_register()));
459 }
460 EmitProfilingCounterHandlingForReturnSequence(false);
461
462 SetReturnPosition(literal());
463 const Register& current_sp = __ StackPointer();
464 // Nothing ensures 16 bytes alignment here.
465 DCHECK(!current_sp.Is(csp));
466 __ Mov(current_sp, fp);
467 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
468 // Drop the arguments and receiver and return.
469 // TODO(all): This implementation is overkill as it supports 2**31+1
470 // arguments, consider how to improve it without creating a security
471 // hole.
472 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
473 __ Add(current_sp, current_sp, ip0);
474 __ Ret();
475 int32_t arg_count = info_->scope()->num_parameters() + 1;
476 __ dc64(kXRegSize * arg_count);
477 }
478 }
479
RestoreContext()480 void FullCodeGenerator::RestoreContext() {
481 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
482 }
483
Plug(Variable * var) const484 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
485 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
486 codegen()->GetVar(result_register(), var);
487 codegen()->PushOperand(result_register());
488 }
489
490
Plug(Heap::RootListIndex index) const491 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
492 // Root values have no side effects.
493 }
494
495
Plug(Heap::RootListIndex index) const496 void FullCodeGenerator::AccumulatorValueContext::Plug(
497 Heap::RootListIndex index) const {
498 __ LoadRoot(result_register(), index);
499 }
500
501
Plug(Heap::RootListIndex index) const502 void FullCodeGenerator::StackValueContext::Plug(
503 Heap::RootListIndex index) const {
504 __ LoadRoot(result_register(), index);
505 codegen()->PushOperand(result_register());
506 }
507
508
Plug(Heap::RootListIndex index) const509 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
510 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
511 false_label_);
512 if (index == Heap::kUndefinedValueRootIndex ||
513 index == Heap::kNullValueRootIndex ||
514 index == Heap::kFalseValueRootIndex) {
515 if (false_label_ != fall_through_) __ B(false_label_);
516 } else if (index == Heap::kTrueValueRootIndex) {
517 if (true_label_ != fall_through_) __ B(true_label_);
518 } else {
519 __ LoadRoot(result_register(), index);
520 codegen()->DoTest(this);
521 }
522 }
523
524
Plug(Handle<Object> lit) const525 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
526 }
527
528
Plug(Handle<Object> lit) const529 void FullCodeGenerator::AccumulatorValueContext::Plug(
530 Handle<Object> lit) const {
531 __ Mov(result_register(), Operand(lit));
532 }
533
534
Plug(Handle<Object> lit) const535 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
536 // Immediates cannot be pushed directly.
537 __ Mov(result_register(), Operand(lit));
538 codegen()->PushOperand(result_register());
539 }
540
541
Plug(Handle<Object> lit) const542 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
543 codegen()->PrepareForBailoutBeforeSplit(condition(),
544 true,
545 true_label_,
546 false_label_);
547 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
548 !lit->IsUndetectable());
549 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
550 lit->IsFalse(isolate())) {
551 if (false_label_ != fall_through_) __ B(false_label_);
552 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
553 if (true_label_ != fall_through_) __ B(true_label_);
554 } else if (lit->IsString()) {
555 if (String::cast(*lit)->length() == 0) {
556 if (false_label_ != fall_through_) __ B(false_label_);
557 } else {
558 if (true_label_ != fall_through_) __ B(true_label_);
559 }
560 } else if (lit->IsSmi()) {
561 if (Smi::cast(*lit)->value() == 0) {
562 if (false_label_ != fall_through_) __ B(false_label_);
563 } else {
564 if (true_label_ != fall_through_) __ B(true_label_);
565 }
566 } else {
567 // For simplicity we always test the accumulator register.
568 __ Mov(result_register(), Operand(lit));
569 codegen()->DoTest(this);
570 }
571 }
572
573
DropAndPlug(int count,Register reg) const574 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
575 Register reg) const {
576 DCHECK(count > 0);
577 if (count > 1) codegen()->DropOperands(count - 1);
578 __ Poke(reg, 0);
579 }
580
581
Plug(Label * materialize_true,Label * materialize_false) const582 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
583 Label* materialize_false) const {
584 DCHECK(materialize_true == materialize_false);
585 __ Bind(materialize_true);
586 }
587
588
Plug(Label * materialize_true,Label * materialize_false) const589 void FullCodeGenerator::AccumulatorValueContext::Plug(
590 Label* materialize_true,
591 Label* materialize_false) const {
592 Label done;
593 __ Bind(materialize_true);
594 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
595 __ B(&done);
596 __ Bind(materialize_false);
597 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
598 __ Bind(&done);
599 }
600
601
Plug(Label * materialize_true,Label * materialize_false) const602 void FullCodeGenerator::StackValueContext::Plug(
603 Label* materialize_true,
604 Label* materialize_false) const {
605 Label done;
606 __ Bind(materialize_true);
607 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
608 __ B(&done);
609 __ Bind(materialize_false);
610 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
611 __ Bind(&done);
612 codegen()->PushOperand(x10);
613 }
614
615
Plug(Label * materialize_true,Label * materialize_false) const616 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
617 Label* materialize_false) const {
618 DCHECK(materialize_true == true_label_);
619 DCHECK(materialize_false == false_label_);
620 }
621
622
Plug(bool flag) const623 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
624 Heap::RootListIndex value_root_index =
625 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
626 __ LoadRoot(result_register(), value_root_index);
627 }
628
629
Plug(bool flag) const630 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
631 Heap::RootListIndex value_root_index =
632 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
633 __ LoadRoot(x10, value_root_index);
634 codegen()->PushOperand(x10);
635 }
636
637
Plug(bool flag) const638 void FullCodeGenerator::TestContext::Plug(bool flag) const {
639 codegen()->PrepareForBailoutBeforeSplit(condition(),
640 true,
641 true_label_,
642 false_label_);
643 if (flag) {
644 if (true_label_ != fall_through_) {
645 __ B(true_label_);
646 }
647 } else {
648 if (false_label_ != fall_through_) {
649 __ B(false_label_);
650 }
651 }
652 }
653
654
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)655 void FullCodeGenerator::DoTest(Expression* condition,
656 Label* if_true,
657 Label* if_false,
658 Label* fall_through) {
659 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
660 CallIC(ic, condition->test_id());
661 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
662 Split(eq, if_true, if_false, fall_through);
663 }
664
665
666 // If (cond), branch to if_true.
667 // If (!cond), branch to if_false.
668 // fall_through is used as an optimization in cases where only one branch
669 // instruction is necessary.
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)670 void FullCodeGenerator::Split(Condition cond,
671 Label* if_true,
672 Label* if_false,
673 Label* fall_through) {
674 if (if_false == fall_through) {
675 __ B(cond, if_true);
676 } else if (if_true == fall_through) {
677 DCHECK(if_false != fall_through);
678 __ B(NegateCondition(cond), if_false);
679 } else {
680 __ B(cond, if_true);
681 __ B(if_false);
682 }
683 }
684
685
StackOperand(Variable * var)686 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
687 // Offset is negative because higher indexes are at lower addresses.
688 int offset = -var->index() * kXRegSize;
689 // Adjust by a (parameter or local) base offset.
690 if (var->IsParameter()) {
691 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
692 } else {
693 offset += JavaScriptFrameConstants::kLocal0Offset;
694 }
695 return MemOperand(fp, offset);
696 }
697
698
VarOperand(Variable * var,Register scratch)699 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
700 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
701 if (var->IsContextSlot()) {
702 int context_chain_length = scope()->ContextChainLength(var->scope());
703 __ LoadContext(scratch, context_chain_length);
704 return ContextMemOperand(scratch, var->index());
705 } else {
706 return StackOperand(var);
707 }
708 }
709
710
GetVar(Register dest,Variable * var)711 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
712 // Use destination as scratch.
713 MemOperand location = VarOperand(var, dest);
714 __ Ldr(dest, location);
715 }
716
717
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)718 void FullCodeGenerator::SetVar(Variable* var,
719 Register src,
720 Register scratch0,
721 Register scratch1) {
722 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
723 DCHECK(!AreAliased(src, scratch0, scratch1));
724 MemOperand location = VarOperand(var, scratch0);
725 __ Str(src, location);
726
727 // Emit the write barrier code if the location is in the heap.
728 if (var->IsContextSlot()) {
729 // scratch0 contains the correct context.
730 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
731 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
732 }
733 }
734
735
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)736 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
737 bool should_normalize,
738 Label* if_true,
739 Label* if_false) {
740 // Only prepare for bailouts before splits if we're in a test
741 // context. Otherwise, we let the Visit function deal with the
742 // preparation to avoid preparing with the same AST id twice.
743 if (!context()->IsTest()) return;
744
745 // TODO(all): Investigate to see if there is something to work on here.
746 Label skip;
747 if (should_normalize) {
748 __ B(&skip);
749 }
750 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
751 if (should_normalize) {
752 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
753 Split(eq, if_true, if_false, NULL);
754 __ Bind(&skip);
755 }
756 }
757
758
EmitDebugCheckDeclarationContext(Variable * variable)759 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
760 // The variable in the declaration always resides in the current function
761 // context.
762 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
763 if (FLAG_debug_code) {
764 // Check that we're not inside a with or catch context.
765 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
766 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
767 __ Check(ne, kDeclarationInWithContext);
768 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
769 __ Check(ne, kDeclarationInCatchContext);
770 }
771 }
772
773
VisitVariableDeclaration(VariableDeclaration * declaration)774 void FullCodeGenerator::VisitVariableDeclaration(
775 VariableDeclaration* declaration) {
776 VariableProxy* proxy = declaration->proxy();
777 Variable* variable = proxy->var();
778 switch (variable->location()) {
779 case VariableLocation::UNALLOCATED: {
780 DCHECK(!variable->binding_needs_init());
781 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
782 DCHECK(!slot.IsInvalid());
783 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
784 globals_->Add(isolate()->factory()->undefined_value(), zone());
785 break;
786 }
787 case VariableLocation::PARAMETER:
788 case VariableLocation::LOCAL:
789 if (variable->binding_needs_init()) {
790 Comment cmnt(masm_, "[ VariableDeclaration");
791 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
792 __ Str(x10, StackOperand(variable));
793 }
794 break;
795
796 case VariableLocation::CONTEXT:
797 if (variable->binding_needs_init()) {
798 Comment cmnt(masm_, "[ VariableDeclaration");
799 EmitDebugCheckDeclarationContext(variable);
800 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
801 __ Str(x10, ContextMemOperand(cp, variable->index()));
802 // No write barrier since the_hole_value is in old space.
803 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
804 }
805 break;
806
807 case VariableLocation::LOOKUP: {
808 Comment cmnt(masm_, "[ VariableDeclaration");
809 DCHECK_EQ(VAR, variable->mode());
810 DCHECK(!variable->binding_needs_init());
811 __ Mov(x2, Operand(variable->name()));
812 __ Push(x2);
813 __ CallRuntime(Runtime::kDeclareEvalVar);
814 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
815 break;
816 }
817
818 case VariableLocation::MODULE:
819 UNREACHABLE();
820 }
821 }
822
823
VisitFunctionDeclaration(FunctionDeclaration * declaration)824 void FullCodeGenerator::VisitFunctionDeclaration(
825 FunctionDeclaration* declaration) {
826 VariableProxy* proxy = declaration->proxy();
827 Variable* variable = proxy->var();
828 switch (variable->location()) {
829 case VariableLocation::UNALLOCATED: {
830 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
831 DCHECK(!slot.IsInvalid());
832 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
833 Handle<SharedFunctionInfo> function =
834 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
835 // Check for stack overflow exception.
836 if (function.is_null()) return SetStackOverflow();
837 globals_->Add(function, zone());
838 break;
839 }
840
841 case VariableLocation::PARAMETER:
842 case VariableLocation::LOCAL: {
843 Comment cmnt(masm_, "[ Function Declaration");
844 VisitForAccumulatorValue(declaration->fun());
845 __ Str(result_register(), StackOperand(variable));
846 break;
847 }
848
849 case VariableLocation::CONTEXT: {
850 Comment cmnt(masm_, "[ Function Declaration");
851 EmitDebugCheckDeclarationContext(variable);
852 VisitForAccumulatorValue(declaration->fun());
853 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
854 int offset = Context::SlotOffset(variable->index());
855 // We know that we have written a function, which is not a smi.
856 __ RecordWriteContextSlot(cp,
857 offset,
858 result_register(),
859 x2,
860 kLRHasBeenSaved,
861 kDontSaveFPRegs,
862 EMIT_REMEMBERED_SET,
863 OMIT_SMI_CHECK);
864 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
865 break;
866 }
867
868 case VariableLocation::LOOKUP: {
869 Comment cmnt(masm_, "[ Function Declaration");
870 __ Mov(x2, Operand(variable->name()));
871 PushOperand(x2);
872 // Push initial value for function declaration.
873 VisitForStackValue(declaration->fun());
874 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
875 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
876 break;
877 }
878
879 case VariableLocation::MODULE:
880 UNREACHABLE();
881 }
882 }
883
884
DeclareGlobals(Handle<FixedArray> pairs)885 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
886 // Call the runtime to declare the globals.
887 __ Mov(x11, Operand(pairs));
888 Register flags = xzr;
889 if (Smi::FromInt(DeclareGlobalsFlags())) {
890 flags = x10;
891 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
892 }
893 __ EmitLoadTypeFeedbackVector(x12);
894 __ Push(x11, flags, x12);
895 __ CallRuntime(Runtime::kDeclareGlobals);
896 // Return value is ignored.
897 }
898
899
VisitSwitchStatement(SwitchStatement * stmt)900 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
901 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
902 Comment cmnt(masm_, "[ SwitchStatement");
903 Breakable nested_statement(this, stmt);
904 SetStatementPosition(stmt);
905
906 // Keep the switch value on the stack until a case matches.
907 VisitForStackValue(stmt->tag());
908 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
909
910 ZoneList<CaseClause*>* clauses = stmt->cases();
911 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
912
913 Label next_test; // Recycled for each test.
914 // Compile all the tests with branches to their bodies.
915 for (int i = 0; i < clauses->length(); i++) {
916 CaseClause* clause = clauses->at(i);
917 clause->body_target()->Unuse();
918
919 // The default is not a test, but remember it as final fall through.
920 if (clause->is_default()) {
921 default_clause = clause;
922 continue;
923 }
924
925 Comment cmnt(masm_, "[ Case comparison");
926 __ Bind(&next_test);
927 next_test.Unuse();
928
929 // Compile the label expression.
930 VisitForAccumulatorValue(clause->label());
931
932 // Perform the comparison as if via '==='.
933 __ Peek(x1, 0); // Switch value.
934
935 JumpPatchSite patch_site(masm_);
936 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
937 Label slow_case;
938 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
939 __ Cmp(x1, x0);
940 __ B(ne, &next_test);
941 __ Drop(1); // Switch value is no longer needed.
942 __ B(clause->body_target());
943 __ Bind(&slow_case);
944 }
945
946 // Record position before stub call for type feedback.
947 SetExpressionPosition(clause);
948 Handle<Code> ic =
949 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
950 CallIC(ic, clause->CompareId());
951 patch_site.EmitPatchInfo();
952
953 Label skip;
954 __ B(&skip);
955 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
956 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
957 __ Drop(1);
958 __ B(clause->body_target());
959 __ Bind(&skip);
960
961 __ Cbnz(x0, &next_test);
962 __ Drop(1); // Switch value is no longer needed.
963 __ B(clause->body_target());
964 }
965
966 // Discard the test value and jump to the default if present, otherwise to
967 // the end of the statement.
968 __ Bind(&next_test);
969 DropOperands(1); // Switch value is no longer needed.
970 if (default_clause == NULL) {
971 __ B(nested_statement.break_label());
972 } else {
973 __ B(default_clause->body_target());
974 }
975
976 // Compile all the case bodies.
977 for (int i = 0; i < clauses->length(); i++) {
978 Comment cmnt(masm_, "[ Case body");
979 CaseClause* clause = clauses->at(i);
980 __ Bind(clause->body_target());
981 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
982 VisitStatements(clause->statements());
983 }
984
985 __ Bind(nested_statement.break_label());
986 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
987 }
988
989
VisitForInStatement(ForInStatement * stmt)990 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
991 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
992 Comment cmnt(masm_, "[ ForInStatement");
993 SetStatementPosition(stmt, SKIP_BREAK);
994
995 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
996
997 // TODO(all): This visitor probably needs better comments and a revisit.
998
999 // Get the object to enumerate over.
1000 SetExpressionAsStatementPosition(stmt->enumerable());
1001 VisitForAccumulatorValue(stmt->enumerable());
1002 OperandStackDepthIncrement(5);
1003
1004 Label loop, exit;
1005 Iteration loop_statement(this, stmt);
1006 increment_loop_depth();
1007
1008 // If the object is null or undefined, skip over the loop, otherwise convert
1009 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
1010 Label convert, done_convert;
1011 __ JumpIfSmi(x0, &convert);
1012 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, &done_convert, ge);
1013 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, &exit);
1014 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1015 __ Bind(&convert);
1016 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
1017 RestoreContext();
1018 __ Bind(&done_convert);
1019 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1020 __ Push(x0);
1021
1022 // Check cache validity in generated code. If we cannot guarantee cache
1023 // validity, call the runtime system to check cache validity or get the
1024 // property names in a fixed array. Note: Proxies never have an enum cache,
1025 // so will always take the slow path.
1026 Label call_runtime;
1027 __ CheckEnumCache(x0, x15, x10, x11, x12, x13, &call_runtime);
1028
1029 // The enum cache is valid. Load the map of the object being
1030 // iterated over and use the cache for the iteration.
1031 Label use_cache;
1032 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1033 __ B(&use_cache);
1034
1035 // Get the set of properties to enumerate.
1036 __ Bind(&call_runtime);
1037 __ Push(x0); // Duplicate the enumerable object on the stack.
1038 __ CallRuntime(Runtime::kForInEnumerate);
1039 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1040
1041 // If we got a map from the runtime call, we can do a fast
1042 // modification check. Otherwise, we got a fixed array, and we have
1043 // to do a slow check.
1044 Label fixed_array, no_descriptors;
1045 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1046 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1047
1048 // We got a map in register x0. Get the enumeration cache from it.
1049 __ Bind(&use_cache);
1050
1051 __ EnumLengthUntagged(x1, x0);
1052 __ Cbz(x1, &no_descriptors);
1053
1054 __ LoadInstanceDescriptors(x0, x2);
1055 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1056 __ Ldr(x2,
1057 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1058
1059 // Set up the four remaining stack slots.
1060 __ SmiTag(x1);
1061 // Map, enumeration cache, enum cache length, zero (both last as smis).
1062 __ Push(x0, x2, x1, xzr);
1063 __ B(&loop);
1064
1065 __ Bind(&no_descriptors);
1066 __ Drop(1);
1067 __ B(&exit);
1068
1069 // We got a fixed array in register x0. Iterate through that.
1070 __ Bind(&fixed_array);
1071
1072 __ Mov(x1, Smi::FromInt(1)); // Smi(1) indicates slow check.
1073 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1074 __ Push(x1, x0, x2); // Smi and array, fixed array length (as smi).
1075 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1076 __ Push(xzr); // Initial index.
1077
1078 // Generate code for doing the condition check.
1079 __ Bind(&loop);
1080 SetExpressionAsStatementPosition(stmt->each());
1081
1082 // Load the current count to x0, load the length to x1.
1083 __ PeekPair(x0, x1, 0);
1084 __ Cmp(x0, x1); // Compare to the array length.
1085 __ B(hs, loop_statement.break_label());
1086
1087 // Get the current entry of the array into register x0.
1088 __ Peek(x10, 2 * kXRegSize);
1089 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1090 __ Ldr(x0, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1091
1092 // Get the expected map from the stack or a smi in the
1093 // permanent slow case into register x2.
1094 __ Peek(x2, 3 * kXRegSize);
1095
1096 // Check if the expected map still matches that of the enumerable.
1097 // If not, we may have to filter the key.
1098 Label update_each;
1099 __ Peek(x1, 4 * kXRegSize);
1100 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1101 __ Cmp(x11, x2);
1102 __ B(eq, &update_each);
1103
1104 // We need to filter the key, record slow-path here.
1105 int const vector_index = SmiFromSlot(slot)->value();
1106 __ EmitLoadTypeFeedbackVector(x3);
1107 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1108 __ Str(x10, FieldMemOperand(x3, FixedArray::OffsetOfElementAt(vector_index)));
1109
1110 // x0 contains the key. The receiver in x1 is the second argument to the
1111 // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1112 // have the key or returns the name-converted key.
1113 __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1114 RestoreContext();
1115 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1116 __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex);
1117 __ B(eq, loop_statement.continue_label());
1118
1119 // Update the 'each' property or variable from the possibly filtered
1120 // entry in register x0.
1121 __ Bind(&update_each);
1122 // Perform the assignment as if via '='.
1123 { EffectContext context(this);
1124 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1125 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1126 }
1127
1128 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1129 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1130 // Generate code for the body of the loop.
1131 Visit(stmt->body());
1132
1133 // Generate code for going to the next element by incrementing
1134 // the index (smi) stored on top of the stack.
1135 __ Bind(loop_statement.continue_label());
1136 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1137 // TODO(all): We could use a callee saved register to avoid popping.
1138 __ Pop(x0);
1139 __ Add(x0, x0, Smi::FromInt(1));
1140 __ Push(x0);
1141
1142 EmitBackEdgeBookkeeping(stmt, &loop);
1143 __ B(&loop);
1144
1145 // Remove the pointers stored on the stack.
1146 __ Bind(loop_statement.break_label());
1147 DropOperands(5);
1148
1149 // Exit and decrement the loop depth.
1150 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1151 __ Bind(&exit);
1152 decrement_loop_depth();
1153 }
1154
1155
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1156 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1157 FeedbackVectorSlot slot) {
1158 DCHECK(NeedsHomeObject(initializer));
1159 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1160 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1161 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1162 }
1163
1164
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1165 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1166 int offset,
1167 FeedbackVectorSlot slot) {
1168 DCHECK(NeedsHomeObject(initializer));
1169 __ Move(StoreDescriptor::ReceiverRegister(), x0);
1170 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1171 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1172 }
1173
1174
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1175 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1176 TypeofMode typeof_mode,
1177 Label* slow) {
1178 Register current = cp;
1179 Register next = x10;
1180 Register temp = x11;
1181
1182 int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1183 for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1184 if (!s->NeedsContext()) continue;
1185 if (s->calls_sloppy_eval()) {
1186 // Check that extension is "the hole".
1187 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1188 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1189 }
1190 // Load next context in chain.
1191 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1192 // Walk the rest of the chain without clobbering cp.
1193 current = next;
1194 to_check--;
1195 }
1196
1197 // All extension objects were empty and it is safe to use a normal global
1198 // load machinery.
1199 EmitGlobalVariableLoad(proxy, typeof_mode);
1200 }
1201
1202
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1203 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1204 Label* slow) {
1205 DCHECK(var->IsContextSlot());
1206 Register context = cp;
1207 Register next = x10;
1208 Register temp = x11;
1209
1210 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1211 if (s->NeedsContext()) {
1212 if (s->calls_sloppy_eval()) {
1213 // Check that extension is "the hole".
1214 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1215 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1216 }
1217 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1218 // Walk the rest of the chain without clobbering cp.
1219 context = next;
1220 }
1221 }
1222 // Check that last extension is "the hole".
1223 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1224 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1225
1226 // This function is used only for loads, not stores, so it's safe to
1227 // return an cp-based operand (the write barrier cannot be allowed to
1228 // destroy the cp register).
1229 return ContextMemOperand(context, var->index());
1230 }
1231
1232
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1233 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1234 TypeofMode typeof_mode,
1235 Label* slow, Label* done) {
1236 // Generate fast-case code for variables that might be shadowed by
1237 // eval-introduced variables. Eval is used a lot without
1238 // introducing variables. In those cases, we do not want to
1239 // perform a runtime call for all variables in the scope
1240 // containing the eval.
1241 Variable* var = proxy->var();
1242 if (var->mode() == DYNAMIC_GLOBAL) {
1243 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1244 __ B(done);
1245 } else if (var->mode() == DYNAMIC_LOCAL) {
1246 Variable* local = var->local_if_not_shadowed();
1247 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1248 if (local->binding_needs_init()) {
1249 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1250 __ Mov(x0, Operand(var->name()));
1251 __ Push(x0);
1252 __ CallRuntime(Runtime::kThrowReferenceError);
1253 } else {
1254 __ B(done);
1255 }
1256 }
1257 }
1258
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1259 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1260 TypeofMode typeof_mode) {
1261 // Record position before possible IC call.
1262 SetExpressionPosition(proxy);
1263 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1264 Variable* var = proxy->var();
1265
1266 // Three cases: global variables, lookup variables, and all other types of
1267 // variables.
1268 switch (var->location()) {
1269 case VariableLocation::UNALLOCATED: {
1270 Comment cmnt(masm_, "Global variable");
1271 EmitGlobalVariableLoad(proxy, typeof_mode);
1272 context()->Plug(x0);
1273 break;
1274 }
1275
1276 case VariableLocation::PARAMETER:
1277 case VariableLocation::LOCAL:
1278 case VariableLocation::CONTEXT: {
1279 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1280 Comment cmnt(masm_, var->IsContextSlot()
1281 ? "Context variable"
1282 : "Stack variable");
1283 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1284 // Throw a reference error when using an uninitialized let/const
1285 // binding in harmony mode.
1286 Label done;
1287 GetVar(x0, var);
1288 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1289 __ Mov(x0, Operand(var->name()));
1290 __ Push(x0);
1291 __ CallRuntime(Runtime::kThrowReferenceError);
1292 __ Bind(&done);
1293 context()->Plug(x0);
1294 break;
1295 }
1296 context()->Plug(var);
1297 break;
1298 }
1299
1300 case VariableLocation::LOOKUP: {
1301 Label done, slow;
1302 // Generate code for loading from variables potentially shadowed by
1303 // eval-introduced variables.
1304 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1305 __ Bind(&slow);
1306 Comment cmnt(masm_, "Lookup variable");
1307 __ Push(var->name());
1308 Runtime::FunctionId function_id =
1309 typeof_mode == NOT_INSIDE_TYPEOF
1310 ? Runtime::kLoadLookupSlot
1311 : Runtime::kLoadLookupSlotInsideTypeof;
1312 __ CallRuntime(function_id);
1313 __ Bind(&done);
1314 context()->Plug(x0);
1315 break;
1316 }
1317
1318 case VariableLocation::MODULE:
1319 UNREACHABLE();
1320 }
1321 }
1322
1323
EmitAccessor(ObjectLiteralProperty * property)1324 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1325 Expression* expression = (property == NULL) ? NULL : property->value();
1326 if (expression == NULL) {
1327 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1328 PushOperand(x10);
1329 } else {
1330 VisitForStackValue(expression);
1331 if (NeedsHomeObject(expression)) {
1332 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1333 property->kind() == ObjectLiteral::Property::SETTER);
1334 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1335 EmitSetHomeObject(expression, offset, property->GetSlot());
1336 }
1337 }
1338 }
1339
1340
VisitObjectLiteral(ObjectLiteral * expr)1341 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1342 Comment cmnt(masm_, "[ ObjectLiteral");
1343
1344 Handle<FixedArray> constant_properties = expr->constant_properties();
1345 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1346 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1347 __ Mov(x1, Operand(constant_properties));
1348 int flags = expr->ComputeFlags();
1349 __ Mov(x0, Smi::FromInt(flags));
1350 if (MustCreateObjectLiteralWithRuntime(expr)) {
1351 __ Push(x3, x2, x1, x0);
1352 __ CallRuntime(Runtime::kCreateObjectLiteral);
1353 } else {
1354 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1355 __ CallStub(&stub);
1356 RestoreContext();
1357 }
1358 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1359
1360 // If result_saved is true the result is on top of the stack. If
1361 // result_saved is false the result is in x0.
1362 bool result_saved = false;
1363
1364 AccessorTable accessor_table(zone());
1365 int property_index = 0;
1366 for (; property_index < expr->properties()->length(); property_index++) {
1367 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1368 if (property->is_computed_name()) break;
1369 if (property->IsCompileTimeValue()) continue;
1370
1371 Literal* key = property->key()->AsLiteral();
1372 Expression* value = property->value();
1373 if (!result_saved) {
1374 PushOperand(x0); // Save result on stack
1375 result_saved = true;
1376 }
1377 switch (property->kind()) {
1378 case ObjectLiteral::Property::CONSTANT:
1379 UNREACHABLE();
1380 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1381 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1382 // Fall through.
1383 case ObjectLiteral::Property::COMPUTED:
1384 // It is safe to use [[Put]] here because the boilerplate already
1385 // contains computed properties with an uninitialized value.
1386 if (key->IsStringLiteral()) {
1387 DCHECK(key->IsPropertyName());
1388 if (property->emit_store()) {
1389 VisitForAccumulatorValue(value);
1390 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1391 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1392 CallStoreIC(property->GetSlot(0), key->value());
1393 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1394
1395 if (NeedsHomeObject(value)) {
1396 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1397 }
1398 } else {
1399 VisitForEffect(value);
1400 }
1401 break;
1402 }
1403 __ Peek(x0, 0);
1404 PushOperand(x0);
1405 VisitForStackValue(key);
1406 VisitForStackValue(value);
1407 if (property->emit_store()) {
1408 if (NeedsHomeObject(value)) {
1409 EmitSetHomeObject(value, 2, property->GetSlot());
1410 }
1411 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1412 PushOperand(x0);
1413 CallRuntimeWithOperands(Runtime::kSetProperty);
1414 } else {
1415 DropOperands(3);
1416 }
1417 break;
1418 case ObjectLiteral::Property::PROTOTYPE:
1419 DCHECK(property->emit_store());
1420 // Duplicate receiver on stack.
1421 __ Peek(x0, 0);
1422 PushOperand(x0);
1423 VisitForStackValue(value);
1424 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1425 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1426 BailoutState::NO_REGISTERS);
1427 break;
1428 case ObjectLiteral::Property::GETTER:
1429 if (property->emit_store()) {
1430 AccessorTable::Iterator it = accessor_table.lookup(key);
1431 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1432 it->second->getter = property;
1433 }
1434 break;
1435 case ObjectLiteral::Property::SETTER:
1436 if (property->emit_store()) {
1437 AccessorTable::Iterator it = accessor_table.lookup(key);
1438 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1439 it->second->setter = property;
1440 }
1441 break;
1442 }
1443 }
1444
1445 // Emit code to define accessors, using only a single call to the runtime for
1446 // each pair of corresponding getters and setters.
1447 for (AccessorTable::Iterator it = accessor_table.begin();
1448 it != accessor_table.end();
1449 ++it) {
1450 __ Peek(x10, 0); // Duplicate receiver.
1451 PushOperand(x10);
1452 VisitForStackValue(it->first);
1453 EmitAccessor(it->second->getter);
1454 EmitAccessor(it->second->setter);
1455 __ Mov(x10, Smi::FromInt(NONE));
1456 PushOperand(x10);
1457 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1458 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1459 }
1460
1461 // Object literals have two parts. The "static" part on the left contains no
1462 // computed property names, and so we can compute its map ahead of time; see
1463 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1464 // starts with the first computed property name, and continues with all
1465 // properties to its right. All the code from above initializes the static
1466 // component of the object literal, and arranges for the map of the result to
1467 // reflect the static order in which the keys appear. For the dynamic
1468 // properties, we compile them into a series of "SetOwnProperty" runtime
1469 // calls. This will preserve insertion order.
1470 for (; property_index < expr->properties()->length(); property_index++) {
1471 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1472
1473 Expression* value = property->value();
1474 if (!result_saved) {
1475 PushOperand(x0); // Save result on stack
1476 result_saved = true;
1477 }
1478
1479 __ Peek(x10, 0); // Duplicate receiver.
1480 PushOperand(x10);
1481
1482 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1483 DCHECK(!property->is_computed_name());
1484 VisitForStackValue(value);
1485 DCHECK(property->emit_store());
1486 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1487 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1488 BailoutState::NO_REGISTERS);
1489 } else {
1490 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1491 VisitForStackValue(value);
1492 if (NeedsHomeObject(value)) {
1493 EmitSetHomeObject(value, 2, property->GetSlot());
1494 }
1495
1496 switch (property->kind()) {
1497 case ObjectLiteral::Property::CONSTANT:
1498 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1499 case ObjectLiteral::Property::COMPUTED:
1500 if (property->emit_store()) {
1501 PushOperand(Smi::FromInt(NONE));
1502 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1503 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1504 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1505 BailoutState::NO_REGISTERS);
1506 } else {
1507 DropOperands(3);
1508 }
1509 break;
1510
1511 case ObjectLiteral::Property::PROTOTYPE:
1512 UNREACHABLE();
1513 break;
1514
1515 case ObjectLiteral::Property::GETTER:
1516 PushOperand(Smi::FromInt(NONE));
1517 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1518 break;
1519
1520 case ObjectLiteral::Property::SETTER:
1521 PushOperand(Smi::FromInt(NONE));
1522 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1523 break;
1524 }
1525 }
1526 }
1527
1528 if (result_saved) {
1529 context()->PlugTOS();
1530 } else {
1531 context()->Plug(x0);
1532 }
1533 }
1534
1535
VisitArrayLiteral(ArrayLiteral * expr)1536 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1537 Comment cmnt(masm_, "[ ArrayLiteral");
1538
1539 Handle<FixedArray> constant_elements = expr->constant_elements();
1540 bool has_fast_elements =
1541 IsFastObjectElementsKind(expr->constant_elements_kind());
1542
1543 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1544 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1545 // If the only customer of allocation sites is transitioning, then
1546 // we can turn it off if we don't have anywhere else to transition to.
1547 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1548 }
1549
1550 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1551 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1552 __ Mov(x1, Operand(constant_elements));
1553 if (MustCreateArrayLiteralWithRuntime(expr)) {
1554 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1555 __ Push(x3, x2, x1, x0);
1556 __ CallRuntime(Runtime::kCreateArrayLiteral);
1557 } else {
1558 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1559 __ CallStub(&stub);
1560 RestoreContext();
1561 }
1562 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1563
1564 bool result_saved = false; // Is the result saved to the stack?
1565 ZoneList<Expression*>* subexprs = expr->values();
1566 int length = subexprs->length();
1567
1568 // Emit code to evaluate all the non-constant subexpressions and to store
1569 // them into the newly cloned array.
1570 for (int array_index = 0; array_index < length; array_index++) {
1571 Expression* subexpr = subexprs->at(array_index);
1572 DCHECK(!subexpr->IsSpread());
1573
1574 // If the subexpression is a literal or a simple materialized literal it
1575 // is already set in the cloned array.
1576 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1577
1578 if (!result_saved) {
1579 PushOperand(x0);
1580 result_saved = true;
1581 }
1582 VisitForAccumulatorValue(subexpr);
1583
1584 __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1585 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1586 CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1587
1588 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1589 BailoutState::NO_REGISTERS);
1590 }
1591
1592 if (result_saved) {
1593 context()->PlugTOS();
1594 } else {
1595 context()->Plug(x0);
1596 }
1597 }
1598
1599
VisitAssignment(Assignment * expr)1600 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1601 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1602
1603 Comment cmnt(masm_, "[ Assignment");
1604
1605 Property* property = expr->target()->AsProperty();
1606 LhsKind assign_type = Property::GetAssignType(property);
1607
1608 // Evaluate LHS expression.
1609 switch (assign_type) {
1610 case VARIABLE:
1611 // Nothing to do here.
1612 break;
1613 case NAMED_PROPERTY:
1614 if (expr->is_compound()) {
1615 // We need the receiver both on the stack and in the register.
1616 VisitForStackValue(property->obj());
1617 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1618 } else {
1619 VisitForStackValue(property->obj());
1620 }
1621 break;
1622 case NAMED_SUPER_PROPERTY:
1623 VisitForStackValue(
1624 property->obj()->AsSuperPropertyReference()->this_var());
1625 VisitForAccumulatorValue(
1626 property->obj()->AsSuperPropertyReference()->home_object());
1627 PushOperand(result_register());
1628 if (expr->is_compound()) {
1629 const Register scratch = x10;
1630 __ Peek(scratch, kPointerSize);
1631 PushOperands(scratch, result_register());
1632 }
1633 break;
1634 case KEYED_SUPER_PROPERTY:
1635 VisitForStackValue(
1636 property->obj()->AsSuperPropertyReference()->this_var());
1637 VisitForStackValue(
1638 property->obj()->AsSuperPropertyReference()->home_object());
1639 VisitForAccumulatorValue(property->key());
1640 PushOperand(result_register());
1641 if (expr->is_compound()) {
1642 const Register scratch1 = x10;
1643 const Register scratch2 = x11;
1644 __ Peek(scratch1, 2 * kPointerSize);
1645 __ Peek(scratch2, kPointerSize);
1646 PushOperands(scratch1, scratch2, result_register());
1647 }
1648 break;
1649 case KEYED_PROPERTY:
1650 if (expr->is_compound()) {
1651 VisitForStackValue(property->obj());
1652 VisitForStackValue(property->key());
1653 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1654 __ Peek(LoadDescriptor::NameRegister(), 0);
1655 } else {
1656 VisitForStackValue(property->obj());
1657 VisitForStackValue(property->key());
1658 }
1659 break;
1660 }
1661
1662 // For compound assignments we need another deoptimization point after the
1663 // variable/property load.
1664 if (expr->is_compound()) {
1665 { AccumulatorValueContext context(this);
1666 switch (assign_type) {
1667 case VARIABLE:
1668 EmitVariableLoad(expr->target()->AsVariableProxy());
1669 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1670 break;
1671 case NAMED_PROPERTY:
1672 EmitNamedPropertyLoad(property);
1673 PrepareForBailoutForId(property->LoadId(),
1674 BailoutState::TOS_REGISTER);
1675 break;
1676 case NAMED_SUPER_PROPERTY:
1677 EmitNamedSuperPropertyLoad(property);
1678 PrepareForBailoutForId(property->LoadId(),
1679 BailoutState::TOS_REGISTER);
1680 break;
1681 case KEYED_SUPER_PROPERTY:
1682 EmitKeyedSuperPropertyLoad(property);
1683 PrepareForBailoutForId(property->LoadId(),
1684 BailoutState::TOS_REGISTER);
1685 break;
1686 case KEYED_PROPERTY:
1687 EmitKeyedPropertyLoad(property);
1688 PrepareForBailoutForId(property->LoadId(),
1689 BailoutState::TOS_REGISTER);
1690 break;
1691 }
1692 }
1693
1694 Token::Value op = expr->binary_op();
1695 PushOperand(x0); // Left operand goes on the stack.
1696 VisitForAccumulatorValue(expr->value());
1697
1698 AccumulatorValueContext context(this);
1699 if (ShouldInlineSmiCase(op)) {
1700 EmitInlineSmiBinaryOp(expr->binary_operation(),
1701 op,
1702 expr->target(),
1703 expr->value());
1704 } else {
1705 EmitBinaryOp(expr->binary_operation(), op);
1706 }
1707
1708 // Deoptimization point in case the binary operation may have side effects.
1709 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1710 } else {
1711 VisitForAccumulatorValue(expr->value());
1712 }
1713
1714 SetExpressionPosition(expr);
1715
1716 // Store the value.
1717 switch (assign_type) {
1718 case VARIABLE: {
1719 VariableProxy* proxy = expr->target()->AsVariableProxy();
1720 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1721 proxy->hole_check_mode());
1722 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1723 context()->Plug(x0);
1724 break;
1725 }
1726 case NAMED_PROPERTY:
1727 EmitNamedPropertyAssignment(expr);
1728 break;
1729 case NAMED_SUPER_PROPERTY:
1730 EmitNamedSuperPropertyStore(property);
1731 context()->Plug(x0);
1732 break;
1733 case KEYED_SUPER_PROPERTY:
1734 EmitKeyedSuperPropertyStore(property);
1735 context()->Plug(x0);
1736 break;
1737 case KEYED_PROPERTY:
1738 EmitKeyedPropertyAssignment(expr);
1739 break;
1740 }
1741 }
1742
1743
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1744 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1745 Token::Value op,
1746 Expression* left_expr,
1747 Expression* right_expr) {
1748 Label done, both_smis, stub_call;
1749
1750 // Get the arguments.
1751 Register left = x1;
1752 Register right = x0;
1753 Register result = x0;
1754 PopOperand(left);
1755
1756 // Perform combined smi check on both operands.
1757 __ Orr(x10, left, right);
1758 JumpPatchSite patch_site(masm_);
1759 patch_site.EmitJumpIfSmi(x10, &both_smis);
1760
1761 __ Bind(&stub_call);
1762
1763 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1764 {
1765 Assembler::BlockPoolsScope scope(masm_);
1766 CallIC(code, expr->BinaryOperationFeedbackId());
1767 patch_site.EmitPatchInfo();
1768 }
1769 __ B(&done);
1770
1771 __ Bind(&both_smis);
1772 // Smi case. This code works in the same way as the smi-smi case in the type
1773 // recording binary operation stub, see
1774 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1775 // TODO(all): That doesn't exist any more. Where are the comments?
1776 //
1777 // The set of operations that needs to be supported here is controlled by
1778 // FullCodeGenerator::ShouldInlineSmiCase().
1779 switch (op) {
1780 case Token::SAR:
1781 __ Ubfx(right, right, kSmiShift, 5);
1782 __ Asr(result, left, right);
1783 __ Bic(result, result, kSmiShiftMask);
1784 break;
1785 case Token::SHL:
1786 __ Ubfx(right, right, kSmiShift, 5);
1787 __ Lsl(result, left, right);
1788 break;
1789 case Token::SHR:
1790 // If `left >>> right` >= 0x80000000, the result is not representable in a
1791 // signed 32-bit smi.
1792 __ Ubfx(right, right, kSmiShift, 5);
1793 __ Lsr(x10, left, right);
1794 __ Tbnz(x10, kXSignBit, &stub_call);
1795 __ Bic(result, x10, kSmiShiftMask);
1796 break;
1797 case Token::ADD:
1798 __ Adds(x10, left, right);
1799 __ B(vs, &stub_call);
1800 __ Mov(result, x10);
1801 break;
1802 case Token::SUB:
1803 __ Subs(x10, left, right);
1804 __ B(vs, &stub_call);
1805 __ Mov(result, x10);
1806 break;
1807 case Token::MUL: {
1808 Label not_minus_zero, done;
1809 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
1810 STATIC_ASSERT(kSmiTag == 0);
1811 __ Smulh(x10, left, right);
1812 __ Cbnz(x10, ¬_minus_zero);
1813 __ Eor(x11, left, right);
1814 __ Tbnz(x11, kXSignBit, &stub_call);
1815 __ Mov(result, x10);
1816 __ B(&done);
1817 __ Bind(¬_minus_zero);
1818 __ Cls(x11, x10);
1819 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
1820 __ B(lt, &stub_call);
1821 __ SmiTag(result, x10);
1822 __ Bind(&done);
1823 break;
1824 }
1825 case Token::BIT_OR:
1826 __ Orr(result, left, right);
1827 break;
1828 case Token::BIT_AND:
1829 __ And(result, left, right);
1830 break;
1831 case Token::BIT_XOR:
1832 __ Eor(result, left, right);
1833 break;
1834 default:
1835 UNREACHABLE();
1836 }
1837
1838 __ Bind(&done);
1839 context()->Plug(x0);
1840 }
1841
1842
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1843 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1844 PopOperand(x1);
1845 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1846 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
1847 {
1848 Assembler::BlockPoolsScope scope(masm_);
1849 CallIC(code, expr->BinaryOperationFeedbackId());
1850 patch_site.EmitPatchInfo();
1851 }
1852 context()->Plug(x0);
1853 }
1854
1855
EmitClassDefineProperties(ClassLiteral * lit)1856 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1857 for (int i = 0; i < lit->properties()->length(); i++) {
1858 ClassLiteral::Property* property = lit->properties()->at(i);
1859 Expression* value = property->value();
1860
1861 Register scratch = x1;
1862 if (property->is_static()) {
1863 __ Peek(scratch, kPointerSize); // constructor
1864 } else {
1865 __ Peek(scratch, 0); // prototype
1866 }
1867 PushOperand(scratch);
1868 EmitPropertyKey(property, lit->GetIdForProperty(i));
1869
1870 // The static prototype property is read only. We handle the non computed
1871 // property name case in the parser. Since this is the only case where we
1872 // need to check for an own read only property we special case this so we do
1873 // not need to do this for every property.
1874 if (property->is_static() && property->is_computed_name()) {
1875 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1876 __ Push(x0);
1877 }
1878
1879 VisitForStackValue(value);
1880 if (NeedsHomeObject(value)) {
1881 EmitSetHomeObject(value, 2, property->GetSlot());
1882 }
1883
1884 switch (property->kind()) {
1885 case ClassLiteral::Property::METHOD:
1886 PushOperand(Smi::FromInt(DONT_ENUM));
1887 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1888 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1889 break;
1890
1891 case ClassLiteral::Property::GETTER:
1892 PushOperand(Smi::FromInt(DONT_ENUM));
1893 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1894 break;
1895
1896 case ClassLiteral::Property::SETTER:
1897 PushOperand(Smi::FromInt(DONT_ENUM));
1898 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1899 break;
1900
1901 case ClassLiteral::Property::FIELD:
1902 default:
1903 UNREACHABLE();
1904 }
1905 }
1906 }
1907
1908
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)1909 void FullCodeGenerator::EmitAssignment(Expression* expr,
1910 FeedbackVectorSlot slot) {
1911 DCHECK(expr->IsValidReferenceExpressionOrThis());
1912
1913 Property* prop = expr->AsProperty();
1914 LhsKind assign_type = Property::GetAssignType(prop);
1915
1916 switch (assign_type) {
1917 case VARIABLE: {
1918 VariableProxy* proxy = expr->AsVariableProxy();
1919 EffectContext context(this);
1920 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
1921 proxy->hole_check_mode());
1922 break;
1923 }
1924 case NAMED_PROPERTY: {
1925 PushOperand(x0); // Preserve value.
1926 VisitForAccumulatorValue(prop->obj());
1927 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
1928 // this copy.
1929 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1930 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
1931 CallStoreIC(slot, prop->key()->AsLiteral()->value());
1932 break;
1933 }
1934 case NAMED_SUPER_PROPERTY: {
1935 PushOperand(x0);
1936 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1937 VisitForAccumulatorValue(
1938 prop->obj()->AsSuperPropertyReference()->home_object());
1939 // stack: value, this; x0: home_object
1940 Register scratch = x10;
1941 Register scratch2 = x11;
1942 __ mov(scratch, result_register()); // home_object
1943 __ Peek(x0, kPointerSize); // value
1944 __ Peek(scratch2, 0); // this
1945 __ Poke(scratch2, kPointerSize); // this
1946 __ Poke(scratch, 0); // home_object
1947 // stack: this, home_object; x0: value
1948 EmitNamedSuperPropertyStore(prop);
1949 break;
1950 }
1951 case KEYED_SUPER_PROPERTY: {
1952 PushOperand(x0);
1953 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1954 VisitForStackValue(
1955 prop->obj()->AsSuperPropertyReference()->home_object());
1956 VisitForAccumulatorValue(prop->key());
1957 Register scratch = x10;
1958 Register scratch2 = x11;
1959 __ Peek(scratch2, 2 * kPointerSize); // value
1960 // stack: value, this, home_object; x0: key, x11: value
1961 __ Peek(scratch, kPointerSize); // this
1962 __ Poke(scratch, 2 * kPointerSize);
1963 __ Peek(scratch, 0); // home_object
1964 __ Poke(scratch, kPointerSize);
1965 __ Poke(x0, 0);
1966 __ Move(x0, scratch2);
1967 // stack: this, home_object, key; x0: value.
1968 EmitKeyedSuperPropertyStore(prop);
1969 break;
1970 }
1971 case KEYED_PROPERTY: {
1972 PushOperand(x0); // Preserve value.
1973 VisitForStackValue(prop->obj());
1974 VisitForAccumulatorValue(prop->key());
1975 __ Mov(StoreDescriptor::NameRegister(), x0);
1976 PopOperands(StoreDescriptor::ReceiverRegister(),
1977 StoreDescriptor::ValueRegister());
1978 CallKeyedStoreIC(slot);
1979 break;
1980 }
1981 }
1982 context()->Plug(x0);
1983 }
1984
1985
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)1986 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
1987 Variable* var, MemOperand location) {
1988 __ Str(result_register(), location);
1989 if (var->IsContextSlot()) {
1990 // RecordWrite may destroy all its register arguments.
1991 __ Mov(x10, result_register());
1992 int offset = Context::SlotOffset(var->index());
1993 __ RecordWriteContextSlot(
1994 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
1995 }
1996 }
1997
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot,HoleCheckMode hole_check_mode)1998 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
1999 FeedbackVectorSlot slot,
2000 HoleCheckMode hole_check_mode) {
2001 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2002 if (var->IsUnallocated()) {
2003 // Global var, const, or let.
2004 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2005 CallStoreIC(slot, var->name());
2006
2007 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2008 DCHECK(!var->IsLookupSlot());
2009 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2010 MemOperand location = VarOperand(var, x1);
2011 // Perform an initialization check for lexically declared variables.
2012 if (var->binding_needs_init()) {
2013 Label assign;
2014 __ Ldr(x10, location);
2015 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2016 __ Mov(x10, Operand(var->name()));
2017 __ Push(x10);
2018 __ CallRuntime(Runtime::kThrowReferenceError);
2019 __ Bind(&assign);
2020 }
2021 if (var->mode() != CONST) {
2022 EmitStoreToStackLocalOrContextSlot(var, location);
2023 } else if (var->throw_on_const_assignment(language_mode())) {
2024 __ CallRuntime(Runtime::kThrowConstAssignError);
2025 }
2026 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2027 // Initializing assignment to const {this} needs a write barrier.
2028 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2029 Label uninitialized_this;
2030 MemOperand location = VarOperand(var, x1);
2031 __ Ldr(x10, location);
2032 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2033 __ Mov(x0, Operand(var->name()));
2034 __ Push(x0);
2035 __ CallRuntime(Runtime::kThrowReferenceError);
2036 __ bind(&uninitialized_this);
2037 EmitStoreToStackLocalOrContextSlot(var, location);
2038
2039 } else {
2040 DCHECK(var->mode() != CONST || op == Token::INIT);
2041 if (var->IsLookupSlot()) {
2042 // Assignment to var.
2043 __ Push(var->name());
2044 __ Push(x0);
2045 __ CallRuntime(is_strict(language_mode())
2046 ? Runtime::kStoreLookupSlot_Strict
2047 : Runtime::kStoreLookupSlot_Sloppy);
2048 } else {
2049 // Assignment to var or initializing assignment to let/const in harmony
2050 // mode.
2051 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2052 MemOperand location = VarOperand(var, x1);
2053 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2054 __ Ldr(x10, location);
2055 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2056 __ Check(eq, kLetBindingReInitialization);
2057 }
2058 EmitStoreToStackLocalOrContextSlot(var, location);
2059 }
2060 }
2061 }
2062
2063
EmitNamedPropertyAssignment(Assignment * expr)2064 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2065 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2066 // Assignment to a property, using a named store IC.
2067 Property* prop = expr->target()->AsProperty();
2068 DCHECK(prop != NULL);
2069 DCHECK(prop->key()->IsLiteral());
2070
2071 PopOperand(StoreDescriptor::ReceiverRegister());
2072 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2073
2074 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2075 context()->Plug(x0);
2076 }
2077
2078
EmitNamedSuperPropertyStore(Property * prop)2079 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2080 // Assignment to named property of super.
2081 // x0 : value
2082 // stack : receiver ('this'), home_object
2083 DCHECK(prop != NULL);
2084 Literal* key = prop->key()->AsLiteral();
2085 DCHECK(key != NULL);
2086
2087 PushOperand(key->value());
2088 PushOperand(x0);
2089 CallRuntimeWithOperands(is_strict(language_mode())
2090 ? Runtime::kStoreToSuper_Strict
2091 : Runtime::kStoreToSuper_Sloppy);
2092 }
2093
2094
EmitKeyedSuperPropertyStore(Property * prop)2095 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2096 // Assignment to named property of super.
2097 // x0 : value
2098 // stack : receiver ('this'), home_object, key
2099 DCHECK(prop != NULL);
2100
2101 PushOperand(x0);
2102 CallRuntimeWithOperands(is_strict(language_mode())
2103 ? Runtime::kStoreKeyedToSuper_Strict
2104 : Runtime::kStoreKeyedToSuper_Sloppy);
2105 }
2106
2107
EmitKeyedPropertyAssignment(Assignment * expr)2108 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2109 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2110 // Assignment to a property, using a keyed store IC.
2111
2112 // TODO(all): Could we pass this in registers rather than on the stack?
2113 PopOperands(StoreDescriptor::NameRegister(),
2114 StoreDescriptor::ReceiverRegister());
2115 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2116
2117 CallKeyedStoreIC(expr->AssignmentSlot());
2118
2119 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2120 context()->Plug(x0);
2121 }
2122
2123 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2124 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2125 ASM_LOCATION("FullCodeGenerator::EmitCallWithLoadIC");
2126 Expression* callee = expr->expression();
2127
2128 // Get the target function.
2129 ConvertReceiverMode convert_mode;
2130 if (callee->IsVariableProxy()) {
2131 { StackValueContext context(this);
2132 EmitVariableLoad(callee->AsVariableProxy());
2133 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2134 }
2135 // Push undefined as receiver. This is patched in the method prologue if it
2136 // is a sloppy mode method.
2137 {
2138 UseScratchRegisterScope temps(masm_);
2139 Register temp = temps.AcquireX();
2140 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2141 PushOperand(temp);
2142 }
2143 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2144 } else {
2145 // Load the function from the receiver.
2146 DCHECK(callee->IsProperty());
2147 DCHECK(!callee->AsProperty()->IsSuperAccess());
2148 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2149 EmitNamedPropertyLoad(callee->AsProperty());
2150 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2151 BailoutState::TOS_REGISTER);
2152 // Push the target function under the receiver.
2153 PopOperand(x10);
2154 PushOperands(x0, x10);
2155 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2156 }
2157
2158 EmitCall(expr, convert_mode);
2159 }
2160
2161
EmitSuperCallWithLoadIC(Call * expr)2162 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2163 ASM_LOCATION("FullCodeGenerator::EmitSuperCallWithLoadIC");
2164 Expression* callee = expr->expression();
2165 DCHECK(callee->IsProperty());
2166 Property* prop = callee->AsProperty();
2167 DCHECK(prop->IsSuperAccess());
2168 SetExpressionPosition(prop);
2169
2170 Literal* key = prop->key()->AsLiteral();
2171 DCHECK(!key->value()->IsSmi());
2172
2173 // Load the function from the receiver.
2174 const Register scratch = x10;
2175 SuperPropertyReference* super_ref =
2176 callee->AsProperty()->obj()->AsSuperPropertyReference();
2177 VisitForStackValue(super_ref->home_object());
2178 VisitForAccumulatorValue(super_ref->this_var());
2179 PushOperand(x0);
2180 __ Peek(scratch, kPointerSize);
2181 PushOperands(x0, scratch);
2182 PushOperand(key->value());
2183
2184 // Stack here:
2185 // - home_object
2186 // - this (receiver)
2187 // - this (receiver) <-- LoadFromSuper will pop here and below.
2188 // - home_object
2189 // - key
2190 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2191 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2192
2193 // Replace home_object with target function.
2194 __ Poke(x0, kPointerSize);
2195
2196 // Stack here:
2197 // - target function
2198 // - this (receiver)
2199 EmitCall(expr);
2200 }
2201
2202
2203 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2204 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2205 Expression* key) {
2206 ASM_LOCATION("FullCodeGenerator::EmitKeyedCallWithLoadIC");
2207 // Load the key.
2208 VisitForAccumulatorValue(key);
2209
2210 Expression* callee = expr->expression();
2211
2212 // Load the function from the receiver.
2213 DCHECK(callee->IsProperty());
2214 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2215 __ Move(LoadDescriptor::NameRegister(), x0);
2216 EmitKeyedPropertyLoad(callee->AsProperty());
2217 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2218 BailoutState::TOS_REGISTER);
2219
2220 // Push the target function under the receiver.
2221 PopOperand(x10);
2222 PushOperands(x0, x10);
2223
2224 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2225 }
2226
2227
EmitKeyedSuperCallWithLoadIC(Call * expr)2228 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2229 ASM_LOCATION("FullCodeGenerator::EmitKeyedSuperCallWithLoadIC");
2230 Expression* callee = expr->expression();
2231 DCHECK(callee->IsProperty());
2232 Property* prop = callee->AsProperty();
2233 DCHECK(prop->IsSuperAccess());
2234 SetExpressionPosition(prop);
2235
2236 // Load the function from the receiver.
2237 const Register scratch = x10;
2238 SuperPropertyReference* super_ref =
2239 callee->AsProperty()->obj()->AsSuperPropertyReference();
2240 VisitForStackValue(super_ref->home_object());
2241 VisitForAccumulatorValue(super_ref->this_var());
2242 PushOperand(x0);
2243 __ Peek(scratch, kPointerSize);
2244 PushOperands(x0, scratch);
2245 VisitForStackValue(prop->key());
2246
2247 // Stack here:
2248 // - home_object
2249 // - this (receiver)
2250 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2251 // - home_object
2252 // - key
2253 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2254 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2255
2256 // Replace home_object with target function.
2257 __ Poke(x0, kPointerSize);
2258
2259 // Stack here:
2260 // - target function
2261 // - this (receiver)
2262 EmitCall(expr);
2263 }
2264
2265
EmitCall(Call * expr,ConvertReceiverMode mode)2266 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2267 ASM_LOCATION("FullCodeGenerator::EmitCall");
2268 // Load the arguments.
2269 ZoneList<Expression*>* args = expr->arguments();
2270 int arg_count = args->length();
2271 for (int i = 0; i < arg_count; i++) {
2272 VisitForStackValue(args->at(i));
2273 }
2274
2275 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2276 SetCallPosition(expr, expr->tail_call_mode());
2277 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2278 if (FLAG_trace) {
2279 __ CallRuntime(Runtime::kTraceTailCall);
2280 }
2281 // Update profiling counters before the tail call since we will
2282 // not return to this function.
2283 EmitProfilingCounterHandlingForReturnSequence(true);
2284 }
2285 Handle<Code> code =
2286 CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2287 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2288 __ Peek(x1, (arg_count + 1) * kXRegSize);
2289 __ Mov(x0, arg_count);
2290 CallIC(code);
2291 OperandStackDepthDecrement(arg_count + 1);
2292
2293 RecordJSReturnSite(expr);
2294 RestoreContext();
2295 context()->DropAndPlug(1, x0);
2296 }
2297
EmitResolvePossiblyDirectEval(Call * expr)2298 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2299 int arg_count = expr->arguments()->length();
2300 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2301 // Prepare to push a copy of the first argument or undefined if it doesn't
2302 // exist.
2303 if (arg_count > 0) {
2304 __ Peek(x9, arg_count * kXRegSize);
2305 } else {
2306 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2307 }
2308
2309 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2310
2311 // Prepare to push the language mode.
2312 __ Mov(x11, Smi::FromInt(language_mode()));
2313 // Prepare to push the start position of the scope the calls resides in.
2314 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2315 // Prepare to push the source position of the eval call.
2316 __ Mov(x13, Smi::FromInt(expr->position()));
2317
2318 // Push.
2319 __ Push(x9, x10, x11, x12, x13);
2320
2321 // Do the runtime call.
2322 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2323 }
2324
2325
2326 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2327 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2328 VariableProxy* callee = expr->expression()->AsVariableProxy();
2329 if (callee->var()->IsLookupSlot()) {
2330 Label slow, done;
2331 SetExpressionPosition(callee);
2332 // Generate code for loading from variables potentially shadowed
2333 // by eval-introduced variables.
2334 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2335
2336 __ Bind(&slow);
2337 // Call the runtime to find the function to call (returned in x0)
2338 // and the object holding it (returned in x1).
2339 __ Push(callee->name());
2340 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2341 PushOperands(x0, x1); // Receiver, function.
2342 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2343
2344 // If fast case code has been generated, emit code to push the
2345 // function and receiver and have the slow path jump around this
2346 // code.
2347 if (done.is_linked()) {
2348 Label call;
2349 __ B(&call);
2350 __ Bind(&done);
2351 // Push function.
2352 // The receiver is implicitly the global receiver. Indicate this
2353 // by passing the undefined to the call function stub.
2354 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2355 __ Push(x0, x1);
2356 __ Bind(&call);
2357 }
2358 } else {
2359 VisitForStackValue(callee);
2360 // refEnv.WithBaseObject()
2361 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2362 PushOperand(x10); // Reserved receiver slot.
2363 }
2364 }
2365
2366
EmitPossiblyEvalCall(Call * expr)2367 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2368 ASM_LOCATION("FullCodeGenerator::EmitPossiblyEvalCall");
2369 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2370 // to resolve the function we need to call. Then we call the resolved
2371 // function using the given arguments.
2372 ZoneList<Expression*>* args = expr->arguments();
2373 int arg_count = args->length();
2374
2375 PushCalleeAndWithBaseObject(expr);
2376
2377 // Push the arguments.
2378 for (int i = 0; i < arg_count; i++) {
2379 VisitForStackValue(args->at(i));
2380 }
2381
2382 // Push a copy of the function (found below the arguments) and
2383 // resolve eval.
2384 __ Peek(x10, (arg_count + 1) * kPointerSize);
2385 __ Push(x10);
2386 EmitResolvePossiblyDirectEval(expr);
2387
2388 // Touch up the stack with the resolved function.
2389 __ Poke(x0, (arg_count + 1) * kPointerSize);
2390
2391 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2392
2393 // Record source position for debugger.
2394 SetCallPosition(expr);
2395
2396 // Call the evaluated function.
2397 Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2398 expr->tail_call_mode())
2399 .code();
2400 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2401 __ Peek(x1, (arg_count + 1) * kXRegSize);
2402 __ Mov(x0, arg_count);
2403 __ Call(code, RelocInfo::CODE_TARGET);
2404 OperandStackDepthDecrement(arg_count + 1);
2405 RecordJSReturnSite(expr);
2406 RestoreContext();
2407 context()->DropAndPlug(1, x0);
2408 }
2409
2410
VisitCallNew(CallNew * expr)2411 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2412 Comment cmnt(masm_, "[ CallNew");
2413 // According to ECMA-262, section 11.2.2, page 44, the function
2414 // expression in new calls must be evaluated before the
2415 // arguments.
2416
2417 // Push constructor on the stack. If it's not a function it's used as
2418 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2419 // ignored.
2420 DCHECK(!expr->expression()->IsSuperPropertyReference());
2421 VisitForStackValue(expr->expression());
2422
2423 // Push the arguments ("left-to-right") on the stack.
2424 ZoneList<Expression*>* args = expr->arguments();
2425 int arg_count = args->length();
2426 for (int i = 0; i < arg_count; i++) {
2427 VisitForStackValue(args->at(i));
2428 }
2429
2430 // Call the construct call builtin that handles allocation and
2431 // constructor invocation.
2432 SetConstructCallPosition(expr);
2433
2434 // Load function and argument count into x1 and x0.
2435 __ Mov(x0, arg_count);
2436 __ Peek(x1, arg_count * kXRegSize);
2437
2438 // Record call targets in unoptimized code.
2439 __ EmitLoadTypeFeedbackVector(x2);
2440 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2441
2442 CallConstructStub stub(isolate());
2443 CallIC(stub.GetCode());
2444 OperandStackDepthDecrement(arg_count + 1);
2445 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2446 RestoreContext();
2447 context()->Plug(x0);
2448 }
2449
2450
EmitSuperConstructorCall(Call * expr)2451 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2452 ASM_LOCATION("FullCodeGenerator::EmitSuperConstructorCall");
2453 SuperCallReference* super_call_ref =
2454 expr->expression()->AsSuperCallReference();
2455 DCHECK_NOT_NULL(super_call_ref);
2456
2457 // Push the super constructor target on the stack (may be null,
2458 // but the Construct builtin can deal with that properly).
2459 VisitForAccumulatorValue(super_call_ref->this_function_var());
2460 __ AssertFunction(result_register());
2461 __ Ldr(result_register(),
2462 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2463 __ Ldr(result_register(),
2464 FieldMemOperand(result_register(), Map::kPrototypeOffset));
2465 PushOperand(result_register());
2466
2467 // Push the arguments ("left-to-right") on the stack.
2468 ZoneList<Expression*>* args = expr->arguments();
2469 int arg_count = args->length();
2470 for (int i = 0; i < arg_count; i++) {
2471 VisitForStackValue(args->at(i));
2472 }
2473
2474 // Call the construct call builtin that handles allocation and
2475 // constructor invocation.
2476 SetConstructCallPosition(expr);
2477
2478 // Load new target into x3.
2479 VisitForAccumulatorValue(super_call_ref->new_target_var());
2480 __ Mov(x3, result_register());
2481
2482 // Load function and argument count into x1 and x0.
2483 __ Mov(x0, arg_count);
2484 __ Peek(x1, arg_count * kXRegSize);
2485
2486 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2487 OperandStackDepthDecrement(arg_count + 1);
2488
2489 RecordJSReturnSite(expr);
2490 RestoreContext();
2491 context()->Plug(x0);
2492 }
2493
2494
EmitIsSmi(CallRuntime * expr)2495 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2496 ZoneList<Expression*>* args = expr->arguments();
2497 DCHECK(args->length() == 1);
2498
2499 VisitForAccumulatorValue(args->at(0));
2500
2501 Label materialize_true, materialize_false;
2502 Label* if_true = NULL;
2503 Label* if_false = NULL;
2504 Label* fall_through = NULL;
2505 context()->PrepareTest(&materialize_true, &materialize_false,
2506 &if_true, &if_false, &fall_through);
2507
2508 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2509 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2510
2511 context()->Plug(if_true, if_false);
2512 }
2513
2514
EmitIsJSReceiver(CallRuntime * expr)2515 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2516 ZoneList<Expression*>* args = expr->arguments();
2517 DCHECK(args->length() == 1);
2518
2519 VisitForAccumulatorValue(args->at(0));
2520
2521 Label materialize_true, materialize_false;
2522 Label* if_true = NULL;
2523 Label* if_false = NULL;
2524 Label* fall_through = NULL;
2525 context()->PrepareTest(&materialize_true, &materialize_false,
2526 &if_true, &if_false, &fall_through);
2527
2528 __ JumpIfSmi(x0, if_false);
2529 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2530 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2531 Split(ge, if_true, if_false, fall_through);
2532
2533 context()->Plug(if_true, if_false);
2534 }
2535
2536
EmitIsArray(CallRuntime * expr)2537 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2538 ZoneList<Expression*>* args = expr->arguments();
2539 DCHECK(args->length() == 1);
2540
2541 VisitForAccumulatorValue(args->at(0));
2542
2543 Label materialize_true, materialize_false;
2544 Label* if_true = NULL;
2545 Label* if_false = NULL;
2546 Label* fall_through = NULL;
2547 context()->PrepareTest(&materialize_true, &materialize_false,
2548 &if_true, &if_false, &fall_through);
2549
2550 __ JumpIfSmi(x0, if_false);
2551 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2552 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2553 Split(eq, if_true, if_false, fall_through);
2554
2555 context()->Plug(if_true, if_false);
2556 }
2557
2558
EmitIsTypedArray(CallRuntime * expr)2559 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2560 ZoneList<Expression*>* args = expr->arguments();
2561 DCHECK(args->length() == 1);
2562
2563 VisitForAccumulatorValue(args->at(0));
2564
2565 Label materialize_true, materialize_false;
2566 Label* if_true = NULL;
2567 Label* if_false = NULL;
2568 Label* fall_through = NULL;
2569 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2570 &if_false, &fall_through);
2571
2572 __ JumpIfSmi(x0, if_false);
2573 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
2574 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2575 Split(eq, if_true, if_false, fall_through);
2576
2577 context()->Plug(if_true, if_false);
2578 }
2579
2580
EmitIsRegExp(CallRuntime * expr)2581 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2582 ZoneList<Expression*>* args = expr->arguments();
2583 DCHECK(args->length() == 1);
2584
2585 VisitForAccumulatorValue(args->at(0));
2586
2587 Label materialize_true, materialize_false;
2588 Label* if_true = NULL;
2589 Label* if_false = NULL;
2590 Label* fall_through = NULL;
2591 context()->PrepareTest(&materialize_true, &materialize_false,
2592 &if_true, &if_false, &fall_through);
2593
2594 __ JumpIfSmi(x0, if_false);
2595 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2596 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2597 Split(eq, if_true, if_false, fall_through);
2598
2599 context()->Plug(if_true, if_false);
2600 }
2601
2602
EmitIsJSProxy(CallRuntime * expr)2603 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2604 ZoneList<Expression*>* args = expr->arguments();
2605 DCHECK(args->length() == 1);
2606
2607 VisitForAccumulatorValue(args->at(0));
2608
2609 Label materialize_true, materialize_false;
2610 Label* if_true = NULL;
2611 Label* if_false = NULL;
2612 Label* fall_through = NULL;
2613 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2614 &if_false, &fall_through);
2615
2616 __ JumpIfSmi(x0, if_false);
2617 __ CompareObjectType(x0, x10, x11, JS_PROXY_TYPE);
2618 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2619 Split(eq, if_true, if_false, fall_through);
2620
2621 context()->Plug(if_true, if_false);
2622 }
2623
2624
EmitClassOf(CallRuntime * expr)2625 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2626 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
2627 ZoneList<Expression*>* args = expr->arguments();
2628 DCHECK(args->length() == 1);
2629 Label done, null, function, non_function_constructor;
2630
2631 VisitForAccumulatorValue(args->at(0));
2632
2633 // If the object is not a JSReceiver, we return null.
2634 __ JumpIfSmi(x0, &null);
2635 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2636 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2637 // x10: object's map.
2638 // x11: object's type.
2639 __ B(lt, &null);
2640
2641 // Return 'Function' for JSFunction objects.
2642 __ Cmp(x11, FIRST_FUNCTION_TYPE);
2643 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2644 __ B(hs, &function);
2645
2646 // Check if the constructor in the map is a JS function.
2647 Register instance_type = x14;
2648 __ GetMapConstructor(x12, x10, x13, instance_type);
2649 __ Cmp(instance_type, JS_FUNCTION_TYPE);
2650 __ B(ne, &non_function_constructor);
2651
2652 // x12 now contains the constructor function. Grab the
2653 // instance class name from there.
2654 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
2655 __ Ldr(x0,
2656 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
2657 __ B(&done);
2658
2659 // Functions have class 'Function'.
2660 __ Bind(&function);
2661 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
2662 __ B(&done);
2663
2664 // Objects with a non-function constructor have class 'Object'.
2665 __ Bind(&non_function_constructor);
2666 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
2667 __ B(&done);
2668
2669 // Non-JS objects have class null.
2670 __ Bind(&null);
2671 __ LoadRoot(x0, Heap::kNullValueRootIndex);
2672
2673 // All done.
2674 __ Bind(&done);
2675
2676 context()->Plug(x0);
2677 }
2678
2679
EmitStringCharCodeAt(CallRuntime * expr)2680 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2681 ZoneList<Expression*>* args = expr->arguments();
2682 DCHECK(args->length() == 2);
2683
2684 VisitForStackValue(args->at(0));
2685 VisitForAccumulatorValue(args->at(1));
2686
2687 Register object = x1;
2688 Register index = x0;
2689 Register result = x3;
2690
2691 PopOperand(object);
2692
2693 Label need_conversion;
2694 Label index_out_of_range;
2695 Label done;
2696 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2697 &need_conversion, &index_out_of_range);
2698 generator.GenerateFast(masm_);
2699 __ B(&done);
2700
2701 __ Bind(&index_out_of_range);
2702 // When the index is out of range, the spec requires us to return NaN.
2703 __ LoadRoot(result, Heap::kNanValueRootIndex);
2704 __ B(&done);
2705
2706 __ Bind(&need_conversion);
2707 // Load the undefined value into the result register, which will
2708 // trigger conversion.
2709 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2710 __ B(&done);
2711
2712 NopRuntimeCallHelper call_helper;
2713 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2714
2715 __ Bind(&done);
2716 context()->Plug(result);
2717 }
2718
2719
EmitCall(CallRuntime * expr)2720 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2721 ASM_LOCATION("FullCodeGenerator::EmitCall");
2722 ZoneList<Expression*>* args = expr->arguments();
2723 DCHECK_LE(2, args->length());
2724 // Push target, receiver and arguments onto the stack.
2725 for (Expression* const arg : *args) {
2726 VisitForStackValue(arg);
2727 }
2728 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2729 // Move target to x1.
2730 int const argc = args->length() - 2;
2731 __ Peek(x1, (argc + 1) * kXRegSize);
2732 // Call the target.
2733 __ Mov(x0, argc);
2734 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2735 OperandStackDepthDecrement(argc + 1);
2736 RestoreContext();
2737 // Discard the function left on TOS.
2738 context()->DropAndPlug(1, x0);
2739 }
2740
EmitGetSuperConstructor(CallRuntime * expr)2741 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2742 ZoneList<Expression*>* args = expr->arguments();
2743 DCHECK_EQ(1, args->length());
2744 VisitForAccumulatorValue(args->at(0));
2745 __ AssertFunction(x0);
2746 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
2747 __ Ldr(x0, FieldMemOperand(x0, Map::kPrototypeOffset));
2748 context()->Plug(x0);
2749 }
2750
EmitDebugIsActive(CallRuntime * expr)2751 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2752 DCHECK(expr->arguments()->length() == 0);
2753 ExternalReference debug_is_active =
2754 ExternalReference::debug_is_active_address(isolate());
2755 __ Mov(x10, debug_is_active);
2756 __ Ldrb(x0, MemOperand(x10));
2757 __ SmiTag(x0);
2758 context()->Plug(x0);
2759 }
2760
2761
EmitCreateIterResultObject(CallRuntime * expr)2762 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2763 ZoneList<Expression*>* args = expr->arguments();
2764 DCHECK_EQ(2, args->length());
2765 VisitForStackValue(args->at(0));
2766 VisitForStackValue(args->at(1));
2767
2768 Label runtime, done;
2769
2770 Register result = x0;
2771 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime,
2772 NO_ALLOCATION_FLAGS);
2773 Register map_reg = x1;
2774 Register result_value = x2;
2775 Register boolean_done = x3;
2776 Register empty_fixed_array = x4;
2777 Register untagged_result = x5;
2778 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
2779 __ Pop(boolean_done);
2780 __ Pop(result_value);
2781 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
2782 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
2783 JSObject::kElementsOffset);
2784 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
2785 JSIteratorResult::kDoneOffset);
2786 __ ObjectUntag(untagged_result, result);
2787 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
2788 __ Stp(empty_fixed_array, empty_fixed_array,
2789 MemOperand(untagged_result, JSObject::kPropertiesOffset));
2790 __ Stp(result_value, boolean_done,
2791 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
2792 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2793 __ B(&done);
2794
2795 __ Bind(&runtime);
2796 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2797
2798 __ Bind(&done);
2799 context()->Plug(x0);
2800 }
2801
2802
EmitLoadJSRuntimeFunction(CallRuntime * expr)2803 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2804 // Push function.
2805 __ LoadNativeContextSlot(expr->context_index(), x0);
2806 PushOperand(x0);
2807
2808 // Push undefined as the receiver.
2809 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
2810 PushOperand(x0);
2811 }
2812
2813
EmitCallJSRuntimeFunction(CallRuntime * expr)2814 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2815 ZoneList<Expression*>* args = expr->arguments();
2816 int arg_count = args->length();
2817
2818 SetCallPosition(expr);
2819 __ Peek(x1, (arg_count + 1) * kPointerSize);
2820 __ Mov(x0, arg_count);
2821 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2822 RelocInfo::CODE_TARGET);
2823 OperandStackDepthDecrement(arg_count + 1);
2824 RestoreContext();
2825 }
2826
2827
VisitUnaryOperation(UnaryOperation * expr)2828 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2829 switch (expr->op()) {
2830 case Token::DELETE: {
2831 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2832 Property* property = expr->expression()->AsProperty();
2833 VariableProxy* proxy = expr->expression()->AsVariableProxy();
2834
2835 if (property != NULL) {
2836 VisitForStackValue(property->obj());
2837 VisitForStackValue(property->key());
2838 CallRuntimeWithOperands(is_strict(language_mode())
2839 ? Runtime::kDeleteProperty_Strict
2840 : Runtime::kDeleteProperty_Sloppy);
2841 context()->Plug(x0);
2842 } else if (proxy != NULL) {
2843 Variable* var = proxy->var();
2844 // Delete of an unqualified identifier is disallowed in strict mode but
2845 // "delete this" is allowed.
2846 bool is_this = var->is_this();
2847 DCHECK(is_sloppy(language_mode()) || is_this);
2848 if (var->IsUnallocated()) {
2849 __ LoadGlobalObject(x12);
2850 __ Mov(x11, Operand(var->name()));
2851 __ Push(x12, x11);
2852 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2853 context()->Plug(x0);
2854 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2855 // Result of deleting non-global, non-dynamic variables is false.
2856 // The subexpression does not have side effects.
2857 context()->Plug(is_this);
2858 } else {
2859 // Non-global variable. Call the runtime to try to delete from the
2860 // context where the variable was introduced.
2861 __ Push(var->name());
2862 __ CallRuntime(Runtime::kDeleteLookupSlot);
2863 context()->Plug(x0);
2864 }
2865 } else {
2866 // Result of deleting non-property, non-variable reference is true.
2867 // The subexpression may have side effects.
2868 VisitForEffect(expr->expression());
2869 context()->Plug(true);
2870 }
2871 break;
2872 break;
2873 }
2874 case Token::VOID: {
2875 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2876 VisitForEffect(expr->expression());
2877 context()->Plug(Heap::kUndefinedValueRootIndex);
2878 break;
2879 }
2880 case Token::NOT: {
2881 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2882 if (context()->IsEffect()) {
2883 // Unary NOT has no side effects so it's only necessary to visit the
2884 // subexpression. Match the optimizing compiler by not branching.
2885 VisitForEffect(expr->expression());
2886 } else if (context()->IsTest()) {
2887 const TestContext* test = TestContext::cast(context());
2888 // The labels are swapped for the recursive call.
2889 VisitForControl(expr->expression(),
2890 test->false_label(),
2891 test->true_label(),
2892 test->fall_through());
2893 context()->Plug(test->true_label(), test->false_label());
2894 } else {
2895 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2896 // TODO(jbramley): This could be much more efficient using (for
2897 // example) the CSEL instruction.
2898 Label materialize_true, materialize_false, done;
2899 VisitForControl(expr->expression(),
2900 &materialize_false,
2901 &materialize_true,
2902 &materialize_true);
2903 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2904
2905 __ Bind(&materialize_true);
2906 PrepareForBailoutForId(expr->MaterializeTrueId(),
2907 BailoutState::NO_REGISTERS);
2908 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
2909 __ B(&done);
2910
2911 __ Bind(&materialize_false);
2912 PrepareForBailoutForId(expr->MaterializeFalseId(),
2913 BailoutState::NO_REGISTERS);
2914 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
2915 __ B(&done);
2916
2917 __ Bind(&done);
2918 if (context()->IsStackValue()) {
2919 __ Push(result_register());
2920 }
2921 }
2922 break;
2923 }
2924 case Token::TYPEOF: {
2925 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2926 {
2927 AccumulatorValueContext context(this);
2928 VisitForTypeofValue(expr->expression());
2929 }
2930 __ Mov(x3, x0);
2931 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2932 context()->Plug(x0);
2933 break;
2934 }
2935 default:
2936 UNREACHABLE();
2937 }
2938 }
2939
2940
VisitCountOperation(CountOperation * expr)2941 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2942 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2943
2944 Comment cmnt(masm_, "[ CountOperation");
2945
2946 Property* prop = expr->expression()->AsProperty();
2947 LhsKind assign_type = Property::GetAssignType(prop);
2948
2949 // Evaluate expression and get value.
2950 if (assign_type == VARIABLE) {
2951 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2952 AccumulatorValueContext context(this);
2953 EmitVariableLoad(expr->expression()->AsVariableProxy());
2954 } else {
2955 // Reserve space for result of postfix operation.
2956 if (expr->is_postfix() && !context()->IsEffect()) {
2957 PushOperand(xzr);
2958 }
2959 switch (assign_type) {
2960 case NAMED_PROPERTY: {
2961 // Put the object both on the stack and in the register.
2962 VisitForStackValue(prop->obj());
2963 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2964 EmitNamedPropertyLoad(prop);
2965 break;
2966 }
2967
2968 case NAMED_SUPER_PROPERTY: {
2969 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2970 VisitForAccumulatorValue(
2971 prop->obj()->AsSuperPropertyReference()->home_object());
2972 PushOperand(result_register());
2973 const Register scratch = x10;
2974 __ Peek(scratch, kPointerSize);
2975 PushOperands(scratch, result_register());
2976 EmitNamedSuperPropertyLoad(prop);
2977 break;
2978 }
2979
2980 case KEYED_SUPER_PROPERTY: {
2981 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2982 VisitForStackValue(
2983 prop->obj()->AsSuperPropertyReference()->home_object());
2984 VisitForAccumulatorValue(prop->key());
2985 PushOperand(result_register());
2986 const Register scratch1 = x10;
2987 const Register scratch2 = x11;
2988 __ Peek(scratch1, 2 * kPointerSize);
2989 __ Peek(scratch2, kPointerSize);
2990 PushOperands(scratch1, scratch2, result_register());
2991 EmitKeyedSuperPropertyLoad(prop);
2992 break;
2993 }
2994
2995 case KEYED_PROPERTY: {
2996 VisitForStackValue(prop->obj());
2997 VisitForStackValue(prop->key());
2998 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
2999 __ Peek(LoadDescriptor::NameRegister(), 0);
3000 EmitKeyedPropertyLoad(prop);
3001 break;
3002 }
3003
3004 case VARIABLE:
3005 UNREACHABLE();
3006 }
3007 }
3008
3009 // We need a second deoptimization point after loading the value
3010 // in case evaluating the property load my have a side effect.
3011 if (assign_type == VARIABLE) {
3012 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3013 } else {
3014 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3015 }
3016
3017 // Inline smi case if we are in a loop.
3018 Label stub_call, done;
3019 JumpPatchSite patch_site(masm_);
3020
3021 int count_value = expr->op() == Token::INC ? 1 : -1;
3022 if (ShouldInlineSmiCase(expr->op())) {
3023 Label slow;
3024 patch_site.EmitJumpIfNotSmi(x0, &slow);
3025
3026 // Save result for postfix expressions.
3027 if (expr->is_postfix()) {
3028 if (!context()->IsEffect()) {
3029 // Save the result on the stack. If we have a named or keyed property we
3030 // store the result under the receiver that is currently on top of the
3031 // stack.
3032 switch (assign_type) {
3033 case VARIABLE:
3034 __ Push(x0);
3035 break;
3036 case NAMED_PROPERTY:
3037 __ Poke(x0, kPointerSize);
3038 break;
3039 case NAMED_SUPER_PROPERTY:
3040 __ Poke(x0, kPointerSize * 2);
3041 break;
3042 case KEYED_PROPERTY:
3043 __ Poke(x0, kPointerSize * 2);
3044 break;
3045 case KEYED_SUPER_PROPERTY:
3046 __ Poke(x0, kPointerSize * 3);
3047 break;
3048 }
3049 }
3050 }
3051
3052 __ Adds(x0, x0, Smi::FromInt(count_value));
3053 __ B(vc, &done);
3054 // Call stub. Undo operation first.
3055 __ Sub(x0, x0, Smi::FromInt(count_value));
3056 __ B(&stub_call);
3057 __ Bind(&slow);
3058 }
3059
3060 // Convert old value into a number.
3061 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3062 RestoreContext();
3063 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3064
3065 // Save result for postfix expressions.
3066 if (expr->is_postfix()) {
3067 if (!context()->IsEffect()) {
3068 // Save the result on the stack. If we have a named or keyed property
3069 // we store the result under the receiver that is currently on top
3070 // of the stack.
3071 switch (assign_type) {
3072 case VARIABLE:
3073 PushOperand(x0);
3074 break;
3075 case NAMED_PROPERTY:
3076 __ Poke(x0, kXRegSize);
3077 break;
3078 case NAMED_SUPER_PROPERTY:
3079 __ Poke(x0, 2 * kXRegSize);
3080 break;
3081 case KEYED_PROPERTY:
3082 __ Poke(x0, 2 * kXRegSize);
3083 break;
3084 case KEYED_SUPER_PROPERTY:
3085 __ Poke(x0, 3 * kXRegSize);
3086 break;
3087 }
3088 }
3089 }
3090
3091 __ Bind(&stub_call);
3092 __ Mov(x1, x0);
3093 __ Mov(x0, Smi::FromInt(count_value));
3094
3095 SetExpressionPosition(expr);
3096
3097 {
3098 Assembler::BlockPoolsScope scope(masm_);
3099 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3100 CallIC(code, expr->CountBinOpFeedbackId());
3101 patch_site.EmitPatchInfo();
3102 }
3103 __ Bind(&done);
3104
3105 // Store the value returned in x0.
3106 switch (assign_type) {
3107 case VARIABLE: {
3108 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3109 if (expr->is_postfix()) {
3110 { EffectContext context(this);
3111 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3112 proxy->hole_check_mode());
3113 PrepareForBailoutForId(expr->AssignmentId(),
3114 BailoutState::TOS_REGISTER);
3115 context.Plug(x0);
3116 }
3117 // For all contexts except EffectConstant We have the result on
3118 // top of the stack.
3119 if (!context()->IsEffect()) {
3120 context()->PlugTOS();
3121 }
3122 } else {
3123 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3124 proxy->hole_check_mode());
3125 PrepareForBailoutForId(expr->AssignmentId(),
3126 BailoutState::TOS_REGISTER);
3127 context()->Plug(x0);
3128 }
3129 break;
3130 }
3131 case NAMED_PROPERTY: {
3132 PopOperand(StoreDescriptor::ReceiverRegister());
3133 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3134 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3135 if (expr->is_postfix()) {
3136 if (!context()->IsEffect()) {
3137 context()->PlugTOS();
3138 }
3139 } else {
3140 context()->Plug(x0);
3141 }
3142 break;
3143 }
3144 case NAMED_SUPER_PROPERTY: {
3145 EmitNamedSuperPropertyStore(prop);
3146 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3147 if (expr->is_postfix()) {
3148 if (!context()->IsEffect()) {
3149 context()->PlugTOS();
3150 }
3151 } else {
3152 context()->Plug(x0);
3153 }
3154 break;
3155 }
3156 case KEYED_SUPER_PROPERTY: {
3157 EmitKeyedSuperPropertyStore(prop);
3158 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3159 if (expr->is_postfix()) {
3160 if (!context()->IsEffect()) {
3161 context()->PlugTOS();
3162 }
3163 } else {
3164 context()->Plug(x0);
3165 }
3166 break;
3167 }
3168 case KEYED_PROPERTY: {
3169 PopOperand(StoreDescriptor::NameRegister());
3170 PopOperand(StoreDescriptor::ReceiverRegister());
3171 CallKeyedStoreIC(expr->CountSlot());
3172 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3173 if (expr->is_postfix()) {
3174 if (!context()->IsEffect()) {
3175 context()->PlugTOS();
3176 }
3177 } else {
3178 context()->Plug(x0);
3179 }
3180 break;
3181 }
3182 }
3183 }
3184
3185
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3186 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3187 Expression* sub_expr,
3188 Handle<String> check) {
3189 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
3190 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
3191 Label materialize_true, materialize_false;
3192 Label* if_true = NULL;
3193 Label* if_false = NULL;
3194 Label* fall_through = NULL;
3195 context()->PrepareTest(&materialize_true, &materialize_false,
3196 &if_true, &if_false, &fall_through);
3197
3198 { AccumulatorValueContext context(this);
3199 VisitForTypeofValue(sub_expr);
3200 }
3201 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3202
3203 Factory* factory = isolate()->factory();
3204 if (String::Equals(check, factory->number_string())) {
3205 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
3206 __ JumpIfSmi(x0, if_true);
3207 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3208 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
3209 Split(eq, if_true, if_false, fall_through);
3210 } else if (String::Equals(check, factory->string_string())) {
3211 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
3212 __ JumpIfSmi(x0, if_false);
3213 __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
3214 Split(lt, if_true, if_false, fall_through);
3215 } else if (String::Equals(check, factory->symbol_string())) {
3216 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
3217 __ JumpIfSmi(x0, if_false);
3218 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
3219 Split(eq, if_true, if_false, fall_through);
3220 } else if (String::Equals(check, factory->boolean_string())) {
3221 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
3222 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
3223 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
3224 Split(eq, if_true, if_false, fall_through);
3225 } else if (String::Equals(check, factory->undefined_string())) {
3226 ASM_LOCATION(
3227 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
3228 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_false);
3229 __ JumpIfSmi(x0, if_false);
3230 // Check for undetectable objects => true.
3231 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3232 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3233 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3234 fall_through);
3235 } else if (String::Equals(check, factory->function_string())) {
3236 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
3237 __ JumpIfSmi(x0, if_false);
3238 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3239 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3240 __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3241 __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true,
3242 if_false, fall_through);
3243 } else if (String::Equals(check, factory->object_string())) {
3244 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
3245 __ JumpIfSmi(x0, if_false);
3246 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3247 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3248 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, if_false, lt);
3249 // Check for callable or undetectable objects => false.
3250 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
3251 __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable),
3252 if_true, if_false, fall_through);
3253 // clang-format off
3254 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3255 } else if (String::Equals(check, factory->type##_string())) { \
3256 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
3257 #type "_string"); \
3258 __ JumpIfSmi(x0, if_true); \
3259 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); \
3260 __ CompareRoot(x0, Heap::k##Type##MapRootIndex); \
3261 Split(eq, if_true, if_false, fall_through);
3262 SIMD128_TYPES(SIMD128_TYPE)
3263 #undef SIMD128_TYPE
3264 // clang-format on
3265 } else {
3266 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
3267 if (if_false != fall_through) __ B(if_false);
3268 }
3269 context()->Plug(if_true, if_false);
3270 }
3271
3272
VisitCompareOperation(CompareOperation * expr)3273 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3274 Comment cmnt(masm_, "[ CompareOperation");
3275
3276 // Try to generate an optimized comparison with a literal value.
3277 // TODO(jbramley): This only checks common values like NaN or undefined.
3278 // Should it also handle ARM64 immediate operands?
3279 if (TryLiteralCompare(expr)) {
3280 return;
3281 }
3282
3283 // Assign labels according to context()->PrepareTest.
3284 Label materialize_true;
3285 Label materialize_false;
3286 Label* if_true = NULL;
3287 Label* if_false = NULL;
3288 Label* fall_through = NULL;
3289 context()->PrepareTest(&materialize_true, &materialize_false,
3290 &if_true, &if_false, &fall_through);
3291
3292 Token::Value op = expr->op();
3293 VisitForStackValue(expr->left());
3294 switch (op) {
3295 case Token::IN:
3296 VisitForStackValue(expr->right());
3297 SetExpressionPosition(expr);
3298 EmitHasProperty();
3299 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3300 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3301 Split(eq, if_true, if_false, fall_through);
3302 break;
3303
3304 case Token::INSTANCEOF: {
3305 VisitForAccumulatorValue(expr->right());
3306 SetExpressionPosition(expr);
3307 PopOperand(x1);
3308 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3309 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3310 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3311 Split(eq, if_true, if_false, fall_through);
3312 break;
3313 }
3314
3315 default: {
3316 VisitForAccumulatorValue(expr->right());
3317 SetExpressionPosition(expr);
3318 Condition cond = CompareIC::ComputeCondition(op);
3319
3320 // Pop the stack value.
3321 PopOperand(x1);
3322
3323 JumpPatchSite patch_site(masm_);
3324 if (ShouldInlineSmiCase(op)) {
3325 Label slow_case;
3326 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
3327 __ Cmp(x1, x0);
3328 Split(cond, if_true, if_false, NULL);
3329 __ Bind(&slow_case);
3330 }
3331
3332 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3333 CallIC(ic, expr->CompareOperationFeedbackId());
3334 patch_site.EmitPatchInfo();
3335 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3336 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
3337 }
3338 }
3339
3340 // Convert the result of the comparison into one expected for this
3341 // expression's context.
3342 context()->Plug(if_true, if_false);
3343 }
3344
3345
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3346 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3347 Expression* sub_expr,
3348 NilValue nil) {
3349 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
3350 Label materialize_true, materialize_false;
3351 Label* if_true = NULL;
3352 Label* if_false = NULL;
3353 Label* fall_through = NULL;
3354 context()->PrepareTest(&materialize_true, &materialize_false,
3355 &if_true, &if_false, &fall_through);
3356
3357 VisitForAccumulatorValue(sub_expr);
3358 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3359
3360 if (expr->op() == Token::EQ_STRICT) {
3361 Heap::RootListIndex nil_value = nil == kNullValue ?
3362 Heap::kNullValueRootIndex :
3363 Heap::kUndefinedValueRootIndex;
3364 __ CompareRoot(x0, nil_value);
3365 Split(eq, if_true, if_false, fall_through);
3366 } else {
3367 __ JumpIfSmi(x0, if_false);
3368 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3369 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3370 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3371 fall_through);
3372 }
3373
3374 context()->Plug(if_true, if_false);
3375 }
3376
3377
VisitYield(Yield * expr)3378 void FullCodeGenerator::VisitYield(Yield* expr) {
3379 Comment cmnt(masm_, "[ Yield");
3380 SetExpressionPosition(expr);
3381
3382 // Evaluate yielded value first; the initial iterator definition depends on
3383 // this. It stays on the stack while we update the iterator.
3384 VisitForStackValue(expr->expression());
3385
3386 // TODO(jbramley): Tidy this up once the merge is done, using named registers
3387 // and suchlike. The implementation changes a little by bleeding_edge so I
3388 // don't want to spend too much time on it now.
3389
3390 Label suspend, continuation, post_runtime, resume, exception;
3391
3392 __ B(&suspend);
3393 // TODO(jbramley): This label is bound here because the following code
3394 // looks at its pos(). Is it possible to do something more efficient here,
3395 // perhaps using Adr?
3396 __ Bind(&continuation);
3397 // When we arrive here, x0 holds the generator object.
3398 __ RecordGeneratorContinuation();
3399 __ Ldr(x1, FieldMemOperand(x0, JSGeneratorObject::kResumeModeOffset));
3400 __ Ldr(x0, FieldMemOperand(x0, JSGeneratorObject::kInputOrDebugPosOffset));
3401 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
3402 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
3403 __ Cmp(x1, Operand(Smi::FromInt(JSGeneratorObject::kReturn)));
3404 __ B(lt, &resume);
3405 __ Push(result_register());
3406 __ B(gt, &exception);
3407 EmitCreateIteratorResult(true);
3408 EmitUnwindAndReturn();
3409
3410 __ Bind(&exception);
3411 __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
3412 : Runtime::kThrow);
3413
3414 __ Bind(&suspend);
3415 OperandStackDepthIncrement(1); // Not popped on this path.
3416 VisitForAccumulatorValue(expr->generator_object());
3417 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
3418 __ Mov(x1, Smi::FromInt(continuation.pos()));
3419 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
3420 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
3421 __ Mov(x1, cp);
3422 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
3423 kLRHasBeenSaved, kDontSaveFPRegs);
3424 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
3425 __ Cmp(__ StackPointer(), x1);
3426 __ B(eq, &post_runtime);
3427 __ Push(x0); // generator object
3428 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
3429 RestoreContext();
3430 __ Bind(&post_runtime);
3431 PopOperand(result_register());
3432 EmitReturnSequence();
3433
3434 __ Bind(&resume);
3435 context()->Plug(result_register());
3436 }
3437
PushOperands(Register reg1,Register reg2)3438 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
3439 OperandStackDepthIncrement(2);
3440 __ Push(reg1, reg2);
3441 }
3442
PushOperands(Register reg1,Register reg2,Register reg3)3443 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
3444 Register reg3) {
3445 OperandStackDepthIncrement(3);
3446 __ Push(reg1, reg2, reg3);
3447 }
3448
PopOperands(Register reg1,Register reg2)3449 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
3450 OperandStackDepthDecrement(2);
3451 __ Pop(reg1, reg2);
3452 }
3453
EmitOperandStackDepthCheck()3454 void FullCodeGenerator::EmitOperandStackDepthCheck() {
3455 if (FLAG_debug_code) {
3456 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
3457 operand_stack_depth_ * kPointerSize;
3458 __ Sub(x0, fp, jssp);
3459 __ Cmp(x0, Operand(expected_diff));
3460 __ Assert(eq, kUnexpectedStackDepth);
3461 }
3462 }
3463
EmitCreateIteratorResult(bool done)3464 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
3465 Label allocate, done_allocate;
3466
3467 // Allocate and populate an object with this form: { value: VAL, done: DONE }
3468
3469 Register result = x0;
3470 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate,
3471 NO_ALLOCATION_FLAGS);
3472 __ B(&done_allocate);
3473
3474 __ Bind(&allocate);
3475 __ Push(Smi::FromInt(JSIteratorResult::kSize));
3476 __ CallRuntime(Runtime::kAllocateInNewSpace);
3477
3478 __ Bind(&done_allocate);
3479 Register map_reg = x1;
3480 Register result_value = x2;
3481 Register boolean_done = x3;
3482 Register empty_fixed_array = x4;
3483 Register untagged_result = x5;
3484 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
3485 PopOperand(result_value);
3486 __ LoadRoot(boolean_done,
3487 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
3488 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3489 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3490 JSObject::kElementsOffset);
3491 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3492 JSIteratorResult::kDoneOffset);
3493 __ ObjectUntag(untagged_result, result);
3494 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3495 __ Stp(empty_fixed_array, empty_fixed_array,
3496 MemOperand(untagged_result, JSObject::kPropertiesOffset));
3497 __ Stp(result_value, boolean_done,
3498 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3499 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3500 }
3501
3502
3503 // TODO(all): I don't like this method.
3504 // It seems to me that in too many places x0 is used in place of this.
3505 // Also, this function is not suitable for all places where x0 should be
3506 // abstracted (eg. when used as an argument). But some places assume that the
3507 // first argument register is x0, and use this function instead.
3508 // Considering that most of the register allocation is hard-coded in the
3509 // FullCodeGen, that it is unlikely we will need to change it extensively, and
3510 // that abstracting the allocation through functions would not yield any
3511 // performance benefit, I think the existence of this function is debatable.
result_register()3512 Register FullCodeGenerator::result_register() {
3513 return x0;
3514 }
3515
3516
context_register()3517 Register FullCodeGenerator::context_register() {
3518 return cp;
3519 }
3520
LoadFromFrameField(int frame_offset,Register value)3521 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3522 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3523 __ Ldr(value, MemOperand(fp, frame_offset));
3524 }
3525
StoreToFrameField(int frame_offset,Register value)3526 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3527 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3528 __ Str(value, MemOperand(fp, frame_offset));
3529 }
3530
3531
LoadContextField(Register dst,int context_index)3532 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3533 __ Ldr(dst, ContextMemOperand(cp, context_index));
3534 }
3535
3536
PushFunctionArgumentForContextAllocation()3537 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3538 DeclarationScope* closure_scope = scope()->GetClosureScope();
3539 if (closure_scope->is_script_scope() ||
3540 closure_scope->is_module_scope()) {
3541 // Contexts nested in the native context have a canonical empty function
3542 // as their closure, not the anonymous closure containing the global
3543 // code.
3544 DCHECK(kSmiTag == 0);
3545 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, x10);
3546 } else if (closure_scope->is_eval_scope()) {
3547 // Contexts created by a call to eval have the same closure as the
3548 // context calling eval, not the anonymous closure containing the eval
3549 // code. Fetch it from the context.
3550 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3551 } else {
3552 DCHECK(closure_scope->is_function_scope());
3553 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3554 }
3555 PushOperand(x10);
3556 }
3557
3558
EnterFinallyBlock()3559 void FullCodeGenerator::EnterFinallyBlock() {
3560 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
3561 DCHECK(!result_register().is(x10));
3562 // Store pending message while executing finally block.
3563 ExternalReference pending_message_obj =
3564 ExternalReference::address_of_pending_message_obj(isolate());
3565 __ Mov(x10, pending_message_obj);
3566 __ Ldr(x10, MemOperand(x10));
3567 PushOperand(x10);
3568
3569 ClearPendingMessage();
3570 }
3571
3572
ExitFinallyBlock()3573 void FullCodeGenerator::ExitFinallyBlock() {
3574 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
3575 DCHECK(!result_register().is(x10));
3576
3577 // Restore pending message from stack.
3578 PopOperand(x10);
3579 ExternalReference pending_message_obj =
3580 ExternalReference::address_of_pending_message_obj(isolate());
3581 __ Mov(x13, pending_message_obj);
3582 __ Str(x10, MemOperand(x13));
3583 }
3584
3585
ClearPendingMessage()3586 void FullCodeGenerator::ClearPendingMessage() {
3587 DCHECK(!result_register().is(x10));
3588 ExternalReference pending_message_obj =
3589 ExternalReference::address_of_pending_message_obj(isolate());
3590 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
3591 __ Mov(x13, pending_message_obj);
3592 __ Str(x10, MemOperand(x13));
3593 }
3594
3595
EmitCommands()3596 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3597 __ Pop(result_register(), x1); // Restore the accumulator and get the token.
3598 for (DeferredCommand cmd : commands_) {
3599 Label skip;
3600 __ Cmp(x1, Operand(Smi::FromInt(cmd.token)));
3601 __ B(ne, &skip);
3602 switch (cmd.command) {
3603 case kReturn:
3604 codegen_->EmitUnwindAndReturn();
3605 break;
3606 case kThrow:
3607 __ Push(result_register());
3608 __ CallRuntime(Runtime::kReThrow);
3609 break;
3610 case kContinue:
3611 codegen_->EmitContinue(cmd.target);
3612 break;
3613 case kBreak:
3614 codegen_->EmitBreak(cmd.target);
3615 break;
3616 }
3617 __ bind(&skip);
3618 }
3619 }
3620
3621 #undef __
3622
3623
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3624 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3625 Address pc,
3626 BackEdgeState target_state,
3627 Code* replacement_code) {
3628 // Turn the jump into a nop.
3629 Address branch_address = pc - 3 * kInstructionSize;
3630 Isolate* isolate = unoptimized_code->GetIsolate();
3631 PatchingAssembler patcher(isolate, branch_address, 1);
3632
3633 DCHECK(Instruction::Cast(branch_address)
3634 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
3635 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
3636 Instruction::Cast(branch_address)->ImmPCOffset() ==
3637 6 * kInstructionSize));
3638
3639 switch (target_state) {
3640 case INTERRUPT:
3641 // <decrement profiling counter>
3642 // .. .. .. .. b.pl ok
3643 // .. .. .. .. ldr x16, pc+<interrupt stub address>
3644 // .. .. .. .. blr x16
3645 // ... more instructions.
3646 // ok-label
3647 // Jump offset is 6 instructions.
3648 patcher.b(6, pl);
3649 break;
3650 case ON_STACK_REPLACEMENT:
3651 // <decrement profiling counter>
3652 // .. .. .. .. mov x0, x0 (NOP)
3653 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
3654 // .. .. .. .. blr x16
3655 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
3656 break;
3657 }
3658
3659 // Replace the call address.
3660 Instruction* load = Instruction::Cast(pc)->preceding(2);
3661 Address interrupt_address_pointer =
3662 reinterpret_cast<Address>(load) + load->ImmPCOffset();
3663 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
3664 reinterpret_cast<uint64_t>(
3665 isolate->builtins()->OnStackReplacement()->entry())) ||
3666 (Memory::uint64_at(interrupt_address_pointer) ==
3667 reinterpret_cast<uint64_t>(
3668 isolate->builtins()->InterruptCheck()->entry())) ||
3669 (Memory::uint64_at(interrupt_address_pointer) ==
3670 reinterpret_cast<uint64_t>(
3671 isolate->builtins()->OnStackReplacement()->entry())));
3672 Memory::uint64_at(interrupt_address_pointer) =
3673 reinterpret_cast<uint64_t>(replacement_code->entry());
3674
3675 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3676 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
3677 }
3678
3679
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3680 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3681 Isolate* isolate,
3682 Code* unoptimized_code,
3683 Address pc) {
3684 // TODO(jbramley): There should be some extra assertions here (as in the ARM
3685 // back-end), but this function is gone in bleeding_edge so it might not
3686 // matter anyway.
3687 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
3688
3689 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
3690 Instruction* load = Instruction::Cast(pc)->preceding(2);
3691 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
3692 load->ImmPCOffset());
3693 if (entry == reinterpret_cast<uint64_t>(
3694 isolate->builtins()->OnStackReplacement()->entry())) {
3695 return ON_STACK_REPLACEMENT;
3696 } else {
3697 UNREACHABLE();
3698 }
3699 }
3700
3701 return INTERRUPT;
3702 }
3703
3704
3705 } // namespace internal
3706 } // namespace v8
3707
3708 #endif // V8_TARGET_ARCH_ARM64
3709