1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_ARM
6
7 #include "src/full-codegen/full-codegen.h"
8 #include "src/ast/compile-time-value.h"
9 #include "src/ast/scopes.h"
10 #include "src/code-factory.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/ic/ic.h"
17
18 #include "src/arm/code-stubs-arm.h"
19 #include "src/arm/macro-assembler-arm.h"
20
21 namespace v8 {
22 namespace internal {
23
24 #define __ ACCESS_MASM(masm())
25
26 // A patch site is a location in the code which it is possible to patch. This
27 // class has a number of methods to emit the code which is patchable and the
28 // method EmitPatchInfo to record a marker back to the patchable code. This
29 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
30 // immediate value is used) is the delta from the pc to the first instruction of
31 // the patchable code.
32 class JumpPatchSite BASE_EMBEDDED {
33 public:
JumpPatchSite(MacroAssembler * masm)34 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
35 #ifdef DEBUG
36 info_emitted_ = false;
37 #endif
38 }
39
~JumpPatchSite()40 ~JumpPatchSite() {
41 DCHECK(patch_site_.is_bound() == info_emitted_);
42 }
43
44 // When initially emitting this ensure that a jump is always generated to skip
45 // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)46 void EmitJumpIfNotSmi(Register reg, Label* target) {
47 DCHECK(!patch_site_.is_bound() && !info_emitted_);
48 Assembler::BlockConstPoolScope block_const_pool(masm_);
49 __ bind(&patch_site_);
50 __ cmp(reg, Operand(reg));
51 __ b(eq, target); // Always taken before patched.
52 }
53
54 // When initially emitting this ensure that a jump is never generated to skip
55 // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)56 void EmitJumpIfSmi(Register reg, Label* target) {
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 Assembler::BlockConstPoolScope block_const_pool(masm_);
59 __ bind(&patch_site_);
60 __ cmp(reg, Operand(reg));
61 __ b(ne, target); // Never taken before patched.
62 }
63
EmitPatchInfo()64 void EmitPatchInfo() {
65 // Block literal pool emission whilst recording patch site information.
66 Assembler::BlockConstPoolScope block_const_pool(masm_);
67 if (patch_site_.is_bound()) {
68 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
69 Register reg;
70 reg.set_code(delta_to_patch_site / kOff12Mask);
71 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
72 #ifdef DEBUG
73 info_emitted_ = true;
74 #endif
75 } else {
76 __ nop(); // Signals no inlined code.
77 }
78 }
79
80 private:
masm()81 MacroAssembler* masm() { return masm_; }
82 MacroAssembler* masm_;
83 Label patch_site_;
84 #ifdef DEBUG
85 bool info_emitted_;
86 #endif
87 };
88
89
90 // Generate code for a JS function. On entry to the function the receiver
91 // and arguments have been pushed on the stack left to right. The actual
92 // argument count matches the formal parameter count expected by the
93 // function.
94 //
95 // The live registers are:
96 // o r1: the JS function object being called (i.e., ourselves)
97 // o r3: the new target value
98 // o cp: our context
99 // o pp: our caller's constant pool pointer (if enabled)
100 // o fp: our caller's frame pointer
101 // o sp: stack pointer
102 // o lr: return address
103 //
104 // The function builds a JS frame. Please see JavaScriptFrameConstants in
105 // frames-arm.h for its layout.
Generate()106 void FullCodeGenerator::Generate() {
107 CompilationInfo* info = info_;
108 profiling_counter_ = isolate()->factory()->NewCell(
109 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
110 SetFunctionPosition(literal());
111 Comment cmnt(masm_, "[ function compiled by full code generator");
112
113 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
114
115 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
116 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
117 __ ldr(r2, MemOperand(sp, receiver_offset));
118 __ AssertNotSmi(r2);
119 __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
120 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
121 }
122
123 // Open a frame scope to indicate that there is a frame on the stack. The
124 // MANUAL indicates that the scope shouldn't actually generate code to set up
125 // the frame (that is done below).
126 FrameScope frame_scope(masm_, StackFrame::MANUAL);
127
128 info->set_prologue_offset(masm_->pc_offset());
129 __ Prologue(info->GeneratePreagedPrologue());
130
131 // Increment invocation count for the function.
132 {
133 Comment cmnt(masm_, "[ Increment invocation count");
134 __ ldr(r2, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
135 __ ldr(r2, FieldMemOperand(r2, LiteralsArray::kFeedbackVectorOffset));
136 __ ldr(r9, FieldMemOperand(r2, TypeFeedbackVector::kInvocationCountIndex *
137 kPointerSize +
138 TypeFeedbackVector::kHeaderSize));
139 __ add(r9, r9, Operand(Smi::FromInt(1)));
140 __ str(r9, FieldMemOperand(r2, TypeFeedbackVector::kInvocationCountIndex *
141 kPointerSize +
142 TypeFeedbackVector::kHeaderSize));
143 }
144
145 { Comment cmnt(masm_, "[ Allocate locals");
146 int locals_count = info->scope()->num_stack_slots();
147 // Generators allocate locals, if any, in context slots.
148 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
149 OperandStackDepthIncrement(locals_count);
150 if (locals_count > 0) {
151 if (locals_count >= 128) {
152 Label ok;
153 __ sub(r9, sp, Operand(locals_count * kPointerSize));
154 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
155 __ cmp(r9, Operand(r2));
156 __ b(hs, &ok);
157 __ CallRuntime(Runtime::kThrowStackOverflow);
158 __ bind(&ok);
159 }
160 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
161 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
162 if (locals_count >= kMaxPushes) {
163 int loop_iterations = locals_count / kMaxPushes;
164 __ mov(r2, Operand(loop_iterations));
165 Label loop_header;
166 __ bind(&loop_header);
167 // Do pushes.
168 for (int i = 0; i < kMaxPushes; i++) {
169 __ push(r9);
170 }
171 // Continue loop if not done.
172 __ sub(r2, r2, Operand(1), SetCC);
173 __ b(&loop_header, ne);
174 }
175 int remaining = locals_count % kMaxPushes;
176 // Emit the remaining pushes.
177 for (int i = 0; i < remaining; i++) {
178 __ push(r9);
179 }
180 }
181 }
182
183 bool function_in_register_r1 = true;
184
185 // Possibly allocate a local context.
186 if (info->scope()->NeedsContext()) {
187 // Argument to NewContext is the function, which is still in r1.
188 Comment cmnt(masm_, "[ Allocate context");
189 bool need_write_barrier = true;
190 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
191 if (info->scope()->is_script_scope()) {
192 __ push(r1);
193 __ Push(info->scope()->scope_info());
194 __ CallRuntime(Runtime::kNewScriptContext);
195 PrepareForBailoutForId(BailoutId::ScriptContext(),
196 BailoutState::TOS_REGISTER);
197 // The new target value is not used, clobbering is safe.
198 DCHECK_NULL(info->scope()->new_target_var());
199 } else {
200 if (info->scope()->new_target_var() != nullptr) {
201 __ push(r3); // Preserve new target.
202 }
203 if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
204 FastNewFunctionContextStub stub(isolate());
205 __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
206 Operand(slots));
207 __ CallStub(&stub);
208 // Result of FastNewFunctionContextStub is always in new space.
209 need_write_barrier = false;
210 } else {
211 __ push(r1);
212 __ CallRuntime(Runtime::kNewFunctionContext);
213 }
214 if (info->scope()->new_target_var() != nullptr) {
215 __ pop(r3); // Preserve new target.
216 }
217 }
218 function_in_register_r1 = false;
219 // Context is returned in r0. It replaces the context passed to us.
220 // It's saved in the stack and kept live in cp.
221 __ mov(cp, r0);
222 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
223 // Copy any necessary parameters into the context.
224 int num_parameters = info->scope()->num_parameters();
225 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
226 for (int i = first_parameter; i < num_parameters; i++) {
227 Variable* var =
228 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
229 if (var->IsContextSlot()) {
230 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
231 (num_parameters - 1 - i) * kPointerSize;
232 // Load parameter from stack.
233 __ ldr(r0, MemOperand(fp, parameter_offset));
234 // Store it in the context.
235 MemOperand target = ContextMemOperand(cp, var->index());
236 __ str(r0, target);
237
238 // Update the write barrier.
239 if (need_write_barrier) {
240 __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
241 kLRHasBeenSaved, kDontSaveFPRegs);
242 } else if (FLAG_debug_code) {
243 Label done;
244 __ JumpIfInNewSpace(cp, r0, &done);
245 __ Abort(kExpectedNewSpaceObject);
246 __ bind(&done);
247 }
248 }
249 }
250 }
251
252 // Register holding this function and new target are both trashed in case we
253 // bailout here. But since that can happen only when new target is not used
254 // and we allocate a context, the value of |function_in_register| is correct.
255 PrepareForBailoutForId(BailoutId::FunctionContext(),
256 BailoutState::NO_REGISTERS);
257
258 // Possibly set up a local binding to the this function which is used in
259 // derived constructors with super calls.
260 Variable* this_function_var = info->scope()->this_function_var();
261 if (this_function_var != nullptr) {
262 Comment cmnt(masm_, "[ This function");
263 if (!function_in_register_r1) {
264 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
265 // The write barrier clobbers register again, keep it marked as such.
266 }
267 SetVar(this_function_var, r1, r0, r2);
268 }
269
270 // Possibly set up a local binding to the new target value.
271 Variable* new_target_var = info->scope()->new_target_var();
272 if (new_target_var != nullptr) {
273 Comment cmnt(masm_, "[ new.target");
274 SetVar(new_target_var, r3, r0, r2);
275 }
276
277 // Possibly allocate RestParameters
278 Variable* rest_param = info->scope()->rest_parameter();
279 if (rest_param != nullptr) {
280 Comment cmnt(masm_, "[ Allocate rest parameter array");
281 if (!function_in_register_r1) {
282 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
283 }
284 FastNewRestParameterStub stub(isolate());
285 __ CallStub(&stub);
286 function_in_register_r1 = false;
287 SetVar(rest_param, r0, r1, r2);
288 }
289
290 Variable* arguments = info->scope()->arguments();
291 if (arguments != NULL) {
292 // Function uses arguments object.
293 Comment cmnt(masm_, "[ Allocate arguments object");
294 if (!function_in_register_r1) {
295 // Load this again, if it's used by the local context below.
296 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
297 }
298 if (is_strict(language_mode()) || !has_simple_parameters()) {
299 FastNewStrictArgumentsStub stub(isolate());
300 __ CallStub(&stub);
301 } else if (literal()->has_duplicate_parameters()) {
302 __ Push(r1);
303 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
304 } else {
305 FastNewSloppyArgumentsStub stub(isolate());
306 __ CallStub(&stub);
307 }
308
309 SetVar(arguments, r0, r1, r2);
310 }
311
312 if (FLAG_trace) {
313 __ CallRuntime(Runtime::kTraceEnter);
314 }
315
316 // Visit the declarations and body.
317 PrepareForBailoutForId(BailoutId::FunctionEntry(),
318 BailoutState::NO_REGISTERS);
319 {
320 Comment cmnt(masm_, "[ Declarations");
321 VisitDeclarations(scope()->declarations());
322 }
323
324 // Assert that the declarations do not use ICs. Otherwise the debugger
325 // won't be able to redirect a PC at an IC to the correct IC in newly
326 // recompiled code.
327 DCHECK_EQ(0, ic_total_count_);
328
329 {
330 Comment cmnt(masm_, "[ Stack check");
331 PrepareForBailoutForId(BailoutId::Declarations(),
332 BailoutState::NO_REGISTERS);
333 Label ok;
334 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
335 __ cmp(sp, Operand(ip));
336 __ b(hs, &ok);
337 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
338 PredictableCodeSizeScope predictable(masm_);
339 predictable.ExpectSize(
340 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
341 __ Call(stack_check, RelocInfo::CODE_TARGET);
342 __ bind(&ok);
343 }
344
345 {
346 Comment cmnt(masm_, "[ Body");
347 DCHECK(loop_depth() == 0);
348 VisitStatements(literal()->body());
349 DCHECK(loop_depth() == 0);
350 }
351
352 // Always emit a 'return undefined' in case control fell off the end of
353 // the body.
354 { Comment cmnt(masm_, "[ return <undefined>;");
355 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
356 }
357 EmitReturnSequence();
358
359 // Force emit the constant pool, so it doesn't get emitted in the middle
360 // of the back edge table.
361 masm()->CheckConstPool(true, false);
362 }
363
ClearAccumulator()364 void FullCodeGenerator::ClearAccumulator() { __ mov(r0, Operand(Smi::kZero)); }
365
EmitProfilingCounterDecrement(int delta)366 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
367 __ mov(r2, Operand(profiling_counter_));
368 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
369 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
370 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
371 }
372
373
374 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
375 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
376 #else
377 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
378 #endif
379
380
EmitProfilingCounterReset()381 void FullCodeGenerator::EmitProfilingCounterReset() {
382 Assembler::BlockConstPoolScope block_const_pool(masm_);
383 PredictableCodeSizeScope predictable_code_size_scope(
384 masm_, kProfileCounterResetSequenceLength);
385 Label start;
386 __ bind(&start);
387 int reset_value = FLAG_interrupt_budget;
388 __ mov(r2, Operand(profiling_counter_));
389 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
390 // instructions (for ARMv6) depending upon whether it is an extended constant
391 // pool - insert nop to compensate.
392 int expected_instr_count =
393 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
394 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
395 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
396 __ nop();
397 }
398 __ mov(r3, Operand(Smi::FromInt(reset_value)));
399 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
400 }
401
402
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)403 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
404 Label* back_edge_target) {
405 Comment cmnt(masm_, "[ Back edge bookkeeping");
406 // Block literal pools whilst emitting back edge code.
407 Assembler::BlockConstPoolScope block_const_pool(masm_);
408 Label ok;
409
410 DCHECK(back_edge_target->is_bound());
411 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
412 int weight = Min(kMaxBackEdgeWeight,
413 Max(1, distance / kCodeSizeMultiplier));
414 EmitProfilingCounterDecrement(weight);
415 __ b(pl, &ok);
416 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
417
418 // Record a mapping of this PC offset to the OSR id. This is used to find
419 // the AST id from the unoptimized code in order to use it as a key into
420 // the deoptimization input data found in the optimized code.
421 RecordBackEdge(stmt->OsrEntryId());
422
423 EmitProfilingCounterReset();
424
425 __ bind(&ok);
426 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
427 // Record a mapping of the OSR id to this PC. This is used if the OSR
428 // entry becomes the target of a bailout. We don't expect it to be, but
429 // we want it to work if it is.
430 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
431 }
432
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)433 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
434 bool is_tail_call) {
435 // Pretend that the exit is a backwards jump to the entry.
436 int weight = 1;
437 if (info_->ShouldSelfOptimize()) {
438 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
439 } else {
440 int distance = masm_->pc_offset();
441 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
442 }
443 EmitProfilingCounterDecrement(weight);
444 Label ok;
445 __ b(pl, &ok);
446 // Don't need to save result register if we are going to do a tail call.
447 if (!is_tail_call) {
448 __ push(r0);
449 }
450 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
451 if (!is_tail_call) {
452 __ pop(r0);
453 }
454 EmitProfilingCounterReset();
455 __ bind(&ok);
456 }
457
EmitReturnSequence()458 void FullCodeGenerator::EmitReturnSequence() {
459 Comment cmnt(masm_, "[ Return sequence");
460 if (return_label_.is_bound()) {
461 __ b(&return_label_);
462 } else {
463 __ bind(&return_label_);
464 if (FLAG_trace) {
465 // Push the return value on the stack as the parameter.
466 // Runtime::TraceExit returns its parameter in r0.
467 __ push(r0);
468 __ CallRuntime(Runtime::kTraceExit);
469 }
470 EmitProfilingCounterHandlingForReturnSequence(false);
471
472 // Make sure that the constant pool is not emitted inside of the return
473 // sequence.
474 { Assembler::BlockConstPoolScope block_const_pool(masm_);
475 int32_t arg_count = info_->scope()->num_parameters() + 1;
476 int32_t sp_delta = arg_count * kPointerSize;
477 SetReturnPosition(literal());
478 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
479 PredictableCodeSizeScope predictable(masm_, -1);
480 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
481 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
482 __ add(sp, sp, Operand(sp_delta));
483 __ Jump(lr);
484 }
485 }
486 }
487 }
488
RestoreContext()489 void FullCodeGenerator::RestoreContext() {
490 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
491 }
492
Plug(Variable * var) const493 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
494 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
495 codegen()->GetVar(result_register(), var);
496 codegen()->PushOperand(result_register());
497 }
498
499
Plug(Heap::RootListIndex index) const500 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
501 }
502
503
Plug(Heap::RootListIndex index) const504 void FullCodeGenerator::AccumulatorValueContext::Plug(
505 Heap::RootListIndex index) const {
506 __ LoadRoot(result_register(), index);
507 }
508
509
Plug(Heap::RootListIndex index) const510 void FullCodeGenerator::StackValueContext::Plug(
511 Heap::RootListIndex index) const {
512 __ LoadRoot(result_register(), index);
513 codegen()->PushOperand(result_register());
514 }
515
516
Plug(Heap::RootListIndex index) const517 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
518 codegen()->PrepareForBailoutBeforeSplit(condition(),
519 true,
520 true_label_,
521 false_label_);
522 if (index == Heap::kUndefinedValueRootIndex ||
523 index == Heap::kNullValueRootIndex ||
524 index == Heap::kFalseValueRootIndex) {
525 if (false_label_ != fall_through_) __ b(false_label_);
526 } else if (index == Heap::kTrueValueRootIndex) {
527 if (true_label_ != fall_through_) __ b(true_label_);
528 } else {
529 __ LoadRoot(result_register(), index);
530 codegen()->DoTest(this);
531 }
532 }
533
534
Plug(Handle<Object> lit) const535 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
536 }
537
538
Plug(Handle<Object> lit) const539 void FullCodeGenerator::AccumulatorValueContext::Plug(
540 Handle<Object> lit) const {
541 __ mov(result_register(), Operand(lit));
542 }
543
544
Plug(Handle<Object> lit) const545 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
546 // Immediates cannot be pushed directly.
547 __ mov(result_register(), Operand(lit));
548 codegen()->PushOperand(result_register());
549 }
550
551
Plug(Handle<Object> lit) const552 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
553 codegen()->PrepareForBailoutBeforeSplit(condition(),
554 true,
555 true_label_,
556 false_label_);
557 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
558 !lit->IsUndetectable());
559 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
560 lit->IsFalse(isolate())) {
561 if (false_label_ != fall_through_) __ b(false_label_);
562 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
563 if (true_label_ != fall_through_) __ b(true_label_);
564 } else if (lit->IsString()) {
565 if (String::cast(*lit)->length() == 0) {
566 if (false_label_ != fall_through_) __ b(false_label_);
567 } else {
568 if (true_label_ != fall_through_) __ b(true_label_);
569 }
570 } else if (lit->IsSmi()) {
571 if (Smi::cast(*lit)->value() == 0) {
572 if (false_label_ != fall_through_) __ b(false_label_);
573 } else {
574 if (true_label_ != fall_through_) __ b(true_label_);
575 }
576 } else {
577 // For simplicity we always test the accumulator register.
578 __ mov(result_register(), Operand(lit));
579 codegen()->DoTest(this);
580 }
581 }
582
583
DropAndPlug(int count,Register reg) const584 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
585 Register reg) const {
586 DCHECK(count > 0);
587 if (count > 1) codegen()->DropOperands(count - 1);
588 __ str(reg, MemOperand(sp, 0));
589 }
590
591
Plug(Label * materialize_true,Label * materialize_false) const592 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
593 Label* materialize_false) const {
594 DCHECK(materialize_true == materialize_false);
595 __ bind(materialize_true);
596 }
597
598
Plug(Label * materialize_true,Label * materialize_false) const599 void FullCodeGenerator::AccumulatorValueContext::Plug(
600 Label* materialize_true,
601 Label* materialize_false) const {
602 Label done;
603 __ bind(materialize_true);
604 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
605 __ jmp(&done);
606 __ bind(materialize_false);
607 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
608 __ bind(&done);
609 }
610
611
Plug(Label * materialize_true,Label * materialize_false) const612 void FullCodeGenerator::StackValueContext::Plug(
613 Label* materialize_true,
614 Label* materialize_false) const {
615 Label done;
616 __ bind(materialize_true);
617 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
618 __ jmp(&done);
619 __ bind(materialize_false);
620 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
621 __ bind(&done);
622 codegen()->PushOperand(ip);
623 }
624
625
Plug(Label * materialize_true,Label * materialize_false) const626 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
627 Label* materialize_false) const {
628 DCHECK(materialize_true == true_label_);
629 DCHECK(materialize_false == false_label_);
630 }
631
632
Plug(bool flag) const633 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
634 Heap::RootListIndex value_root_index =
635 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
636 __ LoadRoot(result_register(), value_root_index);
637 }
638
639
Plug(bool flag) const640 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
641 Heap::RootListIndex value_root_index =
642 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
643 __ LoadRoot(ip, value_root_index);
644 codegen()->PushOperand(ip);
645 }
646
647
Plug(bool flag) const648 void FullCodeGenerator::TestContext::Plug(bool flag) const {
649 codegen()->PrepareForBailoutBeforeSplit(condition(),
650 true,
651 true_label_,
652 false_label_);
653 if (flag) {
654 if (true_label_ != fall_through_) __ b(true_label_);
655 } else {
656 if (false_label_ != fall_through_) __ b(false_label_);
657 }
658 }
659
660
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)661 void FullCodeGenerator::DoTest(Expression* condition,
662 Label* if_true,
663 Label* if_false,
664 Label* fall_through) {
665 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
666 CallIC(ic, condition->test_id());
667 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
668 Split(eq, if_true, if_false, fall_through);
669 }
670
671
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)672 void FullCodeGenerator::Split(Condition cond,
673 Label* if_true,
674 Label* if_false,
675 Label* fall_through) {
676 if (if_false == fall_through) {
677 __ b(cond, if_true);
678 } else if (if_true == fall_through) {
679 __ b(NegateCondition(cond), if_false);
680 } else {
681 __ b(cond, if_true);
682 __ b(if_false);
683 }
684 }
685
686
StackOperand(Variable * var)687 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
688 DCHECK(var->IsStackAllocated());
689 // Offset is negative because higher indexes are at lower addresses.
690 int offset = -var->index() * kPointerSize;
691 // Adjust by a (parameter or local) base offset.
692 if (var->IsParameter()) {
693 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
694 } else {
695 offset += JavaScriptFrameConstants::kLocal0Offset;
696 }
697 return MemOperand(fp, offset);
698 }
699
700
VarOperand(Variable * var,Register scratch)701 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
702 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
703 if (var->IsContextSlot()) {
704 int context_chain_length = scope()->ContextChainLength(var->scope());
705 __ LoadContext(scratch, context_chain_length);
706 return ContextMemOperand(scratch, var->index());
707 } else {
708 return StackOperand(var);
709 }
710 }
711
712
GetVar(Register dest,Variable * var)713 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
714 // Use destination as scratch.
715 MemOperand location = VarOperand(var, dest);
716 __ ldr(dest, location);
717 }
718
719
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)720 void FullCodeGenerator::SetVar(Variable* var,
721 Register src,
722 Register scratch0,
723 Register scratch1) {
724 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
725 DCHECK(!scratch0.is(src));
726 DCHECK(!scratch0.is(scratch1));
727 DCHECK(!scratch1.is(src));
728 MemOperand location = VarOperand(var, scratch0);
729 __ str(src, location);
730
731 // Emit the write barrier code if the location is in the heap.
732 if (var->IsContextSlot()) {
733 __ RecordWriteContextSlot(scratch0,
734 location.offset(),
735 src,
736 scratch1,
737 kLRHasBeenSaved,
738 kDontSaveFPRegs);
739 }
740 }
741
742
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)743 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
744 bool should_normalize,
745 Label* if_true,
746 Label* if_false) {
747 // Only prepare for bailouts before splits if we're in a test
748 // context. Otherwise, we let the Visit function deal with the
749 // preparation to avoid preparing with the same AST id twice.
750 if (!context()->IsTest()) return;
751
752 Label skip;
753 if (should_normalize) __ b(&skip);
754 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
755 if (should_normalize) {
756 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
757 __ cmp(r0, ip);
758 Split(eq, if_true, if_false, NULL);
759 __ bind(&skip);
760 }
761 }
762
763
EmitDebugCheckDeclarationContext(Variable * variable)764 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
765 // The variable in the declaration always resides in the current function
766 // context.
767 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
768 if (FLAG_debug_code) {
769 // Check that we're not inside a with or catch context.
770 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
771 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
772 __ Check(ne, kDeclarationInWithContext);
773 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
774 __ Check(ne, kDeclarationInCatchContext);
775 }
776 }
777
778
VisitVariableDeclaration(VariableDeclaration * declaration)779 void FullCodeGenerator::VisitVariableDeclaration(
780 VariableDeclaration* declaration) {
781 VariableProxy* proxy = declaration->proxy();
782 Variable* variable = proxy->var();
783 switch (variable->location()) {
784 case VariableLocation::UNALLOCATED: {
785 DCHECK(!variable->binding_needs_init());
786 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
787 DCHECK(!slot.IsInvalid());
788 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
789 globals_->Add(isolate()->factory()->undefined_value(), zone());
790 break;
791 }
792 case VariableLocation::PARAMETER:
793 case VariableLocation::LOCAL:
794 if (variable->binding_needs_init()) {
795 Comment cmnt(masm_, "[ VariableDeclaration");
796 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
797 __ str(r0, StackOperand(variable));
798 }
799 break;
800
801 case VariableLocation::CONTEXT:
802 if (variable->binding_needs_init()) {
803 Comment cmnt(masm_, "[ VariableDeclaration");
804 EmitDebugCheckDeclarationContext(variable);
805 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
806 __ str(r0, ContextMemOperand(cp, variable->index()));
807 // No write barrier since the_hole_value is in old space.
808 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
809 }
810 break;
811
812 case VariableLocation::LOOKUP: {
813 Comment cmnt(masm_, "[ VariableDeclaration");
814 DCHECK_EQ(VAR, variable->mode());
815 DCHECK(!variable->binding_needs_init());
816 __ mov(r2, Operand(variable->name()));
817 __ Push(r2);
818 __ CallRuntime(Runtime::kDeclareEvalVar);
819 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
820 break;
821 }
822
823 case VariableLocation::MODULE:
824 UNREACHABLE();
825 }
826 }
827
828
VisitFunctionDeclaration(FunctionDeclaration * declaration)829 void FullCodeGenerator::VisitFunctionDeclaration(
830 FunctionDeclaration* declaration) {
831 VariableProxy* proxy = declaration->proxy();
832 Variable* variable = proxy->var();
833 switch (variable->location()) {
834 case VariableLocation::UNALLOCATED: {
835 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
836 DCHECK(!slot.IsInvalid());
837 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
838 Handle<SharedFunctionInfo> function =
839 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
840 // Check for stack-overflow exception.
841 if (function.is_null()) return SetStackOverflow();
842 globals_->Add(function, zone());
843 break;
844 }
845
846 case VariableLocation::PARAMETER:
847 case VariableLocation::LOCAL: {
848 Comment cmnt(masm_, "[ FunctionDeclaration");
849 VisitForAccumulatorValue(declaration->fun());
850 __ str(result_register(), StackOperand(variable));
851 break;
852 }
853
854 case VariableLocation::CONTEXT: {
855 Comment cmnt(masm_, "[ FunctionDeclaration");
856 EmitDebugCheckDeclarationContext(variable);
857 VisitForAccumulatorValue(declaration->fun());
858 __ str(result_register(), ContextMemOperand(cp, variable->index()));
859 int offset = Context::SlotOffset(variable->index());
860 // We know that we have written a function, which is not a smi.
861 __ RecordWriteContextSlot(cp,
862 offset,
863 result_register(),
864 r2,
865 kLRHasBeenSaved,
866 kDontSaveFPRegs,
867 EMIT_REMEMBERED_SET,
868 OMIT_SMI_CHECK);
869 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
870 break;
871 }
872
873 case VariableLocation::LOOKUP: {
874 Comment cmnt(masm_, "[ FunctionDeclaration");
875 __ mov(r2, Operand(variable->name()));
876 PushOperand(r2);
877 // Push initial value for function declaration.
878 VisitForStackValue(declaration->fun());
879 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
880 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
881 break;
882 }
883
884 case VariableLocation::MODULE:
885 UNREACHABLE();
886 }
887 }
888
889
DeclareGlobals(Handle<FixedArray> pairs)890 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
891 // Call the runtime to declare the globals.
892 __ mov(r1, Operand(pairs));
893 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
894 __ EmitLoadTypeFeedbackVector(r2);
895 __ Push(r1, r0, r2);
896 __ CallRuntime(Runtime::kDeclareGlobals);
897 // Return value is ignored.
898 }
899
900
VisitSwitchStatement(SwitchStatement * stmt)901 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
902 Comment cmnt(masm_, "[ SwitchStatement");
903 Breakable nested_statement(this, stmt);
904 SetStatementPosition(stmt);
905
906 // Keep the switch value on the stack until a case matches.
907 VisitForStackValue(stmt->tag());
908 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
909
910 ZoneList<CaseClause*>* clauses = stmt->cases();
911 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
912
913 Label next_test; // Recycled for each test.
914 // Compile all the tests with branches to their bodies.
915 for (int i = 0; i < clauses->length(); i++) {
916 CaseClause* clause = clauses->at(i);
917 clause->body_target()->Unuse();
918
919 // The default is not a test, but remember it as final fall through.
920 if (clause->is_default()) {
921 default_clause = clause;
922 continue;
923 }
924
925 Comment cmnt(masm_, "[ Case comparison");
926 __ bind(&next_test);
927 next_test.Unuse();
928
929 // Compile the label expression.
930 VisitForAccumulatorValue(clause->label());
931
932 // Perform the comparison as if via '==='.
933 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
934 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
935 JumpPatchSite patch_site(masm_);
936 if (inline_smi_code) {
937 Label slow_case;
938 __ orr(r2, r1, r0);
939 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
940
941 __ cmp(r1, r0);
942 __ b(ne, &next_test);
943 __ Drop(1); // Switch value is no longer needed.
944 __ b(clause->body_target());
945 __ bind(&slow_case);
946 }
947
948 // Record position before stub call for type feedback.
949 SetExpressionPosition(clause);
950 Handle<Code> ic =
951 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
952 CallIC(ic, clause->CompareId());
953 patch_site.EmitPatchInfo();
954
955 Label skip;
956 __ b(&skip);
957 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
958 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
959 __ cmp(r0, ip);
960 __ b(ne, &next_test);
961 __ Drop(1);
962 __ jmp(clause->body_target());
963 __ bind(&skip);
964
965 __ cmp(r0, Operand::Zero());
966 __ b(ne, &next_test);
967 __ Drop(1); // Switch value is no longer needed.
968 __ b(clause->body_target());
969 }
970
971 // Discard the test value and jump to the default if present, otherwise to
972 // the end of the statement.
973 __ bind(&next_test);
974 DropOperands(1); // Switch value is no longer needed.
975 if (default_clause == NULL) {
976 __ b(nested_statement.break_label());
977 } else {
978 __ b(default_clause->body_target());
979 }
980
981 // Compile all the case bodies.
982 for (int i = 0; i < clauses->length(); i++) {
983 Comment cmnt(masm_, "[ Case body");
984 CaseClause* clause = clauses->at(i);
985 __ bind(clause->body_target());
986 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
987 VisitStatements(clause->statements());
988 }
989
990 __ bind(nested_statement.break_label());
991 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
992 }
993
994
VisitForInStatement(ForInStatement * stmt)995 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
996 Comment cmnt(masm_, "[ ForInStatement");
997 SetStatementPosition(stmt, SKIP_BREAK);
998
999 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1000
1001 // Get the object to enumerate over.
1002 SetExpressionAsStatementPosition(stmt->enumerable());
1003 VisitForAccumulatorValue(stmt->enumerable());
1004 OperandStackDepthIncrement(5);
1005
1006 Label loop, exit;
1007 Iteration loop_statement(this, stmt);
1008 increment_loop_depth();
1009
1010 // If the object is null or undefined, skip over the loop, otherwise convert
1011 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
1012 Label convert, done_convert;
1013 __ JumpIfSmi(r0, &convert);
1014 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
1015 __ b(ge, &done_convert);
1016 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1017 __ b(eq, &exit);
1018 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1019 __ b(eq, &exit);
1020 __ bind(&convert);
1021 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
1022 RestoreContext();
1023 __ bind(&done_convert);
1024 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1025 __ push(r0);
1026
1027 // Check cache validity in generated code. If we cannot guarantee cache
1028 // validity, call the runtime system to check cache validity or get the
1029 // property names in a fixed array. Note: Proxies never have an enum cache,
1030 // so will always take the slow path.
1031 Label call_runtime;
1032 __ CheckEnumCache(&call_runtime);
1033
1034 // The enum cache is valid. Load the map of the object being
1035 // iterated over and use the cache for the iteration.
1036 Label use_cache;
1037 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1038 __ b(&use_cache);
1039
1040 // Get the set of properties to enumerate.
1041 __ bind(&call_runtime);
1042 __ push(r0); // Duplicate the enumerable object on the stack.
1043 __ CallRuntime(Runtime::kForInEnumerate);
1044 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1045
1046 // If we got a map from the runtime call, we can do a fast
1047 // modification check. Otherwise, we got a fixed array, and we have
1048 // to do a slow check.
1049 Label fixed_array;
1050 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1051 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1052 __ cmp(r2, ip);
1053 __ b(ne, &fixed_array);
1054
1055 // We got a map in register r0. Get the enumeration cache from it.
1056 Label no_descriptors;
1057 __ bind(&use_cache);
1058
1059 __ EnumLength(r1, r0);
1060 __ cmp(r1, Operand(Smi::kZero));
1061 __ b(eq, &no_descriptors);
1062
1063 __ LoadInstanceDescriptors(r0, r2);
1064 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1065 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1066
1067 // Set up the four remaining stack slots.
1068 __ push(r0); // Map.
1069 __ mov(r0, Operand(Smi::kZero));
1070 // Push enumeration cache, enumeration cache length (as smi) and zero.
1071 __ Push(r2, r1, r0);
1072 __ jmp(&loop);
1073
1074 __ bind(&no_descriptors);
1075 __ Drop(1);
1076 __ jmp(&exit);
1077
1078 // We got a fixed array in register r0. Iterate through that.
1079 __ bind(&fixed_array);
1080
1081 __ mov(r1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1082 __ Push(r1, r0); // Smi and array
1083 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1084 __ Push(r1); // Fixed array length (as smi).
1085 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1086 __ mov(r0, Operand(Smi::kZero));
1087 __ Push(r0); // Initial index.
1088
1089 // Generate code for doing the condition check.
1090 __ bind(&loop);
1091 SetExpressionAsStatementPosition(stmt->each());
1092
1093 // Load the current count to r0, load the length to r1.
1094 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1095 __ cmp(r0, r1); // Compare to the array length.
1096 __ b(hs, loop_statement.break_label());
1097
1098 // Get the current entry of the array into register r0.
1099 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1100 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1101 __ ldr(r0, MemOperand::PointerAddressFromSmiKey(r2, r0));
1102
1103 // Get the expected map from the stack or a smi in the
1104 // permanent slow case into register r2.
1105 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1106
1107 // Check if the expected map still matches that of the enumerable.
1108 // If not, we may have to filter the key.
1109 Label update_each;
1110 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1111 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1112 __ cmp(r4, Operand(r2));
1113 __ b(eq, &update_each);
1114
1115 // We need to filter the key, record slow-path here.
1116 int const vector_index = SmiFromSlot(slot)->value();
1117 __ EmitLoadTypeFeedbackVector(r3);
1118 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1119 __ str(r2, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)));
1120
1121 // r0 contains the key. The receiver in r1 is the second argument to the
1122 // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1123 // have the key or returns the name-converted key.
1124 __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1125 RestoreContext();
1126 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1127 __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex);
1128 __ b(eq, loop_statement.continue_label());
1129
1130 // Update the 'each' property or variable from the possibly filtered
1131 // entry in register r0.
1132 __ bind(&update_each);
1133
1134 // Perform the assignment as if via '='.
1135 { EffectContext context(this);
1136 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1137 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1138 }
1139
1140 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1141 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1142 // Generate code for the body of the loop.
1143 Visit(stmt->body());
1144
1145 // Generate code for the going to the next element by incrementing
1146 // the index (smi) stored on top of the stack.
1147 __ bind(loop_statement.continue_label());
1148 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1149 __ pop(r0);
1150 __ add(r0, r0, Operand(Smi::FromInt(1)));
1151 __ push(r0);
1152
1153 EmitBackEdgeBookkeeping(stmt, &loop);
1154 __ b(&loop);
1155
1156 // Remove the pointers stored on the stack.
1157 __ bind(loop_statement.break_label());
1158 DropOperands(5);
1159
1160 // Exit and decrement the loop depth.
1161 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1162 __ bind(&exit);
1163 decrement_loop_depth();
1164 }
1165
1166
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1167 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1168 FeedbackVectorSlot slot) {
1169 DCHECK(NeedsHomeObject(initializer));
1170 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1171 __ ldr(StoreDescriptor::ValueRegister(),
1172 MemOperand(sp, offset * kPointerSize));
1173 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1174 }
1175
1176
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1177 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1178 int offset,
1179 FeedbackVectorSlot slot) {
1180 DCHECK(NeedsHomeObject(initializer));
1181 __ Move(StoreDescriptor::ReceiverRegister(), r0);
1182 __ ldr(StoreDescriptor::ValueRegister(),
1183 MemOperand(sp, offset * kPointerSize));
1184 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1185 }
1186
1187
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1188 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1189 TypeofMode typeof_mode,
1190 Label* slow) {
1191 Register current = cp;
1192 Register next = r1;
1193 Register temp = r2;
1194
1195 int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1196 for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1197 if (!s->NeedsContext()) continue;
1198 if (s->calls_sloppy_eval()) {
1199 // Check that extension is "the hole".
1200 __ ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1201 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1202 }
1203 // Load next context in chain.
1204 __ ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1205 // Walk the rest of the chain without clobbering cp.
1206 current = next;
1207 to_check--;
1208 }
1209
1210 // All extension objects were empty and it is safe to use a normal global
1211 // load machinery.
1212 EmitGlobalVariableLoad(proxy, typeof_mode);
1213 }
1214
1215
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1216 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1217 Label* slow) {
1218 DCHECK(var->IsContextSlot());
1219 Register context = cp;
1220 Register next = r3;
1221 Register temp = r4;
1222
1223 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1224 if (s->NeedsContext()) {
1225 if (s->calls_sloppy_eval()) {
1226 // Check that extension is "the hole".
1227 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1228 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1229 }
1230 __ ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1231 // Walk the rest of the chain without clobbering cp.
1232 context = next;
1233 }
1234 }
1235 // Check that last extension is "the hole".
1236 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1237 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1238
1239 // This function is used only for loads, not stores, so it's safe to
1240 // return an cp-based operand (the write barrier cannot be allowed to
1241 // destroy the cp register).
1242 return ContextMemOperand(context, var->index());
1243 }
1244
1245
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1246 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1247 TypeofMode typeof_mode,
1248 Label* slow, Label* done) {
1249 // Generate fast-case code for variables that might be shadowed by
1250 // eval-introduced variables. Eval is used a lot without
1251 // introducing variables. In those cases, we do not want to
1252 // perform a runtime call for all variables in the scope
1253 // containing the eval.
1254 Variable* var = proxy->var();
1255 if (var->mode() == DYNAMIC_GLOBAL) {
1256 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1257 __ jmp(done);
1258 } else if (var->mode() == DYNAMIC_LOCAL) {
1259 Variable* local = var->local_if_not_shadowed();
1260 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1261 if (local->binding_needs_init()) {
1262 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1263 __ b(ne, done);
1264 __ mov(r0, Operand(var->name()));
1265 __ push(r0);
1266 __ CallRuntime(Runtime::kThrowReferenceError);
1267 } else {
1268 __ jmp(done);
1269 }
1270 }
1271 }
1272
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1273 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1274 TypeofMode typeof_mode) {
1275 // Record position before possible IC call.
1276 SetExpressionPosition(proxy);
1277 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1278 Variable* var = proxy->var();
1279
1280 // Three cases: global variables, lookup variables, and all other types of
1281 // variables.
1282 switch (var->location()) {
1283 case VariableLocation::UNALLOCATED: {
1284 Comment cmnt(masm_, "[ Global variable");
1285 EmitGlobalVariableLoad(proxy, typeof_mode);
1286 context()->Plug(r0);
1287 break;
1288 }
1289
1290 case VariableLocation::PARAMETER:
1291 case VariableLocation::LOCAL:
1292 case VariableLocation::CONTEXT: {
1293 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1294 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1295 : "[ Stack variable");
1296 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1297 // Throw a reference error when using an uninitialized let/const
1298 // binding in harmony mode.
1299 Label done;
1300 GetVar(r0, var);
1301 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1302 __ b(ne, &done);
1303 __ mov(r0, Operand(var->name()));
1304 __ push(r0);
1305 __ CallRuntime(Runtime::kThrowReferenceError);
1306 __ bind(&done);
1307 context()->Plug(r0);
1308 break;
1309 }
1310 context()->Plug(var);
1311 break;
1312 }
1313
1314 case VariableLocation::LOOKUP: {
1315 Comment cmnt(masm_, "[ Lookup variable");
1316 Label done, slow;
1317 // Generate code for loading from variables potentially shadowed
1318 // by eval-introduced variables.
1319 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1320 __ bind(&slow);
1321 __ Push(var->name());
1322 Runtime::FunctionId function_id =
1323 typeof_mode == NOT_INSIDE_TYPEOF
1324 ? Runtime::kLoadLookupSlot
1325 : Runtime::kLoadLookupSlotInsideTypeof;
1326 __ CallRuntime(function_id);
1327 __ bind(&done);
1328 context()->Plug(r0);
1329 break;
1330 }
1331
1332 case VariableLocation::MODULE:
1333 UNREACHABLE();
1334 }
1335 }
1336
1337
EmitAccessor(ObjectLiteralProperty * property)1338 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1339 Expression* expression = (property == NULL) ? NULL : property->value();
1340 if (expression == NULL) {
1341 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1342 PushOperand(r1);
1343 } else {
1344 VisitForStackValue(expression);
1345 if (NeedsHomeObject(expression)) {
1346 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1347 property->kind() == ObjectLiteral::Property::SETTER);
1348 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1349 EmitSetHomeObject(expression, offset, property->GetSlot());
1350 }
1351 }
1352 }
1353
1354
VisitObjectLiteral(ObjectLiteral * expr)1355 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1356 Comment cmnt(masm_, "[ ObjectLiteral");
1357
1358 Handle<FixedArray> constant_properties = expr->constant_properties();
1359 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1360 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1361 __ mov(r1, Operand(constant_properties));
1362 int flags = expr->ComputeFlags();
1363 __ mov(r0, Operand(Smi::FromInt(flags)));
1364 if (MustCreateObjectLiteralWithRuntime(expr)) {
1365 __ Push(r3, r2, r1, r0);
1366 __ CallRuntime(Runtime::kCreateObjectLiteral);
1367 } else {
1368 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1369 __ CallStub(&stub);
1370 RestoreContext();
1371 }
1372 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1373
1374 // If result_saved is true the result is on top of the stack. If
1375 // result_saved is false the result is in r0.
1376 bool result_saved = false;
1377
1378 AccessorTable accessor_table(zone());
1379 int property_index = 0;
1380 for (; property_index < expr->properties()->length(); property_index++) {
1381 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1382 if (property->is_computed_name()) break;
1383 if (property->IsCompileTimeValue()) continue;
1384
1385 Literal* key = property->key()->AsLiteral();
1386 Expression* value = property->value();
1387 if (!result_saved) {
1388 PushOperand(r0); // Save result on stack
1389 result_saved = true;
1390 }
1391 switch (property->kind()) {
1392 case ObjectLiteral::Property::CONSTANT:
1393 UNREACHABLE();
1394 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1395 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1396 // Fall through.
1397 case ObjectLiteral::Property::COMPUTED:
1398 // It is safe to use [[Put]] here because the boilerplate already
1399 // contains computed properties with an uninitialized value.
1400 if (key->IsStringLiteral()) {
1401 DCHECK(key->IsPropertyName());
1402 if (property->emit_store()) {
1403 VisitForAccumulatorValue(value);
1404 DCHECK(StoreDescriptor::ValueRegister().is(r0));
1405 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1406 CallStoreIC(property->GetSlot(0), key->value());
1407 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1408
1409 if (NeedsHomeObject(value)) {
1410 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1411 }
1412 } else {
1413 VisitForEffect(value);
1414 }
1415 break;
1416 }
1417 // Duplicate receiver on stack.
1418 __ ldr(r0, MemOperand(sp));
1419 PushOperand(r0);
1420 VisitForStackValue(key);
1421 VisitForStackValue(value);
1422 if (property->emit_store()) {
1423 if (NeedsHomeObject(value)) {
1424 EmitSetHomeObject(value, 2, property->GetSlot());
1425 }
1426 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
1427 PushOperand(r0);
1428 CallRuntimeWithOperands(Runtime::kSetProperty);
1429 } else {
1430 DropOperands(3);
1431 }
1432 break;
1433 case ObjectLiteral::Property::PROTOTYPE:
1434 // Duplicate receiver on stack.
1435 __ ldr(r0, MemOperand(sp));
1436 PushOperand(r0);
1437 VisitForStackValue(value);
1438 DCHECK(property->emit_store());
1439 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1440 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1441 BailoutState::NO_REGISTERS);
1442 break;
1443
1444 case ObjectLiteral::Property::GETTER:
1445 if (property->emit_store()) {
1446 AccessorTable::Iterator it = accessor_table.lookup(key);
1447 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1448 it->second->getter = property;
1449 }
1450 break;
1451 case ObjectLiteral::Property::SETTER:
1452 if (property->emit_store()) {
1453 AccessorTable::Iterator it = accessor_table.lookup(key);
1454 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1455 it->second->setter = property;
1456 }
1457 break;
1458 }
1459 }
1460
1461 // Emit code to define accessors, using only a single call to the runtime for
1462 // each pair of corresponding getters and setters.
1463 for (AccessorTable::Iterator it = accessor_table.begin();
1464 it != accessor_table.end();
1465 ++it) {
1466 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1467 PushOperand(r0);
1468 VisitForStackValue(it->first);
1469 EmitAccessor(it->second->getter);
1470 EmitAccessor(it->second->setter);
1471 __ mov(r0, Operand(Smi::FromInt(NONE)));
1472 PushOperand(r0);
1473 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1474 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1475 }
1476
1477 // Object literals have two parts. The "static" part on the left contains no
1478 // computed property names, and so we can compute its map ahead of time; see
1479 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1480 // starts with the first computed property name, and continues with all
1481 // properties to its right. All the code from above initializes the static
1482 // component of the object literal, and arranges for the map of the result to
1483 // reflect the static order in which the keys appear. For the dynamic
1484 // properties, we compile them into a series of "SetOwnProperty" runtime
1485 // calls. This will preserve insertion order.
1486 for (; property_index < expr->properties()->length(); property_index++) {
1487 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1488
1489 Expression* value = property->value();
1490 if (!result_saved) {
1491 PushOperand(r0); // Save result on the stack
1492 result_saved = true;
1493 }
1494
1495 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1496 PushOperand(r0);
1497
1498 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1499 DCHECK(!property->is_computed_name());
1500 VisitForStackValue(value);
1501 DCHECK(property->emit_store());
1502 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1503 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1504 BailoutState::NO_REGISTERS);
1505 } else {
1506 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1507 VisitForStackValue(value);
1508 if (NeedsHomeObject(value)) {
1509 EmitSetHomeObject(value, 2, property->GetSlot());
1510 }
1511
1512 switch (property->kind()) {
1513 case ObjectLiteral::Property::CONSTANT:
1514 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1515 case ObjectLiteral::Property::COMPUTED:
1516 if (property->emit_store()) {
1517 PushOperand(Smi::FromInt(NONE));
1518 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1519 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1520 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1521 BailoutState::NO_REGISTERS);
1522 } else {
1523 DropOperands(3);
1524 }
1525 break;
1526
1527 case ObjectLiteral::Property::PROTOTYPE:
1528 UNREACHABLE();
1529 break;
1530
1531 case ObjectLiteral::Property::GETTER:
1532 PushOperand(Smi::FromInt(NONE));
1533 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1534 break;
1535
1536 case ObjectLiteral::Property::SETTER:
1537 PushOperand(Smi::FromInt(NONE));
1538 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1539 break;
1540 }
1541 }
1542 }
1543
1544 if (result_saved) {
1545 context()->PlugTOS();
1546 } else {
1547 context()->Plug(r0);
1548 }
1549 }
1550
1551
VisitArrayLiteral(ArrayLiteral * expr)1552 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1553 Comment cmnt(masm_, "[ ArrayLiteral");
1554
1555 Handle<FixedArray> constant_elements = expr->constant_elements();
1556 bool has_fast_elements =
1557 IsFastObjectElementsKind(expr->constant_elements_kind());
1558 Handle<FixedArrayBase> constant_elements_values(
1559 FixedArrayBase::cast(constant_elements->get(1)));
1560
1561 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1562 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1563 // If the only customer of allocation sites is transitioning, then
1564 // we can turn it off if we don't have anywhere else to transition to.
1565 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1566 }
1567
1568 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1569 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1570 __ mov(r1, Operand(constant_elements));
1571 if (MustCreateArrayLiteralWithRuntime(expr)) {
1572 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1573 __ Push(r3, r2, r1, r0);
1574 __ CallRuntime(Runtime::kCreateArrayLiteral);
1575 } else {
1576 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1577 __ CallStub(&stub);
1578 RestoreContext();
1579 }
1580 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1581
1582 bool result_saved = false; // Is the result saved to the stack?
1583 ZoneList<Expression*>* subexprs = expr->values();
1584 int length = subexprs->length();
1585
1586 // Emit code to evaluate all the non-constant subexpressions and to store
1587 // them into the newly cloned array.
1588 for (int array_index = 0; array_index < length; array_index++) {
1589 Expression* subexpr = subexprs->at(array_index);
1590 DCHECK(!subexpr->IsSpread());
1591
1592 // If the subexpression is a literal or a simple materialized literal it
1593 // is already set in the cloned array.
1594 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1595
1596 if (!result_saved) {
1597 PushOperand(r0);
1598 result_saved = true;
1599 }
1600 VisitForAccumulatorValue(subexpr);
1601
1602 __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1603 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1604 CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1605
1606 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1607 BailoutState::NO_REGISTERS);
1608 }
1609
1610 if (result_saved) {
1611 context()->PlugTOS();
1612 } else {
1613 context()->Plug(r0);
1614 }
1615 }
1616
1617
VisitAssignment(Assignment * expr)1618 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1619 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1620
1621 Comment cmnt(masm_, "[ Assignment");
1622
1623 Property* property = expr->target()->AsProperty();
1624 LhsKind assign_type = Property::GetAssignType(property);
1625
1626 // Evaluate LHS expression.
1627 switch (assign_type) {
1628 case VARIABLE:
1629 // Nothing to do here.
1630 break;
1631 case NAMED_PROPERTY:
1632 if (expr->is_compound()) {
1633 // We need the receiver both on the stack and in the register.
1634 VisitForStackValue(property->obj());
1635 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1636 } else {
1637 VisitForStackValue(property->obj());
1638 }
1639 break;
1640 case NAMED_SUPER_PROPERTY:
1641 VisitForStackValue(
1642 property->obj()->AsSuperPropertyReference()->this_var());
1643 VisitForAccumulatorValue(
1644 property->obj()->AsSuperPropertyReference()->home_object());
1645 PushOperand(result_register());
1646 if (expr->is_compound()) {
1647 const Register scratch = r1;
1648 __ ldr(scratch, MemOperand(sp, kPointerSize));
1649 PushOperand(scratch);
1650 PushOperand(result_register());
1651 }
1652 break;
1653 case KEYED_SUPER_PROPERTY:
1654 VisitForStackValue(
1655 property->obj()->AsSuperPropertyReference()->this_var());
1656 VisitForStackValue(
1657 property->obj()->AsSuperPropertyReference()->home_object());
1658 VisitForAccumulatorValue(property->key());
1659 PushOperand(result_register());
1660 if (expr->is_compound()) {
1661 const Register scratch = r1;
1662 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1663 PushOperand(scratch);
1664 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1665 PushOperand(scratch);
1666 PushOperand(result_register());
1667 }
1668 break;
1669 case KEYED_PROPERTY:
1670 if (expr->is_compound()) {
1671 VisitForStackValue(property->obj());
1672 VisitForStackValue(property->key());
1673 __ ldr(LoadDescriptor::ReceiverRegister(),
1674 MemOperand(sp, 1 * kPointerSize));
1675 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1676 } else {
1677 VisitForStackValue(property->obj());
1678 VisitForStackValue(property->key());
1679 }
1680 break;
1681 }
1682
1683 // For compound assignments we need another deoptimization point after the
1684 // variable/property load.
1685 if (expr->is_compound()) {
1686 { AccumulatorValueContext context(this);
1687 switch (assign_type) {
1688 case VARIABLE:
1689 EmitVariableLoad(expr->target()->AsVariableProxy());
1690 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1691 break;
1692 case NAMED_PROPERTY:
1693 EmitNamedPropertyLoad(property);
1694 PrepareForBailoutForId(property->LoadId(),
1695 BailoutState::TOS_REGISTER);
1696 break;
1697 case NAMED_SUPER_PROPERTY:
1698 EmitNamedSuperPropertyLoad(property);
1699 PrepareForBailoutForId(property->LoadId(),
1700 BailoutState::TOS_REGISTER);
1701 break;
1702 case KEYED_SUPER_PROPERTY:
1703 EmitKeyedSuperPropertyLoad(property);
1704 PrepareForBailoutForId(property->LoadId(),
1705 BailoutState::TOS_REGISTER);
1706 break;
1707 case KEYED_PROPERTY:
1708 EmitKeyedPropertyLoad(property);
1709 PrepareForBailoutForId(property->LoadId(),
1710 BailoutState::TOS_REGISTER);
1711 break;
1712 }
1713 }
1714
1715 Token::Value op = expr->binary_op();
1716 PushOperand(r0); // Left operand goes on the stack.
1717 VisitForAccumulatorValue(expr->value());
1718
1719 AccumulatorValueContext context(this);
1720 if (ShouldInlineSmiCase(op)) {
1721 EmitInlineSmiBinaryOp(expr->binary_operation(),
1722 op,
1723 expr->target(),
1724 expr->value());
1725 } else {
1726 EmitBinaryOp(expr->binary_operation(), op);
1727 }
1728
1729 // Deoptimization point in case the binary operation may have side effects.
1730 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1731 } else {
1732 VisitForAccumulatorValue(expr->value());
1733 }
1734
1735 SetExpressionPosition(expr);
1736
1737 // Store the value.
1738 switch (assign_type) {
1739 case VARIABLE: {
1740 VariableProxy* proxy = expr->target()->AsVariableProxy();
1741 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1742 proxy->hole_check_mode());
1743 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1744 context()->Plug(r0);
1745 break;
1746 }
1747 case NAMED_PROPERTY:
1748 EmitNamedPropertyAssignment(expr);
1749 break;
1750 case NAMED_SUPER_PROPERTY:
1751 EmitNamedSuperPropertyStore(property);
1752 context()->Plug(r0);
1753 break;
1754 case KEYED_SUPER_PROPERTY:
1755 EmitKeyedSuperPropertyStore(property);
1756 context()->Plug(r0);
1757 break;
1758 case KEYED_PROPERTY:
1759 EmitKeyedPropertyAssignment(expr);
1760 break;
1761 }
1762 }
1763
1764
VisitYield(Yield * expr)1765 void FullCodeGenerator::VisitYield(Yield* expr) {
1766 Comment cmnt(masm_, "[ Yield");
1767 SetExpressionPosition(expr);
1768
1769 // Evaluate yielded value first; the initial iterator definition depends on
1770 // this. It stays on the stack while we update the iterator.
1771 VisitForStackValue(expr->expression());
1772
1773 Label suspend, continuation, post_runtime, resume, exception;
1774
1775 __ jmp(&suspend);
1776 __ bind(&continuation);
1777 // When we arrive here, r0 holds the generator object.
1778 __ RecordGeneratorContinuation();
1779 __ ldr(r1, FieldMemOperand(r0, JSGeneratorObject::kResumeModeOffset));
1780 __ ldr(r0, FieldMemOperand(r0, JSGeneratorObject::kInputOrDebugPosOffset));
1781 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1782 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1783 __ cmp(r1, Operand(Smi::FromInt(JSGeneratorObject::kReturn)));
1784 __ b(lt, &resume);
1785 __ Push(result_register());
1786 __ b(gt, &exception);
1787 EmitCreateIteratorResult(true);
1788 EmitUnwindAndReturn();
1789
1790 __ bind(&exception);
1791 __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1792 : Runtime::kThrow);
1793
1794 __ bind(&suspend);
1795 OperandStackDepthIncrement(1); // Not popped on this path.
1796 VisitForAccumulatorValue(expr->generator_object());
1797 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1798 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1799 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1800 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1801 __ mov(r1, cp);
1802 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1803 kLRHasBeenSaved, kDontSaveFPRegs);
1804 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1805 __ cmp(sp, r1);
1806 __ b(eq, &post_runtime);
1807 __ push(r0); // generator object
1808 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1809 RestoreContext();
1810 __ bind(&post_runtime);
1811 PopOperand(result_register());
1812 EmitReturnSequence();
1813
1814 __ bind(&resume);
1815 context()->Plug(result_register());
1816 }
1817
PushOperands(Register reg1,Register reg2)1818 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1819 OperandStackDepthIncrement(2);
1820 __ Push(reg1, reg2);
1821 }
1822
PopOperands(Register reg1,Register reg2)1823 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1824 OperandStackDepthDecrement(2);
1825 __ Pop(reg1, reg2);
1826 }
1827
EmitOperandStackDepthCheck()1828 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1829 if (FLAG_debug_code) {
1830 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1831 operand_stack_depth_ * kPointerSize;
1832 __ sub(r0, fp, sp);
1833 __ cmp(r0, Operand(expected_diff));
1834 __ Assert(eq, kUnexpectedStackDepth);
1835 }
1836 }
1837
EmitCreateIteratorResult(bool done)1838 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1839 Label allocate, done_allocate;
1840
1841 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate,
1842 NO_ALLOCATION_FLAGS);
1843 __ b(&done_allocate);
1844
1845 __ bind(&allocate);
1846 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1847 __ CallRuntime(Runtime::kAllocateInNewSpace);
1848
1849 __ bind(&done_allocate);
1850 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
1851 PopOperand(r2);
1852 __ LoadRoot(r3,
1853 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1854 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
1855 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
1856 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1857 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1858 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
1859 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
1860 }
1861
1862
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1863 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1864 Token::Value op,
1865 Expression* left_expr,
1866 Expression* right_expr) {
1867 Label done, smi_case, stub_call;
1868
1869 Register scratch1 = r2;
1870 Register scratch2 = r3;
1871
1872 // Get the arguments.
1873 Register left = r1;
1874 Register right = r0;
1875 PopOperand(left);
1876
1877 // Perform combined smi check on both operands.
1878 __ orr(scratch1, left, Operand(right));
1879 STATIC_ASSERT(kSmiTag == 0);
1880 JumpPatchSite patch_site(masm_);
1881 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1882
1883 __ bind(&stub_call);
1884 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1885 CallIC(code, expr->BinaryOperationFeedbackId());
1886 patch_site.EmitPatchInfo();
1887 __ jmp(&done);
1888
1889 __ bind(&smi_case);
1890 // Smi case. This code works the same way as the smi-smi case in the type
1891 // recording binary operation stub, see
1892 switch (op) {
1893 case Token::SAR:
1894 __ GetLeastBitsFromSmi(scratch1, right, 5);
1895 __ mov(right, Operand(left, ASR, scratch1));
1896 __ bic(right, right, Operand(kSmiTagMask));
1897 break;
1898 case Token::SHL: {
1899 __ SmiUntag(scratch1, left);
1900 __ GetLeastBitsFromSmi(scratch2, right, 5);
1901 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1902 __ TrySmiTag(right, scratch1, &stub_call);
1903 break;
1904 }
1905 case Token::SHR: {
1906 __ SmiUntag(scratch1, left);
1907 __ GetLeastBitsFromSmi(scratch2, right, 5);
1908 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1909 __ tst(scratch1, Operand(0xc0000000));
1910 __ b(ne, &stub_call);
1911 __ SmiTag(right, scratch1);
1912 break;
1913 }
1914 case Token::ADD:
1915 __ add(scratch1, left, Operand(right), SetCC);
1916 __ b(vs, &stub_call);
1917 __ mov(right, scratch1);
1918 break;
1919 case Token::SUB:
1920 __ sub(scratch1, left, Operand(right), SetCC);
1921 __ b(vs, &stub_call);
1922 __ mov(right, scratch1);
1923 break;
1924 case Token::MUL: {
1925 __ SmiUntag(ip, right);
1926 __ smull(scratch1, scratch2, left, ip);
1927 __ mov(ip, Operand(scratch1, ASR, 31));
1928 __ cmp(ip, Operand(scratch2));
1929 __ b(ne, &stub_call);
1930 __ cmp(scratch1, Operand::Zero());
1931 __ mov(right, Operand(scratch1), LeaveCC, ne);
1932 __ b(ne, &done);
1933 __ add(scratch2, right, Operand(left), SetCC);
1934 __ mov(right, Operand(Smi::kZero), LeaveCC, pl);
1935 __ b(mi, &stub_call);
1936 break;
1937 }
1938 case Token::BIT_OR:
1939 __ orr(right, left, Operand(right));
1940 break;
1941 case Token::BIT_AND:
1942 __ and_(right, left, Operand(right));
1943 break;
1944 case Token::BIT_XOR:
1945 __ eor(right, left, Operand(right));
1946 break;
1947 default:
1948 UNREACHABLE();
1949 }
1950
1951 __ bind(&done);
1952 context()->Plug(r0);
1953 }
1954
1955
EmitClassDefineProperties(ClassLiteral * lit)1956 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1957 for (int i = 0; i < lit->properties()->length(); i++) {
1958 ClassLiteral::Property* property = lit->properties()->at(i);
1959 Expression* value = property->value();
1960
1961 Register scratch = r1;
1962 if (property->is_static()) {
1963 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor
1964 } else {
1965 __ ldr(scratch, MemOperand(sp, 0)); // prototype
1966 }
1967 PushOperand(scratch);
1968 EmitPropertyKey(property, lit->GetIdForProperty(i));
1969
1970 // The static prototype property is read only. We handle the non computed
1971 // property name case in the parser. Since this is the only case where we
1972 // need to check for an own read only property we special case this so we do
1973 // not need to do this for every property.
1974 if (property->is_static() && property->is_computed_name()) {
1975 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1976 __ push(r0);
1977 }
1978
1979 VisitForStackValue(value);
1980 if (NeedsHomeObject(value)) {
1981 EmitSetHomeObject(value, 2, property->GetSlot());
1982 }
1983
1984 switch (property->kind()) {
1985 case ClassLiteral::Property::METHOD:
1986 PushOperand(Smi::FromInt(DONT_ENUM));
1987 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1988 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1989 break;
1990
1991 case ClassLiteral::Property::GETTER:
1992 PushOperand(Smi::FromInt(DONT_ENUM));
1993 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1994 break;
1995
1996 case ClassLiteral::Property::SETTER:
1997 PushOperand(Smi::FromInt(DONT_ENUM));
1998 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1999 break;
2000
2001 case ClassLiteral::Property::FIELD:
2002 default:
2003 UNREACHABLE();
2004 }
2005 }
2006 }
2007
2008
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2009 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2010 PopOperand(r1);
2011 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2012 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2013 CallIC(code, expr->BinaryOperationFeedbackId());
2014 patch_site.EmitPatchInfo();
2015 context()->Plug(r0);
2016 }
2017
2018
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2019 void FullCodeGenerator::EmitAssignment(Expression* expr,
2020 FeedbackVectorSlot slot) {
2021 DCHECK(expr->IsValidReferenceExpressionOrThis());
2022
2023 Property* prop = expr->AsProperty();
2024 LhsKind assign_type = Property::GetAssignType(prop);
2025
2026 switch (assign_type) {
2027 case VARIABLE: {
2028 VariableProxy* proxy = expr->AsVariableProxy();
2029 EffectContext context(this);
2030 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
2031 proxy->hole_check_mode());
2032 break;
2033 }
2034 case NAMED_PROPERTY: {
2035 PushOperand(r0); // Preserve value.
2036 VisitForAccumulatorValue(prop->obj());
2037 __ Move(StoreDescriptor::ReceiverRegister(), r0);
2038 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
2039 CallStoreIC(slot, prop->key()->AsLiteral()->value());
2040 break;
2041 }
2042 case NAMED_SUPER_PROPERTY: {
2043 PushOperand(r0);
2044 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2045 VisitForAccumulatorValue(
2046 prop->obj()->AsSuperPropertyReference()->home_object());
2047 // stack: value, this; r0: home_object
2048 Register scratch = r2;
2049 Register scratch2 = r3;
2050 __ mov(scratch, result_register()); // home_object
2051 __ ldr(r0, MemOperand(sp, kPointerSize)); // value
2052 __ ldr(scratch2, MemOperand(sp, 0)); // this
2053 __ str(scratch2, MemOperand(sp, kPointerSize)); // this
2054 __ str(scratch, MemOperand(sp, 0)); // home_object
2055 // stack: this, home_object; r0: value
2056 EmitNamedSuperPropertyStore(prop);
2057 break;
2058 }
2059 case KEYED_SUPER_PROPERTY: {
2060 PushOperand(r0);
2061 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2062 VisitForStackValue(
2063 prop->obj()->AsSuperPropertyReference()->home_object());
2064 VisitForAccumulatorValue(prop->key());
2065 Register scratch = r2;
2066 Register scratch2 = r3;
2067 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2068 // stack: value, this, home_object; r0: key, r3: value
2069 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this
2070 __ str(scratch, MemOperand(sp, 2 * kPointerSize));
2071 __ ldr(scratch, MemOperand(sp, 0)); // home_object
2072 __ str(scratch, MemOperand(sp, kPointerSize));
2073 __ str(r0, MemOperand(sp, 0));
2074 __ Move(r0, scratch2);
2075 // stack: this, home_object, key; r0: value.
2076 EmitKeyedSuperPropertyStore(prop);
2077 break;
2078 }
2079 case KEYED_PROPERTY: {
2080 PushOperand(r0); // Preserve value.
2081 VisitForStackValue(prop->obj());
2082 VisitForAccumulatorValue(prop->key());
2083 __ Move(StoreDescriptor::NameRegister(), r0);
2084 PopOperands(StoreDescriptor::ValueRegister(),
2085 StoreDescriptor::ReceiverRegister());
2086 CallKeyedStoreIC(slot);
2087 break;
2088 }
2089 }
2090 context()->Plug(r0);
2091 }
2092
2093
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2094 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2095 Variable* var, MemOperand location) {
2096 __ str(result_register(), location);
2097 if (var->IsContextSlot()) {
2098 // RecordWrite may destroy all its register arguments.
2099 __ mov(r3, result_register());
2100 int offset = Context::SlotOffset(var->index());
2101 __ RecordWriteContextSlot(
2102 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2103 }
2104 }
2105
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot,HoleCheckMode hole_check_mode)2106 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2107 FeedbackVectorSlot slot,
2108 HoleCheckMode hole_check_mode) {
2109 if (var->IsUnallocated()) {
2110 // Global var, const, or let.
2111 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2112 CallStoreIC(slot, var->name());
2113
2114 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2115 DCHECK(!var->IsLookupSlot());
2116 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2117 MemOperand location = VarOperand(var, r1);
2118 // Perform an initialization check for lexically declared variables.
2119 if (hole_check_mode == HoleCheckMode::kRequired) {
2120 Label assign;
2121 __ ldr(r3, location);
2122 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2123 __ b(ne, &assign);
2124 __ mov(r3, Operand(var->name()));
2125 __ push(r3);
2126 __ CallRuntime(Runtime::kThrowReferenceError);
2127 __ bind(&assign);
2128 }
2129 if (var->mode() != CONST) {
2130 EmitStoreToStackLocalOrContextSlot(var, location);
2131 } else if (var->throw_on_const_assignment(language_mode())) {
2132 __ CallRuntime(Runtime::kThrowConstAssignError);
2133 }
2134 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2135 // Initializing assignment to const {this} needs a write barrier.
2136 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2137 Label uninitialized_this;
2138 MemOperand location = VarOperand(var, r1);
2139 __ ldr(r3, location);
2140 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2141 __ b(eq, &uninitialized_this);
2142 __ mov(r0, Operand(var->name()));
2143 __ Push(r0);
2144 __ CallRuntime(Runtime::kThrowReferenceError);
2145 __ bind(&uninitialized_this);
2146 EmitStoreToStackLocalOrContextSlot(var, location);
2147
2148 } else {
2149 DCHECK(var->mode() != CONST || op == Token::INIT);
2150 if (var->IsLookupSlot()) {
2151 // Assignment to var.
2152 __ Push(var->name());
2153 __ Push(r0);
2154 __ CallRuntime(is_strict(language_mode())
2155 ? Runtime::kStoreLookupSlot_Strict
2156 : Runtime::kStoreLookupSlot_Sloppy);
2157 } else {
2158 // Assignment to var or initializing assignment to let/const in harmony
2159 // mode.
2160 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2161 MemOperand location = VarOperand(var, r1);
2162 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2163 // Check for an uninitialized let binding.
2164 __ ldr(r2, location);
2165 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2166 __ Check(eq, kLetBindingReInitialization);
2167 }
2168 EmitStoreToStackLocalOrContextSlot(var, location);
2169 }
2170 }
2171 }
2172
2173
EmitNamedPropertyAssignment(Assignment * expr)2174 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2175 // Assignment to a property, using a named store IC.
2176 Property* prop = expr->target()->AsProperty();
2177 DCHECK(prop != NULL);
2178 DCHECK(prop->key()->IsLiteral());
2179
2180 PopOperand(StoreDescriptor::ReceiverRegister());
2181 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2182
2183 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2184 context()->Plug(r0);
2185 }
2186
2187
EmitNamedSuperPropertyStore(Property * prop)2188 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2189 // Assignment to named property of super.
2190 // r0 : value
2191 // stack : receiver ('this'), home_object
2192 DCHECK(prop != NULL);
2193 Literal* key = prop->key()->AsLiteral();
2194 DCHECK(key != NULL);
2195
2196 PushOperand(key->value());
2197 PushOperand(r0);
2198 CallRuntimeWithOperands(is_strict(language_mode())
2199 ? Runtime::kStoreToSuper_Strict
2200 : Runtime::kStoreToSuper_Sloppy);
2201 }
2202
2203
EmitKeyedSuperPropertyStore(Property * prop)2204 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2205 // Assignment to named property of super.
2206 // r0 : value
2207 // stack : receiver ('this'), home_object, key
2208 DCHECK(prop != NULL);
2209
2210 PushOperand(r0);
2211 CallRuntimeWithOperands(is_strict(language_mode())
2212 ? Runtime::kStoreKeyedToSuper_Strict
2213 : Runtime::kStoreKeyedToSuper_Sloppy);
2214 }
2215
2216
EmitKeyedPropertyAssignment(Assignment * expr)2217 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2218 // Assignment to a property, using a keyed store IC.
2219 PopOperands(StoreDescriptor::ReceiverRegister(),
2220 StoreDescriptor::NameRegister());
2221 DCHECK(StoreDescriptor::ValueRegister().is(r0));
2222
2223 CallKeyedStoreIC(expr->AssignmentSlot());
2224
2225 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2226 context()->Plug(r0);
2227 }
2228
2229 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2230 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2231 Expression* callee = expr->expression();
2232
2233 // Get the target function.
2234 ConvertReceiverMode convert_mode;
2235 if (callee->IsVariableProxy()) {
2236 { StackValueContext context(this);
2237 EmitVariableLoad(callee->AsVariableProxy());
2238 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2239 }
2240 // Push undefined as receiver. This is patched in the method prologue if it
2241 // is a sloppy mode method.
2242 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2243 PushOperand(ip);
2244 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2245 } else {
2246 // Load the function from the receiver.
2247 DCHECK(callee->IsProperty());
2248 DCHECK(!callee->AsProperty()->IsSuperAccess());
2249 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2250 EmitNamedPropertyLoad(callee->AsProperty());
2251 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2252 BailoutState::TOS_REGISTER);
2253 // Push the target function under the receiver.
2254 __ ldr(ip, MemOperand(sp, 0));
2255 PushOperand(ip);
2256 __ str(r0, MemOperand(sp, kPointerSize));
2257 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2258 }
2259
2260 EmitCall(expr, convert_mode);
2261 }
2262
2263
EmitSuperCallWithLoadIC(Call * expr)2264 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2265 Expression* callee = expr->expression();
2266 DCHECK(callee->IsProperty());
2267 Property* prop = callee->AsProperty();
2268 DCHECK(prop->IsSuperAccess());
2269 SetExpressionPosition(prop);
2270
2271 Literal* key = prop->key()->AsLiteral();
2272 DCHECK(!key->value()->IsSmi());
2273 // Load the function from the receiver.
2274 const Register scratch = r1;
2275 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2276 VisitForStackValue(super_ref->home_object());
2277 VisitForAccumulatorValue(super_ref->this_var());
2278 PushOperand(r0);
2279 PushOperand(r0);
2280 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2281 PushOperand(scratch);
2282 PushOperand(key->value());
2283
2284 // Stack here:
2285 // - home_object
2286 // - this (receiver)
2287 // - this (receiver) <-- LoadFromSuper will pop here and below.
2288 // - home_object
2289 // - key
2290 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2291 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2292
2293 // Replace home_object with target function.
2294 __ str(r0, MemOperand(sp, kPointerSize));
2295
2296 // Stack here:
2297 // - target function
2298 // - this (receiver)
2299 EmitCall(expr);
2300 }
2301
2302
2303 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2304 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2305 Expression* key) {
2306 // Load the key.
2307 VisitForAccumulatorValue(key);
2308
2309 Expression* callee = expr->expression();
2310
2311 // Load the function from the receiver.
2312 DCHECK(callee->IsProperty());
2313 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2314 __ Move(LoadDescriptor::NameRegister(), r0);
2315 EmitKeyedPropertyLoad(callee->AsProperty());
2316 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2317 BailoutState::TOS_REGISTER);
2318
2319 // Push the target function under the receiver.
2320 __ ldr(ip, MemOperand(sp, 0));
2321 PushOperand(ip);
2322 __ str(r0, MemOperand(sp, kPointerSize));
2323
2324 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2325 }
2326
2327
EmitKeyedSuperCallWithLoadIC(Call * expr)2328 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2329 Expression* callee = expr->expression();
2330 DCHECK(callee->IsProperty());
2331 Property* prop = callee->AsProperty();
2332 DCHECK(prop->IsSuperAccess());
2333
2334 SetExpressionPosition(prop);
2335 // Load the function from the receiver.
2336 const Register scratch = r1;
2337 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2338 VisitForStackValue(super_ref->home_object());
2339 VisitForAccumulatorValue(super_ref->this_var());
2340 PushOperand(r0);
2341 PushOperand(r0);
2342 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2343 PushOperand(scratch);
2344 VisitForStackValue(prop->key());
2345
2346 // Stack here:
2347 // - home_object
2348 // - this (receiver)
2349 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2350 // - home_object
2351 // - key
2352 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2353 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2354
2355 // Replace home_object with target function.
2356 __ str(r0, MemOperand(sp, kPointerSize));
2357
2358 // Stack here:
2359 // - target function
2360 // - this (receiver)
2361 EmitCall(expr);
2362 }
2363
2364
EmitCall(Call * expr,ConvertReceiverMode mode)2365 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2366 // Load the arguments.
2367 ZoneList<Expression*>* args = expr->arguments();
2368 int arg_count = args->length();
2369 for (int i = 0; i < arg_count; i++) {
2370 VisitForStackValue(args->at(i));
2371 }
2372
2373 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2374 SetCallPosition(expr, expr->tail_call_mode());
2375 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2376 if (FLAG_trace) {
2377 __ CallRuntime(Runtime::kTraceTailCall);
2378 }
2379 // Update profiling counters before the tail call since we will
2380 // not return to this function.
2381 EmitProfilingCounterHandlingForReturnSequence(true);
2382 }
2383 Handle<Code> code =
2384 CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2385 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2386 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2387 __ mov(r0, Operand(arg_count));
2388 CallIC(code);
2389 OperandStackDepthDecrement(arg_count + 1);
2390
2391 RecordJSReturnSite(expr);
2392 RestoreContext();
2393 context()->DropAndPlug(1, r0);
2394 }
2395
EmitResolvePossiblyDirectEval(Call * expr)2396 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2397 int arg_count = expr->arguments()->length();
2398 // r4: copy of the first argument or undefined if it doesn't exist.
2399 if (arg_count > 0) {
2400 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
2401 } else {
2402 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2403 }
2404
2405 // r3: the receiver of the enclosing function.
2406 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2407
2408 // r2: language mode.
2409 __ mov(r2, Operand(Smi::FromInt(language_mode())));
2410
2411 // r1: the start position of the scope the calls resides in.
2412 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2413
2414 // r0: the source position of the eval call.
2415 __ mov(r0, Operand(Smi::FromInt(expr->position())));
2416
2417 // Do the runtime call.
2418 __ Push(r4, r3, r2, r1, r0);
2419 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2420 }
2421
2422
2423 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2424 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2425 VariableProxy* callee = expr->expression()->AsVariableProxy();
2426 if (callee->var()->IsLookupSlot()) {
2427 Label slow, done;
2428 SetExpressionPosition(callee);
2429 // Generate code for loading from variables potentially shadowed
2430 // by eval-introduced variables.
2431 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2432
2433 __ bind(&slow);
2434 // Call the runtime to find the function to call (returned in r0)
2435 // and the object holding it (returned in edx).
2436 __ Push(callee->name());
2437 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2438 PushOperands(r0, r1); // Function, receiver.
2439 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2440
2441 // If fast case code has been generated, emit code to push the
2442 // function and receiver and have the slow path jump around this
2443 // code.
2444 if (done.is_linked()) {
2445 Label call;
2446 __ b(&call);
2447 __ bind(&done);
2448 // Push function.
2449 __ push(r0);
2450 // The receiver is implicitly the global receiver. Indicate this
2451 // by passing the hole to the call function stub.
2452 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2453 __ push(r1);
2454 __ bind(&call);
2455 }
2456 } else {
2457 VisitForStackValue(callee);
2458 // refEnv.WithBaseObject()
2459 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2460 PushOperand(r2); // Reserved receiver slot.
2461 }
2462 }
2463
2464
EmitPossiblyEvalCall(Call * expr)2465 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2466 // In a call to eval, we first call
2467 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
2468 // to call. Then we call the resolved function using the given arguments.
2469 ZoneList<Expression*>* args = expr->arguments();
2470 int arg_count = args->length();
2471
2472 PushCalleeAndWithBaseObject(expr);
2473
2474 // Push the arguments.
2475 for (int i = 0; i < arg_count; i++) {
2476 VisitForStackValue(args->at(i));
2477 }
2478
2479 // Push a copy of the function (found below the arguments) and
2480 // resolve eval.
2481 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2482 __ push(r1);
2483 EmitResolvePossiblyDirectEval(expr);
2484
2485 // Touch up the stack with the resolved function.
2486 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2487
2488 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2489
2490 // Record source position for debugger.
2491 SetCallPosition(expr);
2492 Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2493 expr->tail_call_mode())
2494 .code();
2495 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2496 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2497 __ mov(r0, Operand(arg_count));
2498 __ Call(code, RelocInfo::CODE_TARGET);
2499 OperandStackDepthDecrement(arg_count + 1);
2500 RecordJSReturnSite(expr);
2501 RestoreContext();
2502 context()->DropAndPlug(1, r0);
2503 }
2504
2505
VisitCallNew(CallNew * expr)2506 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2507 Comment cmnt(masm_, "[ CallNew");
2508 // According to ECMA-262, section 11.2.2, page 44, the function
2509 // expression in new calls must be evaluated before the
2510 // arguments.
2511
2512 // Push constructor on the stack. If it's not a function it's used as
2513 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2514 // ignored.
2515 DCHECK(!expr->expression()->IsSuperPropertyReference());
2516 VisitForStackValue(expr->expression());
2517
2518 // Push the arguments ("left-to-right") on the stack.
2519 ZoneList<Expression*>* args = expr->arguments();
2520 int arg_count = args->length();
2521 for (int i = 0; i < arg_count; i++) {
2522 VisitForStackValue(args->at(i));
2523 }
2524
2525 // Call the construct call builtin that handles allocation and
2526 // constructor invocation.
2527 SetConstructCallPosition(expr);
2528
2529 // Load function and argument count into r1 and r0.
2530 __ mov(r0, Operand(arg_count));
2531 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2532
2533 // Record call targets in unoptimized code.
2534 __ EmitLoadTypeFeedbackVector(r2);
2535 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2536
2537 CallConstructStub stub(isolate());
2538 CallIC(stub.GetCode());
2539 OperandStackDepthDecrement(arg_count + 1);
2540 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2541 RestoreContext();
2542 context()->Plug(r0);
2543 }
2544
2545
EmitSuperConstructorCall(Call * expr)2546 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2547 SuperCallReference* super_call_ref =
2548 expr->expression()->AsSuperCallReference();
2549 DCHECK_NOT_NULL(super_call_ref);
2550
2551 // Push the super constructor target on the stack (may be null,
2552 // but the Construct builtin can deal with that properly).
2553 VisitForAccumulatorValue(super_call_ref->this_function_var());
2554 __ AssertFunction(result_register());
2555 __ ldr(result_register(),
2556 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2557 __ ldr(result_register(),
2558 FieldMemOperand(result_register(), Map::kPrototypeOffset));
2559 PushOperand(result_register());
2560
2561 // Push the arguments ("left-to-right") on the stack.
2562 ZoneList<Expression*>* args = expr->arguments();
2563 int arg_count = args->length();
2564 for (int i = 0; i < arg_count; i++) {
2565 VisitForStackValue(args->at(i));
2566 }
2567
2568 // Call the construct call builtin that handles allocation and
2569 // constructor invocation.
2570 SetConstructCallPosition(expr);
2571
2572 // Load new target into r3.
2573 VisitForAccumulatorValue(super_call_ref->new_target_var());
2574 __ mov(r3, result_register());
2575
2576 // Load function and argument count into r1 and r0.
2577 __ mov(r0, Operand(arg_count));
2578 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2579
2580 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2581 OperandStackDepthDecrement(arg_count + 1);
2582
2583 RecordJSReturnSite(expr);
2584 RestoreContext();
2585 context()->Plug(r0);
2586 }
2587
2588
EmitIsSmi(CallRuntime * expr)2589 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2590 ZoneList<Expression*>* args = expr->arguments();
2591 DCHECK(args->length() == 1);
2592
2593 VisitForAccumulatorValue(args->at(0));
2594
2595 Label materialize_true, materialize_false;
2596 Label* if_true = NULL;
2597 Label* if_false = NULL;
2598 Label* fall_through = NULL;
2599 context()->PrepareTest(&materialize_true, &materialize_false,
2600 &if_true, &if_false, &fall_through);
2601
2602 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2603 __ SmiTst(r0);
2604 Split(eq, if_true, if_false, fall_through);
2605
2606 context()->Plug(if_true, if_false);
2607 }
2608
2609
EmitIsJSReceiver(CallRuntime * expr)2610 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2611 ZoneList<Expression*>* args = expr->arguments();
2612 DCHECK(args->length() == 1);
2613
2614 VisitForAccumulatorValue(args->at(0));
2615
2616 Label materialize_true, materialize_false;
2617 Label* if_true = NULL;
2618 Label* if_false = NULL;
2619 Label* fall_through = NULL;
2620 context()->PrepareTest(&materialize_true, &materialize_false,
2621 &if_true, &if_false, &fall_through);
2622
2623 __ JumpIfSmi(r0, if_false);
2624 __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
2625 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2626 Split(ge, if_true, if_false, fall_through);
2627
2628 context()->Plug(if_true, if_false);
2629 }
2630
2631
EmitIsArray(CallRuntime * expr)2632 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2633 ZoneList<Expression*>* args = expr->arguments();
2634 DCHECK(args->length() == 1);
2635
2636 VisitForAccumulatorValue(args->at(0));
2637
2638 Label materialize_true, materialize_false;
2639 Label* if_true = NULL;
2640 Label* if_false = NULL;
2641 Label* fall_through = NULL;
2642 context()->PrepareTest(&materialize_true, &materialize_false,
2643 &if_true, &if_false, &fall_through);
2644
2645 __ JumpIfSmi(r0, if_false);
2646 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2647 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2648 Split(eq, if_true, if_false, fall_through);
2649
2650 context()->Plug(if_true, if_false);
2651 }
2652
2653
EmitIsTypedArray(CallRuntime * expr)2654 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2655 ZoneList<Expression*>* args = expr->arguments();
2656 DCHECK(args->length() == 1);
2657
2658 VisitForAccumulatorValue(args->at(0));
2659
2660 Label materialize_true, materialize_false;
2661 Label* if_true = NULL;
2662 Label* if_false = NULL;
2663 Label* fall_through = NULL;
2664 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2665 &if_false, &fall_through);
2666
2667 __ JumpIfSmi(r0, if_false);
2668 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
2669 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2670 Split(eq, if_true, if_false, fall_through);
2671
2672 context()->Plug(if_true, if_false);
2673 }
2674
2675
EmitIsRegExp(CallRuntime * expr)2676 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2677 ZoneList<Expression*>* args = expr->arguments();
2678 DCHECK(args->length() == 1);
2679
2680 VisitForAccumulatorValue(args->at(0));
2681
2682 Label materialize_true, materialize_false;
2683 Label* if_true = NULL;
2684 Label* if_false = NULL;
2685 Label* fall_through = NULL;
2686 context()->PrepareTest(&materialize_true, &materialize_false,
2687 &if_true, &if_false, &fall_through);
2688
2689 __ JumpIfSmi(r0, if_false);
2690 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2691 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2692 Split(eq, if_true, if_false, fall_through);
2693
2694 context()->Plug(if_true, if_false);
2695 }
2696
2697
EmitIsJSProxy(CallRuntime * expr)2698 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2699 ZoneList<Expression*>* args = expr->arguments();
2700 DCHECK(args->length() == 1);
2701
2702 VisitForAccumulatorValue(args->at(0));
2703
2704 Label materialize_true, materialize_false;
2705 Label* if_true = NULL;
2706 Label* if_false = NULL;
2707 Label* fall_through = NULL;
2708 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2709 &if_false, &fall_through);
2710
2711 __ JumpIfSmi(r0, if_false);
2712 __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
2713 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2714 Split(eq, if_true, if_false, fall_through);
2715
2716 context()->Plug(if_true, if_false);
2717 }
2718
2719
EmitClassOf(CallRuntime * expr)2720 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2721 ZoneList<Expression*>* args = expr->arguments();
2722 DCHECK(args->length() == 1);
2723 Label done, null, function, non_function_constructor;
2724
2725 VisitForAccumulatorValue(args->at(0));
2726
2727 // If the object is not a JSReceiver, we return null.
2728 __ JumpIfSmi(r0, &null);
2729 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2730 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
2731 // Map is now in r0.
2732 __ b(lt, &null);
2733
2734 // Return 'Function' for JSFunction and JSBoundFunction objects.
2735 __ cmp(r1, Operand(FIRST_FUNCTION_TYPE));
2736 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2737 __ b(hs, &function);
2738
2739 // Check if the constructor in the map is a JS function.
2740 Register instance_type = r2;
2741 __ GetMapConstructor(r0, r0, r1, instance_type);
2742 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
2743 __ b(ne, &non_function_constructor);
2744
2745 // r0 now contains the constructor function. Grab the
2746 // instance class name from there.
2747 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2748 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2749 __ b(&done);
2750
2751 // Functions have class 'Function'.
2752 __ bind(&function);
2753 __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
2754 __ jmp(&done);
2755
2756 // Objects with a non-function constructor have class 'Object'.
2757 __ bind(&non_function_constructor);
2758 __ LoadRoot(r0, Heap::kObject_stringRootIndex);
2759 __ jmp(&done);
2760
2761 // Non-JS objects have class null.
2762 __ bind(&null);
2763 __ LoadRoot(r0, Heap::kNullValueRootIndex);
2764
2765 // All done.
2766 __ bind(&done);
2767
2768 context()->Plug(r0);
2769 }
2770
2771
EmitStringCharCodeAt(CallRuntime * expr)2772 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2773 ZoneList<Expression*>* args = expr->arguments();
2774 DCHECK(args->length() == 2);
2775 VisitForStackValue(args->at(0));
2776 VisitForAccumulatorValue(args->at(1));
2777
2778 Register object = r1;
2779 Register index = r0;
2780 Register result = r3;
2781
2782 PopOperand(object);
2783
2784 Label need_conversion;
2785 Label index_out_of_range;
2786 Label done;
2787 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2788 &need_conversion, &index_out_of_range);
2789 generator.GenerateFast(masm_);
2790 __ jmp(&done);
2791
2792 __ bind(&index_out_of_range);
2793 // When the index is out of range, the spec requires us to return
2794 // NaN.
2795 __ LoadRoot(result, Heap::kNanValueRootIndex);
2796 __ jmp(&done);
2797
2798 __ bind(&need_conversion);
2799 // Load the undefined value into the result register, which will
2800 // trigger conversion.
2801 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2802 __ jmp(&done);
2803
2804 NopRuntimeCallHelper call_helper;
2805 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2806
2807 __ bind(&done);
2808 context()->Plug(result);
2809 }
2810
2811
EmitCall(CallRuntime * expr)2812 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2813 ZoneList<Expression*>* args = expr->arguments();
2814 DCHECK_LE(2, args->length());
2815 // Push target, receiver and arguments onto the stack.
2816 for (Expression* const arg : *args) {
2817 VisitForStackValue(arg);
2818 }
2819 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2820 // Move target to r1.
2821 int const argc = args->length() - 2;
2822 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
2823 // Call the target.
2824 __ mov(r0, Operand(argc));
2825 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2826 OperandStackDepthDecrement(argc + 1);
2827 RestoreContext();
2828 // Discard the function left on TOS.
2829 context()->DropAndPlug(1, r0);
2830 }
2831
EmitGetSuperConstructor(CallRuntime * expr)2832 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2833 ZoneList<Expression*>* args = expr->arguments();
2834 DCHECK_EQ(1, args->length());
2835 VisitForAccumulatorValue(args->at(0));
2836 __ AssertFunction(r0);
2837 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2838 __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
2839 context()->Plug(r0);
2840 }
2841
EmitDebugIsActive(CallRuntime * expr)2842 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2843 DCHECK(expr->arguments()->length() == 0);
2844 ExternalReference debug_is_active =
2845 ExternalReference::debug_is_active_address(isolate());
2846 __ mov(ip, Operand(debug_is_active));
2847 __ ldrb(r0, MemOperand(ip));
2848 __ SmiTag(r0);
2849 context()->Plug(r0);
2850 }
2851
2852
EmitCreateIterResultObject(CallRuntime * expr)2853 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2854 ZoneList<Expression*>* args = expr->arguments();
2855 DCHECK_EQ(2, args->length());
2856 VisitForStackValue(args->at(0));
2857 VisitForStackValue(args->at(1));
2858
2859 Label runtime, done;
2860
2861 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime,
2862 NO_ALLOCATION_FLAGS);
2863 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
2864 __ pop(r3);
2865 __ pop(r2);
2866 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2867 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2868 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2869 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2870 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
2871 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
2872 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2873 __ b(&done);
2874
2875 __ bind(&runtime);
2876 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2877
2878 __ bind(&done);
2879 context()->Plug(r0);
2880 }
2881
2882
EmitLoadJSRuntimeFunction(CallRuntime * expr)2883 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2884 // Push function.
2885 __ LoadNativeContextSlot(expr->context_index(), r0);
2886 PushOperand(r0);
2887
2888 // Push undefined as the receiver.
2889 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2890 PushOperand(r0);
2891 }
2892
2893
EmitCallJSRuntimeFunction(CallRuntime * expr)2894 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2895 ZoneList<Expression*>* args = expr->arguments();
2896 int arg_count = args->length();
2897
2898 SetCallPosition(expr);
2899 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2900 __ mov(r0, Operand(arg_count));
2901 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2902 RelocInfo::CODE_TARGET);
2903 OperandStackDepthDecrement(arg_count + 1);
2904 RestoreContext();
2905 }
2906
2907
VisitUnaryOperation(UnaryOperation * expr)2908 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2909 switch (expr->op()) {
2910 case Token::DELETE: {
2911 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2912 Property* property = expr->expression()->AsProperty();
2913 VariableProxy* proxy = expr->expression()->AsVariableProxy();
2914
2915 if (property != NULL) {
2916 VisitForStackValue(property->obj());
2917 VisitForStackValue(property->key());
2918 CallRuntimeWithOperands(is_strict(language_mode())
2919 ? Runtime::kDeleteProperty_Strict
2920 : Runtime::kDeleteProperty_Sloppy);
2921 context()->Plug(r0);
2922 } else if (proxy != NULL) {
2923 Variable* var = proxy->var();
2924 // Delete of an unqualified identifier is disallowed in strict mode but
2925 // "delete this" is allowed.
2926 bool is_this = var->is_this();
2927 DCHECK(is_sloppy(language_mode()) || is_this);
2928 if (var->IsUnallocated()) {
2929 __ LoadGlobalObject(r2);
2930 __ mov(r1, Operand(var->name()));
2931 __ Push(r2, r1);
2932 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2933 context()->Plug(r0);
2934 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2935 // Result of deleting non-global, non-dynamic variables is false.
2936 // The subexpression does not have side effects.
2937 context()->Plug(is_this);
2938 } else {
2939 // Non-global variable. Call the runtime to try to delete from the
2940 // context where the variable was introduced.
2941 __ Push(var->name());
2942 __ CallRuntime(Runtime::kDeleteLookupSlot);
2943 context()->Plug(r0);
2944 }
2945 } else {
2946 // Result of deleting non-property, non-variable reference is true.
2947 // The subexpression may have side effects.
2948 VisitForEffect(expr->expression());
2949 context()->Plug(true);
2950 }
2951 break;
2952 }
2953
2954 case Token::VOID: {
2955 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2956 VisitForEffect(expr->expression());
2957 context()->Plug(Heap::kUndefinedValueRootIndex);
2958 break;
2959 }
2960
2961 case Token::NOT: {
2962 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2963 if (context()->IsEffect()) {
2964 // Unary NOT has no side effects so it's only necessary to visit the
2965 // subexpression. Match the optimizing compiler by not branching.
2966 VisitForEffect(expr->expression());
2967 } else if (context()->IsTest()) {
2968 const TestContext* test = TestContext::cast(context());
2969 // The labels are swapped for the recursive call.
2970 VisitForControl(expr->expression(),
2971 test->false_label(),
2972 test->true_label(),
2973 test->fall_through());
2974 context()->Plug(test->true_label(), test->false_label());
2975 } else {
2976 // We handle value contexts explicitly rather than simply visiting
2977 // for control and plugging the control flow into the context,
2978 // because we need to prepare a pair of extra administrative AST ids
2979 // for the optimizing compiler.
2980 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2981 Label materialize_true, materialize_false, done;
2982 VisitForControl(expr->expression(),
2983 &materialize_false,
2984 &materialize_true,
2985 &materialize_true);
2986 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2987 __ bind(&materialize_true);
2988 PrepareForBailoutForId(expr->MaterializeTrueId(),
2989 BailoutState::NO_REGISTERS);
2990 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
2991 if (context()->IsStackValue()) __ push(r0);
2992 __ jmp(&done);
2993 __ bind(&materialize_false);
2994 PrepareForBailoutForId(expr->MaterializeFalseId(),
2995 BailoutState::NO_REGISTERS);
2996 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
2997 if (context()->IsStackValue()) __ push(r0);
2998 __ bind(&done);
2999 }
3000 break;
3001 }
3002
3003 case Token::TYPEOF: {
3004 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3005 {
3006 AccumulatorValueContext context(this);
3007 VisitForTypeofValue(expr->expression());
3008 }
3009 __ mov(r3, r0);
3010 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
3011 context()->Plug(r0);
3012 break;
3013 }
3014
3015 default:
3016 UNREACHABLE();
3017 }
3018 }
3019
3020
VisitCountOperation(CountOperation * expr)3021 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3022 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3023
3024 Comment cmnt(masm_, "[ CountOperation");
3025
3026 Property* prop = expr->expression()->AsProperty();
3027 LhsKind assign_type = Property::GetAssignType(prop);
3028
3029 // Evaluate expression and get value.
3030 if (assign_type == VARIABLE) {
3031 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3032 AccumulatorValueContext context(this);
3033 EmitVariableLoad(expr->expression()->AsVariableProxy());
3034 } else {
3035 // Reserve space for result of postfix operation.
3036 if (expr->is_postfix() && !context()->IsEffect()) {
3037 __ mov(ip, Operand(Smi::kZero));
3038 PushOperand(ip);
3039 }
3040 switch (assign_type) {
3041 case NAMED_PROPERTY: {
3042 // Put the object both on the stack and in the register.
3043 VisitForStackValue(prop->obj());
3044 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3045 EmitNamedPropertyLoad(prop);
3046 break;
3047 }
3048
3049 case NAMED_SUPER_PROPERTY: {
3050 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3051 VisitForAccumulatorValue(
3052 prop->obj()->AsSuperPropertyReference()->home_object());
3053 PushOperand(result_register());
3054 const Register scratch = r1;
3055 __ ldr(scratch, MemOperand(sp, kPointerSize));
3056 PushOperand(scratch);
3057 PushOperand(result_register());
3058 EmitNamedSuperPropertyLoad(prop);
3059 break;
3060 }
3061
3062 case KEYED_SUPER_PROPERTY: {
3063 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3064 VisitForStackValue(
3065 prop->obj()->AsSuperPropertyReference()->home_object());
3066 VisitForAccumulatorValue(prop->key());
3067 PushOperand(result_register());
3068 const Register scratch = r1;
3069 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
3070 PushOperand(scratch);
3071 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
3072 PushOperand(scratch);
3073 PushOperand(result_register());
3074 EmitKeyedSuperPropertyLoad(prop);
3075 break;
3076 }
3077
3078 case KEYED_PROPERTY: {
3079 VisitForStackValue(prop->obj());
3080 VisitForStackValue(prop->key());
3081 __ ldr(LoadDescriptor::ReceiverRegister(),
3082 MemOperand(sp, 1 * kPointerSize));
3083 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3084 EmitKeyedPropertyLoad(prop);
3085 break;
3086 }
3087
3088 case VARIABLE:
3089 UNREACHABLE();
3090 }
3091 }
3092
3093 // We need a second deoptimization point after loading the value
3094 // in case evaluating the property load my have a side effect.
3095 if (assign_type == VARIABLE) {
3096 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3097 } else {
3098 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3099 }
3100
3101 // Inline smi case if we are in a loop.
3102 Label stub_call, done;
3103 JumpPatchSite patch_site(masm_);
3104
3105 int count_value = expr->op() == Token::INC ? 1 : -1;
3106 if (ShouldInlineSmiCase(expr->op())) {
3107 Label slow;
3108 patch_site.EmitJumpIfNotSmi(r0, &slow);
3109
3110 // Save result for postfix expressions.
3111 if (expr->is_postfix()) {
3112 if (!context()->IsEffect()) {
3113 // Save the result on the stack. If we have a named or keyed property
3114 // we store the result under the receiver that is currently on top
3115 // of the stack.
3116 switch (assign_type) {
3117 case VARIABLE:
3118 __ push(r0);
3119 break;
3120 case NAMED_PROPERTY:
3121 __ str(r0, MemOperand(sp, kPointerSize));
3122 break;
3123 case NAMED_SUPER_PROPERTY:
3124 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3125 break;
3126 case KEYED_PROPERTY:
3127 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3128 break;
3129 case KEYED_SUPER_PROPERTY:
3130 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3131 break;
3132 }
3133 }
3134 }
3135
3136 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
3137 __ b(vc, &done);
3138 // Call stub. Undo operation first.
3139 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
3140 __ jmp(&stub_call);
3141 __ bind(&slow);
3142 }
3143
3144 // Convert old value into a number.
3145 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3146 RestoreContext();
3147 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3148
3149 // Save result for postfix expressions.
3150 if (expr->is_postfix()) {
3151 if (!context()->IsEffect()) {
3152 // Save the result on the stack. If we have a named or keyed property
3153 // we store the result under the receiver that is currently on top
3154 // of the stack.
3155 switch (assign_type) {
3156 case VARIABLE:
3157 PushOperand(r0);
3158 break;
3159 case NAMED_PROPERTY:
3160 __ str(r0, MemOperand(sp, kPointerSize));
3161 break;
3162 case NAMED_SUPER_PROPERTY:
3163 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3164 break;
3165 case KEYED_PROPERTY:
3166 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3167 break;
3168 case KEYED_SUPER_PROPERTY:
3169 __ str(r0, MemOperand(sp, 3 * kPointerSize));
3170 break;
3171 }
3172 }
3173 }
3174
3175
3176 __ bind(&stub_call);
3177 __ mov(r1, r0);
3178 __ mov(r0, Operand(Smi::FromInt(count_value)));
3179
3180 SetExpressionPosition(expr);
3181
3182 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3183 CallIC(code, expr->CountBinOpFeedbackId());
3184 patch_site.EmitPatchInfo();
3185 __ bind(&done);
3186
3187 // Store the value returned in r0.
3188 switch (assign_type) {
3189 case VARIABLE: {
3190 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3191 if (expr->is_postfix()) {
3192 { EffectContext context(this);
3193 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3194 proxy->hole_check_mode());
3195 PrepareForBailoutForId(expr->AssignmentId(),
3196 BailoutState::TOS_REGISTER);
3197 context.Plug(r0);
3198 }
3199 // For all contexts except EffectConstant We have the result on
3200 // top of the stack.
3201 if (!context()->IsEffect()) {
3202 context()->PlugTOS();
3203 }
3204 } else {
3205 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3206 proxy->hole_check_mode());
3207 PrepareForBailoutForId(expr->AssignmentId(),
3208 BailoutState::TOS_REGISTER);
3209 context()->Plug(r0);
3210 }
3211 break;
3212 }
3213 case NAMED_PROPERTY: {
3214 PopOperand(StoreDescriptor::ReceiverRegister());
3215 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3216 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3217 if (expr->is_postfix()) {
3218 if (!context()->IsEffect()) {
3219 context()->PlugTOS();
3220 }
3221 } else {
3222 context()->Plug(r0);
3223 }
3224 break;
3225 }
3226 case NAMED_SUPER_PROPERTY: {
3227 EmitNamedSuperPropertyStore(prop);
3228 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3229 if (expr->is_postfix()) {
3230 if (!context()->IsEffect()) {
3231 context()->PlugTOS();
3232 }
3233 } else {
3234 context()->Plug(r0);
3235 }
3236 break;
3237 }
3238 case KEYED_SUPER_PROPERTY: {
3239 EmitKeyedSuperPropertyStore(prop);
3240 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3241 if (expr->is_postfix()) {
3242 if (!context()->IsEffect()) {
3243 context()->PlugTOS();
3244 }
3245 } else {
3246 context()->Plug(r0);
3247 }
3248 break;
3249 }
3250 case KEYED_PROPERTY: {
3251 PopOperands(StoreDescriptor::ReceiverRegister(),
3252 StoreDescriptor::NameRegister());
3253 CallKeyedStoreIC(expr->CountSlot());
3254 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3255 if (expr->is_postfix()) {
3256 if (!context()->IsEffect()) {
3257 context()->PlugTOS();
3258 }
3259 } else {
3260 context()->Plug(r0);
3261 }
3262 break;
3263 }
3264 }
3265 }
3266
3267
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3268 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3269 Expression* sub_expr,
3270 Handle<String> check) {
3271 Label materialize_true, materialize_false;
3272 Label* if_true = NULL;
3273 Label* if_false = NULL;
3274 Label* fall_through = NULL;
3275 context()->PrepareTest(&materialize_true, &materialize_false,
3276 &if_true, &if_false, &fall_through);
3277
3278 { AccumulatorValueContext context(this);
3279 VisitForTypeofValue(sub_expr);
3280 }
3281 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3282
3283 Factory* factory = isolate()->factory();
3284 if (String::Equals(check, factory->number_string())) {
3285 __ JumpIfSmi(r0, if_true);
3286 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3287 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3288 __ cmp(r0, ip);
3289 Split(eq, if_true, if_false, fall_through);
3290 } else if (String::Equals(check, factory->string_string())) {
3291 __ JumpIfSmi(r0, if_false);
3292 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
3293 Split(lt, if_true, if_false, fall_through);
3294 } else if (String::Equals(check, factory->symbol_string())) {
3295 __ JumpIfSmi(r0, if_false);
3296 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
3297 Split(eq, if_true, if_false, fall_through);
3298 } else if (String::Equals(check, factory->boolean_string())) {
3299 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3300 __ b(eq, if_true);
3301 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
3302 Split(eq, if_true, if_false, fall_through);
3303 } else if (String::Equals(check, factory->undefined_string())) {
3304 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3305 __ b(eq, if_false);
3306 __ JumpIfSmi(r0, if_false);
3307 // Check for undetectable objects => true.
3308 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3309 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3310 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3311 Split(ne, if_true, if_false, fall_through);
3312
3313 } else if (String::Equals(check, factory->function_string())) {
3314 __ JumpIfSmi(r0, if_false);
3315 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3316 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3317 __ and_(r1, r1,
3318 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3319 __ cmp(r1, Operand(1 << Map::kIsCallable));
3320 Split(eq, if_true, if_false, fall_through);
3321 } else if (String::Equals(check, factory->object_string())) {
3322 __ JumpIfSmi(r0, if_false);
3323 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3324 __ b(eq, if_true);
3325 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3326 __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
3327 __ b(lt, if_false);
3328 // Check for callable or undetectable objects => false.
3329 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3330 __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3331 Split(eq, if_true, if_false, fall_through);
3332 // clang-format off
3333 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3334 } else if (String::Equals(check, factory->type##_string())) { \
3335 __ JumpIfSmi(r0, if_false); \
3336 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); \
3337 __ CompareRoot(r0, Heap::k##Type##MapRootIndex); \
3338 Split(eq, if_true, if_false, fall_through);
3339 SIMD128_TYPES(SIMD128_TYPE)
3340 #undef SIMD128_TYPE
3341 // clang-format on
3342 } else {
3343 if (if_false != fall_through) __ jmp(if_false);
3344 }
3345 context()->Plug(if_true, if_false);
3346 }
3347
3348
VisitCompareOperation(CompareOperation * expr)3349 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3350 Comment cmnt(masm_, "[ CompareOperation");
3351
3352 // First we try a fast inlined version of the compare when one of
3353 // the operands is a literal.
3354 if (TryLiteralCompare(expr)) return;
3355
3356 // Always perform the comparison for its control flow. Pack the result
3357 // into the expression's context after the comparison is performed.
3358 Label materialize_true, materialize_false;
3359 Label* if_true = NULL;
3360 Label* if_false = NULL;
3361 Label* fall_through = NULL;
3362 context()->PrepareTest(&materialize_true, &materialize_false,
3363 &if_true, &if_false, &fall_through);
3364
3365 Token::Value op = expr->op();
3366 VisitForStackValue(expr->left());
3367 switch (op) {
3368 case Token::IN:
3369 VisitForStackValue(expr->right());
3370 SetExpressionPosition(expr);
3371 EmitHasProperty();
3372 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3373 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3374 Split(eq, if_true, if_false, fall_through);
3375 break;
3376
3377 case Token::INSTANCEOF: {
3378 VisitForAccumulatorValue(expr->right());
3379 SetExpressionPosition(expr);
3380 PopOperand(r1);
3381 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3382 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3383 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
3384 Split(eq, if_true, if_false, fall_through);
3385 break;
3386 }
3387
3388 default: {
3389 VisitForAccumulatorValue(expr->right());
3390 SetExpressionPosition(expr);
3391 Condition cond = CompareIC::ComputeCondition(op);
3392 PopOperand(r1);
3393
3394 bool inline_smi_code = ShouldInlineSmiCase(op);
3395 JumpPatchSite patch_site(masm_);
3396 if (inline_smi_code) {
3397 Label slow_case;
3398 __ orr(r2, r0, Operand(r1));
3399 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
3400 __ cmp(r1, r0);
3401 Split(cond, if_true, if_false, NULL);
3402 __ bind(&slow_case);
3403 }
3404
3405 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3406 CallIC(ic, expr->CompareOperationFeedbackId());
3407 patch_site.EmitPatchInfo();
3408 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3409 __ cmp(r0, Operand::Zero());
3410 Split(cond, if_true, if_false, fall_through);
3411 }
3412 }
3413
3414 // Convert the result of the comparison into one expected for this
3415 // expression's context.
3416 context()->Plug(if_true, if_false);
3417 }
3418
3419
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3420 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3421 Expression* sub_expr,
3422 NilValue nil) {
3423 Label materialize_true, materialize_false;
3424 Label* if_true = NULL;
3425 Label* if_false = NULL;
3426 Label* fall_through = NULL;
3427 context()->PrepareTest(&materialize_true, &materialize_false,
3428 &if_true, &if_false, &fall_through);
3429
3430 VisitForAccumulatorValue(sub_expr);
3431 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3432 if (expr->op() == Token::EQ_STRICT) {
3433 Heap::RootListIndex nil_value = nil == kNullValue ?
3434 Heap::kNullValueRootIndex :
3435 Heap::kUndefinedValueRootIndex;
3436 __ LoadRoot(r1, nil_value);
3437 __ cmp(r0, r1);
3438 Split(eq, if_true, if_false, fall_through);
3439 } else {
3440 __ JumpIfSmi(r0, if_false);
3441 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3442 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3443 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3444 Split(ne, if_true, if_false, fall_through);
3445 }
3446 context()->Plug(if_true, if_false);
3447 }
3448
3449
result_register()3450 Register FullCodeGenerator::result_register() {
3451 return r0;
3452 }
3453
3454
context_register()3455 Register FullCodeGenerator::context_register() {
3456 return cp;
3457 }
3458
LoadFromFrameField(int frame_offset,Register value)3459 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3460 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3461 __ ldr(value, MemOperand(fp, frame_offset));
3462 }
3463
StoreToFrameField(int frame_offset,Register value)3464 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3465 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3466 __ str(value, MemOperand(fp, frame_offset));
3467 }
3468
3469
LoadContextField(Register dst,int context_index)3470 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3471 __ ldr(dst, ContextMemOperand(cp, context_index));
3472 }
3473
3474
PushFunctionArgumentForContextAllocation()3475 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3476 DeclarationScope* closure_scope = scope()->GetClosureScope();
3477 if (closure_scope->is_script_scope() ||
3478 closure_scope->is_module_scope()) {
3479 // Contexts nested in the native context have a canonical empty function
3480 // as their closure, not the anonymous closure containing the global
3481 // code.
3482 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3483 } else if (closure_scope->is_eval_scope()) {
3484 // Contexts created by a call to eval have the same closure as the
3485 // context calling eval, not the anonymous closure containing the eval
3486 // code. Fetch it from the context.
3487 __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3488 } else {
3489 DCHECK(closure_scope->is_function_scope());
3490 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3491 }
3492 PushOperand(ip);
3493 }
3494
3495
3496 // ----------------------------------------------------------------------------
3497 // Non-local control flow support.
3498
EnterFinallyBlock()3499 void FullCodeGenerator::EnterFinallyBlock() {
3500 DCHECK(!result_register().is(r1));
3501 // Store pending message while executing finally block.
3502 ExternalReference pending_message_obj =
3503 ExternalReference::address_of_pending_message_obj(isolate());
3504 __ mov(ip, Operand(pending_message_obj));
3505 __ ldr(r1, MemOperand(ip));
3506 PushOperand(r1);
3507
3508 ClearPendingMessage();
3509 }
3510
3511
ExitFinallyBlock()3512 void FullCodeGenerator::ExitFinallyBlock() {
3513 DCHECK(!result_register().is(r1));
3514 // Restore pending message from stack.
3515 PopOperand(r1);
3516 ExternalReference pending_message_obj =
3517 ExternalReference::address_of_pending_message_obj(isolate());
3518 __ mov(ip, Operand(pending_message_obj));
3519 __ str(r1, MemOperand(ip));
3520 }
3521
3522
ClearPendingMessage()3523 void FullCodeGenerator::ClearPendingMessage() {
3524 DCHECK(!result_register().is(r1));
3525 ExternalReference pending_message_obj =
3526 ExternalReference::address_of_pending_message_obj(isolate());
3527 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
3528 __ mov(ip, Operand(pending_message_obj));
3529 __ str(r1, MemOperand(ip));
3530 }
3531
3532
EmitCommands()3533 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3534 DCHECK(!result_register().is(r1));
3535 __ Pop(result_register()); // Restore the accumulator.
3536 __ Pop(r1); // Get the token.
3537 for (DeferredCommand cmd : commands_) {
3538 Label skip;
3539 __ cmp(r1, Operand(Smi::FromInt(cmd.token)));
3540 __ b(ne, &skip);
3541 switch (cmd.command) {
3542 case kReturn:
3543 codegen_->EmitUnwindAndReturn();
3544 break;
3545 case kThrow:
3546 __ Push(result_register());
3547 __ CallRuntime(Runtime::kReThrow);
3548 break;
3549 case kContinue:
3550 codegen_->EmitContinue(cmd.target);
3551 break;
3552 case kBreak:
3553 codegen_->EmitBreak(cmd.target);
3554 break;
3555 }
3556 __ bind(&skip);
3557 }
3558 }
3559
3560 #undef __
3561
3562
GetInterruptImmediateLoadAddress(Address pc)3563 static Address GetInterruptImmediateLoadAddress(Address pc) {
3564 Address load_address = pc - 2 * Assembler::kInstrSize;
3565 if (!FLAG_enable_embedded_constant_pool) {
3566 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
3567 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
3568 // This is an extended constant pool lookup.
3569 if (CpuFeatures::IsSupported(ARMv7)) {
3570 load_address -= 2 * Assembler::kInstrSize;
3571 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
3572 DCHECK(Assembler::IsMovT(
3573 Memory::int32_at(load_address + Assembler::kInstrSize)));
3574 } else {
3575 load_address -= 4 * Assembler::kInstrSize;
3576 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
3577 DCHECK(Assembler::IsOrrImmed(
3578 Memory::int32_at(load_address + Assembler::kInstrSize)));
3579 DCHECK(Assembler::IsOrrImmed(
3580 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
3581 DCHECK(Assembler::IsOrrImmed(
3582 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
3583 }
3584 } else if (CpuFeatures::IsSupported(ARMv7) &&
3585 Assembler::IsMovT(Memory::int32_at(load_address))) {
3586 // This is a movw / movt immediate load.
3587 load_address -= Assembler::kInstrSize;
3588 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
3589 } else if (!CpuFeatures::IsSupported(ARMv7) &&
3590 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
3591 // This is a mov / orr immediate load.
3592 load_address -= 3 * Assembler::kInstrSize;
3593 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
3594 DCHECK(Assembler::IsOrrImmed(
3595 Memory::int32_at(load_address + Assembler::kInstrSize)));
3596 DCHECK(Assembler::IsOrrImmed(
3597 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
3598 } else {
3599 // This is a small constant pool lookup.
3600 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
3601 }
3602 return load_address;
3603 }
3604
3605
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3606 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3607 Address pc,
3608 BackEdgeState target_state,
3609 Code* replacement_code) {
3610 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3611 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
3612 Isolate* isolate = unoptimized_code->GetIsolate();
3613 CodePatcher patcher(isolate, branch_address, 1);
3614 switch (target_state) {
3615 case INTERRUPT:
3616 {
3617 // <decrement profiling counter>
3618 // bpl ok
3619 // ; load interrupt stub address into ip - either of (for ARMv7):
3620 // ; <small cp load> | <extended cp load> | <immediate load>
3621 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
3622 // | movt ip, #imm | movw ip, #imm
3623 // | ldr ip, [pp, ip]
3624 // ; or (for ARMv6):
3625 // ; <small cp load> | <extended cp load> | <immediate load>
3626 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
3627 // | orr ip, ip, #imm> | orr ip, ip, #imm
3628 // | orr ip, ip, #imm> | orr ip, ip, #imm
3629 // | orr ip, ip, #imm> | orr ip, ip, #imm
3630 // blx ip
3631 // <reset profiling counter>
3632 // ok-label
3633
3634 // Calculate branch offset to the ok-label - this is the difference
3635 // between the branch address and |pc| (which points at <blx ip>) plus
3636 // kProfileCounterResetSequence instructions
3637 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
3638 kProfileCounterResetSequenceLength;
3639 patcher.masm()->b(branch_offset, pl);
3640 break;
3641 }
3642 case ON_STACK_REPLACEMENT:
3643 // <decrement profiling counter>
3644 // mov r0, r0 (NOP)
3645 // ; load on-stack replacement address into ip - either of (for ARMv7):
3646 // ; <small cp load> | <extended cp load> | <immediate load>
3647 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
3648 // | movt ip, #imm> | movw ip, #imm
3649 // | ldr ip, [pp, ip]
3650 // ; or (for ARMv6):
3651 // ; <small cp load> | <extended cp load> | <immediate load>
3652 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
3653 // | orr ip, ip, #imm> | orr ip, ip, #imm
3654 // | orr ip, ip, #imm> | orr ip, ip, #imm
3655 // | orr ip, ip, #imm> | orr ip, ip, #imm
3656 // blx ip
3657 // <reset profiling counter>
3658 // ok-label
3659 patcher.masm()->nop();
3660 break;
3661 }
3662
3663 // Replace the call address.
3664 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3665 unoptimized_code, replacement_code->entry());
3666
3667 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3668 unoptimized_code, pc_immediate_load_address, replacement_code);
3669 }
3670
3671
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3672 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3673 Isolate* isolate,
3674 Code* unoptimized_code,
3675 Address pc) {
3676 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
3677
3678 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
3679 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
3680 #ifdef DEBUG
3681 Address interrupt_address = Assembler::target_address_at(
3682 pc_immediate_load_address, unoptimized_code);
3683 #endif
3684
3685 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
3686 DCHECK(interrupt_address ==
3687 isolate->builtins()->InterruptCheck()->entry());
3688 return INTERRUPT;
3689 }
3690
3691 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
3692
3693 DCHECK(interrupt_address ==
3694 isolate->builtins()->OnStackReplacement()->entry());
3695 return ON_STACK_REPLACEMENT;
3696 }
3697
3698
3699 } // namespace internal
3700 } // namespace v8
3701
3702 #endif // V8_TARGET_ARCH_ARM
3703