1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_S390
6
7 #include "src/full-codegen/full-codegen.h"
8 #include "src/ast/compile-time-value.h"
9 #include "src/ast/scopes.h"
10 #include "src/code-factory.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/ic/ic.h"
17
18 #include "src/s390/code-stubs-s390.h"
19 #include "src/s390/macro-assembler-s390.h"
20
21 namespace v8 {
22 namespace internal {
23
24 #define __ ACCESS_MASM(masm())
25
26 // A patch site is a location in the code which it is possible to patch. This
27 // class has a number of methods to emit the code which is patchable and the
28 // method EmitPatchInfo to record a marker back to the patchable code. This
29 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
30 // immediate value is used) is the delta from the pc to the first instruction of
31 // the patchable code.
32 // See PatchInlinedSmiCode in ic-s390.cc for the code that patches it
33 class JumpPatchSite BASE_EMBEDDED {
34 public:
JumpPatchSite(MacroAssembler * masm)35 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
36 #ifdef DEBUG
37 info_emitted_ = false;
38 #endif
39 }
40
~JumpPatchSite()41 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
42
43 // When initially emitting this ensure that a jump is always generated to skip
44 // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)45 void EmitJumpIfNotSmi(Register reg, Label* target) {
46 DCHECK(!patch_site_.is_bound() && !info_emitted_);
47 __ bind(&patch_site_);
48 __ CmpP(reg, reg);
49 // Emit the Nop to make bigger place for patching on 31-bit
50 // as the TestIfSmi sequence uses 4-byte TMLL
51 #ifndef V8_TARGET_ARCH_S390X
52 __ nop();
53 #endif
54 __ beq(target); // Always taken before patched.
55 }
56
57 // When initially emitting this ensure that a jump is never generated to skip
58 // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)59 void EmitJumpIfSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 __ bind(&patch_site_);
62 __ CmpP(reg, reg);
63 // Emit the Nop to make bigger place for patching on 31-bit
64 // as the TestIfSmi sequence uses 4-byte TMLL
65 #ifndef V8_TARGET_ARCH_S390X
66 __ nop();
67 #endif
68 __ bne(target); // Never taken before patched.
69 }
70
EmitPatchInfo()71 void EmitPatchInfo() {
72 if (patch_site_.is_bound()) {
73 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
74 DCHECK(is_int16(delta_to_patch_site));
75 __ chi(r0, Operand(delta_to_patch_site));
76 #ifdef DEBUG
77 info_emitted_ = true;
78 #endif
79 } else {
80 __ nop();
81 __ nop();
82 }
83 }
84
85 private:
masm()86 MacroAssembler* masm() { return masm_; }
87 MacroAssembler* masm_;
88 Label patch_site_;
89 #ifdef DEBUG
90 bool info_emitted_;
91 #endif
92 };
93
94 // Generate code for a JS function. On entry to the function the receiver
95 // and arguments have been pushed on the stack left to right. The actual
96 // argument count matches the formal parameter count expected by the
97 // function.
98 //
99 // The live registers are:
100 // o r3: the JS function object being called (i.e., ourselves)
101 // o r5: the new target value
102 // o cp: our context
103 // o fp: our caller's frame pointer
104 // o sp: stack pointer
105 // o lr: return address
106 // o ip: our own function entry (required by the prologue)
107 //
108 // The function builds a JS frame. Please see JavaScriptFrameConstants in
109 // frames-s390.h for its layout.
Generate()110 void FullCodeGenerator::Generate() {
111 CompilationInfo* info = info_;
112 profiling_counter_ = isolate()->factory()->NewCell(
113 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
114 SetFunctionPosition(literal());
115 Comment cmnt(masm_, "[ function compiled by full code generator");
116
117 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118
119 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
120 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
121 __ LoadP(r4, MemOperand(sp, receiver_offset), r0);
122 __ AssertNotSmi(r4);
123 __ CompareObjectType(r4, r4, no_reg, FIRST_JS_RECEIVER_TYPE);
124 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
125 }
126
127 // Open a frame scope to indicate that there is a frame on the stack. The
128 // MANUAL indicates that the scope shouldn't actually generate code to set up
129 // the frame (that is done below).
130 FrameScope frame_scope(masm_, StackFrame::MANUAL);
131 int prologue_offset = masm_->pc_offset();
132
133 info->set_prologue_offset(prologue_offset);
134 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
135
136 // Increment invocation count for the function.
137 {
138 Comment cmnt(masm_, "[ Increment invocation count");
139 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
140 __ LoadP(r6, FieldMemOperand(r6, LiteralsArray::kFeedbackVectorOffset));
141 __ LoadP(r1, FieldMemOperand(r6, TypeFeedbackVector::kInvocationCountIndex *
142 kPointerSize +
143 TypeFeedbackVector::kHeaderSize));
144 __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0);
145 __ StoreP(r1,
146 FieldMemOperand(
147 r6, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
148 TypeFeedbackVector::kHeaderSize));
149 }
150
151 {
152 Comment cmnt(masm_, "[ Allocate locals");
153 int locals_count = info->scope()->num_stack_slots();
154 // Generators allocate locals, if any, in context slots.
155 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
156 OperandStackDepthIncrement(locals_count);
157 if (locals_count > 0) {
158 if (locals_count >= 128) {
159 Label ok;
160 __ AddP(ip, sp, Operand(-(locals_count * kPointerSize)));
161 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
162 __ CmpLogicalP(ip, r5);
163 __ bge(&ok, Label::kNear);
164 __ CallRuntime(Runtime::kThrowStackOverflow);
165 __ bind(&ok);
166 }
167 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
168 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
169 if (locals_count >= kMaxPushes) {
170 int loop_iterations = locals_count / kMaxPushes;
171 __ mov(r4, Operand(loop_iterations));
172 Label loop_header;
173 __ bind(&loop_header);
174 // Do pushes.
175 // TODO(joransiu): Use MVC for better performance
176 __ lay(sp, MemOperand(sp, -kMaxPushes * kPointerSize));
177 for (int i = 0; i < kMaxPushes; i++) {
178 __ StoreP(ip, MemOperand(sp, i * kPointerSize));
179 }
180 // Continue loop if not done.
181 __ BranchOnCount(r4, &loop_header);
182 }
183 int remaining = locals_count % kMaxPushes;
184 // Emit the remaining pushes.
185 // TODO(joransiu): Use MVC for better performance
186 if (remaining > 0) {
187 __ lay(sp, MemOperand(sp, -remaining * kPointerSize));
188 for (int i = 0; i < remaining; i++) {
189 __ StoreP(ip, MemOperand(sp, i * kPointerSize));
190 }
191 }
192 }
193 }
194
195 bool function_in_register_r3 = true;
196
197 // Possibly allocate a local context.
198 if (info->scope()->NeedsContext()) {
199 // Argument to NewContext is the function, which is still in r3.
200 Comment cmnt(masm_, "[ Allocate context");
201 bool need_write_barrier = true;
202 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
203 if (info->scope()->is_script_scope()) {
204 __ push(r3);
205 __ Push(info->scope()->scope_info());
206 __ CallRuntime(Runtime::kNewScriptContext);
207 PrepareForBailoutForId(BailoutId::ScriptContext(),
208 BailoutState::TOS_REGISTER);
209 // The new target value is not used, clobbering is safe.
210 DCHECK_NULL(info->scope()->new_target_var());
211 } else {
212 if (info->scope()->new_target_var() != nullptr) {
213 __ push(r5); // Preserve new target.
214 }
215 if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
216 FastNewFunctionContextStub stub(isolate());
217 __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
218 Operand(slots));
219 __ CallStub(&stub);
220 // Result of FastNewFunctionContextStub is always in new space.
221 need_write_barrier = false;
222 } else {
223 __ push(r3);
224 __ CallRuntime(Runtime::kNewFunctionContext);
225 }
226 if (info->scope()->new_target_var() != nullptr) {
227 __ pop(r5); // Preserve new target.
228 }
229 }
230 function_in_register_r3 = false;
231 // Context is returned in r2. It replaces the context passed to us.
232 // It's saved in the stack and kept live in cp.
233 __ LoadRR(cp, r2);
234 __ StoreP(r2, MemOperand(fp, StandardFrameConstants::kContextOffset));
235 // Copy any necessary parameters into the context.
236 int num_parameters = info->scope()->num_parameters();
237 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
238 for (int i = first_parameter; i < num_parameters; i++) {
239 Variable* var =
240 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
241 if (var->IsContextSlot()) {
242 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
243 (num_parameters - 1 - i) * kPointerSize;
244 // Load parameter from stack.
245 __ LoadP(r2, MemOperand(fp, parameter_offset), r0);
246 // Store it in the context.
247 MemOperand target = ContextMemOperand(cp, var->index());
248 __ StoreP(r2, target);
249
250 // Update the write barrier.
251 if (need_write_barrier) {
252 __ RecordWriteContextSlot(cp, target.offset(), r2, r4,
253 kLRHasBeenSaved, kDontSaveFPRegs);
254 } else if (FLAG_debug_code) {
255 Label done;
256 __ JumpIfInNewSpace(cp, r2, &done);
257 __ Abort(kExpectedNewSpaceObject);
258 __ bind(&done);
259 }
260 }
261 }
262 }
263
264 // Register holding this function and new target are both trashed in case we
265 // bailout here. But since that can happen only when new target is not used
266 // and we allocate a context, the value of |function_in_register| is correct.
267 PrepareForBailoutForId(BailoutId::FunctionContext(),
268 BailoutState::NO_REGISTERS);
269
270 // Possibly set up a local binding to the this function which is used in
271 // derived constructors with super calls.
272 Variable* this_function_var = info->scope()->this_function_var();
273 if (this_function_var != nullptr) {
274 Comment cmnt(masm_, "[ This function");
275 if (!function_in_register_r3) {
276 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
277 // The write barrier clobbers register again, keep it marked as such.
278 }
279 SetVar(this_function_var, r3, r2, r4);
280 }
281
282 // Possibly set up a local binding to the new target value.
283 Variable* new_target_var = info->scope()->new_target_var();
284 if (new_target_var != nullptr) {
285 Comment cmnt(masm_, "[ new.target");
286 SetVar(new_target_var, r5, r2, r4);
287 }
288
289 // Possibly allocate RestParameters
290 Variable* rest_param = info->scope()->rest_parameter();
291 if (rest_param != nullptr) {
292 Comment cmnt(masm_, "[ Allocate rest parameter array");
293
294 if (!function_in_register_r3) {
295 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
296 }
297 FastNewRestParameterStub stub(isolate());
298 __ CallStub(&stub);
299
300 function_in_register_r3 = false;
301 SetVar(rest_param, r2, r3, r4);
302 }
303
304 Variable* arguments = info->scope()->arguments();
305 if (arguments != NULL) {
306 // Function uses arguments object.
307 Comment cmnt(masm_, "[ Allocate arguments object");
308 if (!function_in_register_r3) {
309 // Load this again, if it's used by the local context below.
310 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
311 }
312 if (is_strict(language_mode()) || !has_simple_parameters()) {
313 FastNewStrictArgumentsStub stub(isolate());
314 __ CallStub(&stub);
315 } else if (literal()->has_duplicate_parameters()) {
316 __ Push(r3);
317 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
318 } else {
319 FastNewSloppyArgumentsStub stub(isolate());
320 __ CallStub(&stub);
321 }
322
323 SetVar(arguments, r2, r3, r4);
324 }
325
326 if (FLAG_trace) {
327 __ CallRuntime(Runtime::kTraceEnter);
328 }
329
330 // Visit the declarations and body.
331 PrepareForBailoutForId(BailoutId::FunctionEntry(),
332 BailoutState::NO_REGISTERS);
333 {
334 Comment cmnt(masm_, "[ Declarations");
335 VisitDeclarations(scope()->declarations());
336 }
337
338 // Assert that the declarations do not use ICs. Otherwise the debugger
339 // won't be able to redirect a PC at an IC to the correct IC in newly
340 // recompiled code.
341 DCHECK_EQ(0, ic_total_count_);
342
343 {
344 Comment cmnt(masm_, "[ Stack check");
345 PrepareForBailoutForId(BailoutId::Declarations(),
346 BailoutState::NO_REGISTERS);
347 Label ok;
348 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
349 __ CmpLogicalP(sp, ip);
350 __ bge(&ok, Label::kNear);
351 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
352 __ bind(&ok);
353 }
354
355 {
356 Comment cmnt(masm_, "[ Body");
357 DCHECK(loop_depth() == 0);
358 VisitStatements(literal()->body());
359 DCHECK(loop_depth() == 0);
360 }
361
362 // Always emit a 'return undefined' in case control fell off the end of
363 // the body.
364 {
365 Comment cmnt(masm_, "[ return <undefined>;");
366 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
367 }
368 EmitReturnSequence();
369 }
370
ClearAccumulator()371 void FullCodeGenerator::ClearAccumulator() {
372 __ LoadSmiLiteral(r2, Smi::kZero);
373 }
374
EmitProfilingCounterDecrement(int delta)375 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
376 __ mov(r4, Operand(profiling_counter_));
377 intptr_t smi_delta = reinterpret_cast<intptr_t>(Smi::FromInt(delta));
378 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT) && is_int8(-smi_delta)) {
379 __ AddP(FieldMemOperand(r4, Cell::kValueOffset), Operand(-smi_delta));
380 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
381 } else {
382 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
383 __ SubSmiLiteral(r5, r5, Smi::FromInt(delta), r0);
384 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
385 }
386 }
387
EmitProfilingCounterReset()388 void FullCodeGenerator::EmitProfilingCounterReset() {
389 int reset_value = FLAG_interrupt_budget;
390 __ mov(r4, Operand(profiling_counter_));
391 __ LoadSmiLiteral(r5, Smi::FromInt(reset_value));
392 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
393 }
394
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)395 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
396 Label* back_edge_target) {
397 Comment cmnt(masm_, "[ Back edge bookkeeping");
398 Label ok;
399
400 DCHECK(back_edge_target->is_bound());
401 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
402 kCodeSizeMultiplier / 2;
403 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
404 EmitProfilingCounterDecrement(weight);
405 {
406 // BackEdgeTable::PatchAt manipulates this sequence.
407 __ bge(&ok, Label::kNear);
408 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
409
410 // Record a mapping of this PC offset to the OSR id. This is used to find
411 // the AST id from the unoptimized code in order to use it as a key into
412 // the deoptimization input data found in the optimized code.
413 RecordBackEdge(stmt->OsrEntryId());
414 }
415 EmitProfilingCounterReset();
416
417 __ bind(&ok);
418 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
419 // Record a mapping of the OSR id to this PC. This is used if the OSR
420 // entry becomes the target of a bailout. We don't expect it to be, but
421 // we want it to work if it is.
422 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
423 }
424
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)425 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
426 bool is_tail_call) {
427 // Pretend that the exit is a backwards jump to the entry.
428 int weight = 1;
429 if (info_->ShouldSelfOptimize()) {
430 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
431 } else {
432 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
433 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
434 }
435 EmitProfilingCounterDecrement(weight);
436 Label ok;
437 __ CmpP(r5, Operand::Zero());
438 __ bge(&ok);
439 // Don't need to save result register if we are going to do a tail call.
440 if (!is_tail_call) {
441 __ push(r2);
442 }
443 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
444 if (!is_tail_call) {
445 __ pop(r2);
446 }
447 EmitProfilingCounterReset();
448 __ bind(&ok);
449 }
450
EmitReturnSequence()451 void FullCodeGenerator::EmitReturnSequence() {
452 Comment cmnt(masm_, "[ Return sequence");
453 if (return_label_.is_bound()) {
454 __ b(&return_label_);
455 } else {
456 __ bind(&return_label_);
457 if (FLAG_trace) {
458 // Push the return value on the stack as the parameter.
459 // Runtime::TraceExit returns its parameter in r2
460 __ push(r2);
461 __ CallRuntime(Runtime::kTraceExit);
462 }
463 EmitProfilingCounterHandlingForReturnSequence(false);
464
465 // Make sure that the constant pool is not emitted inside of the return
466 // sequence.
467 {
468 int32_t arg_count = info_->scope()->num_parameters() + 1;
469 int32_t sp_delta = arg_count * kPointerSize;
470 SetReturnPosition(literal());
471 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
472
473 __ Ret();
474 }
475 }
476 }
477
RestoreContext()478 void FullCodeGenerator::RestoreContext() {
479 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
480 }
481
Plug(Variable * var) const482 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
483 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
484 codegen()->GetVar(result_register(), var);
485 codegen()->PushOperand(result_register());
486 }
487
Plug(Heap::RootListIndex index) const488 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
489
Plug(Heap::RootListIndex index) const490 void FullCodeGenerator::AccumulatorValueContext::Plug(
491 Heap::RootListIndex index) const {
492 __ LoadRoot(result_register(), index);
493 }
494
Plug(Heap::RootListIndex index) const495 void FullCodeGenerator::StackValueContext::Plug(
496 Heap::RootListIndex index) const {
497 __ LoadRoot(result_register(), index);
498 codegen()->PushOperand(result_register());
499 }
500
Plug(Heap::RootListIndex index) const501 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
502 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
503 false_label_);
504 if (index == Heap::kUndefinedValueRootIndex ||
505 index == Heap::kNullValueRootIndex ||
506 index == Heap::kFalseValueRootIndex) {
507 if (false_label_ != fall_through_) __ b(false_label_);
508 } else if (index == Heap::kTrueValueRootIndex) {
509 if (true_label_ != fall_through_) __ b(true_label_);
510 } else {
511 __ LoadRoot(result_register(), index);
512 codegen()->DoTest(this);
513 }
514 }
515
Plug(Handle<Object> lit) const516 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
517
Plug(Handle<Object> lit) const518 void FullCodeGenerator::AccumulatorValueContext::Plug(
519 Handle<Object> lit) const {
520 __ mov(result_register(), Operand(lit));
521 }
522
Plug(Handle<Object> lit) const523 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
524 // Immediates cannot be pushed directly.
525 __ mov(result_register(), Operand(lit));
526 codegen()->PushOperand(result_register());
527 }
528
Plug(Handle<Object> lit) const529 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
530 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
531 false_label_);
532 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
533 !lit->IsUndetectable());
534 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
535 lit->IsFalse(isolate())) {
536 if (false_label_ != fall_through_) __ b(false_label_);
537 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
538 if (true_label_ != fall_through_) __ b(true_label_);
539 } else if (lit->IsString()) {
540 if (String::cast(*lit)->length() == 0) {
541 if (false_label_ != fall_through_) __ b(false_label_);
542 } else {
543 if (true_label_ != fall_through_) __ b(true_label_);
544 }
545 } else if (lit->IsSmi()) {
546 if (Smi::cast(*lit)->value() == 0) {
547 if (false_label_ != fall_through_) __ b(false_label_);
548 } else {
549 if (true_label_ != fall_through_) __ b(true_label_);
550 }
551 } else {
552 // For simplicity we always test the accumulator register.
553 __ mov(result_register(), Operand(lit));
554 codegen()->DoTest(this);
555 }
556 }
557
DropAndPlug(int count,Register reg) const558 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
559 Register reg) const {
560 DCHECK(count > 0);
561 if (count > 1) codegen()->DropOperands(count - 1);
562 __ StoreP(reg, MemOperand(sp, 0));
563 }
564
Plug(Label * materialize_true,Label * materialize_false) const565 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
566 Label* materialize_false) const {
567 DCHECK(materialize_true == materialize_false);
568 __ bind(materialize_true);
569 }
570
Plug(Label * materialize_true,Label * materialize_false) const571 void FullCodeGenerator::AccumulatorValueContext::Plug(
572 Label* materialize_true, Label* materialize_false) const {
573 Label done;
574 __ bind(materialize_true);
575 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
576 __ b(&done, Label::kNear);
577 __ bind(materialize_false);
578 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
579 __ bind(&done);
580 }
581
Plug(Label * materialize_true,Label * materialize_false) const582 void FullCodeGenerator::StackValueContext::Plug(
583 Label* materialize_true, Label* materialize_false) const {
584 Label done;
585 __ bind(materialize_true);
586 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
587 __ b(&done, Label::kNear);
588 __ bind(materialize_false);
589 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
590 __ bind(&done);
591 codegen()->PushOperand(ip);
592 }
593
Plug(Label * materialize_true,Label * materialize_false) const594 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
595 Label* materialize_false) const {
596 DCHECK(materialize_true == true_label_);
597 DCHECK(materialize_false == false_label_);
598 }
599
Plug(bool flag) const600 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
601 Heap::RootListIndex value_root_index =
602 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
603 __ LoadRoot(result_register(), value_root_index);
604 }
605
Plug(bool flag) const606 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
607 Heap::RootListIndex value_root_index =
608 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
609 __ LoadRoot(ip, value_root_index);
610 codegen()->PushOperand(ip);
611 }
612
Plug(bool flag) const613 void FullCodeGenerator::TestContext::Plug(bool flag) const {
614 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
615 false_label_);
616 if (flag) {
617 if (true_label_ != fall_through_) __ b(true_label_);
618 } else {
619 if (false_label_ != fall_through_) __ b(false_label_);
620 }
621 }
622
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)623 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
624 Label* if_false, Label* fall_through) {
625 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
626 CallIC(ic, condition->test_id());
627 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
628 Split(eq, if_true, if_false, fall_through);
629 }
630
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)631 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
632 Label* fall_through) {
633 if (if_false == fall_through) {
634 __ b(cond, if_true);
635 } else if (if_true == fall_through) {
636 __ b(NegateCondition(cond), if_false);
637 } else {
638 __ b(cond, if_true);
639 __ b(if_false);
640 }
641 }
642
StackOperand(Variable * var)643 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
644 DCHECK(var->IsStackAllocated());
645 // Offset is negative because higher indexes are at lower addresses.
646 int offset = -var->index() * kPointerSize;
647 // Adjust by a (parameter or local) base offset.
648 if (var->IsParameter()) {
649 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
650 } else {
651 offset += JavaScriptFrameConstants::kLocal0Offset;
652 }
653 return MemOperand(fp, offset);
654 }
655
VarOperand(Variable * var,Register scratch)656 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
657 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
658 if (var->IsContextSlot()) {
659 int context_chain_length = scope()->ContextChainLength(var->scope());
660 __ LoadContext(scratch, context_chain_length);
661 return ContextMemOperand(scratch, var->index());
662 } else {
663 return StackOperand(var);
664 }
665 }
666
GetVar(Register dest,Variable * var)667 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
668 // Use destination as scratch.
669 MemOperand location = VarOperand(var, dest);
670 __ LoadP(dest, location, r0);
671 }
672
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)673 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
674 Register scratch1) {
675 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
676 DCHECK(!scratch0.is(src));
677 DCHECK(!scratch0.is(scratch1));
678 DCHECK(!scratch1.is(src));
679 MemOperand location = VarOperand(var, scratch0);
680 __ StoreP(src, location);
681
682 // Emit the write barrier code if the location is in the heap.
683 if (var->IsContextSlot()) {
684 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
685 kLRHasBeenSaved, kDontSaveFPRegs);
686 }
687 }
688
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)689 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
690 bool should_normalize,
691 Label* if_true,
692 Label* if_false) {
693 // Only prepare for bailouts before splits if we're in a test
694 // context. Otherwise, we let the Visit function deal with the
695 // preparation to avoid preparing with the same AST id twice.
696 if (!context()->IsTest()) return;
697
698 Label skip;
699 if (should_normalize) __ b(&skip);
700 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
701 if (should_normalize) {
702 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
703 Split(eq, if_true, if_false, NULL);
704 __ bind(&skip);
705 }
706 }
707
EmitDebugCheckDeclarationContext(Variable * variable)708 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
709 // The variable in the declaration always resides in the current function
710 // context.
711 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
712 if (FLAG_debug_code) {
713 // Check that we're not inside a with or catch context.
714 __ LoadP(r3, FieldMemOperand(cp, HeapObject::kMapOffset));
715 __ CompareRoot(r3, Heap::kWithContextMapRootIndex);
716 __ Check(ne, kDeclarationInWithContext);
717 __ CompareRoot(r3, Heap::kCatchContextMapRootIndex);
718 __ Check(ne, kDeclarationInCatchContext);
719 }
720 }
721
VisitVariableDeclaration(VariableDeclaration * declaration)722 void FullCodeGenerator::VisitVariableDeclaration(
723 VariableDeclaration* declaration) {
724 VariableProxy* proxy = declaration->proxy();
725 Variable* variable = proxy->var();
726 switch (variable->location()) {
727 case VariableLocation::UNALLOCATED: {
728 DCHECK(!variable->binding_needs_init());
729 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
730 DCHECK(!slot.IsInvalid());
731 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
732 globals_->Add(isolate()->factory()->undefined_value(), zone());
733 break;
734 }
735 case VariableLocation::PARAMETER:
736 case VariableLocation::LOCAL:
737 if (variable->binding_needs_init()) {
738 Comment cmnt(masm_, "[ VariableDeclaration");
739 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
740 __ StoreP(ip, StackOperand(variable));
741 }
742 break;
743
744 case VariableLocation::CONTEXT:
745 if (variable->binding_needs_init()) {
746 Comment cmnt(masm_, "[ VariableDeclaration");
747 EmitDebugCheckDeclarationContext(variable);
748 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
749 __ StoreP(ip, ContextMemOperand(cp, variable->index()));
750 // No write barrier since the_hole_value is in old space.
751 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
752 }
753 break;
754
755 case VariableLocation::LOOKUP: {
756 Comment cmnt(masm_, "[ VariableDeclaration");
757 DCHECK_EQ(VAR, variable->mode());
758 DCHECK(!variable->binding_needs_init());
759 __ mov(r4, Operand(variable->name()));
760 __ Push(r4);
761 __ CallRuntime(Runtime::kDeclareEvalVar);
762 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
763 break;
764 }
765
766 case VariableLocation::MODULE:
767 UNREACHABLE();
768 }
769 }
770
VisitFunctionDeclaration(FunctionDeclaration * declaration)771 void FullCodeGenerator::VisitFunctionDeclaration(
772 FunctionDeclaration* declaration) {
773 VariableProxy* proxy = declaration->proxy();
774 Variable* variable = proxy->var();
775 switch (variable->location()) {
776 case VariableLocation::UNALLOCATED: {
777 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
778 DCHECK(!slot.IsInvalid());
779 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
780 Handle<SharedFunctionInfo> function =
781 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
782 // Check for stack-overflow exception.
783 if (function.is_null()) return SetStackOverflow();
784 globals_->Add(function, zone());
785 break;
786 }
787
788 case VariableLocation::PARAMETER:
789 case VariableLocation::LOCAL: {
790 Comment cmnt(masm_, "[ FunctionDeclaration");
791 VisitForAccumulatorValue(declaration->fun());
792 __ StoreP(result_register(), StackOperand(variable));
793 break;
794 }
795
796 case VariableLocation::CONTEXT: {
797 Comment cmnt(masm_, "[ FunctionDeclaration");
798 EmitDebugCheckDeclarationContext(variable);
799 VisitForAccumulatorValue(declaration->fun());
800 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()));
801 int offset = Context::SlotOffset(variable->index());
802 // We know that we have written a function, which is not a smi.
803 __ RecordWriteContextSlot(cp, offset, result_register(), r4,
804 kLRHasBeenSaved, kDontSaveFPRegs,
805 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
806 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
807 break;
808 }
809
810 case VariableLocation::LOOKUP: {
811 Comment cmnt(masm_, "[ FunctionDeclaration");
812 __ mov(r4, Operand(variable->name()));
813 PushOperand(r4);
814 // Push initial value for function declaration.
815 VisitForStackValue(declaration->fun());
816 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
817 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
818 break;
819 }
820
821 case VariableLocation::MODULE:
822 UNREACHABLE();
823 }
824 }
825
DeclareGlobals(Handle<FixedArray> pairs)826 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
827 // Call the runtime to declare the globals.
828 __ mov(r3, Operand(pairs));
829 __ LoadSmiLiteral(r2, Smi::FromInt(DeclareGlobalsFlags()));
830 __ EmitLoadTypeFeedbackVector(r4);
831 __ Push(r3, r2, r4);
832 __ CallRuntime(Runtime::kDeclareGlobals);
833 // Return value is ignored.
834 }
835
VisitSwitchStatement(SwitchStatement * stmt)836 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
837 Comment cmnt(masm_, "[ SwitchStatement");
838 Breakable nested_statement(this, stmt);
839 SetStatementPosition(stmt);
840
841 // Keep the switch value on the stack until a case matches.
842 VisitForStackValue(stmt->tag());
843 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
844
845 ZoneList<CaseClause*>* clauses = stmt->cases();
846 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
847
848 Label next_test; // Recycled for each test.
849 // Compile all the tests with branches to their bodies.
850 for (int i = 0; i < clauses->length(); i++) {
851 CaseClause* clause = clauses->at(i);
852 clause->body_target()->Unuse();
853
854 // The default is not a test, but remember it as final fall through.
855 if (clause->is_default()) {
856 default_clause = clause;
857 continue;
858 }
859
860 Comment cmnt(masm_, "[ Case comparison");
861 __ bind(&next_test);
862 next_test.Unuse();
863
864 // Compile the label expression.
865 VisitForAccumulatorValue(clause->label());
866
867 // Perform the comparison as if via '==='.
868 __ LoadP(r3, MemOperand(sp, 0)); // Switch value.
869 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
870 JumpPatchSite patch_site(masm_);
871 if (inline_smi_code) {
872 Label slow_case;
873 __ LoadRR(r4, r2);
874 __ OrP(r4, r3);
875 patch_site.EmitJumpIfNotSmi(r4, &slow_case);
876
877 __ CmpP(r3, r2);
878 __ bne(&next_test);
879 __ Drop(1); // Switch value is no longer needed.
880 __ b(clause->body_target());
881 __ bind(&slow_case);
882 }
883
884 // Record position before stub call for type feedback.
885 SetExpressionPosition(clause);
886 Handle<Code> ic =
887 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
888 CallIC(ic, clause->CompareId());
889 patch_site.EmitPatchInfo();
890
891 Label skip;
892 __ b(&skip);
893 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
894 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
895 __ bne(&next_test);
896 __ Drop(1);
897 __ b(clause->body_target());
898 __ bind(&skip);
899
900 __ CmpP(r2, Operand::Zero());
901 __ bne(&next_test);
902 __ Drop(1); // Switch value is no longer needed.
903 __ b(clause->body_target());
904 }
905
906 // Discard the test value and jump to the default if present, otherwise to
907 // the end of the statement.
908 __ bind(&next_test);
909 DropOperands(1); // Switch value is no longer needed.
910 if (default_clause == NULL) {
911 __ b(nested_statement.break_label());
912 } else {
913 __ b(default_clause->body_target());
914 }
915
916 // Compile all the case bodies.
917 for (int i = 0; i < clauses->length(); i++) {
918 Comment cmnt(masm_, "[ Case body");
919 CaseClause* clause = clauses->at(i);
920 __ bind(clause->body_target());
921 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
922 VisitStatements(clause->statements());
923 }
924
925 __ bind(nested_statement.break_label());
926 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
927 }
928
VisitForInStatement(ForInStatement * stmt)929 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
930 Comment cmnt(masm_, "[ ForInStatement");
931 SetStatementPosition(stmt, SKIP_BREAK);
932
933 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
934
935 // Get the object to enumerate over.
936 SetExpressionAsStatementPosition(stmt->enumerable());
937 VisitForAccumulatorValue(stmt->enumerable());
938 OperandStackDepthIncrement(5);
939
940 Label loop, exit;
941 Iteration loop_statement(this, stmt);
942 increment_loop_depth();
943
944 // If the object is null or undefined, skip over the loop, otherwise convert
945 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
946 Label convert, done_convert;
947 __ JumpIfSmi(r2, &convert);
948 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
949 __ bge(&done_convert);
950 __ CompareRoot(r2, Heap::kNullValueRootIndex);
951 __ beq(&exit);
952 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
953 __ beq(&exit);
954 __ bind(&convert);
955 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
956 RestoreContext();
957 __ bind(&done_convert);
958 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
959 __ push(r2);
960
961 // Check cache validity in generated code. If we cannot guarantee cache
962 // validity, call the runtime system to check cache validity or get the
963 // property names in a fixed array. Note: Proxies never have an enum cache,
964 // so will always take the slow path.
965 Label call_runtime;
966 __ CheckEnumCache(&call_runtime);
967
968 // The enum cache is valid. Load the map of the object being
969 // iterated over and use the cache for the iteration.
970 Label use_cache;
971 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
972 __ b(&use_cache);
973
974 // Get the set of properties to enumerate.
975 __ bind(&call_runtime);
976 __ push(r2); // Duplicate the enumerable object on the stack.
977 __ CallRuntime(Runtime::kForInEnumerate);
978 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
979
980 // If we got a map from the runtime call, we can do a fast
981 // modification check. Otherwise, we got a fixed array, and we have
982 // to do a slow check.
983 Label fixed_array;
984 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
985 __ CompareRoot(r4, Heap::kMetaMapRootIndex);
986 __ bne(&fixed_array);
987
988 // We got a map in register r2. Get the enumeration cache from it.
989 Label no_descriptors;
990 __ bind(&use_cache);
991
992 __ EnumLength(r3, r2);
993 __ CmpSmiLiteral(r3, Smi::kZero, r0);
994 __ beq(&no_descriptors, Label::kNear);
995
996 __ LoadInstanceDescriptors(r2, r4);
997 __ LoadP(r4, FieldMemOperand(r4, DescriptorArray::kEnumCacheOffset));
998 __ LoadP(r4,
999 FieldMemOperand(r4, DescriptorArray::kEnumCacheBridgeCacheOffset));
1000
1001 // Set up the four remaining stack slots.
1002 __ push(r2); // Map.
1003 __ LoadSmiLiteral(r2, Smi::kZero);
1004 // Push enumeration cache, enumeration cache length (as smi) and zero.
1005 __ Push(r4, r3, r2);
1006 __ b(&loop);
1007
1008 __ bind(&no_descriptors);
1009 __ Drop(1);
1010 __ b(&exit);
1011
1012 // We got a fixed array in register r2. Iterate through that.
1013 __ bind(&fixed_array);
1014
1015 __ LoadSmiLiteral(r3, Smi::FromInt(1)); // Smi(1) indicates slow check
1016 __ Push(r3, r2); // Smi and array
1017 __ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
1018 __ Push(r3); // Fixed array length (as smi).
1019 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1020 __ LoadSmiLiteral(r2, Smi::kZero);
1021 __ Push(r2); // Initial index.
1022
1023 // Generate code for doing the condition check.
1024 __ bind(&loop);
1025 SetExpressionAsStatementPosition(stmt->each());
1026
1027 // Load the current count to r2, load the length to r3.
1028 __ LoadP(r2, MemOperand(sp, 0 * kPointerSize));
1029 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1030 __ CmpLogicalP(r2, r3); // Compare to the array length.
1031 __ bge(loop_statement.break_label());
1032
1033 // Get the current entry of the array into register r5.
1034 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
1035 __ AddP(r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1036 __ SmiToPtrArrayOffset(r5, r2);
1037 __ LoadP(r5, MemOperand(r5, r4));
1038
1039 // Get the expected map from the stack or a smi in the
1040 // permanent slow case into register r4.
1041 __ LoadP(r4, MemOperand(sp, 3 * kPointerSize));
1042
1043 // Check if the expected map still matches that of the enumerable.
1044 // If not, we may have to filter the key.
1045 Label update_each;
1046 __ LoadP(r3, MemOperand(sp, 4 * kPointerSize));
1047 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1048 __ CmpP(r6, r4);
1049 __ beq(&update_each);
1050
1051 // We need to filter the key, record slow-path here.
1052 int const vector_index = SmiFromSlot(slot)->value();
1053 __ EmitLoadTypeFeedbackVector(r2);
1054 __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1055 __ StoreP(
1056 r4, FieldMemOperand(r2, FixedArray::OffsetOfElementAt(vector_index)), r0);
1057
1058 // Convert the entry to a string or (smi) 0 if it isn't a property
1059 // any more. If the property has been removed while iterating, we
1060 // just skip it.
1061 __ Push(r3, r5); // Enumerable and current entry.
1062 __ CallRuntime(Runtime::kForInFilter);
1063 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1064 __ LoadRR(r5, r2);
1065 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1066 __ CmpP(r2, r0);
1067 __ beq(loop_statement.continue_label());
1068
1069 // Update the 'each' property or variable from the possibly filtered
1070 // entry in register r5.
1071 __ bind(&update_each);
1072 __ LoadRR(result_register(), r5);
1073 // Perform the assignment as if via '='.
1074 {
1075 EffectContext context(this);
1076 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1077 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1078 }
1079
1080 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1081 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1082 // Generate code for the body of the loop.
1083 Visit(stmt->body());
1084
1085 // Generate code for the going to the next element by incrementing
1086 // the index (smi) stored on top of the stack.
1087 __ bind(loop_statement.continue_label());
1088 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1089 __ pop(r2);
1090 __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0);
1091 __ push(r2);
1092
1093 EmitBackEdgeBookkeeping(stmt, &loop);
1094 __ b(&loop);
1095
1096 // Remove the pointers stored on the stack.
1097 __ bind(loop_statement.break_label());
1098 DropOperands(5);
1099
1100 // Exit and decrement the loop depth.
1101 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1102 __ bind(&exit);
1103 decrement_loop_depth();
1104 }
1105
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1106 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1107 FeedbackVectorSlot slot) {
1108 DCHECK(NeedsHomeObject(initializer));
1109 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1110 __ LoadP(StoreDescriptor::ValueRegister(),
1111 MemOperand(sp, offset * kPointerSize));
1112 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1113 }
1114
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1115 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1116 int offset,
1117 FeedbackVectorSlot slot) {
1118 DCHECK(NeedsHomeObject(initializer));
1119 __ Move(StoreDescriptor::ReceiverRegister(), r2);
1120 __ LoadP(StoreDescriptor::ValueRegister(),
1121 MemOperand(sp, offset * kPointerSize));
1122 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1123 }
1124
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1125 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1126 TypeofMode typeof_mode,
1127 Label* slow) {
1128 Register current = cp;
1129 Register next = r3;
1130 Register temp = r4;
1131
1132 int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1133 for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1134 if (!s->NeedsContext()) continue;
1135 if (s->calls_sloppy_eval()) {
1136 // Check that extension is "the hole".
1137 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1138 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1139 }
1140 // Load next context in chain.
1141 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1142 // Walk the rest of the chain without clobbering cp.
1143 current = next;
1144 to_check--;
1145 }
1146
1147 // All extension objects were empty and it is safe to use a normal global
1148 // load machinery.
1149 EmitGlobalVariableLoad(proxy, typeof_mode);
1150 }
1151
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1152 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1153 Label* slow) {
1154 DCHECK(var->IsContextSlot());
1155 Register context = cp;
1156 Register next = r5;
1157 Register temp = r6;
1158
1159 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1160 if (s->NeedsContext()) {
1161 if (s->calls_sloppy_eval()) {
1162 // Check that extension is "the hole".
1163 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1164 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1165 }
1166 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1167 // Walk the rest of the chain without clobbering cp.
1168 context = next;
1169 }
1170 }
1171 // Check that last extension is "the hole".
1172 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1173 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1174
1175 // This function is used only for loads, not stores, so it's safe to
1176 // return an cp-based operand (the write barrier cannot be allowed to
1177 // destroy the cp register).
1178 return ContextMemOperand(context, var->index());
1179 }
1180
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1181 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1182 TypeofMode typeof_mode,
1183 Label* slow, Label* done) {
1184 // Generate fast-case code for variables that might be shadowed by
1185 // eval-introduced variables. Eval is used a lot without
1186 // introducing variables. In those cases, we do not want to
1187 // perform a runtime call for all variables in the scope
1188 // containing the eval.
1189 Variable* var = proxy->var();
1190 if (var->mode() == DYNAMIC_GLOBAL) {
1191 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1192 __ b(done);
1193 } else if (var->mode() == DYNAMIC_LOCAL) {
1194 Variable* local = var->local_if_not_shadowed();
1195 __ LoadP(r2, ContextSlotOperandCheckExtensions(local, slow));
1196 if (local->binding_needs_init()) {
1197 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1198 __ bne(done);
1199 __ mov(r2, Operand(var->name()));
1200 __ push(r2);
1201 __ CallRuntime(Runtime::kThrowReferenceError);
1202 } else {
1203 __ b(done);
1204 }
1205 }
1206 }
1207
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1208 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1209 TypeofMode typeof_mode) {
1210 // Record position before possible IC call.
1211 SetExpressionPosition(proxy);
1212 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1213 Variable* var = proxy->var();
1214
1215 // Three cases: global variables, lookup variables, and all other types of
1216 // variables.
1217 switch (var->location()) {
1218 case VariableLocation::UNALLOCATED: {
1219 Comment cmnt(masm_, "[ Global variable");
1220 EmitGlobalVariableLoad(proxy, typeof_mode);
1221 context()->Plug(r2);
1222 break;
1223 }
1224
1225 case VariableLocation::PARAMETER:
1226 case VariableLocation::LOCAL:
1227 case VariableLocation::CONTEXT: {
1228 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1229 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1230 : "[ Stack variable");
1231 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1232 // Throw a reference error when using an uninitialized let/const
1233 // binding in harmony mode.
1234 Label done;
1235 GetVar(r2, var);
1236 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1237 __ bne(&done);
1238 __ mov(r2, Operand(var->name()));
1239 __ push(r2);
1240 __ CallRuntime(Runtime::kThrowReferenceError);
1241 __ bind(&done);
1242 context()->Plug(r2);
1243 break;
1244 }
1245 context()->Plug(var);
1246 break;
1247 }
1248
1249 case VariableLocation::LOOKUP: {
1250 Comment cmnt(masm_, "[ Lookup variable");
1251 Label done, slow;
1252 // Generate code for loading from variables potentially shadowed
1253 // by eval-introduced variables.
1254 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1255 __ bind(&slow);
1256 __ Push(var->name());
1257 Runtime::FunctionId function_id =
1258 typeof_mode == NOT_INSIDE_TYPEOF
1259 ? Runtime::kLoadLookupSlot
1260 : Runtime::kLoadLookupSlotInsideTypeof;
1261 __ CallRuntime(function_id);
1262 __ bind(&done);
1263 context()->Plug(r2);
1264 break;
1265 }
1266
1267 case VariableLocation::MODULE:
1268 UNREACHABLE();
1269 }
1270 }
1271
EmitAccessor(ObjectLiteralProperty * property)1272 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1273 Expression* expression = (property == NULL) ? NULL : property->value();
1274 if (expression == NULL) {
1275 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1276 PushOperand(r3);
1277 } else {
1278 VisitForStackValue(expression);
1279 if (NeedsHomeObject(expression)) {
1280 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1281 property->kind() == ObjectLiteral::Property::SETTER);
1282 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1283 EmitSetHomeObject(expression, offset, property->GetSlot());
1284 }
1285 }
1286 }
1287
VisitObjectLiteral(ObjectLiteral * expr)1288 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1289 Comment cmnt(masm_, "[ ObjectLiteral");
1290
1291 Handle<FixedArray> constant_properties = expr->constant_properties();
1292 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1293 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index()));
1294 __ mov(r3, Operand(constant_properties));
1295 int flags = expr->ComputeFlags();
1296 __ LoadSmiLiteral(r2, Smi::FromInt(flags));
1297 if (MustCreateObjectLiteralWithRuntime(expr)) {
1298 __ Push(r5, r4, r3, r2);
1299 __ CallRuntime(Runtime::kCreateObjectLiteral);
1300 } else {
1301 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1302 __ CallStub(&stub);
1303 RestoreContext();
1304 }
1305 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1306
1307 // If result_saved is true the result is on top of the stack. If
1308 // result_saved is false the result is in r2.
1309 bool result_saved = false;
1310
1311 AccessorTable accessor_table(zone());
1312 int property_index = 0;
1313 for (; property_index < expr->properties()->length(); property_index++) {
1314 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1315 if (property->is_computed_name()) break;
1316 if (property->IsCompileTimeValue()) continue;
1317
1318 Literal* key = property->key()->AsLiteral();
1319 Expression* value = property->value();
1320 if (!result_saved) {
1321 PushOperand(r2); // Save result on stack
1322 result_saved = true;
1323 }
1324 switch (property->kind()) {
1325 case ObjectLiteral::Property::CONSTANT:
1326 UNREACHABLE();
1327 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1328 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1329 // Fall through.
1330 case ObjectLiteral::Property::COMPUTED:
1331 // It is safe to use [[Put]] here because the boilerplate already
1332 // contains computed properties with an uninitialized value.
1333 if (key->IsStringLiteral()) {
1334 DCHECK(key->IsPropertyName());
1335 if (property->emit_store()) {
1336 VisitForAccumulatorValue(value);
1337 DCHECK(StoreDescriptor::ValueRegister().is(r2));
1338 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1339 CallStoreIC(property->GetSlot(0), key->value());
1340 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1341
1342 if (NeedsHomeObject(value)) {
1343 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1344 }
1345 } else {
1346 VisitForEffect(value);
1347 }
1348 break;
1349 }
1350 // Duplicate receiver on stack.
1351 __ LoadP(r2, MemOperand(sp));
1352 PushOperand(r2);
1353 VisitForStackValue(key);
1354 VisitForStackValue(value);
1355 if (property->emit_store()) {
1356 if (NeedsHomeObject(value)) {
1357 EmitSetHomeObject(value, 2, property->GetSlot());
1358 }
1359 __ LoadSmiLiteral(r2, Smi::FromInt(SLOPPY)); // PropertyAttributes
1360 PushOperand(r2);
1361 CallRuntimeWithOperands(Runtime::kSetProperty);
1362 } else {
1363 DropOperands(3);
1364 }
1365 break;
1366 case ObjectLiteral::Property::PROTOTYPE:
1367 // Duplicate receiver on stack.
1368 __ LoadP(r2, MemOperand(sp));
1369 PushOperand(r2);
1370 VisitForStackValue(value);
1371 DCHECK(property->emit_store());
1372 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1373 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1374 BailoutState::NO_REGISTERS);
1375 break;
1376 case ObjectLiteral::Property::GETTER:
1377 if (property->emit_store()) {
1378 AccessorTable::Iterator it = accessor_table.lookup(key);
1379 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1380 it->second->getter = property;
1381 }
1382 break;
1383 case ObjectLiteral::Property::SETTER:
1384 if (property->emit_store()) {
1385 AccessorTable::Iterator it = accessor_table.lookup(key);
1386 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1387 it->second->setter = property;
1388 }
1389 break;
1390 }
1391 }
1392
1393 // Emit code to define accessors, using only a single call to the runtime for
1394 // each pair of corresponding getters and setters.
1395 for (AccessorTable::Iterator it = accessor_table.begin();
1396 it != accessor_table.end(); ++it) {
1397 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver.
1398 PushOperand(r2);
1399 VisitForStackValue(it->first);
1400 EmitAccessor(it->second->getter);
1401 EmitAccessor(it->second->setter);
1402 __ LoadSmiLiteral(r2, Smi::FromInt(NONE));
1403 PushOperand(r2);
1404 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1405 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1406 }
1407
1408 // Object literals have two parts. The "static" part on the left contains no
1409 // computed property names, and so we can compute its map ahead of time; see
1410 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1411 // starts with the first computed property name, and continues with all
1412 // properties to its right. All the code from above initializes the static
1413 // component of the object literal, and arranges for the map of the result to
1414 // reflect the static order in which the keys appear. For the dynamic
1415 // properties, we compile them into a series of "SetOwnProperty" runtime
1416 // calls. This will preserve insertion order.
1417 for (; property_index < expr->properties()->length(); property_index++) {
1418 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1419
1420 Expression* value = property->value();
1421 if (!result_saved) {
1422 PushOperand(r2); // Save result on the stack
1423 result_saved = true;
1424 }
1425
1426 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver.
1427 PushOperand(r2);
1428
1429 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1430 DCHECK(!property->is_computed_name());
1431 VisitForStackValue(value);
1432 DCHECK(property->emit_store());
1433 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1434 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1435 BailoutState::NO_REGISTERS);
1436 } else {
1437 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1438 VisitForStackValue(value);
1439 if (NeedsHomeObject(value)) {
1440 EmitSetHomeObject(value, 2, property->GetSlot());
1441 }
1442
1443 switch (property->kind()) {
1444 case ObjectLiteral::Property::CONSTANT:
1445 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1446 case ObjectLiteral::Property::COMPUTED:
1447 if (property->emit_store()) {
1448 PushOperand(Smi::FromInt(NONE));
1449 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1450 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1451 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1452 BailoutState::NO_REGISTERS);
1453 } else {
1454 DropOperands(3);
1455 }
1456 break;
1457
1458 case ObjectLiteral::Property::PROTOTYPE:
1459 UNREACHABLE();
1460 break;
1461
1462 case ObjectLiteral::Property::GETTER:
1463 PushOperand(Smi::FromInt(NONE));
1464 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1465 break;
1466
1467 case ObjectLiteral::Property::SETTER:
1468 PushOperand(Smi::FromInt(NONE));
1469 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1470 break;
1471 }
1472 }
1473 }
1474
1475 if (result_saved) {
1476 context()->PlugTOS();
1477 } else {
1478 context()->Plug(r2);
1479 }
1480 }
1481
VisitArrayLiteral(ArrayLiteral * expr)1482 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1483 Comment cmnt(masm_, "[ ArrayLiteral");
1484
1485 Handle<FixedArray> constant_elements = expr->constant_elements();
1486 bool has_fast_elements =
1487 IsFastObjectElementsKind(expr->constant_elements_kind());
1488 Handle<FixedArrayBase> constant_elements_values(
1489 FixedArrayBase::cast(constant_elements->get(1)));
1490
1491 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1492 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1493 // If the only customer of allocation sites is transitioning, then
1494 // we can turn it off if we don't have anywhere else to transition to.
1495 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1496 }
1497
1498 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1499 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index()));
1500 __ mov(r3, Operand(constant_elements));
1501 if (MustCreateArrayLiteralWithRuntime(expr)) {
1502 __ LoadSmiLiteral(r2, Smi::FromInt(expr->ComputeFlags()));
1503 __ Push(r5, r4, r3, r2);
1504 __ CallRuntime(Runtime::kCreateArrayLiteral);
1505 } else {
1506 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1507 __ CallStub(&stub);
1508 RestoreContext();
1509 }
1510 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1511
1512 bool result_saved = false; // Is the result saved to the stack?
1513 ZoneList<Expression*>* subexprs = expr->values();
1514 int length = subexprs->length();
1515
1516 // Emit code to evaluate all the non-constant subexpressions and to store
1517 // them into the newly cloned array.
1518 for (int array_index = 0; array_index < length; array_index++) {
1519 Expression* subexpr = subexprs->at(array_index);
1520 DCHECK(!subexpr->IsSpread());
1521 // If the subexpression is a literal or a simple materialized literal it
1522 // is already set in the cloned array.
1523 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1524
1525 if (!result_saved) {
1526 PushOperand(r2);
1527 result_saved = true;
1528 }
1529 VisitForAccumulatorValue(subexpr);
1530
1531 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1532 Smi::FromInt(array_index));
1533 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1534 CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1535
1536 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1537 BailoutState::NO_REGISTERS);
1538 }
1539
1540 if (result_saved) {
1541 context()->PlugTOS();
1542 } else {
1543 context()->Plug(r2);
1544 }
1545 }
1546
VisitAssignment(Assignment * expr)1547 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1548 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1549
1550 Comment cmnt(masm_, "[ Assignment");
1551
1552 Property* property = expr->target()->AsProperty();
1553 LhsKind assign_type = Property::GetAssignType(property);
1554
1555 // Evaluate LHS expression.
1556 switch (assign_type) {
1557 case VARIABLE:
1558 // Nothing to do here.
1559 break;
1560 case NAMED_PROPERTY:
1561 if (expr->is_compound()) {
1562 // We need the receiver both on the stack and in the register.
1563 VisitForStackValue(property->obj());
1564 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1565 } else {
1566 VisitForStackValue(property->obj());
1567 }
1568 break;
1569 case NAMED_SUPER_PROPERTY:
1570 VisitForStackValue(
1571 property->obj()->AsSuperPropertyReference()->this_var());
1572 VisitForAccumulatorValue(
1573 property->obj()->AsSuperPropertyReference()->home_object());
1574 PushOperand(result_register());
1575 if (expr->is_compound()) {
1576 const Register scratch = r3;
1577 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1578 PushOperands(scratch, result_register());
1579 }
1580 break;
1581 case KEYED_SUPER_PROPERTY: {
1582 VisitForStackValue(
1583 property->obj()->AsSuperPropertyReference()->this_var());
1584 VisitForStackValue(
1585 property->obj()->AsSuperPropertyReference()->home_object());
1586 VisitForAccumulatorValue(property->key());
1587 PushOperand(result_register());
1588 if (expr->is_compound()) {
1589 const Register scratch1 = r4;
1590 const Register scratch2 = r3;
1591 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1592 __ LoadP(scratch2, MemOperand(sp, 1 * kPointerSize));
1593 PushOperands(scratch1, scratch2, result_register());
1594 }
1595 break;
1596 }
1597 case KEYED_PROPERTY:
1598 if (expr->is_compound()) {
1599 VisitForStackValue(property->obj());
1600 VisitForStackValue(property->key());
1601 __ LoadP(LoadDescriptor::ReceiverRegister(),
1602 MemOperand(sp, 1 * kPointerSize));
1603 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1604 } else {
1605 VisitForStackValue(property->obj());
1606 VisitForStackValue(property->key());
1607 }
1608 break;
1609 }
1610
1611 // For compound assignments we need another deoptimization point after the
1612 // variable/property load.
1613 if (expr->is_compound()) {
1614 {
1615 AccumulatorValueContext context(this);
1616 switch (assign_type) {
1617 case VARIABLE:
1618 EmitVariableLoad(expr->target()->AsVariableProxy());
1619 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1620 break;
1621 case NAMED_PROPERTY:
1622 EmitNamedPropertyLoad(property);
1623 PrepareForBailoutForId(property->LoadId(),
1624 BailoutState::TOS_REGISTER);
1625 break;
1626 case NAMED_SUPER_PROPERTY:
1627 EmitNamedSuperPropertyLoad(property);
1628 PrepareForBailoutForId(property->LoadId(),
1629 BailoutState::TOS_REGISTER);
1630 break;
1631 case KEYED_SUPER_PROPERTY:
1632 EmitKeyedSuperPropertyLoad(property);
1633 PrepareForBailoutForId(property->LoadId(),
1634 BailoutState::TOS_REGISTER);
1635 break;
1636 case KEYED_PROPERTY:
1637 EmitKeyedPropertyLoad(property);
1638 PrepareForBailoutForId(property->LoadId(),
1639 BailoutState::TOS_REGISTER);
1640 break;
1641 }
1642 }
1643
1644 Token::Value op = expr->binary_op();
1645 PushOperand(r2); // Left operand goes on the stack.
1646 VisitForAccumulatorValue(expr->value());
1647
1648 AccumulatorValueContext context(this);
1649 if (ShouldInlineSmiCase(op)) {
1650 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1651 expr->value());
1652 } else {
1653 EmitBinaryOp(expr->binary_operation(), op);
1654 }
1655
1656 // Deoptimization point in case the binary operation may have side effects.
1657 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1658 } else {
1659 VisitForAccumulatorValue(expr->value());
1660 }
1661
1662 SetExpressionPosition(expr);
1663
1664 // Store the value.
1665 switch (assign_type) {
1666 case VARIABLE: {
1667 VariableProxy* proxy = expr->target()->AsVariableProxy();
1668 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1669 proxy->hole_check_mode());
1670 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1671 context()->Plug(r2);
1672 break;
1673 }
1674 case NAMED_PROPERTY:
1675 EmitNamedPropertyAssignment(expr);
1676 break;
1677 case NAMED_SUPER_PROPERTY:
1678 EmitNamedSuperPropertyStore(property);
1679 context()->Plug(r2);
1680 break;
1681 case KEYED_SUPER_PROPERTY:
1682 EmitKeyedSuperPropertyStore(property);
1683 context()->Plug(r2);
1684 break;
1685 case KEYED_PROPERTY:
1686 EmitKeyedPropertyAssignment(expr);
1687 break;
1688 }
1689 }
1690
VisitYield(Yield * expr)1691 void FullCodeGenerator::VisitYield(Yield* expr) {
1692 Comment cmnt(masm_, "[ Yield");
1693 SetExpressionPosition(expr);
1694
1695 // Evaluate yielded value first; the initial iterator definition depends on
1696 // this. It stays on the stack while we update the iterator.
1697 VisitForStackValue(expr->expression());
1698
1699 Label suspend, continuation, post_runtime, resume, exception;
1700
1701 __ b(&suspend);
1702 __ bind(&continuation);
1703 // When we arrive here, r2 holds the generator object.
1704 __ RecordGeneratorContinuation();
1705 __ LoadP(r3, FieldMemOperand(r2, JSGeneratorObject::kResumeModeOffset));
1706 __ LoadP(r2, FieldMemOperand(r2, JSGeneratorObject::kInputOrDebugPosOffset));
1707 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1708 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1709 __ CmpSmiLiteral(r3, Smi::FromInt(JSGeneratorObject::kReturn), r0);
1710 __ blt(&resume);
1711 __ Push(result_register());
1712 __ bgt(&exception);
1713 EmitCreateIteratorResult(true);
1714 EmitUnwindAndReturn();
1715
1716 __ bind(&exception);
1717 __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1718 : Runtime::kThrow);
1719
1720 __ bind(&suspend);
1721 OperandStackDepthIncrement(1); // Not popped on this path.
1722 VisitForAccumulatorValue(expr->generator_object());
1723 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1724 __ LoadSmiLiteral(r3, Smi::FromInt(continuation.pos()));
1725 __ StoreP(r3, FieldMemOperand(r2, JSGeneratorObject::kContinuationOffset),
1726 r0);
1727 __ StoreP(cp, FieldMemOperand(r2, JSGeneratorObject::kContextOffset), r0);
1728 __ LoadRR(r3, cp);
1729 __ RecordWriteField(r2, JSGeneratorObject::kContextOffset, r3, r4,
1730 kLRHasBeenSaved, kDontSaveFPRegs);
1731 __ AddP(r3, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1732 __ CmpP(sp, r3);
1733 __ beq(&post_runtime);
1734 __ push(r2); // generator object
1735 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1736 RestoreContext();
1737 __ bind(&post_runtime);
1738 PopOperand(result_register());
1739 EmitReturnSequence();
1740
1741 __ bind(&resume);
1742 context()->Plug(result_register());
1743 }
1744
PushOperands(Register reg1,Register reg2)1745 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1746 OperandStackDepthIncrement(2);
1747 __ Push(reg1, reg2);
1748 }
1749
PushOperands(Register reg1,Register reg2,Register reg3)1750 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1751 Register reg3) {
1752 OperandStackDepthIncrement(3);
1753 __ Push(reg1, reg2, reg3);
1754 }
1755
PushOperands(Register reg1,Register reg2,Register reg3,Register reg4)1756 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1757 Register reg3, Register reg4) {
1758 OperandStackDepthIncrement(4);
1759 __ Push(reg1, reg2, reg3, reg4);
1760 }
1761
PopOperands(Register reg1,Register reg2)1762 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1763 OperandStackDepthDecrement(2);
1764 __ Pop(reg1, reg2);
1765 }
1766
EmitOperandStackDepthCheck()1767 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1768 if (FLAG_debug_code) {
1769 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1770 operand_stack_depth_ * kPointerSize;
1771 __ SubP(r2, fp, sp);
1772 __ CmpP(r2, Operand(expected_diff));
1773 __ Assert(eq, kUnexpectedStackDepth);
1774 }
1775 }
1776
EmitCreateIteratorResult(bool done)1777 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1778 Label allocate, done_allocate;
1779
1780 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &allocate,
1781 NO_ALLOCATION_FLAGS);
1782 __ b(&done_allocate);
1783
1784 __ bind(&allocate);
1785 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1786 __ CallRuntime(Runtime::kAllocateInNewSpace);
1787
1788 __ bind(&done_allocate);
1789 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
1790 PopOperand(r4);
1791 __ LoadRoot(r5,
1792 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1793 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1794 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
1795 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
1796 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
1797 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
1798 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
1799 }
1800
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1801 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1802 Token::Value op,
1803 Expression* left_expr,
1804 Expression* right_expr) {
1805 Label done, smi_case, stub_call;
1806
1807 Register scratch1 = r4;
1808 Register scratch2 = r5;
1809
1810 // Get the arguments.
1811 Register left = r3;
1812 Register right = r2;
1813 PopOperand(left);
1814
1815 // Perform combined smi check on both operands.
1816 __ LoadRR(scratch1, right);
1817 __ OrP(scratch1, left);
1818 STATIC_ASSERT(kSmiTag == 0);
1819 JumpPatchSite patch_site(masm_);
1820 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1821
1822 __ bind(&stub_call);
1823 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1824 CallIC(code, expr->BinaryOperationFeedbackId());
1825 patch_site.EmitPatchInfo();
1826 __ b(&done);
1827
1828 __ bind(&smi_case);
1829 // Smi case. This code works the same way as the smi-smi case in the type
1830 // recording binary operation stub.
1831 switch (op) {
1832 case Token::SAR:
1833 __ GetLeastBitsFromSmi(scratch1, right, 5);
1834 __ ShiftRightArithP(right, left, scratch1);
1835 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
1836 break;
1837 case Token::SHL: {
1838 __ GetLeastBitsFromSmi(scratch2, right, 5);
1839 #if V8_TARGET_ARCH_S390X
1840 __ ShiftLeftP(right, left, scratch2);
1841 #else
1842 __ SmiUntag(scratch1, left);
1843 __ ShiftLeftP(scratch1, scratch1, scratch2);
1844 // Check that the *signed* result fits in a smi
1845 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
1846 __ SmiTag(right, scratch1);
1847 #endif
1848 break;
1849 }
1850 case Token::SHR: {
1851 __ SmiUntag(scratch1, left);
1852 __ GetLeastBitsFromSmi(scratch2, right, 5);
1853 __ srl(scratch1, scratch2);
1854 // Unsigned shift is not allowed to produce a negative number.
1855 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
1856 __ SmiTag(right, scratch1);
1857 break;
1858 }
1859 case Token::ADD: {
1860 __ AddP(scratch1, left, right);
1861 __ b(overflow, &stub_call);
1862 __ LoadRR(right, scratch1);
1863 break;
1864 }
1865 case Token::SUB: {
1866 __ SubP(scratch1, left, right);
1867 __ b(overflow, &stub_call);
1868 __ LoadRR(right, scratch1);
1869 break;
1870 }
1871 case Token::MUL: {
1872 Label mul_zero;
1873 #if V8_TARGET_ARCH_S390X
1874 // Remove tag from both operands.
1875 __ SmiUntag(ip, right);
1876 __ SmiUntag(scratch2, left);
1877 __ mr_z(scratch1, ip);
1878 // Check for overflowing the smi range - no overflow if higher 33 bits of
1879 // the result are identical.
1880 __ lr(ip, scratch2); // 32 bit load
1881 __ sra(ip, Operand(31));
1882 __ cr_z(ip, scratch1); // 32 bit compare
1883 __ bne(&stub_call);
1884 #else
1885 __ SmiUntag(ip, right);
1886 __ LoadRR(scratch2, left); // load into low order of reg pair
1887 __ mr_z(scratch1, ip); // R4:R5 = R5 * ip
1888 // Check for overflowing the smi range - no overflow if higher 33 bits of
1889 // the result are identical.
1890 __ TestIfInt32(scratch1, scratch2, ip);
1891 __ bne(&stub_call);
1892 #endif
1893 // Go slow on zero result to handle -0.
1894 __ chi(scratch2, Operand::Zero());
1895 __ beq(&mul_zero, Label::kNear);
1896 #if V8_TARGET_ARCH_S390X
1897 __ SmiTag(right, scratch2);
1898 #else
1899 __ LoadRR(right, scratch2);
1900 #endif
1901 __ b(&done);
1902 // We need -0 if we were multiplying a negative number with 0 to get 0.
1903 // We know one of them was zero.
1904 __ bind(&mul_zero);
1905 __ AddP(scratch2, right, left);
1906 __ CmpP(scratch2, Operand::Zero());
1907 __ blt(&stub_call);
1908 __ LoadSmiLiteral(right, Smi::kZero);
1909 break;
1910 }
1911 case Token::BIT_OR:
1912 __ OrP(right, left);
1913 break;
1914 case Token::BIT_AND:
1915 __ AndP(right, left);
1916 break;
1917 case Token::BIT_XOR:
1918 __ XorP(right, left);
1919 break;
1920 default:
1921 UNREACHABLE();
1922 }
1923
1924 __ bind(&done);
1925 context()->Plug(r2);
1926 }
1927
EmitClassDefineProperties(ClassLiteral * lit)1928 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1929 for (int i = 0; i < lit->properties()->length(); i++) {
1930 ClassLiteral::Property* property = lit->properties()->at(i);
1931 Expression* value = property->value();
1932
1933 Register scratch = r3;
1934 if (property->is_static()) {
1935 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
1936 } else {
1937 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
1938 }
1939 PushOperand(scratch);
1940 EmitPropertyKey(property, lit->GetIdForProperty(i));
1941
1942 // The static prototype property is read only. We handle the non computed
1943 // property name case in the parser. Since this is the only case where we
1944 // need to check for an own read only property we special case this so we do
1945 // not need to do this for every property.
1946 if (property->is_static() && property->is_computed_name()) {
1947 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1948 __ push(r2);
1949 }
1950
1951 VisitForStackValue(value);
1952 if (NeedsHomeObject(value)) {
1953 EmitSetHomeObject(value, 2, property->GetSlot());
1954 }
1955
1956 switch (property->kind()) {
1957 case ClassLiteral::Property::METHOD:
1958 PushOperand(Smi::FromInt(DONT_ENUM));
1959 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1960 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1961 break;
1962
1963 case ClassLiteral::Property::GETTER:
1964 PushOperand(Smi::FromInt(DONT_ENUM));
1965 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1966 break;
1967
1968 case ClassLiteral::Property::SETTER:
1969 PushOperand(Smi::FromInt(DONT_ENUM));
1970 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1971 break;
1972
1973 case ClassLiteral::Property::FIELD:
1974 default:
1975 UNREACHABLE();
1976 }
1977 }
1978 }
1979
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1980 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1981 PopOperand(r3);
1982 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1983 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1984 CallIC(code, expr->BinaryOperationFeedbackId());
1985 patch_site.EmitPatchInfo();
1986 context()->Plug(r2);
1987 }
1988
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)1989 void FullCodeGenerator::EmitAssignment(Expression* expr,
1990 FeedbackVectorSlot slot) {
1991 DCHECK(expr->IsValidReferenceExpressionOrThis());
1992
1993 Property* prop = expr->AsProperty();
1994 LhsKind assign_type = Property::GetAssignType(prop);
1995
1996 switch (assign_type) {
1997 case VARIABLE: {
1998 VariableProxy* proxy = expr->AsVariableProxy();
1999 EffectContext context(this);
2000 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
2001 proxy->hole_check_mode());
2002 break;
2003 }
2004 case NAMED_PROPERTY: {
2005 PushOperand(r2); // Preserve value.
2006 VisitForAccumulatorValue(prop->obj());
2007 __ Move(StoreDescriptor::ReceiverRegister(), r2);
2008 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
2009 CallStoreIC(slot, prop->key()->AsLiteral()->value());
2010 break;
2011 }
2012 case NAMED_SUPER_PROPERTY: {
2013 PushOperand(r2);
2014 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2015 VisitForAccumulatorValue(
2016 prop->obj()->AsSuperPropertyReference()->home_object());
2017 // stack: value, this; r2: home_object
2018 Register scratch = r4;
2019 Register scratch2 = r5;
2020 __ LoadRR(scratch, result_register()); // home_object
2021 __ LoadP(r2, MemOperand(sp, kPointerSize)); // value
2022 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2023 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2024 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2025 // stack: this, home_object; r2: value
2026 EmitNamedSuperPropertyStore(prop);
2027 break;
2028 }
2029 case KEYED_SUPER_PROPERTY: {
2030 PushOperand(r2);
2031 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2032 VisitForStackValue(
2033 prop->obj()->AsSuperPropertyReference()->home_object());
2034 VisitForAccumulatorValue(prop->key());
2035 Register scratch = r4;
2036 Register scratch2 = r5;
2037 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2038 // stack: value, this, home_object; r3: key, r6: value
2039 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2040 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2041 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2042 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2043 __ StoreP(r2, MemOperand(sp, 0));
2044 __ Move(r2, scratch2);
2045 // stack: this, home_object, key; r2: value.
2046 EmitKeyedSuperPropertyStore(prop);
2047 break;
2048 }
2049 case KEYED_PROPERTY: {
2050 PushOperand(r2); // Preserve value.
2051 VisitForStackValue(prop->obj());
2052 VisitForAccumulatorValue(prop->key());
2053 __ Move(StoreDescriptor::NameRegister(), r2);
2054 PopOperands(StoreDescriptor::ValueRegister(),
2055 StoreDescriptor::ReceiverRegister());
2056 CallKeyedStoreIC(slot);
2057 break;
2058 }
2059 }
2060 context()->Plug(r2);
2061 }
2062
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2063 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2064 Variable* var, MemOperand location) {
2065 __ StoreP(result_register(), location);
2066 if (var->IsContextSlot()) {
2067 // RecordWrite may destroy all its register arguments.
2068 __ LoadRR(r5, result_register());
2069 int offset = Context::SlotOffset(var->index());
2070 __ RecordWriteContextSlot(r3, offset, r5, r4, kLRHasBeenSaved,
2071 kDontSaveFPRegs);
2072 }
2073 }
2074
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot,HoleCheckMode hole_check_mode)2075 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2076 FeedbackVectorSlot slot,
2077 HoleCheckMode hole_check_mode) {
2078 if (var->IsUnallocated()) {
2079 // Global var, const, or let.
2080 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2081 CallStoreIC(slot, var->name());
2082
2083 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2084 // Non-initializing assignment to let variable needs a write barrier.
2085 DCHECK(!var->IsLookupSlot());
2086 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2087 MemOperand location = VarOperand(var, r3);
2088 // Perform an initialization check for lexically declared variables.
2089 if (hole_check_mode == HoleCheckMode::kRequired) {
2090 Label assign;
2091 __ LoadP(r5, location);
2092 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2093 __ bne(&assign);
2094 __ mov(r5, Operand(var->name()));
2095 __ push(r5);
2096 __ CallRuntime(Runtime::kThrowReferenceError);
2097 __ bind(&assign);
2098 }
2099 if (var->mode() != CONST) {
2100 EmitStoreToStackLocalOrContextSlot(var, location);
2101 } else if (var->throw_on_const_assignment(language_mode())) {
2102 __ CallRuntime(Runtime::kThrowConstAssignError);
2103 }
2104 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2105 // Initializing assignment to const {this} needs a write barrier.
2106 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2107 Label uninitialized_this;
2108 MemOperand location = VarOperand(var, r3);
2109 __ LoadP(r5, location);
2110 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2111 __ beq(&uninitialized_this);
2112 __ mov(r3, Operand(var->name()));
2113 __ push(r3);
2114 __ CallRuntime(Runtime::kThrowReferenceError);
2115 __ bind(&uninitialized_this);
2116 EmitStoreToStackLocalOrContextSlot(var, location);
2117 } else {
2118 DCHECK(var->mode() != CONST || op == Token::INIT);
2119 if (var->IsLookupSlot()) {
2120 // Assignment to var.
2121 __ Push(var->name());
2122 __ Push(r2);
2123 __ CallRuntime(is_strict(language_mode())
2124 ? Runtime::kStoreLookupSlot_Strict
2125 : Runtime::kStoreLookupSlot_Sloppy);
2126 } else {
2127 // Assignment to var or initializing assignment to let/const in harmony
2128 // mode.
2129 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2130 MemOperand location = VarOperand(var, r3);
2131 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2132 // Check for an uninitialized let binding.
2133 __ LoadP(r4, location);
2134 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
2135 __ Check(eq, kLetBindingReInitialization);
2136 }
2137 EmitStoreToStackLocalOrContextSlot(var, location);
2138 }
2139 }
2140 }
2141
EmitNamedPropertyAssignment(Assignment * expr)2142 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2143 // Assignment to a property, using a named store IC.
2144 Property* prop = expr->target()->AsProperty();
2145 DCHECK(prop != NULL);
2146 DCHECK(prop->key()->IsLiteral());
2147
2148 PopOperand(StoreDescriptor::ReceiverRegister());
2149 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2150
2151 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2152 context()->Plug(r2);
2153 }
2154
EmitNamedSuperPropertyStore(Property * prop)2155 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2156 // Assignment to named property of super.
2157 // r2 : value
2158 // stack : receiver ('this'), home_object
2159 DCHECK(prop != NULL);
2160 Literal* key = prop->key()->AsLiteral();
2161 DCHECK(key != NULL);
2162
2163 PushOperand(key->value());
2164 PushOperand(r2);
2165 CallRuntimeWithOperands((is_strict(language_mode())
2166 ? Runtime::kStoreToSuper_Strict
2167 : Runtime::kStoreToSuper_Sloppy));
2168 }
2169
EmitKeyedSuperPropertyStore(Property * prop)2170 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2171 // Assignment to named property of super.
2172 // r2 : value
2173 // stack : receiver ('this'), home_object, key
2174 DCHECK(prop != NULL);
2175
2176 PushOperand(r2);
2177 CallRuntimeWithOperands((is_strict(language_mode())
2178 ? Runtime::kStoreKeyedToSuper_Strict
2179 : Runtime::kStoreKeyedToSuper_Sloppy));
2180 }
2181
EmitKeyedPropertyAssignment(Assignment * expr)2182 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2183 // Assignment to a property, using a keyed store IC.
2184 PopOperands(StoreDescriptor::ReceiverRegister(),
2185 StoreDescriptor::NameRegister());
2186 DCHECK(StoreDescriptor::ValueRegister().is(r2));
2187
2188 CallKeyedStoreIC(expr->AssignmentSlot());
2189
2190 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2191 context()->Plug(r2);
2192 }
2193
2194 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2195 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2196 Expression* callee = expr->expression();
2197
2198 // Get the target function.
2199 ConvertReceiverMode convert_mode;
2200 if (callee->IsVariableProxy()) {
2201 {
2202 StackValueContext context(this);
2203 EmitVariableLoad(callee->AsVariableProxy());
2204 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2205 }
2206 // Push undefined as receiver. This is patched in the method prologue if it
2207 // is a sloppy mode method.
2208 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2209 PushOperand(r1);
2210 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2211 } else {
2212 // Load the function from the receiver.
2213 DCHECK(callee->IsProperty());
2214 DCHECK(!callee->AsProperty()->IsSuperAccess());
2215 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2216 EmitNamedPropertyLoad(callee->AsProperty());
2217 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2218 BailoutState::TOS_REGISTER);
2219 // Push the target function under the receiver.
2220 __ LoadP(r1, MemOperand(sp, 0));
2221 PushOperand(r1);
2222 __ StoreP(r2, MemOperand(sp, kPointerSize));
2223 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2224 }
2225
2226 EmitCall(expr, convert_mode);
2227 }
2228
EmitSuperCallWithLoadIC(Call * expr)2229 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2230 Expression* callee = expr->expression();
2231 DCHECK(callee->IsProperty());
2232 Property* prop = callee->AsProperty();
2233 DCHECK(prop->IsSuperAccess());
2234 SetExpressionPosition(prop);
2235
2236 Literal* key = prop->key()->AsLiteral();
2237 DCHECK(!key->value()->IsSmi());
2238 // Load the function from the receiver.
2239 const Register scratch = r3;
2240 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2241 VisitForAccumulatorValue(super_ref->home_object());
2242 __ LoadRR(scratch, r2);
2243 VisitForAccumulatorValue(super_ref->this_var());
2244 PushOperands(scratch, r2, r2, scratch);
2245 PushOperand(key->value());
2246
2247 // Stack here:
2248 // - home_object
2249 // - this (receiver)
2250 // - this (receiver) <-- LoadFromSuper will pop here and below.
2251 // - home_object
2252 // - key
2253 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2254 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2255
2256 // Replace home_object with target function.
2257 __ StoreP(r2, MemOperand(sp, kPointerSize));
2258
2259 // Stack here:
2260 // - target function
2261 // - this (receiver)
2262 EmitCall(expr);
2263 }
2264
2265 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2266 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2267 // Load the key.
2268 VisitForAccumulatorValue(key);
2269
2270 Expression* callee = expr->expression();
2271
2272 // Load the function from the receiver.
2273 DCHECK(callee->IsProperty());
2274 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2275 __ Move(LoadDescriptor::NameRegister(), r2);
2276 EmitKeyedPropertyLoad(callee->AsProperty());
2277 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2278 BailoutState::TOS_REGISTER);
2279
2280 // Push the target function under the receiver.
2281 __ LoadP(ip, MemOperand(sp, 0));
2282 PushOperand(ip);
2283 __ StoreP(r2, MemOperand(sp, kPointerSize));
2284
2285 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2286 }
2287
EmitKeyedSuperCallWithLoadIC(Call * expr)2288 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2289 Expression* callee = expr->expression();
2290 DCHECK(callee->IsProperty());
2291 Property* prop = callee->AsProperty();
2292 DCHECK(prop->IsSuperAccess());
2293
2294 SetExpressionPosition(prop);
2295 // Load the function from the receiver.
2296 const Register scratch = r3;
2297 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2298 VisitForAccumulatorValue(super_ref->home_object());
2299 __ LoadRR(scratch, r2);
2300 VisitForAccumulatorValue(super_ref->this_var());
2301 PushOperands(scratch, r2, r2, scratch);
2302 VisitForStackValue(prop->key());
2303
2304 // Stack here:
2305 // - home_object
2306 // - this (receiver)
2307 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2308 // - home_object
2309 // - key
2310 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2311 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2312
2313 // Replace home_object with target function.
2314 __ StoreP(r2, MemOperand(sp, kPointerSize));
2315
2316 // Stack here:
2317 // - target function
2318 // - this (receiver)
2319 EmitCall(expr);
2320 }
2321
EmitCall(Call * expr,ConvertReceiverMode mode)2322 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2323 // Load the arguments.
2324 ZoneList<Expression*>* args = expr->arguments();
2325 int arg_count = args->length();
2326 for (int i = 0; i < arg_count; i++) {
2327 VisitForStackValue(args->at(i));
2328 }
2329
2330 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2331 SetCallPosition(expr, expr->tail_call_mode());
2332 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2333 if (FLAG_trace) {
2334 __ CallRuntime(Runtime::kTraceTailCall);
2335 }
2336 // Update profiling counters before the tail call since we will
2337 // not return to this function.
2338 EmitProfilingCounterHandlingForReturnSequence(true);
2339 }
2340 Handle<Code> code =
2341 CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2342 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot()));
2343 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2344 __ mov(r2, Operand(arg_count));
2345 CallIC(code);
2346 OperandStackDepthDecrement(arg_count + 1);
2347
2348 RecordJSReturnSite(expr);
2349 RestoreContext();
2350 context()->DropAndPlug(1, r2);
2351 }
2352
EmitResolvePossiblyDirectEval(Call * expr)2353 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2354 int arg_count = expr->arguments()->length();
2355 // r6: copy of the first argument or undefined if it doesn't exist.
2356 if (arg_count > 0) {
2357 __ LoadP(r6, MemOperand(sp, arg_count * kPointerSize), r0);
2358 } else {
2359 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
2360 }
2361
2362 // r5: the receiver of the enclosing function.
2363 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2364
2365 // r4: language mode.
2366 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode()));
2367
2368 // r3: the start position of the scope the calls resides in.
2369 __ LoadSmiLiteral(r3, Smi::FromInt(scope()->start_position()));
2370
2371 // r2: the source position of the eval call.
2372 __ LoadSmiLiteral(r2, Smi::FromInt(expr->position()));
2373
2374 // Do the runtime call.
2375 __ Push(r6, r5, r4, r3, r2);
2376 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2377 }
2378
2379 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2380 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2381 VariableProxy* callee = expr->expression()->AsVariableProxy();
2382 if (callee->var()->IsLookupSlot()) {
2383 Label slow, done;
2384 SetExpressionPosition(callee);
2385 // Generate code for loading from variables potentially shadowed by
2386 // eval-introduced variables.
2387 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2388
2389 __ bind(&slow);
2390 // Call the runtime to find the function to call (returned in r2) and
2391 // the object holding it (returned in r3).
2392 __ Push(callee->name());
2393 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2394 PushOperands(r2, r3); // Function, receiver.
2395 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2396
2397 // If fast case code has been generated, emit code to push the function
2398 // and receiver and have the slow path jump around this code.
2399 if (done.is_linked()) {
2400 Label call;
2401 __ b(&call);
2402 __ bind(&done);
2403 // Push function.
2404 __ push(r2);
2405 // Pass undefined as the receiver, which is the WithBaseObject of a
2406 // non-object environment record. If the callee is sloppy, it will patch
2407 // it up to be the global receiver.
2408 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2409 __ push(r3);
2410 __ bind(&call);
2411 }
2412 } else {
2413 VisitForStackValue(callee);
2414 // refEnv.WithBaseObject()
2415 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2416 PushOperand(r4); // Reserved receiver slot.
2417 }
2418 }
2419
EmitPossiblyEvalCall(Call * expr)2420 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2421 // In a call to eval, we first call
2422 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
2423 // to call. Then we call the resolved function using the given arguments.
2424 ZoneList<Expression*>* args = expr->arguments();
2425 int arg_count = args->length();
2426
2427 PushCalleeAndWithBaseObject(expr);
2428
2429 // Push the arguments.
2430 for (int i = 0; i < arg_count; i++) {
2431 VisitForStackValue(args->at(i));
2432 }
2433
2434 // Push a copy of the function (found below the arguments) and
2435 // resolve eval.
2436 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2437 __ push(r3);
2438 EmitResolvePossiblyDirectEval(expr);
2439
2440 // Touch up the stack with the resolved function.
2441 __ StoreP(r2, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2442
2443 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2444
2445 // Record source position for debugger.
2446 SetCallPosition(expr);
2447 Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2448 expr->tail_call_mode())
2449 .code();
2450 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot()));
2451 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2452 __ mov(r2, Operand(arg_count));
2453 __ Call(code, RelocInfo::CODE_TARGET);
2454 OperandStackDepthDecrement(arg_count + 1);
2455 RecordJSReturnSite(expr);
2456 RestoreContext();
2457 context()->DropAndPlug(1, r2);
2458 }
2459
VisitCallNew(CallNew * expr)2460 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2461 Comment cmnt(masm_, "[ CallNew");
2462 // According to ECMA-262, section 11.2.2, page 44, the function
2463 // expression in new calls must be evaluated before the
2464 // arguments.
2465
2466 // Push constructor on the stack. If it's not a function it's used as
2467 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2468 // ignored.
2469 DCHECK(!expr->expression()->IsSuperPropertyReference());
2470 VisitForStackValue(expr->expression());
2471
2472 // Push the arguments ("left-to-right") on the stack.
2473 ZoneList<Expression*>* args = expr->arguments();
2474 int arg_count = args->length();
2475 for (int i = 0; i < arg_count; i++) {
2476 VisitForStackValue(args->at(i));
2477 }
2478
2479 // Call the construct call builtin that handles allocation and
2480 // constructor invocation.
2481 SetConstructCallPosition(expr);
2482
2483 // Load function and argument count into r3 and r2.
2484 __ mov(r2, Operand(arg_count));
2485 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize), r0);
2486
2487 // Record call targets in unoptimized code.
2488 __ EmitLoadTypeFeedbackVector(r4);
2489 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallNewFeedbackSlot()));
2490
2491 CallConstructStub stub(isolate());
2492 CallIC(stub.GetCode());
2493 OperandStackDepthDecrement(arg_count + 1);
2494 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2495 RestoreContext();
2496 context()->Plug(r2);
2497 }
2498
EmitSuperConstructorCall(Call * expr)2499 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2500 SuperCallReference* super_call_ref =
2501 expr->expression()->AsSuperCallReference();
2502 DCHECK_NOT_NULL(super_call_ref);
2503
2504 // Push the super constructor target on the stack (may be null,
2505 // but the Construct builtin can deal with that properly).
2506 VisitForAccumulatorValue(super_call_ref->this_function_var());
2507 __ AssertFunction(result_register());
2508 __ LoadP(result_register(),
2509 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2510 __ LoadP(result_register(),
2511 FieldMemOperand(result_register(), Map::kPrototypeOffset));
2512 PushOperand(result_register());
2513
2514 // Push the arguments ("left-to-right") on the stack.
2515 ZoneList<Expression*>* args = expr->arguments();
2516 int arg_count = args->length();
2517 for (int i = 0; i < arg_count; i++) {
2518 VisitForStackValue(args->at(i));
2519 }
2520
2521 // Call the construct call builtin that handles allocation and
2522 // constructor invocation.
2523 SetConstructCallPosition(expr);
2524
2525 // Load new target into r5.
2526 VisitForAccumulatorValue(super_call_ref->new_target_var());
2527 __ LoadRR(r5, result_register());
2528
2529 // Load function and argument count into r1 and r0.
2530 __ mov(r2, Operand(arg_count));
2531 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize));
2532
2533 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2534 OperandStackDepthDecrement(arg_count + 1);
2535
2536 RecordJSReturnSite(expr);
2537 RestoreContext();
2538 context()->Plug(r2);
2539 }
2540
EmitIsSmi(CallRuntime * expr)2541 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2542 ZoneList<Expression*>* args = expr->arguments();
2543 DCHECK(args->length() == 1);
2544
2545 VisitForAccumulatorValue(args->at(0));
2546
2547 Label materialize_true, materialize_false, skip_lookup;
2548 Label* if_true = NULL;
2549 Label* if_false = NULL;
2550 Label* fall_through = NULL;
2551 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2552 &if_false, &fall_through);
2553
2554 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2555 __ TestIfSmi(r2);
2556 Split(eq, if_true, if_false, fall_through);
2557
2558 context()->Plug(if_true, if_false);
2559 }
2560
EmitIsJSReceiver(CallRuntime * expr)2561 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2562 ZoneList<Expression*>* args = expr->arguments();
2563 DCHECK(args->length() == 1);
2564
2565 VisitForAccumulatorValue(args->at(0));
2566
2567 Label materialize_true, materialize_false;
2568 Label* if_true = NULL;
2569 Label* if_false = NULL;
2570 Label* fall_through = NULL;
2571 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2572 &if_false, &fall_through);
2573
2574 __ JumpIfSmi(r2, if_false);
2575 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
2576 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2577 Split(ge, if_true, if_false, fall_through);
2578
2579 context()->Plug(if_true, if_false);
2580 }
2581
EmitIsArray(CallRuntime * expr)2582 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2583 ZoneList<Expression*>* args = expr->arguments();
2584 DCHECK(args->length() == 1);
2585
2586 VisitForAccumulatorValue(args->at(0));
2587
2588 Label materialize_true, materialize_false;
2589 Label* if_true = NULL;
2590 Label* if_false = NULL;
2591 Label* fall_through = NULL;
2592 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2593 &if_false, &fall_through);
2594
2595 __ JumpIfSmi(r2, if_false);
2596 __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE);
2597 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2598 Split(eq, if_true, if_false, fall_through);
2599
2600 context()->Plug(if_true, if_false);
2601 }
2602
EmitIsTypedArray(CallRuntime * expr)2603 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2604 ZoneList<Expression*>* args = expr->arguments();
2605 DCHECK(args->length() == 1);
2606
2607 VisitForAccumulatorValue(args->at(0));
2608
2609 Label materialize_true, materialize_false;
2610 Label* if_true = NULL;
2611 Label* if_false = NULL;
2612 Label* fall_through = NULL;
2613 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2614 &if_false, &fall_through);
2615
2616 __ JumpIfSmi(r2, if_false);
2617 __ CompareObjectType(r2, r3, r3, JS_TYPED_ARRAY_TYPE);
2618 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2619 Split(eq, if_true, if_false, fall_through);
2620
2621 context()->Plug(if_true, if_false);
2622 }
2623
EmitIsRegExp(CallRuntime * expr)2624 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2625 ZoneList<Expression*>* args = expr->arguments();
2626 DCHECK(args->length() == 1);
2627
2628 VisitForAccumulatorValue(args->at(0));
2629
2630 Label materialize_true, materialize_false;
2631 Label* if_true = NULL;
2632 Label* if_false = NULL;
2633 Label* fall_through = NULL;
2634 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2635 &if_false, &fall_through);
2636
2637 __ JumpIfSmi(r2, if_false);
2638 __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE);
2639 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2640 Split(eq, if_true, if_false, fall_through);
2641
2642 context()->Plug(if_true, if_false);
2643 }
2644
EmitIsJSProxy(CallRuntime * expr)2645 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2646 ZoneList<Expression*>* args = expr->arguments();
2647 DCHECK(args->length() == 1);
2648
2649 VisitForAccumulatorValue(args->at(0));
2650
2651 Label materialize_true, materialize_false;
2652 Label* if_true = NULL;
2653 Label* if_false = NULL;
2654 Label* fall_through = NULL;
2655 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2656 &if_false, &fall_through);
2657
2658 __ JumpIfSmi(r2, if_false);
2659 __ CompareObjectType(r2, r3, r3, JS_PROXY_TYPE);
2660 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2661 Split(eq, if_true, if_false, fall_through);
2662
2663 context()->Plug(if_true, if_false);
2664 }
2665
EmitClassOf(CallRuntime * expr)2666 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2667 ZoneList<Expression*>* args = expr->arguments();
2668 DCHECK(args->length() == 1);
2669 Label done, null, function, non_function_constructor;
2670
2671 VisitForAccumulatorValue(args->at(0));
2672
2673 // If the object is not a JSReceiver, we return null.
2674 __ JumpIfSmi(r2, &null);
2675 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2676 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
2677 // Map is now in r2.
2678 __ blt(&null);
2679
2680 // Return 'Function' for JSFunction and JSBoundFunction objects.
2681 __ CmpLogicalP(r3, Operand(FIRST_FUNCTION_TYPE));
2682 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2683 __ bge(&function);
2684
2685 // Check if the constructor in the map is a JS function.
2686 Register instance_type = r4;
2687 __ GetMapConstructor(r2, r2, r3, instance_type);
2688 __ CmpP(instance_type, Operand(JS_FUNCTION_TYPE));
2689 __ bne(&non_function_constructor, Label::kNear);
2690
2691 // r2 now contains the constructor function. Grab the
2692 // instance class name from there.
2693 __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
2694 __ LoadP(r2,
2695 FieldMemOperand(r2, SharedFunctionInfo::kInstanceClassNameOffset));
2696 __ b(&done, Label::kNear);
2697
2698 // Functions have class 'Function'.
2699 __ bind(&function);
2700 __ LoadRoot(r2, Heap::kFunction_stringRootIndex);
2701 __ b(&done, Label::kNear);
2702
2703 // Objects with a non-function constructor have class 'Object'.
2704 __ bind(&non_function_constructor);
2705 __ LoadRoot(r2, Heap::kObject_stringRootIndex);
2706 __ b(&done, Label::kNear);
2707
2708 // Non-JS objects have class null.
2709 __ bind(&null);
2710 __ LoadRoot(r2, Heap::kNullValueRootIndex);
2711
2712 // All done.
2713 __ bind(&done);
2714
2715 context()->Plug(r2);
2716 }
2717
EmitStringCharCodeAt(CallRuntime * expr)2718 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2719 ZoneList<Expression*>* args = expr->arguments();
2720 DCHECK(args->length() == 2);
2721 VisitForStackValue(args->at(0));
2722 VisitForAccumulatorValue(args->at(1));
2723
2724 Register object = r3;
2725 Register index = r2;
2726 Register result = r5;
2727
2728 PopOperand(object);
2729
2730 Label need_conversion;
2731 Label index_out_of_range;
2732 Label done;
2733 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2734 &need_conversion, &index_out_of_range);
2735 generator.GenerateFast(masm_);
2736 __ b(&done);
2737
2738 __ bind(&index_out_of_range);
2739 // When the index is out of range, the spec requires us to return
2740 // NaN.
2741 __ LoadRoot(result, Heap::kNanValueRootIndex);
2742 __ b(&done);
2743
2744 __ bind(&need_conversion);
2745 // Load the undefined value into the result register, which will
2746 // trigger conversion.
2747 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2748 __ b(&done);
2749
2750 NopRuntimeCallHelper call_helper;
2751 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2752
2753 __ bind(&done);
2754 context()->Plug(result);
2755 }
2756
EmitCall(CallRuntime * expr)2757 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2758 ZoneList<Expression*>* args = expr->arguments();
2759 DCHECK_LE(2, args->length());
2760 // Push target, receiver and arguments onto the stack.
2761 for (Expression* const arg : *args) {
2762 VisitForStackValue(arg);
2763 }
2764 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2765 // Move target to r3.
2766 int const argc = args->length() - 2;
2767 __ LoadP(r3, MemOperand(sp, (argc + 1) * kPointerSize));
2768 // Call the target.
2769 __ mov(r2, Operand(argc));
2770 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2771 OperandStackDepthDecrement(argc + 1);
2772 RestoreContext();
2773 // Discard the function left on TOS.
2774 context()->DropAndPlug(1, r2);
2775 }
2776
EmitGetSuperConstructor(CallRuntime * expr)2777 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2778 ZoneList<Expression*>* args = expr->arguments();
2779 DCHECK_EQ(1, args->length());
2780 VisitForAccumulatorValue(args->at(0));
2781 __ AssertFunction(r2);
2782 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2783 __ LoadP(r2, FieldMemOperand(r2, Map::kPrototypeOffset));
2784 context()->Plug(r2);
2785 }
2786
EmitDebugIsActive(CallRuntime * expr)2787 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2788 DCHECK(expr->arguments()->length() == 0);
2789 ExternalReference debug_is_active =
2790 ExternalReference::debug_is_active_address(isolate());
2791 __ mov(ip, Operand(debug_is_active));
2792 __ LoadlB(r2, MemOperand(ip));
2793 __ SmiTag(r2);
2794 context()->Plug(r2);
2795 }
2796
EmitCreateIterResultObject(CallRuntime * expr)2797 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2798 ZoneList<Expression*>* args = expr->arguments();
2799 DCHECK_EQ(2, args->length());
2800 VisitForStackValue(args->at(0));
2801 VisitForStackValue(args->at(1));
2802
2803 Label runtime, done;
2804
2805 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &runtime,
2806 NO_ALLOCATION_FLAGS);
2807 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
2808 __ Pop(r4, r5);
2809 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
2810 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
2811 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
2812 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
2813 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
2814 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
2815 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2816 __ b(&done);
2817
2818 __ bind(&runtime);
2819 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2820
2821 __ bind(&done);
2822 context()->Plug(r2);
2823 }
2824
EmitLoadJSRuntimeFunction(CallRuntime * expr)2825 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2826 // Push function.
2827 __ LoadNativeContextSlot(expr->context_index(), r2);
2828 PushOperand(r2);
2829
2830 // Push undefined as the receiver.
2831 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2832 PushOperand(r2);
2833 }
2834
EmitCallJSRuntimeFunction(CallRuntime * expr)2835 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2836 ZoneList<Expression*>* args = expr->arguments();
2837 int arg_count = args->length();
2838
2839 SetCallPosition(expr);
2840 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2841 __ mov(r2, Operand(arg_count));
2842 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2843 RelocInfo::CODE_TARGET);
2844 OperandStackDepthDecrement(arg_count + 1);
2845 RestoreContext();
2846 }
2847
VisitUnaryOperation(UnaryOperation * expr)2848 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2849 switch (expr->op()) {
2850 case Token::DELETE: {
2851 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2852 Property* property = expr->expression()->AsProperty();
2853 VariableProxy* proxy = expr->expression()->AsVariableProxy();
2854
2855 if (property != NULL) {
2856 VisitForStackValue(property->obj());
2857 VisitForStackValue(property->key());
2858 CallRuntimeWithOperands(is_strict(language_mode())
2859 ? Runtime::kDeleteProperty_Strict
2860 : Runtime::kDeleteProperty_Sloppy);
2861 context()->Plug(r2);
2862 } else if (proxy != NULL) {
2863 Variable* var = proxy->var();
2864 // Delete of an unqualified identifier is disallowed in strict mode but
2865 // "delete this" is allowed.
2866 bool is_this = var->is_this();
2867 DCHECK(is_sloppy(language_mode()) || is_this);
2868 if (var->IsUnallocated()) {
2869 __ LoadGlobalObject(r4);
2870 __ mov(r3, Operand(var->name()));
2871 __ Push(r4, r3);
2872 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2873 context()->Plug(r2);
2874 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2875 // Result of deleting non-global, non-dynamic variables is false.
2876 // The subexpression does not have side effects.
2877 context()->Plug(is_this);
2878 } else {
2879 // Non-global variable. Call the runtime to try to delete from the
2880 // context where the variable was introduced.
2881 __ Push(var->name());
2882 __ CallRuntime(Runtime::kDeleteLookupSlot);
2883 context()->Plug(r2);
2884 }
2885 } else {
2886 // Result of deleting non-property, non-variable reference is true.
2887 // The subexpression may have side effects.
2888 VisitForEffect(expr->expression());
2889 context()->Plug(true);
2890 }
2891 break;
2892 }
2893
2894 case Token::VOID: {
2895 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2896 VisitForEffect(expr->expression());
2897 context()->Plug(Heap::kUndefinedValueRootIndex);
2898 break;
2899 }
2900
2901 case Token::NOT: {
2902 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2903 if (context()->IsEffect()) {
2904 // Unary NOT has no side effects so it's only necessary to visit the
2905 // subexpression. Match the optimizing compiler by not branching.
2906 VisitForEffect(expr->expression());
2907 } else if (context()->IsTest()) {
2908 const TestContext* test = TestContext::cast(context());
2909 // The labels are swapped for the recursive call.
2910 VisitForControl(expr->expression(), test->false_label(),
2911 test->true_label(), test->fall_through());
2912 context()->Plug(test->true_label(), test->false_label());
2913 } else {
2914 // We handle value contexts explicitly rather than simply visiting
2915 // for control and plugging the control flow into the context,
2916 // because we need to prepare a pair of extra administrative AST ids
2917 // for the optimizing compiler.
2918 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2919 Label materialize_true, materialize_false, done;
2920 VisitForControl(expr->expression(), &materialize_false,
2921 &materialize_true, &materialize_true);
2922 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2923 __ bind(&materialize_true);
2924 PrepareForBailoutForId(expr->MaterializeTrueId(),
2925 BailoutState::NO_REGISTERS);
2926 __ LoadRoot(r2, Heap::kTrueValueRootIndex);
2927 if (context()->IsStackValue()) __ push(r2);
2928 __ b(&done);
2929 __ bind(&materialize_false);
2930 PrepareForBailoutForId(expr->MaterializeFalseId(),
2931 BailoutState::NO_REGISTERS);
2932 __ LoadRoot(r2, Heap::kFalseValueRootIndex);
2933 if (context()->IsStackValue()) __ push(r2);
2934 __ bind(&done);
2935 }
2936 break;
2937 }
2938
2939 case Token::TYPEOF: {
2940 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2941 {
2942 AccumulatorValueContext context(this);
2943 VisitForTypeofValue(expr->expression());
2944 }
2945 __ LoadRR(r5, r2);
2946 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2947 context()->Plug(r2);
2948 break;
2949 }
2950
2951 default:
2952 UNREACHABLE();
2953 }
2954 }
2955
VisitCountOperation(CountOperation * expr)2956 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2957 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2958
2959 Comment cmnt(masm_, "[ CountOperation");
2960
2961 Property* prop = expr->expression()->AsProperty();
2962 LhsKind assign_type = Property::GetAssignType(prop);
2963
2964 // Evaluate expression and get value.
2965 if (assign_type == VARIABLE) {
2966 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2967 AccumulatorValueContext context(this);
2968 EmitVariableLoad(expr->expression()->AsVariableProxy());
2969 } else {
2970 // Reserve space for result of postfix operation.
2971 if (expr->is_postfix() && !context()->IsEffect()) {
2972 __ LoadSmiLiteral(ip, Smi::kZero);
2973 PushOperand(ip);
2974 }
2975 switch (assign_type) {
2976 case NAMED_PROPERTY: {
2977 // Put the object both on the stack and in the register.
2978 VisitForStackValue(prop->obj());
2979 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2980 EmitNamedPropertyLoad(prop);
2981 break;
2982 }
2983
2984 case NAMED_SUPER_PROPERTY: {
2985 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2986 VisitForAccumulatorValue(
2987 prop->obj()->AsSuperPropertyReference()->home_object());
2988 const Register scratch = r3;
2989 __ LoadP(scratch, MemOperand(sp, 0)); // this
2990 PushOperands(result_register(), scratch, result_register());
2991 EmitNamedSuperPropertyLoad(prop);
2992 break;
2993 }
2994
2995 case KEYED_SUPER_PROPERTY: {
2996 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2997 VisitForStackValue(
2998 prop->obj()->AsSuperPropertyReference()->home_object());
2999 VisitForAccumulatorValue(prop->key());
3000 const Register scratch1 = r3;
3001 const Register scratch2 = r4;
3002 __ LoadP(scratch1, MemOperand(sp, 1 * kPointerSize)); // this
3003 __ LoadP(scratch2, MemOperand(sp, 0 * kPointerSize)); // home object
3004 PushOperands(result_register(), scratch1, scratch2, result_register());
3005 EmitKeyedSuperPropertyLoad(prop);
3006 break;
3007 }
3008
3009 case KEYED_PROPERTY: {
3010 VisitForStackValue(prop->obj());
3011 VisitForStackValue(prop->key());
3012 __ LoadP(LoadDescriptor::ReceiverRegister(),
3013 MemOperand(sp, 1 * kPointerSize));
3014 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3015 EmitKeyedPropertyLoad(prop);
3016 break;
3017 }
3018
3019 case VARIABLE:
3020 UNREACHABLE();
3021 }
3022 }
3023
3024 // We need a second deoptimization point after loading the value
3025 // in case evaluating the property load my have a side effect.
3026 if (assign_type == VARIABLE) {
3027 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3028 } else {
3029 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3030 }
3031
3032 // Inline smi case if we are in a loop.
3033 Label stub_call, done;
3034 JumpPatchSite patch_site(masm_);
3035
3036 int count_value = expr->op() == Token::INC ? 1 : -1;
3037 if (ShouldInlineSmiCase(expr->op())) {
3038 Label slow;
3039 patch_site.EmitJumpIfNotSmi(r2, &slow);
3040
3041 // Save result for postfix expressions.
3042 if (expr->is_postfix()) {
3043 if (!context()->IsEffect()) {
3044 // Save the result on the stack. If we have a named or keyed property
3045 // we store the result under the receiver that is currently on top
3046 // of the stack.
3047 switch (assign_type) {
3048 case VARIABLE:
3049 __ push(r2);
3050 break;
3051 case NAMED_PROPERTY:
3052 __ StoreP(r2, MemOperand(sp, kPointerSize));
3053 break;
3054 case NAMED_SUPER_PROPERTY:
3055 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3056 break;
3057 case KEYED_PROPERTY:
3058 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3059 break;
3060 case KEYED_SUPER_PROPERTY:
3061 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize));
3062 break;
3063 }
3064 }
3065 }
3066
3067 Register scratch1 = r3;
3068 Register scratch2 = r4;
3069 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3070 __ AddP(scratch2, r2, scratch1);
3071 __ LoadOnConditionP(nooverflow, r2, scratch2);
3072 __ b(nooverflow, &done);
3073 // Call stub. Undo operation first.
3074 __ b(&stub_call);
3075 __ bind(&slow);
3076 }
3077
3078 // Convert old value into a number.
3079 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3080 RestoreContext();
3081 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3082
3083 // Save result for postfix expressions.
3084 if (expr->is_postfix()) {
3085 if (!context()->IsEffect()) {
3086 // Save the result on the stack. If we have a named or keyed property
3087 // we store the result under the receiver that is currently on top
3088 // of the stack.
3089 switch (assign_type) {
3090 case VARIABLE:
3091 PushOperand(r2);
3092 break;
3093 case NAMED_PROPERTY:
3094 __ StoreP(r2, MemOperand(sp, kPointerSize));
3095 break;
3096 case NAMED_SUPER_PROPERTY:
3097 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3098 break;
3099 case KEYED_PROPERTY:
3100 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
3101 break;
3102 case KEYED_SUPER_PROPERTY:
3103 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize));
3104 break;
3105 }
3106 }
3107 }
3108
3109 __ bind(&stub_call);
3110 __ LoadRR(r3, r2);
3111 __ LoadSmiLiteral(r2, Smi::FromInt(count_value));
3112
3113 SetExpressionPosition(expr);
3114
3115 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3116 CallIC(code, expr->CountBinOpFeedbackId());
3117 patch_site.EmitPatchInfo();
3118 __ bind(&done);
3119
3120 // Store the value returned in r2.
3121 switch (assign_type) {
3122 case VARIABLE: {
3123 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3124 if (expr->is_postfix()) {
3125 {
3126 EffectContext context(this);
3127 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3128 proxy->hole_check_mode());
3129 PrepareForBailoutForId(expr->AssignmentId(),
3130 BailoutState::TOS_REGISTER);
3131 context.Plug(r2);
3132 }
3133 // For all contexts except EffectConstant We have the result on
3134 // top of the stack.
3135 if (!context()->IsEffect()) {
3136 context()->PlugTOS();
3137 }
3138 } else {
3139 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3140 proxy->hole_check_mode());
3141 PrepareForBailoutForId(expr->AssignmentId(),
3142 BailoutState::TOS_REGISTER);
3143 context()->Plug(r2);
3144 }
3145 break;
3146 }
3147 case NAMED_PROPERTY: {
3148 PopOperand(StoreDescriptor::ReceiverRegister());
3149 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3150 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3151 if (expr->is_postfix()) {
3152 if (!context()->IsEffect()) {
3153 context()->PlugTOS();
3154 }
3155 } else {
3156 context()->Plug(r2);
3157 }
3158 break;
3159 }
3160 case NAMED_SUPER_PROPERTY: {
3161 EmitNamedSuperPropertyStore(prop);
3162 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3163 if (expr->is_postfix()) {
3164 if (!context()->IsEffect()) {
3165 context()->PlugTOS();
3166 }
3167 } else {
3168 context()->Plug(r2);
3169 }
3170 break;
3171 }
3172 case KEYED_SUPER_PROPERTY: {
3173 EmitKeyedSuperPropertyStore(prop);
3174 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3175 if (expr->is_postfix()) {
3176 if (!context()->IsEffect()) {
3177 context()->PlugTOS();
3178 }
3179 } else {
3180 context()->Plug(r2);
3181 }
3182 break;
3183 }
3184 case KEYED_PROPERTY: {
3185 PopOperands(StoreDescriptor::ReceiverRegister(),
3186 StoreDescriptor::NameRegister());
3187 CallKeyedStoreIC(expr->CountSlot());
3188 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3189 if (expr->is_postfix()) {
3190 if (!context()->IsEffect()) {
3191 context()->PlugTOS();
3192 }
3193 } else {
3194 context()->Plug(r2);
3195 }
3196 break;
3197 }
3198 }
3199 }
3200
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3201 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3202 Expression* sub_expr,
3203 Handle<String> check) {
3204 Label materialize_true, materialize_false;
3205 Label* if_true = NULL;
3206 Label* if_false = NULL;
3207 Label* fall_through = NULL;
3208 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3209 &if_false, &fall_through);
3210
3211 {
3212 AccumulatorValueContext context(this);
3213 VisitForTypeofValue(sub_expr);
3214 }
3215 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3216
3217 Factory* factory = isolate()->factory();
3218 if (String::Equals(check, factory->number_string())) {
3219 __ JumpIfSmi(r2, if_true);
3220 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3221 __ CompareRoot(r2, Heap::kHeapNumberMapRootIndex);
3222 Split(eq, if_true, if_false, fall_through);
3223 } else if (String::Equals(check, factory->string_string())) {
3224 __ JumpIfSmi(r2, if_false);
3225 __ CompareObjectType(r2, r2, r3, FIRST_NONSTRING_TYPE);
3226 Split(lt, if_true, if_false, fall_through);
3227 } else if (String::Equals(check, factory->symbol_string())) {
3228 __ JumpIfSmi(r2, if_false);
3229 __ CompareObjectType(r2, r2, r3, SYMBOL_TYPE);
3230 Split(eq, if_true, if_false, fall_through);
3231 } else if (String::Equals(check, factory->boolean_string())) {
3232 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3233 __ beq(if_true);
3234 __ CompareRoot(r2, Heap::kFalseValueRootIndex);
3235 Split(eq, if_true, if_false, fall_through);
3236 } else if (String::Equals(check, factory->undefined_string())) {
3237 __ CompareRoot(r2, Heap::kNullValueRootIndex);
3238 __ beq(if_false);
3239 __ JumpIfSmi(r2, if_false);
3240 // Check for undetectable objects => true.
3241 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3242 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
3243 Operand(1 << Map::kIsUndetectable));
3244 Split(ne, if_true, if_false, fall_through);
3245
3246 } else if (String::Equals(check, factory->function_string())) {
3247 __ JumpIfSmi(r2, if_false);
3248 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3249 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3250 __ AndP(r3, r3,
3251 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3252 __ CmpP(r3, Operand(1 << Map::kIsCallable));
3253 Split(eq, if_true, if_false, fall_through);
3254 } else if (String::Equals(check, factory->object_string())) {
3255 __ JumpIfSmi(r2, if_false);
3256 __ CompareRoot(r2, Heap::kNullValueRootIndex);
3257 __ beq(if_true);
3258 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3259 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
3260 __ blt(if_false);
3261 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
3262 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3263 Split(eq, if_true, if_false, fall_through);
3264 // clang-format off
3265 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3266 } else if (String::Equals(check, factory->type##_string())) { \
3267 __ JumpIfSmi(r2, if_false); \
3268 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); \
3269 __ CompareRoot(r2, Heap::k##Type##MapRootIndex); \
3270 Split(eq, if_true, if_false, fall_through);
3271 SIMD128_TYPES(SIMD128_TYPE)
3272 #undef SIMD128_TYPE
3273 // clang-format on
3274 } else {
3275 if (if_false != fall_through) __ b(if_false);
3276 }
3277 context()->Plug(if_true, if_false);
3278 }
3279
VisitCompareOperation(CompareOperation * expr)3280 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3281 Comment cmnt(masm_, "[ CompareOperation");
3282
3283 // First we try a fast inlined version of the compare when one of
3284 // the operands is a literal.
3285 if (TryLiteralCompare(expr)) return;
3286
3287 // Always perform the comparison for its control flow. Pack the result
3288 // into the expression's context after the comparison is performed.
3289 Label materialize_true, materialize_false;
3290 Label* if_true = NULL;
3291 Label* if_false = NULL;
3292 Label* fall_through = NULL;
3293 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3294 &if_false, &fall_through);
3295
3296 Token::Value op = expr->op();
3297 VisitForStackValue(expr->left());
3298 switch (op) {
3299 case Token::IN:
3300 VisitForStackValue(expr->right());
3301 SetExpressionPosition(expr);
3302 EmitHasProperty();
3303 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3304 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3305 Split(eq, if_true, if_false, fall_through);
3306 break;
3307
3308 case Token::INSTANCEOF: {
3309 VisitForAccumulatorValue(expr->right());
3310 SetExpressionPosition(expr);
3311 PopOperand(r3);
3312 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3313 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3314 __ CompareRoot(r2, Heap::kTrueValueRootIndex);
3315 Split(eq, if_true, if_false, fall_through);
3316 break;
3317 }
3318
3319 default: {
3320 VisitForAccumulatorValue(expr->right());
3321 SetExpressionPosition(expr);
3322 Condition cond = CompareIC::ComputeCondition(op);
3323 PopOperand(r3);
3324
3325 bool inline_smi_code = ShouldInlineSmiCase(op);
3326 JumpPatchSite patch_site(masm_);
3327 if (inline_smi_code) {
3328 Label slow_case;
3329 __ LoadRR(r4, r3);
3330 __ OrP(r4, r2);
3331 patch_site.EmitJumpIfNotSmi(r4, &slow_case);
3332 __ CmpP(r3, r2);
3333 Split(cond, if_true, if_false, NULL);
3334 __ bind(&slow_case);
3335 }
3336
3337 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3338 CallIC(ic, expr->CompareOperationFeedbackId());
3339 patch_site.EmitPatchInfo();
3340 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3341 __ CmpP(r2, Operand::Zero());
3342 Split(cond, if_true, if_false, fall_through);
3343 }
3344 }
3345
3346 // Convert the result of the comparison into one expected for this
3347 // expression's context.
3348 context()->Plug(if_true, if_false);
3349 }
3350
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3351 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3352 Expression* sub_expr,
3353 NilValue nil) {
3354 Label materialize_true, materialize_false;
3355 Label* if_true = NULL;
3356 Label* if_false = NULL;
3357 Label* fall_through = NULL;
3358 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3359 &if_false, &fall_through);
3360
3361 VisitForAccumulatorValue(sub_expr);
3362 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3363 if (expr->op() == Token::EQ_STRICT) {
3364 Heap::RootListIndex nil_value = nil == kNullValue
3365 ? Heap::kNullValueRootIndex
3366 : Heap::kUndefinedValueRootIndex;
3367 __ CompareRoot(r2, nil_value);
3368 Split(eq, if_true, if_false, fall_through);
3369 } else {
3370 __ JumpIfSmi(r2, if_false);
3371 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3372 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
3373 __ AndP(r0, r3, Operand(1 << Map::kIsUndetectable));
3374 Split(ne, if_true, if_false, fall_through);
3375 }
3376 context()->Plug(if_true, if_false);
3377 }
result_register()3378 Register FullCodeGenerator::result_register() { return r2; }
3379
context_register()3380 Register FullCodeGenerator::context_register() { return cp; }
3381
LoadFromFrameField(int frame_offset,Register value)3382 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3383 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3384 __ LoadP(value, MemOperand(fp, frame_offset));
3385 }
3386
StoreToFrameField(int frame_offset,Register value)3387 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3388 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3389 __ StoreP(value, MemOperand(fp, frame_offset));
3390 }
3391
LoadContextField(Register dst,int context_index)3392 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3393 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
3394 }
3395
PushFunctionArgumentForContextAllocation()3396 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3397 DeclarationScope* closure_scope = scope()->GetClosureScope();
3398 if (closure_scope->is_script_scope() || closure_scope->is_module_scope()) {
3399 // Contexts nested in the native context have a canonical empty function
3400 // as their closure, not the anonymous closure containing the global
3401 // code.
3402 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3403 } else if (closure_scope->is_eval_scope()) {
3404 // Contexts created by a call to eval have the same closure as the
3405 // context calling eval, not the anonymous closure containing the eval
3406 // code. Fetch it from the context.
3407 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3408 } else {
3409 DCHECK(closure_scope->is_function_scope());
3410 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3411 }
3412 PushOperand(ip);
3413 }
3414
3415 // ----------------------------------------------------------------------------
3416 // Non-local control flow support.
3417
EnterFinallyBlock()3418 void FullCodeGenerator::EnterFinallyBlock() {
3419 DCHECK(!result_register().is(r3));
3420 // Store pending message while executing finally block.
3421 ExternalReference pending_message_obj =
3422 ExternalReference::address_of_pending_message_obj(isolate());
3423 __ mov(ip, Operand(pending_message_obj));
3424 __ LoadP(r3, MemOperand(ip));
3425 PushOperand(r3);
3426
3427 ClearPendingMessage();
3428 }
3429
ExitFinallyBlock()3430 void FullCodeGenerator::ExitFinallyBlock() {
3431 DCHECK(!result_register().is(r3));
3432 // Restore pending message from stack.
3433 PopOperand(r3);
3434 ExternalReference pending_message_obj =
3435 ExternalReference::address_of_pending_message_obj(isolate());
3436 __ mov(ip, Operand(pending_message_obj));
3437 __ StoreP(r3, MemOperand(ip));
3438 }
3439
ClearPendingMessage()3440 void FullCodeGenerator::ClearPendingMessage() {
3441 DCHECK(!result_register().is(r3));
3442 ExternalReference pending_message_obj =
3443 ExternalReference::address_of_pending_message_obj(isolate());
3444 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
3445 __ mov(ip, Operand(pending_message_obj));
3446 __ StoreP(r3, MemOperand(ip));
3447 }
3448
EmitCommands()3449 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3450 DCHECK(!result_register().is(r3));
3451 // Restore the accumulator (r2) and token (r3).
3452 __ Pop(r3, result_register());
3453 for (DeferredCommand cmd : commands_) {
3454 Label skip;
3455 __ CmpSmiLiteral(r3, Smi::FromInt(cmd.token), r0);
3456 __ bne(&skip);
3457 switch (cmd.command) {
3458 case kReturn:
3459 codegen_->EmitUnwindAndReturn();
3460 break;
3461 case kThrow:
3462 __ Push(result_register());
3463 __ CallRuntime(Runtime::kReThrow);
3464 break;
3465 case kContinue:
3466 codegen_->EmitContinue(cmd.target);
3467 break;
3468 case kBreak:
3469 codegen_->EmitBreak(cmd.target);
3470 break;
3471 }
3472 __ bind(&skip);
3473 }
3474 }
3475
3476 #undef __
3477
3478 #if V8_TARGET_ARCH_S390X
3479 static const FourByteInstr kInterruptBranchInstruction = 0xA7A40011;
3480 static const FourByteInstr kOSRBranchInstruction = 0xA7040011;
3481 static const int16_t kBackEdgeBranchOffset = 0x11 * 2;
3482 #else
3483 static const FourByteInstr kInterruptBranchInstruction = 0xA7A4000D;
3484 static const FourByteInstr kOSRBranchInstruction = 0xA704000D;
3485 static const int16_t kBackEdgeBranchOffset = 0xD * 2;
3486 #endif
3487
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3488 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
3489 BackEdgeState target_state,
3490 Code* replacement_code) {
3491 Address call_address = Assembler::target_address_from_return_address(pc);
3492 Address branch_address = call_address - 4;
3493 Isolate* isolate = unoptimized_code->GetIsolate();
3494 CodePatcher patcher(isolate, branch_address, 4);
3495
3496 switch (target_state) {
3497 case INTERRUPT: {
3498 // <decrement profiling counter>
3499 // bge <ok> ;; patched to GE BRC
3500 // brasrl r14, <interrupt stub address>
3501 // <reset profiling counter>
3502 // ok-label
3503 patcher.masm()->brc(ge, Operand(kBackEdgeBranchOffset));
3504 break;
3505 }
3506 case ON_STACK_REPLACEMENT:
3507 // <decrement profiling counter>
3508 // brc 0x0, <ok> ;; patched to NOP BRC
3509 // brasrl r14, <interrupt stub address>
3510 // <reset profiling counter>
3511 // ok-label ----- pc_after points here
3512 patcher.masm()->brc(CC_NOP, Operand(kBackEdgeBranchOffset));
3513 break;
3514 }
3515
3516 // Replace the stack check address in the mov sequence with the
3517 // entry address of the replacement code.
3518 Assembler::set_target_address_at(isolate, call_address, unoptimized_code,
3519 replacement_code->entry());
3520
3521 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3522 unoptimized_code, call_address, replacement_code);
3523 }
3524
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3525 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3526 Isolate* isolate, Code* unoptimized_code, Address pc) {
3527 Address call_address = Assembler::target_address_from_return_address(pc);
3528 Address branch_address = call_address - 4;
3529 #ifdef DEBUG
3530 Address interrupt_address =
3531 Assembler::target_address_at(call_address, unoptimized_code);
3532 #endif
3533
3534 DCHECK(BRC == Instruction::S390OpcodeValue(branch_address));
3535 // For interrupt, we expect a branch greater than or equal
3536 // i.e. BRC 0xa, +XXXX (0xA7A4XXXX)
3537 FourByteInstr br_instr = Instruction::InstructionBits(
3538 reinterpret_cast<const byte*>(branch_address));
3539 if (kInterruptBranchInstruction == br_instr) {
3540 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
3541 return INTERRUPT;
3542 }
3543
3544 // Expect BRC to be patched to NOP branch.
3545 // i.e. BRC 0x0, +XXXX (0xA704XXXX)
3546 USE(kOSRBranchInstruction);
3547 DCHECK(kOSRBranchInstruction == br_instr);
3548
3549 DCHECK(interrupt_address ==
3550 isolate->builtins()->OnStackReplacement()->entry());
3551 return ON_STACK_REPLACEMENT;
3552 }
3553
3554 } // namespace internal
3555 } // namespace v8
3556 #endif // V8_TARGET_ARCH_S390
3557