1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_IA32
6
7 #include "src/full-codegen/full-codegen.h"
8 #include "src/ast/compile-time-value.h"
9 #include "src/ast/scopes.h"
10 #include "src/code-factory.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/ia32/frames-ia32.h"
17 #include "src/ic/ic.h"
18
19 namespace v8 {
20 namespace internal {
21
22 #define __ ACCESS_MASM(masm())
23
24 class JumpPatchSite BASE_EMBEDDED {
25 public:
JumpPatchSite(MacroAssembler * masm)26 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
27 #ifdef DEBUG
28 info_emitted_ = false;
29 #endif
30 }
31
~JumpPatchSite()32 ~JumpPatchSite() {
33 DCHECK(patch_site_.is_bound() == info_emitted_);
34 }
35
EmitJumpIfNotSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)36 void EmitJumpIfNotSmi(Register reg,
37 Label* target,
38 Label::Distance distance = Label::kFar) {
39 __ test(reg, Immediate(kSmiTagMask));
40 EmitJump(not_carry, target, distance); // Always taken before patched.
41 }
42
EmitJumpIfSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)43 void EmitJumpIfSmi(Register reg,
44 Label* target,
45 Label::Distance distance = Label::kFar) {
46 __ test(reg, Immediate(kSmiTagMask));
47 EmitJump(carry, target, distance); // Never taken before patched.
48 }
49
EmitPatchInfo()50 void EmitPatchInfo() {
51 if (patch_site_.is_bound()) {
52 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
53 DCHECK(is_uint8(delta_to_patch_site));
54 __ test(eax, Immediate(delta_to_patch_site));
55 #ifdef DEBUG
56 info_emitted_ = true;
57 #endif
58 } else {
59 __ nop(); // Signals no inlined code.
60 }
61 }
62
63 private:
64 // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,Label * target,Label::Distance distance)65 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
66 DCHECK(!patch_site_.is_bound() && !info_emitted_);
67 DCHECK(cc == carry || cc == not_carry);
68 __ bind(&patch_site_);
69 __ j(cc, target, distance);
70 }
71
masm()72 MacroAssembler* masm() { return masm_; }
73 MacroAssembler* masm_;
74 Label patch_site_;
75 #ifdef DEBUG
76 bool info_emitted_;
77 #endif
78 };
79
80
81 // Generate code for a JS function. On entry to the function the receiver
82 // and arguments have been pushed on the stack left to right, with the
83 // return address on top of them. The actual argument count matches the
84 // formal parameter count expected by the function.
85 //
86 // The live registers are:
87 // o edi: the JS function object being called (i.e. ourselves)
88 // o edx: the new target value
89 // o esi: our context
90 // o ebp: our caller's frame pointer
91 // o esp: stack pointer (pointing to return address)
92 //
93 // The function builds a JS frame. Please see JavaScriptFrameConstants in
94 // frames-ia32.h for its layout.
Generate()95 void FullCodeGenerator::Generate() {
96 CompilationInfo* info = info_;
97 profiling_counter_ = isolate()->factory()->NewCell(
98 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
99 SetFunctionPosition(literal());
100 Comment cmnt(masm_, "[ function compiled by full code generator");
101
102 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
103
104 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
105 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
106 __ mov(ecx, Operand(esp, receiver_offset));
107 __ AssertNotSmi(ecx);
108 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
109 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
110 }
111
112 // Open a frame scope to indicate that there is a frame on the stack. The
113 // MANUAL indicates that the scope shouldn't actually generate code to set up
114 // the frame (that is done below).
115 FrameScope frame_scope(masm_, StackFrame::MANUAL);
116
117 info->set_prologue_offset(masm_->pc_offset());
118 __ Prologue(info->GeneratePreagedPrologue());
119
120 // Increment invocation count for the function.
121 {
122 Comment cmnt(masm_, "[ Increment invocation count");
123 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
124 __ mov(ecx, FieldOperand(ecx, LiteralsArray::kFeedbackVectorOffset));
125 __ add(FieldOperand(
126 ecx, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
127 TypeFeedbackVector::kHeaderSize),
128 Immediate(Smi::FromInt(1)));
129 }
130
131 { Comment cmnt(masm_, "[ Allocate locals");
132 int locals_count = info->scope()->num_stack_slots();
133 // Generators allocate locals, if any, in context slots.
134 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
135 OperandStackDepthIncrement(locals_count);
136 if (locals_count == 1) {
137 __ push(Immediate(isolate()->factory()->undefined_value()));
138 } else if (locals_count > 1) {
139 if (locals_count >= 128) {
140 Label ok;
141 __ mov(ecx, esp);
142 __ sub(ecx, Immediate(locals_count * kPointerSize));
143 ExternalReference stack_limit =
144 ExternalReference::address_of_real_stack_limit(isolate());
145 __ cmp(ecx, Operand::StaticVariable(stack_limit));
146 __ j(above_equal, &ok, Label::kNear);
147 __ CallRuntime(Runtime::kThrowStackOverflow);
148 __ bind(&ok);
149 }
150 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
151 const int kMaxPushes = 32;
152 if (locals_count >= kMaxPushes) {
153 int loop_iterations = locals_count / kMaxPushes;
154 __ mov(ecx, loop_iterations);
155 Label loop_header;
156 __ bind(&loop_header);
157 // Do pushes.
158 for (int i = 0; i < kMaxPushes; i++) {
159 __ push(eax);
160 }
161 __ dec(ecx);
162 __ j(not_zero, &loop_header, Label::kNear);
163 }
164 int remaining = locals_count % kMaxPushes;
165 // Emit the remaining pushes.
166 for (int i = 0; i < remaining; i++) {
167 __ push(eax);
168 }
169 }
170 }
171
172 bool function_in_register = true;
173
174 // Possibly allocate a local context.
175 if (info->scope()->NeedsContext()) {
176 Comment cmnt(masm_, "[ Allocate context");
177 bool need_write_barrier = true;
178 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
179 // Argument to NewContext is the function, which is still in edi.
180 if (info->scope()->is_script_scope()) {
181 __ push(edi);
182 __ Push(info->scope()->scope_info());
183 __ CallRuntime(Runtime::kNewScriptContext);
184 PrepareForBailoutForId(BailoutId::ScriptContext(),
185 BailoutState::TOS_REGISTER);
186 // The new target value is not used, clobbering is safe.
187 DCHECK_NULL(info->scope()->new_target_var());
188 } else {
189 if (info->scope()->new_target_var() != nullptr) {
190 __ push(edx); // Preserve new target.
191 }
192 if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
193 FastNewFunctionContextStub stub(isolate());
194 __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
195 Immediate(slots));
196 __ CallStub(&stub);
197 // Result of FastNewFunctionContextStub is always in new space.
198 need_write_barrier = false;
199 } else {
200 __ push(edi);
201 __ CallRuntime(Runtime::kNewFunctionContext);
202 }
203 if (info->scope()->new_target_var() != nullptr) {
204 __ pop(edx); // Restore new target.
205 }
206 }
207 function_in_register = false;
208 // Context is returned in eax. It replaces the context passed to us.
209 // It's saved in the stack and kept live in esi.
210 __ mov(esi, eax);
211 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
212
213 // Copy parameters into context if necessary.
214 int num_parameters = info->scope()->num_parameters();
215 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
216 for (int i = first_parameter; i < num_parameters; i++) {
217 Variable* var =
218 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
219 if (var->IsContextSlot()) {
220 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221 (num_parameters - 1 - i) * kPointerSize;
222 // Load parameter from stack.
223 __ mov(eax, Operand(ebp, parameter_offset));
224 // Store it in the context.
225 int context_offset = Context::SlotOffset(var->index());
226 __ mov(Operand(esi, context_offset), eax);
227 // Update the write barrier. This clobbers eax and ebx.
228 if (need_write_barrier) {
229 __ RecordWriteContextSlot(esi,
230 context_offset,
231 eax,
232 ebx,
233 kDontSaveFPRegs);
234 } else if (FLAG_debug_code) {
235 Label done;
236 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
237 __ Abort(kExpectedNewSpaceObject);
238 __ bind(&done);
239 }
240 }
241 }
242 }
243
244 // Register holding this function and new target are both trashed in case we
245 // bailout here. But since that can happen only when new target is not used
246 // and we allocate a context, the value of |function_in_register| is correct.
247 PrepareForBailoutForId(BailoutId::FunctionContext(),
248 BailoutState::NO_REGISTERS);
249
250 // Possibly set up a local binding to the this function which is used in
251 // derived constructors with super calls.
252 Variable* this_function_var = info->scope()->this_function_var();
253 if (this_function_var != nullptr) {
254 Comment cmnt(masm_, "[ This function");
255 if (!function_in_register) {
256 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
257 // The write barrier clobbers register again, keep it marked as such.
258 }
259 SetVar(this_function_var, edi, ebx, ecx);
260 }
261
262 // Possibly set up a local binding to the new target value.
263 Variable* new_target_var = info->scope()->new_target_var();
264 if (new_target_var != nullptr) {
265 Comment cmnt(masm_, "[ new.target");
266 SetVar(new_target_var, edx, ebx, ecx);
267 }
268
269 // Possibly allocate RestParameters
270 Variable* rest_param = info->scope()->rest_parameter();
271 if (rest_param != nullptr) {
272 Comment cmnt(masm_, "[ Allocate rest parameter array");
273 if (!function_in_register) {
274 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
275 }
276 FastNewRestParameterStub stub(isolate());
277 __ CallStub(&stub);
278 function_in_register = false;
279 SetVar(rest_param, eax, ebx, edx);
280 }
281
282 Variable* arguments = info->scope()->arguments();
283 if (arguments != NULL) {
284 // Arguments object must be allocated after the context object, in
285 // case the "arguments" or ".arguments" variables are in the context.
286 Comment cmnt(masm_, "[ Allocate arguments object");
287 if (!function_in_register) {
288 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
289 }
290 if (is_strict(language_mode()) || !has_simple_parameters()) {
291 FastNewStrictArgumentsStub stub(isolate());
292 __ CallStub(&stub);
293 } else if (literal()->has_duplicate_parameters()) {
294 __ Push(edi);
295 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
296 } else {
297 FastNewSloppyArgumentsStub stub(isolate());
298 __ CallStub(&stub);
299 }
300
301 SetVar(arguments, eax, ebx, edx);
302 }
303
304 if (FLAG_trace) {
305 __ CallRuntime(Runtime::kTraceEnter);
306 }
307
308 // Visit the declarations and body.
309 PrepareForBailoutForId(BailoutId::FunctionEntry(),
310 BailoutState::NO_REGISTERS);
311 {
312 Comment cmnt(masm_, "[ Declarations");
313 VisitDeclarations(info->scope()->declarations());
314 }
315
316 // Assert that the declarations do not use ICs. Otherwise the debugger
317 // won't be able to redirect a PC at an IC to the correct IC in newly
318 // recompiled code.
319 DCHECK_EQ(0, ic_total_count_);
320
321 {
322 Comment cmnt(masm_, "[ Stack check");
323 PrepareForBailoutForId(BailoutId::Declarations(),
324 BailoutState::NO_REGISTERS);
325 Label ok;
326 ExternalReference stack_limit =
327 ExternalReference::address_of_stack_limit(isolate());
328 __ cmp(esp, Operand::StaticVariable(stack_limit));
329 __ j(above_equal, &ok, Label::kNear);
330 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
331 __ bind(&ok);
332 }
333
334 {
335 Comment cmnt(masm_, "[ Body");
336 DCHECK(loop_depth() == 0);
337 VisitStatements(literal()->body());
338 DCHECK(loop_depth() == 0);
339 }
340
341 // Always emit a 'return undefined' in case control fell off the end of
342 // the body.
343 { Comment cmnt(masm_, "[ return <undefined>;");
344 __ mov(eax, isolate()->factory()->undefined_value());
345 EmitReturnSequence();
346 }
347 }
348
349
ClearAccumulator()350 void FullCodeGenerator::ClearAccumulator() {
351 __ Move(eax, Immediate(Smi::kZero));
352 }
353
354
EmitProfilingCounterDecrement(int delta)355 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
356 __ mov(ebx, Immediate(profiling_counter_));
357 __ sub(FieldOperand(ebx, Cell::kValueOffset),
358 Immediate(Smi::FromInt(delta)));
359 }
360
361
EmitProfilingCounterReset()362 void FullCodeGenerator::EmitProfilingCounterReset() {
363 int reset_value = FLAG_interrupt_budget;
364 __ mov(ebx, Immediate(profiling_counter_));
365 __ mov(FieldOperand(ebx, Cell::kValueOffset),
366 Immediate(Smi::FromInt(reset_value)));
367 }
368
369
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)370 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
371 Label* back_edge_target) {
372 Comment cmnt(masm_, "[ Back edge bookkeeping");
373 Label ok;
374
375 DCHECK(back_edge_target->is_bound());
376 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
377 int weight = Min(kMaxBackEdgeWeight,
378 Max(1, distance / kCodeSizeMultiplier));
379 EmitProfilingCounterDecrement(weight);
380 __ j(positive, &ok, Label::kNear);
381 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
382
383 // Record a mapping of this PC offset to the OSR id. This is used to find
384 // the AST id from the unoptimized code in order to use it as a key into
385 // the deoptimization input data found in the optimized code.
386 RecordBackEdge(stmt->OsrEntryId());
387
388 EmitProfilingCounterReset();
389
390 __ bind(&ok);
391 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
392 // Record a mapping of the OSR id to this PC. This is used if the OSR
393 // entry becomes the target of a bailout. We don't expect it to be, but
394 // we want it to work if it is.
395 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
396 }
397
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)398 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
399 bool is_tail_call) {
400 // Pretend that the exit is a backwards jump to the entry.
401 int weight = 1;
402 if (info_->ShouldSelfOptimize()) {
403 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
404 } else {
405 int distance = masm_->pc_offset();
406 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
407 }
408 EmitProfilingCounterDecrement(weight);
409 Label ok;
410 __ j(positive, &ok, Label::kNear);
411 // Don't need to save result register if we are going to do a tail call.
412 if (!is_tail_call) {
413 __ push(eax);
414 }
415 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
416 if (!is_tail_call) {
417 __ pop(eax);
418 }
419 EmitProfilingCounterReset();
420 __ bind(&ok);
421 }
422
EmitReturnSequence()423 void FullCodeGenerator::EmitReturnSequence() {
424 Comment cmnt(masm_, "[ Return sequence");
425 if (return_label_.is_bound()) {
426 __ jmp(&return_label_);
427 } else {
428 // Common return label
429 __ bind(&return_label_);
430 if (FLAG_trace) {
431 __ push(eax);
432 __ CallRuntime(Runtime::kTraceExit);
433 }
434 EmitProfilingCounterHandlingForReturnSequence(false);
435
436 SetReturnPosition(literal());
437 __ leave();
438
439 int arg_count = info_->scope()->num_parameters() + 1;
440 int arguments_bytes = arg_count * kPointerSize;
441 __ Ret(arguments_bytes, ecx);
442 }
443 }
444
RestoreContext()445 void FullCodeGenerator::RestoreContext() {
446 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
447 }
448
Plug(Variable * var) const449 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
450 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
451 MemOperand operand = codegen()->VarOperand(var, result_register());
452 // Memory operands can be pushed directly.
453 codegen()->PushOperand(operand);
454 }
455
456
Plug(Heap::RootListIndex index) const457 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
458 UNREACHABLE(); // Not used on IA32.
459 }
460
461
Plug(Heap::RootListIndex index) const462 void FullCodeGenerator::AccumulatorValueContext::Plug(
463 Heap::RootListIndex index) const {
464 UNREACHABLE(); // Not used on IA32.
465 }
466
467
Plug(Heap::RootListIndex index) const468 void FullCodeGenerator::StackValueContext::Plug(
469 Heap::RootListIndex index) const {
470 UNREACHABLE(); // Not used on IA32.
471 }
472
473
Plug(Heap::RootListIndex index) const474 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
475 UNREACHABLE(); // Not used on IA32.
476 }
477
478
Plug(Handle<Object> lit) const479 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
480 }
481
482
Plug(Handle<Object> lit) const483 void FullCodeGenerator::AccumulatorValueContext::Plug(
484 Handle<Object> lit) const {
485 if (lit->IsSmi()) {
486 __ SafeMove(result_register(), Immediate(lit));
487 } else {
488 __ Move(result_register(), Immediate(lit));
489 }
490 }
491
492
Plug(Handle<Object> lit) const493 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
494 codegen()->OperandStackDepthIncrement(1);
495 if (lit->IsSmi()) {
496 __ SafePush(Immediate(lit));
497 } else {
498 __ push(Immediate(lit));
499 }
500 }
501
502
Plug(Handle<Object> lit) const503 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
504 codegen()->PrepareForBailoutBeforeSplit(condition(),
505 true,
506 true_label_,
507 false_label_);
508 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
509 !lit->IsUndetectable());
510 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
511 lit->IsFalse(isolate())) {
512 if (false_label_ != fall_through_) __ jmp(false_label_);
513 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
514 if (true_label_ != fall_through_) __ jmp(true_label_);
515 } else if (lit->IsString()) {
516 if (String::cast(*lit)->length() == 0) {
517 if (false_label_ != fall_through_) __ jmp(false_label_);
518 } else {
519 if (true_label_ != fall_through_) __ jmp(true_label_);
520 }
521 } else if (lit->IsSmi()) {
522 if (Smi::cast(*lit)->value() == 0) {
523 if (false_label_ != fall_through_) __ jmp(false_label_);
524 } else {
525 if (true_label_ != fall_through_) __ jmp(true_label_);
526 }
527 } else {
528 // For simplicity we always test the accumulator register.
529 __ mov(result_register(), lit);
530 codegen()->DoTest(this);
531 }
532 }
533
534
DropAndPlug(int count,Register reg) const535 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
536 Register reg) const {
537 DCHECK(count > 0);
538 if (count > 1) codegen()->DropOperands(count - 1);
539 __ mov(Operand(esp, 0), reg);
540 }
541
542
Plug(Label * materialize_true,Label * materialize_false) const543 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
544 Label* materialize_false) const {
545 DCHECK(materialize_true == materialize_false);
546 __ bind(materialize_true);
547 }
548
549
Plug(Label * materialize_true,Label * materialize_false) const550 void FullCodeGenerator::AccumulatorValueContext::Plug(
551 Label* materialize_true,
552 Label* materialize_false) const {
553 Label done;
554 __ bind(materialize_true);
555 __ mov(result_register(), isolate()->factory()->true_value());
556 __ jmp(&done, Label::kNear);
557 __ bind(materialize_false);
558 __ mov(result_register(), isolate()->factory()->false_value());
559 __ bind(&done);
560 }
561
562
Plug(Label * materialize_true,Label * materialize_false) const563 void FullCodeGenerator::StackValueContext::Plug(
564 Label* materialize_true,
565 Label* materialize_false) const {
566 codegen()->OperandStackDepthIncrement(1);
567 Label done;
568 __ bind(materialize_true);
569 __ push(Immediate(isolate()->factory()->true_value()));
570 __ jmp(&done, Label::kNear);
571 __ bind(materialize_false);
572 __ push(Immediate(isolate()->factory()->false_value()));
573 __ bind(&done);
574 }
575
576
Plug(Label * materialize_true,Label * materialize_false) const577 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
578 Label* materialize_false) const {
579 DCHECK(materialize_true == true_label_);
580 DCHECK(materialize_false == false_label_);
581 }
582
583
Plug(bool flag) const584 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
585 Handle<Object> value = flag
586 ? isolate()->factory()->true_value()
587 : isolate()->factory()->false_value();
588 __ mov(result_register(), value);
589 }
590
591
Plug(bool flag) const592 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
593 codegen()->OperandStackDepthIncrement(1);
594 Handle<Object> value = flag
595 ? isolate()->factory()->true_value()
596 : isolate()->factory()->false_value();
597 __ push(Immediate(value));
598 }
599
600
Plug(bool flag) const601 void FullCodeGenerator::TestContext::Plug(bool flag) const {
602 codegen()->PrepareForBailoutBeforeSplit(condition(),
603 true,
604 true_label_,
605 false_label_);
606 if (flag) {
607 if (true_label_ != fall_through_) __ jmp(true_label_);
608 } else {
609 if (false_label_ != fall_through_) __ jmp(false_label_);
610 }
611 }
612
613
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)614 void FullCodeGenerator::DoTest(Expression* condition,
615 Label* if_true,
616 Label* if_false,
617 Label* fall_through) {
618 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
619 CallIC(ic, condition->test_id());
620 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
621 Split(equal, if_true, if_false, fall_through);
622 }
623
624
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)625 void FullCodeGenerator::Split(Condition cc,
626 Label* if_true,
627 Label* if_false,
628 Label* fall_through) {
629 if (if_false == fall_through) {
630 __ j(cc, if_true);
631 } else if (if_true == fall_through) {
632 __ j(NegateCondition(cc), if_false);
633 } else {
634 __ j(cc, if_true);
635 __ jmp(if_false);
636 }
637 }
638
639
StackOperand(Variable * var)640 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
641 DCHECK(var->IsStackAllocated());
642 // Offset is negative because higher indexes are at lower addresses.
643 int offset = -var->index() * kPointerSize;
644 // Adjust by a (parameter or local) base offset.
645 if (var->IsParameter()) {
646 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
647 } else {
648 offset += JavaScriptFrameConstants::kLocal0Offset;
649 }
650 return Operand(ebp, offset);
651 }
652
653
VarOperand(Variable * var,Register scratch)654 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
655 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
656 if (var->IsContextSlot()) {
657 int context_chain_length = scope()->ContextChainLength(var->scope());
658 __ LoadContext(scratch, context_chain_length);
659 return ContextOperand(scratch, var->index());
660 } else {
661 return StackOperand(var);
662 }
663 }
664
665
GetVar(Register dest,Variable * var)666 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
667 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
668 MemOperand location = VarOperand(var, dest);
669 __ mov(dest, location);
670 }
671
672
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)673 void FullCodeGenerator::SetVar(Variable* var,
674 Register src,
675 Register scratch0,
676 Register scratch1) {
677 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
678 DCHECK(!scratch0.is(src));
679 DCHECK(!scratch0.is(scratch1));
680 DCHECK(!scratch1.is(src));
681 MemOperand location = VarOperand(var, scratch0);
682 __ mov(location, src);
683
684 // Emit the write barrier code if the location is in the heap.
685 if (var->IsContextSlot()) {
686 int offset = Context::SlotOffset(var->index());
687 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
688 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
689 }
690 }
691
692
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)693 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
694 bool should_normalize,
695 Label* if_true,
696 Label* if_false) {
697 // Only prepare for bailouts before splits if we're in a test
698 // context. Otherwise, we let the Visit function deal with the
699 // preparation to avoid preparing with the same AST id twice.
700 if (!context()->IsTest()) return;
701
702 Label skip;
703 if (should_normalize) __ jmp(&skip, Label::kNear);
704 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
705 if (should_normalize) {
706 __ cmp(eax, isolate()->factory()->true_value());
707 Split(equal, if_true, if_false, NULL);
708 __ bind(&skip);
709 }
710 }
711
712
EmitDebugCheckDeclarationContext(Variable * variable)713 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
714 // The variable in the declaration always resides in the current context.
715 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
716 if (FLAG_debug_code) {
717 // Check that we're not inside a with or catch context.
718 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
719 __ cmp(ebx, isolate()->factory()->with_context_map());
720 __ Check(not_equal, kDeclarationInWithContext);
721 __ cmp(ebx, isolate()->factory()->catch_context_map());
722 __ Check(not_equal, kDeclarationInCatchContext);
723 }
724 }
725
726
VisitVariableDeclaration(VariableDeclaration * declaration)727 void FullCodeGenerator::VisitVariableDeclaration(
728 VariableDeclaration* declaration) {
729 VariableProxy* proxy = declaration->proxy();
730 Variable* variable = proxy->var();
731 switch (variable->location()) {
732 case VariableLocation::UNALLOCATED: {
733 DCHECK(!variable->binding_needs_init());
734 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
735 DCHECK(!slot.IsInvalid());
736 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
737 globals_->Add(isolate()->factory()->undefined_value(), zone());
738 break;
739 }
740 case VariableLocation::PARAMETER:
741 case VariableLocation::LOCAL:
742 if (variable->binding_needs_init()) {
743 Comment cmnt(masm_, "[ VariableDeclaration");
744 __ mov(StackOperand(variable),
745 Immediate(isolate()->factory()->the_hole_value()));
746 }
747 break;
748
749 case VariableLocation::CONTEXT:
750 if (variable->binding_needs_init()) {
751 Comment cmnt(masm_, "[ VariableDeclaration");
752 EmitDebugCheckDeclarationContext(variable);
753 __ mov(ContextOperand(esi, variable->index()),
754 Immediate(isolate()->factory()->the_hole_value()));
755 // No write barrier since the hole value is in old space.
756 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
757 }
758 break;
759
760 case VariableLocation::LOOKUP: {
761 Comment cmnt(masm_, "[ VariableDeclaration");
762 DCHECK_EQ(VAR, variable->mode());
763 DCHECK(!variable->binding_needs_init());
764 __ push(Immediate(variable->name()));
765 __ CallRuntime(Runtime::kDeclareEvalVar);
766 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
767 break;
768 }
769
770 case VariableLocation::MODULE:
771 UNREACHABLE();
772 }
773 }
774
775
VisitFunctionDeclaration(FunctionDeclaration * declaration)776 void FullCodeGenerator::VisitFunctionDeclaration(
777 FunctionDeclaration* declaration) {
778 VariableProxy* proxy = declaration->proxy();
779 Variable* variable = proxy->var();
780 switch (variable->location()) {
781 case VariableLocation::UNALLOCATED: {
782 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
783 DCHECK(!slot.IsInvalid());
784 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
785 Handle<SharedFunctionInfo> function =
786 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
787 // Check for stack-overflow exception.
788 if (function.is_null()) return SetStackOverflow();
789 globals_->Add(function, zone());
790 break;
791 }
792
793 case VariableLocation::PARAMETER:
794 case VariableLocation::LOCAL: {
795 Comment cmnt(masm_, "[ FunctionDeclaration");
796 VisitForAccumulatorValue(declaration->fun());
797 __ mov(StackOperand(variable), result_register());
798 break;
799 }
800
801 case VariableLocation::CONTEXT: {
802 Comment cmnt(masm_, "[ FunctionDeclaration");
803 EmitDebugCheckDeclarationContext(variable);
804 VisitForAccumulatorValue(declaration->fun());
805 __ mov(ContextOperand(esi, variable->index()), result_register());
806 // We know that we have written a function, which is not a smi.
807 __ RecordWriteContextSlot(esi,
808 Context::SlotOffset(variable->index()),
809 result_register(),
810 ecx,
811 kDontSaveFPRegs,
812 EMIT_REMEMBERED_SET,
813 OMIT_SMI_CHECK);
814 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
815 break;
816 }
817
818 case VariableLocation::LOOKUP: {
819 Comment cmnt(masm_, "[ FunctionDeclaration");
820 PushOperand(variable->name());
821 VisitForStackValue(declaration->fun());
822 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
823 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
824 break;
825 }
826
827 case VariableLocation::MODULE:
828 UNREACHABLE();
829 }
830 }
831
832
DeclareGlobals(Handle<FixedArray> pairs)833 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
834 // Call the runtime to declare the globals.
835 __ Push(pairs);
836 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
837 __ EmitLoadTypeFeedbackVector(eax);
838 __ Push(eax);
839 __ CallRuntime(Runtime::kDeclareGlobals);
840 // Return value is ignored.
841 }
842
843
VisitSwitchStatement(SwitchStatement * stmt)844 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
845 Comment cmnt(masm_, "[ SwitchStatement");
846 Breakable nested_statement(this, stmt);
847 SetStatementPosition(stmt);
848
849 // Keep the switch value on the stack until a case matches.
850 VisitForStackValue(stmt->tag());
851 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
852
853 ZoneList<CaseClause*>* clauses = stmt->cases();
854 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
855
856 Label next_test; // Recycled for each test.
857 // Compile all the tests with branches to their bodies.
858 for (int i = 0; i < clauses->length(); i++) {
859 CaseClause* clause = clauses->at(i);
860 clause->body_target()->Unuse();
861
862 // The default is not a test, but remember it as final fall through.
863 if (clause->is_default()) {
864 default_clause = clause;
865 continue;
866 }
867
868 Comment cmnt(masm_, "[ Case comparison");
869 __ bind(&next_test);
870 next_test.Unuse();
871
872 // Compile the label expression.
873 VisitForAccumulatorValue(clause->label());
874
875 // Perform the comparison as if via '==='.
876 __ mov(edx, Operand(esp, 0)); // Switch value.
877 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
878 JumpPatchSite patch_site(masm_);
879 if (inline_smi_code) {
880 Label slow_case;
881 __ mov(ecx, edx);
882 __ or_(ecx, eax);
883 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
884
885 __ cmp(edx, eax);
886 __ j(not_equal, &next_test);
887 __ Drop(1); // Switch value is no longer needed.
888 __ jmp(clause->body_target());
889 __ bind(&slow_case);
890 }
891
892 SetExpressionPosition(clause);
893 Handle<Code> ic =
894 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
895 CallIC(ic, clause->CompareId());
896 patch_site.EmitPatchInfo();
897
898 Label skip;
899 __ jmp(&skip, Label::kNear);
900 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
901 __ cmp(eax, isolate()->factory()->true_value());
902 __ j(not_equal, &next_test);
903 __ Drop(1);
904 __ jmp(clause->body_target());
905 __ bind(&skip);
906
907 __ test(eax, eax);
908 __ j(not_equal, &next_test);
909 __ Drop(1); // Switch value is no longer needed.
910 __ jmp(clause->body_target());
911 }
912
913 // Discard the test value and jump to the default if present, otherwise to
914 // the end of the statement.
915 __ bind(&next_test);
916 DropOperands(1); // Switch value is no longer needed.
917 if (default_clause == NULL) {
918 __ jmp(nested_statement.break_label());
919 } else {
920 __ jmp(default_clause->body_target());
921 }
922
923 // Compile all the case bodies.
924 for (int i = 0; i < clauses->length(); i++) {
925 Comment cmnt(masm_, "[ Case body");
926 CaseClause* clause = clauses->at(i);
927 __ bind(clause->body_target());
928 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
929 VisitStatements(clause->statements());
930 }
931
932 __ bind(nested_statement.break_label());
933 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
934 }
935
936
VisitForInStatement(ForInStatement * stmt)937 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
938 Comment cmnt(masm_, "[ ForInStatement");
939 SetStatementPosition(stmt, SKIP_BREAK);
940
941 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
942
943 // Get the object to enumerate over.
944 SetExpressionAsStatementPosition(stmt->enumerable());
945 VisitForAccumulatorValue(stmt->enumerable());
946 OperandStackDepthIncrement(5);
947
948 Label loop, exit;
949 Iteration loop_statement(this, stmt);
950 increment_loop_depth();
951
952 // If the object is null or undefined, skip over the loop, otherwise convert
953 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
954 Label convert, done_convert;
955 __ JumpIfSmi(eax, &convert, Label::kNear);
956 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
957 __ j(above_equal, &done_convert, Label::kNear);
958 __ cmp(eax, isolate()->factory()->undefined_value());
959 __ j(equal, &exit);
960 __ cmp(eax, isolate()->factory()->null_value());
961 __ j(equal, &exit);
962 __ bind(&convert);
963 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
964 RestoreContext();
965 __ bind(&done_convert);
966 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
967 __ push(eax);
968
969 // Check cache validity in generated code. If we cannot guarantee cache
970 // validity, call the runtime system to check cache validity or get the
971 // property names in a fixed array. Note: Proxies never have an enum cache,
972 // so will always take the slow path.
973 Label call_runtime, use_cache, fixed_array;
974 __ CheckEnumCache(&call_runtime);
975
976 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
977 __ jmp(&use_cache, Label::kNear);
978
979 // Get the set of properties to enumerate.
980 __ bind(&call_runtime);
981 __ push(eax);
982 __ CallRuntime(Runtime::kForInEnumerate);
983 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
984 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
985 isolate()->factory()->meta_map());
986 __ j(not_equal, &fixed_array);
987
988
989 // We got a map in register eax. Get the enumeration cache from it.
990 Label no_descriptors;
991 __ bind(&use_cache);
992
993 __ EnumLength(edx, eax);
994 __ cmp(edx, Immediate(Smi::kZero));
995 __ j(equal, &no_descriptors);
996
997 __ LoadInstanceDescriptors(eax, ecx);
998 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
999 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1000
1001 // Set up the four remaining stack slots.
1002 __ push(eax); // Map.
1003 __ push(ecx); // Enumeration cache.
1004 __ push(edx); // Number of valid entries for the map in the enum cache.
1005 __ push(Immediate(Smi::kZero)); // Initial index.
1006 __ jmp(&loop);
1007
1008 __ bind(&no_descriptors);
1009 __ add(esp, Immediate(kPointerSize));
1010 __ jmp(&exit);
1011
1012 // We got a fixed array in register eax. Iterate through that.
1013 __ bind(&fixed_array);
1014
1015 __ push(Immediate(Smi::FromInt(1))); // Smi(1) indicates slow check
1016 __ push(eax); // Array
1017 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1018 __ push(eax); // Fixed array length (as smi).
1019 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1020 __ push(Immediate(Smi::kZero)); // Initial index.
1021
1022 // Generate code for doing the condition check.
1023 __ bind(&loop);
1024 SetExpressionAsStatementPosition(stmt->each());
1025
1026 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1027 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1028 __ j(above_equal, loop_statement.break_label());
1029
1030 // Get the current entry of the array into register eax.
1031 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1032 __ mov(eax, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1033
1034 // Get the expected map from the stack or a smi in the
1035 // permanent slow case into register edx.
1036 __ mov(edx, Operand(esp, 3 * kPointerSize));
1037
1038 // Check if the expected map still matches that of the enumerable.
1039 // If not, we may have to filter the key.
1040 Label update_each;
1041 __ mov(ebx, Operand(esp, 4 * kPointerSize));
1042 __ cmp(edx, FieldOperand(ebx, HeapObject::kMapOffset));
1043 __ j(equal, &update_each, Label::kNear);
1044
1045 // We need to filter the key, record slow-path here.
1046 int const vector_index = SmiFromSlot(slot)->value();
1047 __ EmitLoadTypeFeedbackVector(edx);
1048 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1049 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1050
1051 // eax contains the key. The receiver in ebx is the second argument to the
1052 // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1053 // have the key or returns the name-converted key.
1054 __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1055 RestoreContext();
1056 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1057 __ JumpIfRoot(result_register(), Heap::kUndefinedValueRootIndex,
1058 loop_statement.continue_label());
1059
1060 // Update the 'each' property or variable from the possibly filtered
1061 // entry in register eax.
1062 __ bind(&update_each);
1063 // Perform the assignment as if via '='.
1064 { EffectContext context(this);
1065 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1066 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1067 }
1068
1069 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1070 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1071 // Generate code for the body of the loop.
1072 Visit(stmt->body());
1073
1074 // Generate code for going to the next element by incrementing the
1075 // index (smi) stored on top of the stack.
1076 __ bind(loop_statement.continue_label());
1077 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1078 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1079
1080 EmitBackEdgeBookkeeping(stmt, &loop);
1081 __ jmp(&loop);
1082
1083 // Remove the pointers stored on the stack.
1084 __ bind(loop_statement.break_label());
1085 DropOperands(5);
1086
1087 // Exit and decrement the loop depth.
1088 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1089 __ bind(&exit);
1090 decrement_loop_depth();
1091 }
1092
1093
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1094 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1095 FeedbackVectorSlot slot) {
1096 DCHECK(NeedsHomeObject(initializer));
1097 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1098 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1099 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1100 }
1101
1102
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1103 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1104 int offset,
1105 FeedbackVectorSlot slot) {
1106 DCHECK(NeedsHomeObject(initializer));
1107 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1108 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1109 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1110 }
1111
1112
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1113 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1114 TypeofMode typeof_mode,
1115 Label* slow) {
1116 Register context = esi;
1117 Register temp = edx;
1118
1119 int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1120 for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1121 if (!s->NeedsContext()) continue;
1122 if (s->calls_sloppy_eval()) {
1123 // Check that extension is "the hole".
1124 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1125 Heap::kTheHoleValueRootIndex, slow);
1126 }
1127 // Load next context in chain.
1128 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1129 // Walk the rest of the chain without clobbering esi.
1130 context = temp;
1131 to_check--;
1132 }
1133
1134 // All extension objects were empty and it is safe to use a normal global
1135 // load machinery.
1136 EmitGlobalVariableLoad(proxy, typeof_mode);
1137 }
1138
1139
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1140 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1141 Label* slow) {
1142 DCHECK(var->IsContextSlot());
1143 Register context = esi;
1144 Register temp = ebx;
1145
1146 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1147 if (s->NeedsContext()) {
1148 if (s->calls_sloppy_eval()) {
1149 // Check that extension is "the hole".
1150 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1151 Heap::kTheHoleValueRootIndex, slow);
1152 }
1153 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1154 // Walk the rest of the chain without clobbering esi.
1155 context = temp;
1156 }
1157 }
1158 // Check that last extension is "the hole".
1159 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1160 Heap::kTheHoleValueRootIndex, slow);
1161
1162 // This function is used only for loads, not stores, so it's safe to
1163 // return an esi-based operand (the write barrier cannot be allowed to
1164 // destroy the esi register).
1165 return ContextOperand(context, var->index());
1166 }
1167
1168
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1169 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1170 TypeofMode typeof_mode,
1171 Label* slow, Label* done) {
1172 // Generate fast-case code for variables that might be shadowed by
1173 // eval-introduced variables. Eval is used a lot without
1174 // introducing variables. In those cases, we do not want to
1175 // perform a runtime call for all variables in the scope
1176 // containing the eval.
1177 Variable* var = proxy->var();
1178 if (var->mode() == DYNAMIC_GLOBAL) {
1179 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1180 __ jmp(done);
1181 } else if (var->mode() == DYNAMIC_LOCAL) {
1182 Variable* local = var->local_if_not_shadowed();
1183 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1184 if (local->binding_needs_init()) {
1185 __ cmp(eax, isolate()->factory()->the_hole_value());
1186 __ j(not_equal, done);
1187 __ push(Immediate(var->name()));
1188 __ CallRuntime(Runtime::kThrowReferenceError);
1189 } else {
1190 __ jmp(done);
1191 }
1192 }
1193 }
1194
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1195 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1196 TypeofMode typeof_mode) {
1197 SetExpressionPosition(proxy);
1198 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1199 Variable* var = proxy->var();
1200
1201 // Three cases: global variables, lookup variables, and all other types of
1202 // variables.
1203 switch (var->location()) {
1204 case VariableLocation::UNALLOCATED: {
1205 Comment cmnt(masm_, "[ Global variable");
1206 EmitGlobalVariableLoad(proxy, typeof_mode);
1207 context()->Plug(eax);
1208 break;
1209 }
1210
1211 case VariableLocation::PARAMETER:
1212 case VariableLocation::LOCAL:
1213 case VariableLocation::CONTEXT: {
1214 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1215 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1216 : "[ Stack variable");
1217
1218 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1219 // Throw a reference error when using an uninitialized let/const
1220 // binding in harmony mode.
1221 Label done;
1222 GetVar(eax, var);
1223 __ cmp(eax, isolate()->factory()->the_hole_value());
1224 __ j(not_equal, &done, Label::kNear);
1225 __ push(Immediate(var->name()));
1226 __ CallRuntime(Runtime::kThrowReferenceError);
1227 __ bind(&done);
1228 context()->Plug(eax);
1229 break;
1230 }
1231 context()->Plug(var);
1232 break;
1233 }
1234
1235 case VariableLocation::LOOKUP: {
1236 Comment cmnt(masm_, "[ Lookup variable");
1237 Label done, slow;
1238 // Generate code for loading from variables potentially shadowed
1239 // by eval-introduced variables.
1240 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1241 __ bind(&slow);
1242 __ push(Immediate(var->name()));
1243 Runtime::FunctionId function_id =
1244 typeof_mode == NOT_INSIDE_TYPEOF
1245 ? Runtime::kLoadLookupSlot
1246 : Runtime::kLoadLookupSlotInsideTypeof;
1247 __ CallRuntime(function_id);
1248 __ bind(&done);
1249 context()->Plug(eax);
1250 break;
1251 }
1252
1253 case VariableLocation::MODULE:
1254 UNREACHABLE();
1255 }
1256 }
1257
1258
EmitAccessor(ObjectLiteralProperty * property)1259 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1260 Expression* expression = (property == NULL) ? NULL : property->value();
1261 if (expression == NULL) {
1262 PushOperand(isolate()->factory()->null_value());
1263 } else {
1264 VisitForStackValue(expression);
1265 if (NeedsHomeObject(expression)) {
1266 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1267 property->kind() == ObjectLiteral::Property::SETTER);
1268 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1269 EmitSetHomeObject(expression, offset, property->GetSlot());
1270 }
1271 }
1272 }
1273
1274
VisitObjectLiteral(ObjectLiteral * expr)1275 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1276 Comment cmnt(masm_, "[ ObjectLiteral");
1277
1278 Handle<FixedArray> constant_properties = expr->constant_properties();
1279 int flags = expr->ComputeFlags();
1280 // If any of the keys would store to the elements array, then we shouldn't
1281 // allow it.
1282 if (MustCreateObjectLiteralWithRuntime(expr)) {
1283 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1284 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1285 __ push(Immediate(constant_properties));
1286 __ push(Immediate(Smi::FromInt(flags)));
1287 __ CallRuntime(Runtime::kCreateObjectLiteral);
1288 } else {
1289 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1290 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1291 __ mov(ecx, Immediate(constant_properties));
1292 __ mov(edx, Immediate(Smi::FromInt(flags)));
1293 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1294 __ CallStub(&stub);
1295 RestoreContext();
1296 }
1297 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1298
1299 // If result_saved is true the result is on top of the stack. If
1300 // result_saved is false the result is in eax.
1301 bool result_saved = false;
1302
1303 AccessorTable accessor_table(zone());
1304 int property_index = 0;
1305 for (; property_index < expr->properties()->length(); property_index++) {
1306 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1307 if (property->is_computed_name()) break;
1308 if (property->IsCompileTimeValue()) continue;
1309
1310 Literal* key = property->key()->AsLiteral();
1311 Expression* value = property->value();
1312 if (!result_saved) {
1313 PushOperand(eax); // Save result on the stack
1314 result_saved = true;
1315 }
1316 switch (property->kind()) {
1317 case ObjectLiteral::Property::CONSTANT:
1318 UNREACHABLE();
1319 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1320 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1321 // Fall through.
1322 case ObjectLiteral::Property::COMPUTED:
1323 // It is safe to use [[Put]] here because the boilerplate already
1324 // contains computed properties with an uninitialized value.
1325 if (key->IsStringLiteral()) {
1326 DCHECK(key->IsPropertyName());
1327 if (property->emit_store()) {
1328 VisitForAccumulatorValue(value);
1329 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1330 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1331 CallStoreIC(property->GetSlot(0), key->value());
1332 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1333 if (NeedsHomeObject(value)) {
1334 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1335 }
1336 } else {
1337 VisitForEffect(value);
1338 }
1339 break;
1340 }
1341 PushOperand(Operand(esp, 0)); // Duplicate receiver.
1342 VisitForStackValue(key);
1343 VisitForStackValue(value);
1344 if (property->emit_store()) {
1345 if (NeedsHomeObject(value)) {
1346 EmitSetHomeObject(value, 2, property->GetSlot());
1347 }
1348 PushOperand(Smi::FromInt(SLOPPY)); // Language mode
1349 CallRuntimeWithOperands(Runtime::kSetProperty);
1350 } else {
1351 DropOperands(3);
1352 }
1353 break;
1354 case ObjectLiteral::Property::PROTOTYPE:
1355 PushOperand(Operand(esp, 0)); // Duplicate receiver.
1356 VisitForStackValue(value);
1357 DCHECK(property->emit_store());
1358 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1359 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1360 BailoutState::NO_REGISTERS);
1361 break;
1362 case ObjectLiteral::Property::GETTER:
1363 if (property->emit_store()) {
1364 AccessorTable::Iterator it = accessor_table.lookup(key);
1365 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1366 it->second->getter = property;
1367 }
1368 break;
1369 case ObjectLiteral::Property::SETTER:
1370 if (property->emit_store()) {
1371 AccessorTable::Iterator it = accessor_table.lookup(key);
1372 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1373 it->second->setter = property;
1374 }
1375 break;
1376 }
1377 }
1378
1379 // Emit code to define accessors, using only a single call to the runtime for
1380 // each pair of corresponding getters and setters.
1381 for (AccessorTable::Iterator it = accessor_table.begin();
1382 it != accessor_table.end();
1383 ++it) {
1384 PushOperand(Operand(esp, 0)); // Duplicate receiver.
1385 VisitForStackValue(it->first);
1386
1387 EmitAccessor(it->second->getter);
1388 EmitAccessor(it->second->setter);
1389
1390 PushOperand(Smi::FromInt(NONE));
1391 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1392 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1393 }
1394
1395 // Object literals have two parts. The "static" part on the left contains no
1396 // computed property names, and so we can compute its map ahead of time; see
1397 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1398 // starts with the first computed property name, and continues with all
1399 // properties to its right. All the code from above initializes the static
1400 // component of the object literal, and arranges for the map of the result to
1401 // reflect the static order in which the keys appear. For the dynamic
1402 // properties, we compile them into a series of "SetOwnProperty" runtime
1403 // calls. This will preserve insertion order.
1404 for (; property_index < expr->properties()->length(); property_index++) {
1405 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1406
1407 Expression* value = property->value();
1408 if (!result_saved) {
1409 PushOperand(eax); // Save result on the stack
1410 result_saved = true;
1411 }
1412
1413 PushOperand(Operand(esp, 0)); // Duplicate receiver.
1414
1415 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1416 DCHECK(!property->is_computed_name());
1417 VisitForStackValue(value);
1418 DCHECK(property->emit_store());
1419 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1420 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1421 BailoutState::NO_REGISTERS);
1422 } else {
1423 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1424 VisitForStackValue(value);
1425 if (NeedsHomeObject(value)) {
1426 EmitSetHomeObject(value, 2, property->GetSlot());
1427 }
1428
1429 switch (property->kind()) {
1430 case ObjectLiteral::Property::CONSTANT:
1431 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1432 case ObjectLiteral::Property::COMPUTED:
1433 if (property->emit_store()) {
1434 PushOperand(Smi::FromInt(NONE));
1435 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1436 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1437 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1438 BailoutState::NO_REGISTERS);
1439 } else {
1440 DropOperands(3);
1441 }
1442 break;
1443
1444 case ObjectLiteral::Property::PROTOTYPE:
1445 UNREACHABLE();
1446 break;
1447
1448 case ObjectLiteral::Property::GETTER:
1449 PushOperand(Smi::FromInt(NONE));
1450 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1451 break;
1452
1453 case ObjectLiteral::Property::SETTER:
1454 PushOperand(Smi::FromInt(NONE));
1455 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1456 break;
1457 }
1458 }
1459 }
1460
1461 if (result_saved) {
1462 context()->PlugTOS();
1463 } else {
1464 context()->Plug(eax);
1465 }
1466 }
1467
1468
VisitArrayLiteral(ArrayLiteral * expr)1469 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1470 Comment cmnt(masm_, "[ ArrayLiteral");
1471
1472 Handle<FixedArray> constant_elements = expr->constant_elements();
1473 bool has_constant_fast_elements =
1474 IsFastObjectElementsKind(expr->constant_elements_kind());
1475
1476 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1477 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1478 // If the only customer of allocation sites is transitioning, then
1479 // we can turn it off if we don't have anywhere else to transition to.
1480 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1481 }
1482
1483 if (MustCreateArrayLiteralWithRuntime(expr)) {
1484 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1485 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1486 __ push(Immediate(constant_elements));
1487 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1488 __ CallRuntime(Runtime::kCreateArrayLiteral);
1489 } else {
1490 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1491 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1492 __ mov(ecx, Immediate(constant_elements));
1493 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1494 __ CallStub(&stub);
1495 RestoreContext();
1496 }
1497 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1498
1499 bool result_saved = false; // Is the result saved to the stack?
1500 ZoneList<Expression*>* subexprs = expr->values();
1501 int length = subexprs->length();
1502
1503 // Emit code to evaluate all the non-constant subexpressions and to store
1504 // them into the newly cloned array.
1505 for (int array_index = 0; array_index < length; array_index++) {
1506 Expression* subexpr = subexprs->at(array_index);
1507 DCHECK(!subexpr->IsSpread());
1508
1509 // If the subexpression is a literal or a simple materialized literal it
1510 // is already set in the cloned array.
1511 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1512
1513 if (!result_saved) {
1514 PushOperand(eax); // array literal.
1515 result_saved = true;
1516 }
1517 VisitForAccumulatorValue(subexpr);
1518
1519 __ mov(StoreDescriptor::NameRegister(),
1520 Immediate(Smi::FromInt(array_index)));
1521 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1522 CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1523 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1524 BailoutState::NO_REGISTERS);
1525 }
1526
1527 if (result_saved) {
1528 context()->PlugTOS();
1529 } else {
1530 context()->Plug(eax);
1531 }
1532 }
1533
1534
VisitAssignment(Assignment * expr)1535 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1536 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1537
1538 Comment cmnt(masm_, "[ Assignment");
1539
1540 Property* property = expr->target()->AsProperty();
1541 LhsKind assign_type = Property::GetAssignType(property);
1542
1543 // Evaluate LHS expression.
1544 switch (assign_type) {
1545 case VARIABLE:
1546 // Nothing to do here.
1547 break;
1548 case NAMED_SUPER_PROPERTY:
1549 VisitForStackValue(
1550 property->obj()->AsSuperPropertyReference()->this_var());
1551 VisitForAccumulatorValue(
1552 property->obj()->AsSuperPropertyReference()->home_object());
1553 PushOperand(result_register());
1554 if (expr->is_compound()) {
1555 PushOperand(MemOperand(esp, kPointerSize));
1556 PushOperand(result_register());
1557 }
1558 break;
1559 case NAMED_PROPERTY:
1560 if (expr->is_compound()) {
1561 // We need the receiver both on the stack and in the register.
1562 VisitForStackValue(property->obj());
1563 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1564 } else {
1565 VisitForStackValue(property->obj());
1566 }
1567 break;
1568 case KEYED_SUPER_PROPERTY:
1569 VisitForStackValue(
1570 property->obj()->AsSuperPropertyReference()->this_var());
1571 VisitForStackValue(
1572 property->obj()->AsSuperPropertyReference()->home_object());
1573 VisitForAccumulatorValue(property->key());
1574 PushOperand(result_register());
1575 if (expr->is_compound()) {
1576 PushOperand(MemOperand(esp, 2 * kPointerSize));
1577 PushOperand(MemOperand(esp, 2 * kPointerSize));
1578 PushOperand(result_register());
1579 }
1580 break;
1581 case KEYED_PROPERTY: {
1582 if (expr->is_compound()) {
1583 VisitForStackValue(property->obj());
1584 VisitForStackValue(property->key());
1585 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1586 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1587 } else {
1588 VisitForStackValue(property->obj());
1589 VisitForStackValue(property->key());
1590 }
1591 break;
1592 }
1593 }
1594
1595 // For compound assignments we need another deoptimization point after the
1596 // variable/property load.
1597 if (expr->is_compound()) {
1598 AccumulatorValueContext result_context(this);
1599 { AccumulatorValueContext left_operand_context(this);
1600 switch (assign_type) {
1601 case VARIABLE:
1602 EmitVariableLoad(expr->target()->AsVariableProxy());
1603 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1604 break;
1605 case NAMED_SUPER_PROPERTY:
1606 EmitNamedSuperPropertyLoad(property);
1607 PrepareForBailoutForId(property->LoadId(),
1608 BailoutState::TOS_REGISTER);
1609 break;
1610 case NAMED_PROPERTY:
1611 EmitNamedPropertyLoad(property);
1612 PrepareForBailoutForId(property->LoadId(),
1613 BailoutState::TOS_REGISTER);
1614 break;
1615 case KEYED_SUPER_PROPERTY:
1616 EmitKeyedSuperPropertyLoad(property);
1617 PrepareForBailoutForId(property->LoadId(),
1618 BailoutState::TOS_REGISTER);
1619 break;
1620 case KEYED_PROPERTY:
1621 EmitKeyedPropertyLoad(property);
1622 PrepareForBailoutForId(property->LoadId(),
1623 BailoutState::TOS_REGISTER);
1624 break;
1625 }
1626 }
1627
1628 Token::Value op = expr->binary_op();
1629 PushOperand(eax); // Left operand goes on the stack.
1630 VisitForAccumulatorValue(expr->value());
1631
1632 if (ShouldInlineSmiCase(op)) {
1633 EmitInlineSmiBinaryOp(expr->binary_operation(),
1634 op,
1635 expr->target(),
1636 expr->value());
1637 } else {
1638 EmitBinaryOp(expr->binary_operation(), op);
1639 }
1640
1641 // Deoptimization point in case the binary operation may have side effects.
1642 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1643 } else {
1644 VisitForAccumulatorValue(expr->value());
1645 }
1646
1647 SetExpressionPosition(expr);
1648
1649 // Store the value.
1650 switch (assign_type) {
1651 case VARIABLE: {
1652 VariableProxy* proxy = expr->target()->AsVariableProxy();
1653 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1654 proxy->hole_check_mode());
1655 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1656 context()->Plug(eax);
1657 break;
1658 }
1659 case NAMED_PROPERTY:
1660 EmitNamedPropertyAssignment(expr);
1661 break;
1662 case NAMED_SUPER_PROPERTY:
1663 EmitNamedSuperPropertyStore(property);
1664 context()->Plug(result_register());
1665 break;
1666 case KEYED_SUPER_PROPERTY:
1667 EmitKeyedSuperPropertyStore(property);
1668 context()->Plug(result_register());
1669 break;
1670 case KEYED_PROPERTY:
1671 EmitKeyedPropertyAssignment(expr);
1672 break;
1673 }
1674 }
1675
1676
VisitYield(Yield * expr)1677 void FullCodeGenerator::VisitYield(Yield* expr) {
1678 Comment cmnt(masm_, "[ Yield");
1679 SetExpressionPosition(expr);
1680
1681 // Evaluate yielded value first; the initial iterator definition depends on
1682 // this. It stays on the stack while we update the iterator.
1683 VisitForStackValue(expr->expression());
1684
1685 Label suspend, continuation, post_runtime, resume, exception;
1686
1687 __ jmp(&suspend);
1688 __ bind(&continuation);
1689 // When we arrive here, eax holds the generator object.
1690 __ RecordGeneratorContinuation();
1691 __ mov(ebx, FieldOperand(eax, JSGeneratorObject::kResumeModeOffset));
1692 __ mov(eax, FieldOperand(eax, JSGeneratorObject::kInputOrDebugPosOffset));
1693 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1694 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1695 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::kReturn)));
1696 __ j(less, &resume);
1697 __ Push(result_register());
1698 __ j(greater, &exception);
1699 EmitCreateIteratorResult(true);
1700 EmitUnwindAndReturn();
1701
1702 __ bind(&exception);
1703 __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1704 : Runtime::kThrow);
1705
1706 __ bind(&suspend);
1707 OperandStackDepthIncrement(1); // Not popped on this path.
1708 VisitForAccumulatorValue(expr->generator_object());
1709 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1710 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1711 Immediate(Smi::FromInt(continuation.pos())));
1712 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1713 __ mov(ecx, esi);
1714 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1715 kDontSaveFPRegs);
1716 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1717 __ cmp(esp, ebx);
1718 __ j(equal, &post_runtime);
1719 __ push(eax); // generator object
1720 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1721 RestoreContext();
1722 __ bind(&post_runtime);
1723 PopOperand(result_register());
1724 EmitReturnSequence();
1725
1726 __ bind(&resume);
1727 context()->Plug(result_register());
1728 }
1729
PushOperand(MemOperand operand)1730 void FullCodeGenerator::PushOperand(MemOperand operand) {
1731 OperandStackDepthIncrement(1);
1732 __ Push(operand);
1733 }
1734
EmitOperandStackDepthCheck()1735 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1736 if (FLAG_debug_code) {
1737 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1738 operand_stack_depth_ * kPointerSize;
1739 __ mov(eax, ebp);
1740 __ sub(eax, esp);
1741 __ cmp(eax, Immediate(expected_diff));
1742 __ Assert(equal, kUnexpectedStackDepth);
1743 }
1744 }
1745
EmitCreateIteratorResult(bool done)1746 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1747 Label allocate, done_allocate;
1748
1749 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate,
1750 NO_ALLOCATION_FLAGS);
1751 __ jmp(&done_allocate, Label::kNear);
1752
1753 __ bind(&allocate);
1754 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1755 __ CallRuntime(Runtime::kAllocateInNewSpace);
1756
1757 __ bind(&done_allocate);
1758 __ mov(ebx, NativeContextOperand());
1759 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1760 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1761 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1762 isolate()->factory()->empty_fixed_array());
1763 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1764 isolate()->factory()->empty_fixed_array());
1765 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1766 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1767 isolate()->factory()->ToBoolean(done));
1768 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1769 OperandStackDepthDecrement(1);
1770 }
1771
1772
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left,Expression * right)1773 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1774 Token::Value op,
1775 Expression* left,
1776 Expression* right) {
1777 // Do combined smi check of the operands. Left operand is on the
1778 // stack. Right operand is in eax.
1779 Label smi_case, done, stub_call;
1780 PopOperand(edx);
1781 __ mov(ecx, eax);
1782 __ or_(eax, edx);
1783 JumpPatchSite patch_site(masm_);
1784 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1785
1786 __ bind(&stub_call);
1787 __ mov(eax, ecx);
1788 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1789 CallIC(code, expr->BinaryOperationFeedbackId());
1790 patch_site.EmitPatchInfo();
1791 __ jmp(&done, Label::kNear);
1792
1793 // Smi case.
1794 __ bind(&smi_case);
1795 __ mov(eax, edx); // Copy left operand in case of a stub call.
1796
1797 switch (op) {
1798 case Token::SAR:
1799 __ SmiUntag(ecx);
1800 __ sar_cl(eax); // No checks of result necessary
1801 __ and_(eax, Immediate(~kSmiTagMask));
1802 break;
1803 case Token::SHL: {
1804 Label result_ok;
1805 __ SmiUntag(eax);
1806 __ SmiUntag(ecx);
1807 __ shl_cl(eax);
1808 // Check that the *signed* result fits in a smi.
1809 __ cmp(eax, 0xc0000000);
1810 __ j(positive, &result_ok);
1811 __ SmiTag(ecx);
1812 __ jmp(&stub_call);
1813 __ bind(&result_ok);
1814 __ SmiTag(eax);
1815 break;
1816 }
1817 case Token::SHR: {
1818 Label result_ok;
1819 __ SmiUntag(eax);
1820 __ SmiUntag(ecx);
1821 __ shr_cl(eax);
1822 __ test(eax, Immediate(0xc0000000));
1823 __ j(zero, &result_ok);
1824 __ SmiTag(ecx);
1825 __ jmp(&stub_call);
1826 __ bind(&result_ok);
1827 __ SmiTag(eax);
1828 break;
1829 }
1830 case Token::ADD:
1831 __ add(eax, ecx);
1832 __ j(overflow, &stub_call);
1833 break;
1834 case Token::SUB:
1835 __ sub(eax, ecx);
1836 __ j(overflow, &stub_call);
1837 break;
1838 case Token::MUL: {
1839 __ SmiUntag(eax);
1840 __ imul(eax, ecx);
1841 __ j(overflow, &stub_call);
1842 __ test(eax, eax);
1843 __ j(not_zero, &done, Label::kNear);
1844 __ mov(ebx, edx);
1845 __ or_(ebx, ecx);
1846 __ j(negative, &stub_call);
1847 break;
1848 }
1849 case Token::BIT_OR:
1850 __ or_(eax, ecx);
1851 break;
1852 case Token::BIT_AND:
1853 __ and_(eax, ecx);
1854 break;
1855 case Token::BIT_XOR:
1856 __ xor_(eax, ecx);
1857 break;
1858 default:
1859 UNREACHABLE();
1860 }
1861
1862 __ bind(&done);
1863 context()->Plug(eax);
1864 }
1865
1866
EmitClassDefineProperties(ClassLiteral * lit)1867 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1868 for (int i = 0; i < lit->properties()->length(); i++) {
1869 ClassLiteral::Property* property = lit->properties()->at(i);
1870 Expression* value = property->value();
1871
1872 if (property->is_static()) {
1873 PushOperand(Operand(esp, kPointerSize)); // constructor
1874 } else {
1875 PushOperand(Operand(esp, 0)); // prototype
1876 }
1877 EmitPropertyKey(property, lit->GetIdForProperty(i));
1878
1879 // The static prototype property is read only. We handle the non computed
1880 // property name case in the parser. Since this is the only case where we
1881 // need to check for an own read only property we special case this so we do
1882 // not need to do this for every property.
1883 if (property->is_static() && property->is_computed_name()) {
1884 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1885 __ push(eax);
1886 }
1887
1888 VisitForStackValue(value);
1889 if (NeedsHomeObject(value)) {
1890 EmitSetHomeObject(value, 2, property->GetSlot());
1891 }
1892
1893 switch (property->kind()) {
1894 case ClassLiteral::Property::METHOD:
1895 PushOperand(Smi::FromInt(DONT_ENUM));
1896 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1897 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1898 break;
1899
1900 case ClassLiteral::Property::GETTER:
1901 PushOperand(Smi::FromInt(DONT_ENUM));
1902 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1903 break;
1904
1905 case ClassLiteral::Property::SETTER:
1906 PushOperand(Smi::FromInt(DONT_ENUM));
1907 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1908 break;
1909
1910 case ClassLiteral::Property::FIELD:
1911 UNREACHABLE();
1912 break;
1913 }
1914 }
1915 }
1916
1917
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1918 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1919 PopOperand(edx);
1920 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1921 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1922 CallIC(code, expr->BinaryOperationFeedbackId());
1923 patch_site.EmitPatchInfo();
1924 context()->Plug(eax);
1925 }
1926
1927
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)1928 void FullCodeGenerator::EmitAssignment(Expression* expr,
1929 FeedbackVectorSlot slot) {
1930 DCHECK(expr->IsValidReferenceExpressionOrThis());
1931
1932 Property* prop = expr->AsProperty();
1933 LhsKind assign_type = Property::GetAssignType(prop);
1934
1935 switch (assign_type) {
1936 case VARIABLE: {
1937 VariableProxy* proxy = expr->AsVariableProxy();
1938 EffectContext context(this);
1939 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
1940 proxy->hole_check_mode());
1941 break;
1942 }
1943 case NAMED_PROPERTY: {
1944 PushOperand(eax); // Preserve value.
1945 VisitForAccumulatorValue(prop->obj());
1946 __ Move(StoreDescriptor::ReceiverRegister(), eax);
1947 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
1948 CallStoreIC(slot, prop->key()->AsLiteral()->value());
1949 break;
1950 }
1951 case NAMED_SUPER_PROPERTY: {
1952 PushOperand(eax);
1953 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1954 VisitForAccumulatorValue(
1955 prop->obj()->AsSuperPropertyReference()->home_object());
1956 // stack: value, this; eax: home_object
1957 Register scratch = ecx;
1958 Register scratch2 = edx;
1959 __ mov(scratch, result_register()); // home_object
1960 __ mov(eax, MemOperand(esp, kPointerSize)); // value
1961 __ mov(scratch2, MemOperand(esp, 0)); // this
1962 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
1963 __ mov(MemOperand(esp, 0), scratch); // home_object
1964 // stack: this, home_object. eax: value
1965 EmitNamedSuperPropertyStore(prop);
1966 break;
1967 }
1968 case KEYED_SUPER_PROPERTY: {
1969 PushOperand(eax);
1970 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1971 VisitForStackValue(
1972 prop->obj()->AsSuperPropertyReference()->home_object());
1973 VisitForAccumulatorValue(prop->key());
1974 Register scratch = ecx;
1975 Register scratch2 = edx;
1976 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
1977 // stack: value, this, home_object; eax: key, edx: value
1978 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
1979 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
1980 __ mov(scratch, MemOperand(esp, 0)); // home_object
1981 __ mov(MemOperand(esp, kPointerSize), scratch);
1982 __ mov(MemOperand(esp, 0), eax);
1983 __ mov(eax, scratch2);
1984 // stack: this, home_object, key; eax: value.
1985 EmitKeyedSuperPropertyStore(prop);
1986 break;
1987 }
1988 case KEYED_PROPERTY: {
1989 PushOperand(eax); // Preserve value.
1990 VisitForStackValue(prop->obj());
1991 VisitForAccumulatorValue(prop->key());
1992 __ Move(StoreDescriptor::NameRegister(), eax);
1993 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver.
1994 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
1995 CallKeyedStoreIC(slot);
1996 break;
1997 }
1998 }
1999 context()->Plug(eax);
2000 }
2001
2002
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2003 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2004 Variable* var, MemOperand location) {
2005 __ mov(location, eax);
2006 if (var->IsContextSlot()) {
2007 __ mov(edx, eax);
2008 int offset = Context::SlotOffset(var->index());
2009 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2010 }
2011 }
2012
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot,HoleCheckMode hole_check_mode)2013 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2014 FeedbackVectorSlot slot,
2015 HoleCheckMode hole_check_mode) {
2016 if (var->IsUnallocated()) {
2017 // Global var, const, or let.
2018 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2019 __ mov(StoreDescriptor::ReceiverRegister(),
2020 ContextOperand(StoreDescriptor::ReceiverRegister(),
2021 Context::EXTENSION_INDEX));
2022 CallStoreIC(slot, var->name());
2023
2024 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2025 DCHECK(!var->IsLookupSlot());
2026 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2027 MemOperand location = VarOperand(var, ecx);
2028 // Perform an initialization check for lexically declared variables.
2029 if (hole_check_mode == HoleCheckMode::kRequired) {
2030 Label assign;
2031 __ mov(edx, location);
2032 __ cmp(edx, isolate()->factory()->the_hole_value());
2033 __ j(not_equal, &assign, Label::kNear);
2034 __ push(Immediate(var->name()));
2035 __ CallRuntime(Runtime::kThrowReferenceError);
2036 __ bind(&assign);
2037 }
2038 if (var->mode() != CONST) {
2039 EmitStoreToStackLocalOrContextSlot(var, location);
2040 } else if (var->throw_on_const_assignment(language_mode())) {
2041 __ CallRuntime(Runtime::kThrowConstAssignError);
2042 }
2043 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2044 // Initializing assignment to const {this} needs a write barrier.
2045 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2046 Label uninitialized_this;
2047 MemOperand location = VarOperand(var, ecx);
2048 __ mov(edx, location);
2049 __ cmp(edx, isolate()->factory()->the_hole_value());
2050 __ j(equal, &uninitialized_this);
2051 __ push(Immediate(var->name()));
2052 __ CallRuntime(Runtime::kThrowReferenceError);
2053 __ bind(&uninitialized_this);
2054 EmitStoreToStackLocalOrContextSlot(var, location);
2055
2056 } else {
2057 DCHECK(var->mode() != CONST || op == Token::INIT);
2058 if (var->IsLookupSlot()) {
2059 // Assignment to var.
2060 __ Push(Immediate(var->name()));
2061 __ Push(eax);
2062 __ CallRuntime(is_strict(language_mode())
2063 ? Runtime::kStoreLookupSlot_Strict
2064 : Runtime::kStoreLookupSlot_Sloppy);
2065 } else {
2066 // Assignment to var or initializing assignment to let/const in harmony
2067 // mode.
2068 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2069 MemOperand location = VarOperand(var, ecx);
2070 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2071 // Check for an uninitialized let binding.
2072 __ mov(edx, location);
2073 __ cmp(edx, isolate()->factory()->the_hole_value());
2074 __ Check(equal, kLetBindingReInitialization);
2075 }
2076 EmitStoreToStackLocalOrContextSlot(var, location);
2077 }
2078 }
2079 }
2080
2081
EmitNamedPropertyAssignment(Assignment * expr)2082 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2083 // Assignment to a property, using a named store IC.
2084 // eax : value
2085 // esp[0] : receiver
2086 Property* prop = expr->target()->AsProperty();
2087 DCHECK(prop != NULL);
2088 DCHECK(prop->key()->IsLiteral());
2089
2090 PopOperand(StoreDescriptor::ReceiverRegister());
2091 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2092 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2093 context()->Plug(eax);
2094 }
2095
2096
EmitNamedSuperPropertyStore(Property * prop)2097 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2098 // Assignment to named property of super.
2099 // eax : value
2100 // stack : receiver ('this'), home_object
2101 DCHECK(prop != NULL);
2102 Literal* key = prop->key()->AsLiteral();
2103 DCHECK(key != NULL);
2104
2105 PushOperand(key->value());
2106 PushOperand(eax);
2107 CallRuntimeWithOperands(is_strict(language_mode())
2108 ? Runtime::kStoreToSuper_Strict
2109 : Runtime::kStoreToSuper_Sloppy);
2110 }
2111
2112
EmitKeyedSuperPropertyStore(Property * prop)2113 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2114 // Assignment to named property of super.
2115 // eax : value
2116 // stack : receiver ('this'), home_object, key
2117
2118 PushOperand(eax);
2119 CallRuntimeWithOperands(is_strict(language_mode())
2120 ? Runtime::kStoreKeyedToSuper_Strict
2121 : Runtime::kStoreKeyedToSuper_Sloppy);
2122 }
2123
2124
EmitKeyedPropertyAssignment(Assignment * expr)2125 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2126 // Assignment to a property, using a keyed store IC.
2127 // eax : value
2128 // esp[0] : key
2129 // esp[kPointerSize] : receiver
2130
2131 PopOperand(StoreDescriptor::NameRegister()); // Key.
2132 PopOperand(StoreDescriptor::ReceiverRegister());
2133 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2134 CallKeyedStoreIC(expr->AssignmentSlot());
2135 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2136 context()->Plug(eax);
2137 }
2138
2139 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2140 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2141 Expression* callee = expr->expression();
2142
2143 // Get the target function.
2144 ConvertReceiverMode convert_mode;
2145 if (callee->IsVariableProxy()) {
2146 { StackValueContext context(this);
2147 EmitVariableLoad(callee->AsVariableProxy());
2148 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2149 }
2150 // Push undefined as receiver. This is patched in the method prologue if it
2151 // is a sloppy mode method.
2152 PushOperand(isolate()->factory()->undefined_value());
2153 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2154 } else {
2155 // Load the function from the receiver.
2156 DCHECK(callee->IsProperty());
2157 DCHECK(!callee->AsProperty()->IsSuperAccess());
2158 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2159 EmitNamedPropertyLoad(callee->AsProperty());
2160 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2161 BailoutState::TOS_REGISTER);
2162 // Push the target function under the receiver.
2163 PushOperand(Operand(esp, 0));
2164 __ mov(Operand(esp, kPointerSize), eax);
2165 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2166 }
2167
2168 EmitCall(expr, convert_mode);
2169 }
2170
2171
EmitSuperCallWithLoadIC(Call * expr)2172 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2173 SetExpressionPosition(expr);
2174 Expression* callee = expr->expression();
2175 DCHECK(callee->IsProperty());
2176 Property* prop = callee->AsProperty();
2177 DCHECK(prop->IsSuperAccess());
2178
2179 Literal* key = prop->key()->AsLiteral();
2180 DCHECK(!key->value()->IsSmi());
2181 // Load the function from the receiver.
2182 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2183 VisitForStackValue(super_ref->home_object());
2184 VisitForAccumulatorValue(super_ref->this_var());
2185 PushOperand(eax);
2186 PushOperand(eax);
2187 PushOperand(Operand(esp, kPointerSize * 2));
2188 PushOperand(key->value());
2189 // Stack here:
2190 // - home_object
2191 // - this (receiver)
2192 // - this (receiver) <-- LoadFromSuper will pop here and below.
2193 // - home_object
2194 // - key
2195 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2196 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2197
2198 // Replace home_object with target function.
2199 __ mov(Operand(esp, kPointerSize), eax);
2200
2201 // Stack here:
2202 // - target function
2203 // - this (receiver)
2204 EmitCall(expr);
2205 }
2206
2207
2208 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2209 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2210 Expression* key) {
2211 // Load the key.
2212 VisitForAccumulatorValue(key);
2213
2214 Expression* callee = expr->expression();
2215
2216 // Load the function from the receiver.
2217 DCHECK(callee->IsProperty());
2218 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2219 __ mov(LoadDescriptor::NameRegister(), eax);
2220 EmitKeyedPropertyLoad(callee->AsProperty());
2221 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2222 BailoutState::TOS_REGISTER);
2223
2224 // Push the target function under the receiver.
2225 PushOperand(Operand(esp, 0));
2226 __ mov(Operand(esp, kPointerSize), eax);
2227
2228 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2229 }
2230
2231
EmitKeyedSuperCallWithLoadIC(Call * expr)2232 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2233 Expression* callee = expr->expression();
2234 DCHECK(callee->IsProperty());
2235 Property* prop = callee->AsProperty();
2236 DCHECK(prop->IsSuperAccess());
2237
2238 SetExpressionPosition(prop);
2239 // Load the function from the receiver.
2240 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2241 VisitForStackValue(super_ref->home_object());
2242 VisitForAccumulatorValue(super_ref->this_var());
2243 PushOperand(eax);
2244 PushOperand(eax);
2245 PushOperand(Operand(esp, kPointerSize * 2));
2246 VisitForStackValue(prop->key());
2247 // Stack here:
2248 // - home_object
2249 // - this (receiver)
2250 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2251 // - home_object
2252 // - key
2253 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2254 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2255
2256 // Replace home_object with target function.
2257 __ mov(Operand(esp, kPointerSize), eax);
2258
2259 // Stack here:
2260 // - target function
2261 // - this (receiver)
2262 EmitCall(expr);
2263 }
2264
2265
EmitCall(Call * expr,ConvertReceiverMode mode)2266 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2267 // Load the arguments.
2268 ZoneList<Expression*>* args = expr->arguments();
2269 int arg_count = args->length();
2270 for (int i = 0; i < arg_count; i++) {
2271 VisitForStackValue(args->at(i));
2272 }
2273
2274 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2275 SetCallPosition(expr, expr->tail_call_mode());
2276 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2277 if (FLAG_trace) {
2278 __ CallRuntime(Runtime::kTraceTailCall);
2279 }
2280 // Update profiling counters before the tail call since we will
2281 // not return to this function.
2282 EmitProfilingCounterHandlingForReturnSequence(true);
2283 }
2284 Handle<Code> code =
2285 CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2286 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2287 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2288 __ Move(eax, Immediate(arg_count));
2289 CallIC(code);
2290 OperandStackDepthDecrement(arg_count + 1);
2291
2292 RecordJSReturnSite(expr);
2293 RestoreContext();
2294 context()->DropAndPlug(1, eax);
2295 }
2296
EmitResolvePossiblyDirectEval(Call * expr)2297 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2298 int arg_count = expr->arguments()->length();
2299 // Push copy of the first argument or undefined if it doesn't exist.
2300 if (arg_count > 0) {
2301 __ push(Operand(esp, arg_count * kPointerSize));
2302 } else {
2303 __ push(Immediate(isolate()->factory()->undefined_value()));
2304 }
2305
2306 // Push the enclosing function.
2307 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2308
2309 // Push the language mode.
2310 __ push(Immediate(Smi::FromInt(language_mode())));
2311
2312 // Push the start position of the scope the calls resides in.
2313 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2314
2315 // Push the source position of the eval call.
2316 __ push(Immediate(Smi::FromInt(expr->position())));
2317
2318 // Do the runtime call.
2319 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2320 }
2321
2322
2323 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2324 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2325 VariableProxy* callee = expr->expression()->AsVariableProxy();
2326 if (callee->var()->IsLookupSlot()) {
2327 Label slow, done;
2328 SetExpressionPosition(callee);
2329 // Generate code for loading from variables potentially shadowed by
2330 // eval-introduced variables.
2331 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2332
2333 __ bind(&slow);
2334 // Call the runtime to find the function to call (returned in eax) and
2335 // the object holding it (returned in edx).
2336 __ Push(callee->name());
2337 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2338 PushOperand(eax); // Function.
2339 PushOperand(edx); // Receiver.
2340 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2341
2342 // If fast case code has been generated, emit code to push the function
2343 // and receiver and have the slow path jump around this code.
2344 if (done.is_linked()) {
2345 Label call;
2346 __ jmp(&call, Label::kNear);
2347 __ bind(&done);
2348 // Push function.
2349 __ push(eax);
2350 // The receiver is implicitly the global receiver. Indicate this by
2351 // passing the hole to the call function stub.
2352 __ push(Immediate(isolate()->factory()->undefined_value()));
2353 __ bind(&call);
2354 }
2355 } else {
2356 VisitForStackValue(callee);
2357 // refEnv.WithBaseObject()
2358 PushOperand(isolate()->factory()->undefined_value());
2359 }
2360 }
2361
2362
EmitPossiblyEvalCall(Call * expr)2363 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2364 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2365 // to resolve the function we need to call. Then we call the resolved
2366 // function using the given arguments.
2367 ZoneList<Expression*>* args = expr->arguments();
2368 int arg_count = args->length();
2369
2370 PushCalleeAndWithBaseObject(expr);
2371
2372 // Push the arguments.
2373 for (int i = 0; i < arg_count; i++) {
2374 VisitForStackValue(args->at(i));
2375 }
2376
2377 // Push a copy of the function (found below the arguments) and
2378 // resolve eval.
2379 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2380 EmitResolvePossiblyDirectEval(expr);
2381
2382 // Touch up the stack with the resolved function.
2383 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2384
2385 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2386
2387 SetCallPosition(expr);
2388 Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2389 expr->tail_call_mode())
2390 .code();
2391 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2392 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2393 __ Move(eax, Immediate(arg_count));
2394 __ call(code, RelocInfo::CODE_TARGET);
2395 OperandStackDepthDecrement(arg_count + 1);
2396 RecordJSReturnSite(expr);
2397 RestoreContext();
2398 context()->DropAndPlug(1, eax);
2399 }
2400
2401
VisitCallNew(CallNew * expr)2402 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2403 Comment cmnt(masm_, "[ CallNew");
2404 // According to ECMA-262, section 11.2.2, page 44, the function
2405 // expression in new calls must be evaluated before the
2406 // arguments.
2407
2408 // Push constructor on the stack. If it's not a function it's used as
2409 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2410 // ignored.
2411 DCHECK(!expr->expression()->IsSuperPropertyReference());
2412 VisitForStackValue(expr->expression());
2413
2414 // Push the arguments ("left-to-right") on the stack.
2415 ZoneList<Expression*>* args = expr->arguments();
2416 int arg_count = args->length();
2417 for (int i = 0; i < arg_count; i++) {
2418 VisitForStackValue(args->at(i));
2419 }
2420
2421 // Call the construct call builtin that handles allocation and
2422 // constructor invocation.
2423 SetConstructCallPosition(expr);
2424
2425 // Load function and argument count into edi and eax.
2426 __ Move(eax, Immediate(arg_count));
2427 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2428
2429 // Record call targets in unoptimized code.
2430 __ EmitLoadTypeFeedbackVector(ebx);
2431 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2432
2433 CallConstructStub stub(isolate());
2434 CallIC(stub.GetCode());
2435 OperandStackDepthDecrement(arg_count + 1);
2436 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2437 RestoreContext();
2438 context()->Plug(eax);
2439 }
2440
2441
EmitSuperConstructorCall(Call * expr)2442 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2443 SuperCallReference* super_call_ref =
2444 expr->expression()->AsSuperCallReference();
2445 DCHECK_NOT_NULL(super_call_ref);
2446
2447 // Push the super constructor target on the stack (may be null,
2448 // but the Construct builtin can deal with that properly).
2449 VisitForAccumulatorValue(super_call_ref->this_function_var());
2450 __ AssertFunction(result_register());
2451 __ mov(result_register(),
2452 FieldOperand(result_register(), HeapObject::kMapOffset));
2453 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
2454
2455 // Push the arguments ("left-to-right") on the stack.
2456 ZoneList<Expression*>* args = expr->arguments();
2457 int arg_count = args->length();
2458 for (int i = 0; i < arg_count; i++) {
2459 VisitForStackValue(args->at(i));
2460 }
2461
2462 // Call the construct call builtin that handles allocation and
2463 // constructor invocation.
2464 SetConstructCallPosition(expr);
2465
2466 // Load new target into edx.
2467 VisitForAccumulatorValue(super_call_ref->new_target_var());
2468 __ mov(edx, result_register());
2469
2470 // Load function and argument count into edi and eax.
2471 __ Move(eax, Immediate(arg_count));
2472 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2473
2474 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2475 OperandStackDepthDecrement(arg_count + 1);
2476
2477 RecordJSReturnSite(expr);
2478 RestoreContext();
2479 context()->Plug(eax);
2480 }
2481
2482
EmitIsSmi(CallRuntime * expr)2483 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2484 ZoneList<Expression*>* args = expr->arguments();
2485 DCHECK(args->length() == 1);
2486
2487 VisitForAccumulatorValue(args->at(0));
2488
2489 Label materialize_true, materialize_false;
2490 Label* if_true = NULL;
2491 Label* if_false = NULL;
2492 Label* fall_through = NULL;
2493 context()->PrepareTest(&materialize_true, &materialize_false,
2494 &if_true, &if_false, &fall_through);
2495
2496 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2497 __ test(eax, Immediate(kSmiTagMask));
2498 Split(zero, if_true, if_false, fall_through);
2499
2500 context()->Plug(if_true, if_false);
2501 }
2502
2503
EmitIsJSReceiver(CallRuntime * expr)2504 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2505 ZoneList<Expression*>* args = expr->arguments();
2506 DCHECK(args->length() == 1);
2507
2508 VisitForAccumulatorValue(args->at(0));
2509
2510 Label materialize_true, materialize_false;
2511 Label* if_true = NULL;
2512 Label* if_false = NULL;
2513 Label* fall_through = NULL;
2514 context()->PrepareTest(&materialize_true, &materialize_false,
2515 &if_true, &if_false, &fall_through);
2516
2517 __ JumpIfSmi(eax, if_false);
2518 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2519 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2520 Split(above_equal, if_true, if_false, fall_through);
2521
2522 context()->Plug(if_true, if_false);
2523 }
2524
2525
EmitIsArray(CallRuntime * expr)2526 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2527 ZoneList<Expression*>* args = expr->arguments();
2528 DCHECK(args->length() == 1);
2529
2530 VisitForAccumulatorValue(args->at(0));
2531
2532 Label materialize_true, materialize_false;
2533 Label* if_true = NULL;
2534 Label* if_false = NULL;
2535 Label* fall_through = NULL;
2536 context()->PrepareTest(&materialize_true, &materialize_false,
2537 &if_true, &if_false, &fall_through);
2538
2539 __ JumpIfSmi(eax, if_false);
2540 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2541 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2542 Split(equal, if_true, if_false, fall_through);
2543
2544 context()->Plug(if_true, if_false);
2545 }
2546
2547
EmitIsTypedArray(CallRuntime * expr)2548 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2549 ZoneList<Expression*>* args = expr->arguments();
2550 DCHECK(args->length() == 1);
2551
2552 VisitForAccumulatorValue(args->at(0));
2553
2554 Label materialize_true, materialize_false;
2555 Label* if_true = NULL;
2556 Label* if_false = NULL;
2557 Label* fall_through = NULL;
2558 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2559 &if_false, &fall_through);
2560
2561 __ JumpIfSmi(eax, if_false);
2562 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2563 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2564 Split(equal, if_true, if_false, fall_through);
2565
2566 context()->Plug(if_true, if_false);
2567 }
2568
2569
EmitIsRegExp(CallRuntime * expr)2570 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2571 ZoneList<Expression*>* args = expr->arguments();
2572 DCHECK(args->length() == 1);
2573
2574 VisitForAccumulatorValue(args->at(0));
2575
2576 Label materialize_true, materialize_false;
2577 Label* if_true = NULL;
2578 Label* if_false = NULL;
2579 Label* fall_through = NULL;
2580 context()->PrepareTest(&materialize_true, &materialize_false,
2581 &if_true, &if_false, &fall_through);
2582
2583 __ JumpIfSmi(eax, if_false);
2584 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2585 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2586 Split(equal, if_true, if_false, fall_through);
2587
2588 context()->Plug(if_true, if_false);
2589 }
2590
2591
EmitIsJSProxy(CallRuntime * expr)2592 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2593 ZoneList<Expression*>* args = expr->arguments();
2594 DCHECK(args->length() == 1);
2595
2596 VisitForAccumulatorValue(args->at(0));
2597
2598 Label materialize_true, materialize_false;
2599 Label* if_true = NULL;
2600 Label* if_false = NULL;
2601 Label* fall_through = NULL;
2602 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2603 &if_false, &fall_through);
2604
2605 __ JumpIfSmi(eax, if_false);
2606 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2607 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2608 Split(equal, if_true, if_false, fall_through);
2609
2610 context()->Plug(if_true, if_false);
2611 }
2612
2613
EmitClassOf(CallRuntime * expr)2614 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2615 ZoneList<Expression*>* args = expr->arguments();
2616 DCHECK(args->length() == 1);
2617 Label done, null, function, non_function_constructor;
2618
2619 VisitForAccumulatorValue(args->at(0));
2620
2621 // If the object is not a JSReceiver, we return null.
2622 __ JumpIfSmi(eax, &null, Label::kNear);
2623 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2624 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2625 __ j(below, &null, Label::kNear);
2626
2627 // Return 'Function' for JSFunction and JSBoundFunction objects.
2628 __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
2629 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2630 __ j(above_equal, &function, Label::kNear);
2631
2632 // Check if the constructor in the map is a JS function.
2633 __ GetMapConstructor(eax, eax, ebx);
2634 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2635 __ j(not_equal, &non_function_constructor, Label::kNear);
2636
2637 // eax now contains the constructor function. Grab the
2638 // instance class name from there.
2639 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2640 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2641 __ jmp(&done, Label::kNear);
2642
2643 // Non-JS objects have class null.
2644 __ bind(&null);
2645 __ mov(eax, isolate()->factory()->null_value());
2646 __ jmp(&done, Label::kNear);
2647
2648 // Functions have class 'Function'.
2649 __ bind(&function);
2650 __ mov(eax, isolate()->factory()->Function_string());
2651 __ jmp(&done, Label::kNear);
2652
2653 // Objects with a non-function constructor have class 'Object'.
2654 __ bind(&non_function_constructor);
2655 __ mov(eax, isolate()->factory()->Object_string());
2656
2657 // All done.
2658 __ bind(&done);
2659
2660 context()->Plug(eax);
2661 }
2662
2663
EmitStringCharCodeAt(CallRuntime * expr)2664 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2665 ZoneList<Expression*>* args = expr->arguments();
2666 DCHECK(args->length() == 2);
2667
2668 VisitForStackValue(args->at(0));
2669 VisitForAccumulatorValue(args->at(1));
2670
2671 Register object = ebx;
2672 Register index = eax;
2673 Register result = edx;
2674
2675 PopOperand(object);
2676
2677 Label need_conversion;
2678 Label index_out_of_range;
2679 Label done;
2680 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2681 &need_conversion, &index_out_of_range);
2682 generator.GenerateFast(masm_);
2683 __ jmp(&done);
2684
2685 __ bind(&index_out_of_range);
2686 // When the index is out of range, the spec requires us to return
2687 // NaN.
2688 __ Move(result, Immediate(isolate()->factory()->nan_value()));
2689 __ jmp(&done);
2690
2691 __ bind(&need_conversion);
2692 // Move the undefined value into the result register, which will
2693 // trigger conversion.
2694 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
2695 __ jmp(&done);
2696
2697 NopRuntimeCallHelper call_helper;
2698 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2699
2700 __ bind(&done);
2701 context()->Plug(result);
2702 }
2703
2704
EmitCall(CallRuntime * expr)2705 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2706 ZoneList<Expression*>* args = expr->arguments();
2707 DCHECK_LE(2, args->length());
2708 // Push target, receiver and arguments onto the stack.
2709 for (Expression* const arg : *args) {
2710 VisitForStackValue(arg);
2711 }
2712 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2713 // Move target to edi.
2714 int const argc = args->length() - 2;
2715 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
2716 // Call the target.
2717 __ mov(eax, Immediate(argc));
2718 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2719 OperandStackDepthDecrement(argc + 1);
2720 RestoreContext();
2721 // Discard the function left on TOS.
2722 context()->DropAndPlug(1, eax);
2723 }
2724
EmitGetSuperConstructor(CallRuntime * expr)2725 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2726 ZoneList<Expression*>* args = expr->arguments();
2727 DCHECK_EQ(1, args->length());
2728 VisitForAccumulatorValue(args->at(0));
2729 __ AssertFunction(eax);
2730 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
2731 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
2732 context()->Plug(eax);
2733 }
2734
EmitDebugIsActive(CallRuntime * expr)2735 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2736 DCHECK(expr->arguments()->length() == 0);
2737 ExternalReference debug_is_active =
2738 ExternalReference::debug_is_active_address(isolate());
2739 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
2740 __ SmiTag(eax);
2741 context()->Plug(eax);
2742 }
2743
2744
EmitCreateIterResultObject(CallRuntime * expr)2745 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2746 ZoneList<Expression*>* args = expr->arguments();
2747 DCHECK_EQ(2, args->length());
2748 VisitForStackValue(args->at(0));
2749 VisitForStackValue(args->at(1));
2750
2751 Label runtime, done;
2752
2753 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime,
2754 NO_ALLOCATION_FLAGS);
2755 __ mov(ebx, NativeContextOperand());
2756 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2757 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2758 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2759 isolate()->factory()->empty_fixed_array());
2760 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2761 isolate()->factory()->empty_fixed_array());
2762 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
2763 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
2764 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2765 __ jmp(&done, Label::kNear);
2766
2767 __ bind(&runtime);
2768 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2769
2770 __ bind(&done);
2771 context()->Plug(eax);
2772 }
2773
2774
EmitLoadJSRuntimeFunction(CallRuntime * expr)2775 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2776 // Push function.
2777 __ LoadGlobalFunction(expr->context_index(), eax);
2778 PushOperand(eax);
2779
2780 // Push undefined as receiver.
2781 PushOperand(isolate()->factory()->undefined_value());
2782 }
2783
2784
EmitCallJSRuntimeFunction(CallRuntime * expr)2785 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2786 ZoneList<Expression*>* args = expr->arguments();
2787 int arg_count = args->length();
2788
2789 SetCallPosition(expr);
2790 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2791 __ Set(eax, arg_count);
2792 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2793 RelocInfo::CODE_TARGET);
2794 OperandStackDepthDecrement(arg_count + 1);
2795 RestoreContext();
2796 }
2797
2798
VisitUnaryOperation(UnaryOperation * expr)2799 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2800 switch (expr->op()) {
2801 case Token::DELETE: {
2802 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2803 Property* property = expr->expression()->AsProperty();
2804 VariableProxy* proxy = expr->expression()->AsVariableProxy();
2805
2806 if (property != NULL) {
2807 VisitForStackValue(property->obj());
2808 VisitForStackValue(property->key());
2809 CallRuntimeWithOperands(is_strict(language_mode())
2810 ? Runtime::kDeleteProperty_Strict
2811 : Runtime::kDeleteProperty_Sloppy);
2812 context()->Plug(eax);
2813 } else if (proxy != NULL) {
2814 Variable* var = proxy->var();
2815 // Delete of an unqualified identifier is disallowed in strict mode but
2816 // "delete this" is allowed.
2817 bool is_this = var->is_this();
2818 DCHECK(is_sloppy(language_mode()) || is_this);
2819 if (var->IsUnallocated()) {
2820 __ mov(eax, NativeContextOperand());
2821 __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
2822 __ push(Immediate(var->name()));
2823 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2824 context()->Plug(eax);
2825 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2826 // Result of deleting non-global variables is false. 'this' is
2827 // not really a variable, though we implement it as one. The
2828 // subexpression does not have side effects.
2829 context()->Plug(is_this);
2830 } else {
2831 // Non-global variable. Call the runtime to try to delete from the
2832 // context where the variable was introduced.
2833 __ Push(var->name());
2834 __ CallRuntime(Runtime::kDeleteLookupSlot);
2835 context()->Plug(eax);
2836 }
2837 } else {
2838 // Result of deleting non-property, non-variable reference is true.
2839 // The subexpression may have side effects.
2840 VisitForEffect(expr->expression());
2841 context()->Plug(true);
2842 }
2843 break;
2844 }
2845
2846 case Token::VOID: {
2847 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2848 VisitForEffect(expr->expression());
2849 context()->Plug(isolate()->factory()->undefined_value());
2850 break;
2851 }
2852
2853 case Token::NOT: {
2854 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2855 if (context()->IsEffect()) {
2856 // Unary NOT has no side effects so it's only necessary to visit the
2857 // subexpression. Match the optimizing compiler by not branching.
2858 VisitForEffect(expr->expression());
2859 } else if (context()->IsTest()) {
2860 const TestContext* test = TestContext::cast(context());
2861 // The labels are swapped for the recursive call.
2862 VisitForControl(expr->expression(),
2863 test->false_label(),
2864 test->true_label(),
2865 test->fall_through());
2866 context()->Plug(test->true_label(), test->false_label());
2867 } else {
2868 // We handle value contexts explicitly rather than simply visiting
2869 // for control and plugging the control flow into the context,
2870 // because we need to prepare a pair of extra administrative AST ids
2871 // for the optimizing compiler.
2872 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2873 Label materialize_true, materialize_false, done;
2874 VisitForControl(expr->expression(),
2875 &materialize_false,
2876 &materialize_true,
2877 &materialize_true);
2878 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2879 __ bind(&materialize_true);
2880 PrepareForBailoutForId(expr->MaterializeTrueId(),
2881 BailoutState::NO_REGISTERS);
2882 if (context()->IsAccumulatorValue()) {
2883 __ mov(eax, isolate()->factory()->true_value());
2884 } else {
2885 __ Push(isolate()->factory()->true_value());
2886 }
2887 __ jmp(&done, Label::kNear);
2888 __ bind(&materialize_false);
2889 PrepareForBailoutForId(expr->MaterializeFalseId(),
2890 BailoutState::NO_REGISTERS);
2891 if (context()->IsAccumulatorValue()) {
2892 __ mov(eax, isolate()->factory()->false_value());
2893 } else {
2894 __ Push(isolate()->factory()->false_value());
2895 }
2896 __ bind(&done);
2897 }
2898 break;
2899 }
2900
2901 case Token::TYPEOF: {
2902 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2903 {
2904 AccumulatorValueContext context(this);
2905 VisitForTypeofValue(expr->expression());
2906 }
2907 __ mov(ebx, eax);
2908 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2909 context()->Plug(eax);
2910 break;
2911 }
2912
2913 default:
2914 UNREACHABLE();
2915 }
2916 }
2917
2918
VisitCountOperation(CountOperation * expr)2919 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2920 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2921
2922 Comment cmnt(masm_, "[ CountOperation");
2923
2924 Property* prop = expr->expression()->AsProperty();
2925 LhsKind assign_type = Property::GetAssignType(prop);
2926
2927 // Evaluate expression and get value.
2928 if (assign_type == VARIABLE) {
2929 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2930 AccumulatorValueContext context(this);
2931 EmitVariableLoad(expr->expression()->AsVariableProxy());
2932 } else {
2933 // Reserve space for result of postfix operation.
2934 if (expr->is_postfix() && !context()->IsEffect()) {
2935 PushOperand(Smi::kZero);
2936 }
2937 switch (assign_type) {
2938 case NAMED_PROPERTY: {
2939 // Put the object both on the stack and in the register.
2940 VisitForStackValue(prop->obj());
2941 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2942 EmitNamedPropertyLoad(prop);
2943 break;
2944 }
2945
2946 case NAMED_SUPER_PROPERTY: {
2947 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2948 VisitForAccumulatorValue(
2949 prop->obj()->AsSuperPropertyReference()->home_object());
2950 PushOperand(result_register());
2951 PushOperand(MemOperand(esp, kPointerSize));
2952 PushOperand(result_register());
2953 EmitNamedSuperPropertyLoad(prop);
2954 break;
2955 }
2956
2957 case KEYED_SUPER_PROPERTY: {
2958 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2959 VisitForStackValue(
2960 prop->obj()->AsSuperPropertyReference()->home_object());
2961 VisitForAccumulatorValue(prop->key());
2962 PushOperand(result_register());
2963 PushOperand(MemOperand(esp, 2 * kPointerSize));
2964 PushOperand(MemOperand(esp, 2 * kPointerSize));
2965 PushOperand(result_register());
2966 EmitKeyedSuperPropertyLoad(prop);
2967 break;
2968 }
2969
2970 case KEYED_PROPERTY: {
2971 VisitForStackValue(prop->obj());
2972 VisitForStackValue(prop->key());
2973 __ mov(LoadDescriptor::ReceiverRegister(),
2974 Operand(esp, kPointerSize)); // Object.
2975 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
2976 EmitKeyedPropertyLoad(prop);
2977 break;
2978 }
2979
2980 case VARIABLE:
2981 UNREACHABLE();
2982 }
2983 }
2984
2985 // We need a second deoptimization point after loading the value
2986 // in case evaluating the property load my have a side effect.
2987 if (assign_type == VARIABLE) {
2988 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
2989 } else {
2990 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2991 }
2992
2993 // Inline smi case if we are in a loop.
2994 Label done, stub_call;
2995 JumpPatchSite patch_site(masm_);
2996 if (ShouldInlineSmiCase(expr->op())) {
2997 Label slow;
2998 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
2999
3000 // Save result for postfix expressions.
3001 if (expr->is_postfix()) {
3002 if (!context()->IsEffect()) {
3003 // Save the result on the stack. If we have a named or keyed property
3004 // we store the result under the receiver that is currently on top
3005 // of the stack.
3006 switch (assign_type) {
3007 case VARIABLE:
3008 __ push(eax);
3009 break;
3010 case NAMED_PROPERTY:
3011 __ mov(Operand(esp, kPointerSize), eax);
3012 break;
3013 case NAMED_SUPER_PROPERTY:
3014 __ mov(Operand(esp, 2 * kPointerSize), eax);
3015 break;
3016 case KEYED_PROPERTY:
3017 __ mov(Operand(esp, 2 * kPointerSize), eax);
3018 break;
3019 case KEYED_SUPER_PROPERTY:
3020 __ mov(Operand(esp, 3 * kPointerSize), eax);
3021 break;
3022 }
3023 }
3024 }
3025
3026 if (expr->op() == Token::INC) {
3027 __ add(eax, Immediate(Smi::FromInt(1)));
3028 } else {
3029 __ sub(eax, Immediate(Smi::FromInt(1)));
3030 }
3031 __ j(no_overflow, &done, Label::kNear);
3032 // Call stub. Undo operation first.
3033 if (expr->op() == Token::INC) {
3034 __ sub(eax, Immediate(Smi::FromInt(1)));
3035 } else {
3036 __ add(eax, Immediate(Smi::FromInt(1)));
3037 }
3038 __ jmp(&stub_call, Label::kNear);
3039 __ bind(&slow);
3040 }
3041
3042 // Convert old value into a number.
3043 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3044 RestoreContext();
3045 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3046
3047 // Save result for postfix expressions.
3048 if (expr->is_postfix()) {
3049 if (!context()->IsEffect()) {
3050 // Save the result on the stack. If we have a named or keyed property
3051 // we store the result under the receiver that is currently on top
3052 // of the stack.
3053 switch (assign_type) {
3054 case VARIABLE:
3055 PushOperand(eax);
3056 break;
3057 case NAMED_PROPERTY:
3058 __ mov(Operand(esp, kPointerSize), eax);
3059 break;
3060 case NAMED_SUPER_PROPERTY:
3061 __ mov(Operand(esp, 2 * kPointerSize), eax);
3062 break;
3063 case KEYED_PROPERTY:
3064 __ mov(Operand(esp, 2 * kPointerSize), eax);
3065 break;
3066 case KEYED_SUPER_PROPERTY:
3067 __ mov(Operand(esp, 3 * kPointerSize), eax);
3068 break;
3069 }
3070 }
3071 }
3072
3073 SetExpressionPosition(expr);
3074
3075 // Call stub for +1/-1.
3076 __ bind(&stub_call);
3077 __ mov(edx, eax);
3078 __ mov(eax, Immediate(Smi::FromInt(1)));
3079 Handle<Code> code =
3080 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
3081 CallIC(code, expr->CountBinOpFeedbackId());
3082 patch_site.EmitPatchInfo();
3083 __ bind(&done);
3084
3085 // Store the value returned in eax.
3086 switch (assign_type) {
3087 case VARIABLE: {
3088 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3089 if (expr->is_postfix()) {
3090 // Perform the assignment as if via '='.
3091 { EffectContext context(this);
3092 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3093 proxy->hole_check_mode());
3094 PrepareForBailoutForId(expr->AssignmentId(),
3095 BailoutState::TOS_REGISTER);
3096 context.Plug(eax);
3097 }
3098 // For all contexts except EffectContext We have the result on
3099 // top of the stack.
3100 if (!context()->IsEffect()) {
3101 context()->PlugTOS();
3102 }
3103 } else {
3104 // Perform the assignment as if via '='.
3105 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3106 proxy->hole_check_mode());
3107 PrepareForBailoutForId(expr->AssignmentId(),
3108 BailoutState::TOS_REGISTER);
3109 context()->Plug(eax);
3110 }
3111 break;
3112 }
3113 case NAMED_PROPERTY: {
3114 PopOperand(StoreDescriptor::ReceiverRegister());
3115 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3116 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3117 if (expr->is_postfix()) {
3118 if (!context()->IsEffect()) {
3119 context()->PlugTOS();
3120 }
3121 } else {
3122 context()->Plug(eax);
3123 }
3124 break;
3125 }
3126 case NAMED_SUPER_PROPERTY: {
3127 EmitNamedSuperPropertyStore(prop);
3128 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3129 if (expr->is_postfix()) {
3130 if (!context()->IsEffect()) {
3131 context()->PlugTOS();
3132 }
3133 } else {
3134 context()->Plug(eax);
3135 }
3136 break;
3137 }
3138 case KEYED_SUPER_PROPERTY: {
3139 EmitKeyedSuperPropertyStore(prop);
3140 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3141 if (expr->is_postfix()) {
3142 if (!context()->IsEffect()) {
3143 context()->PlugTOS();
3144 }
3145 } else {
3146 context()->Plug(eax);
3147 }
3148 break;
3149 }
3150 case KEYED_PROPERTY: {
3151 PopOperand(StoreDescriptor::NameRegister());
3152 PopOperand(StoreDescriptor::ReceiverRegister());
3153 CallKeyedStoreIC(expr->CountSlot());
3154 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3155 if (expr->is_postfix()) {
3156 // Result is on the stack
3157 if (!context()->IsEffect()) {
3158 context()->PlugTOS();
3159 }
3160 } else {
3161 context()->Plug(eax);
3162 }
3163 break;
3164 }
3165 }
3166 }
3167
3168
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3169 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3170 Expression* sub_expr,
3171 Handle<String> check) {
3172 Label materialize_true, materialize_false;
3173 Label* if_true = NULL;
3174 Label* if_false = NULL;
3175 Label* fall_through = NULL;
3176 context()->PrepareTest(&materialize_true, &materialize_false,
3177 &if_true, &if_false, &fall_through);
3178
3179 { AccumulatorValueContext context(this);
3180 VisitForTypeofValue(sub_expr);
3181 }
3182 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3183
3184 Factory* factory = isolate()->factory();
3185 if (String::Equals(check, factory->number_string())) {
3186 __ JumpIfSmi(eax, if_true);
3187 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3188 isolate()->factory()->heap_number_map());
3189 Split(equal, if_true, if_false, fall_through);
3190 } else if (String::Equals(check, factory->string_string())) {
3191 __ JumpIfSmi(eax, if_false);
3192 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3193 Split(below, if_true, if_false, fall_through);
3194 } else if (String::Equals(check, factory->symbol_string())) {
3195 __ JumpIfSmi(eax, if_false);
3196 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3197 Split(equal, if_true, if_false, fall_through);
3198 } else if (String::Equals(check, factory->boolean_string())) {
3199 __ cmp(eax, isolate()->factory()->true_value());
3200 __ j(equal, if_true);
3201 __ cmp(eax, isolate()->factory()->false_value());
3202 Split(equal, if_true, if_false, fall_through);
3203 } else if (String::Equals(check, factory->undefined_string())) {
3204 __ cmp(eax, isolate()->factory()->null_value());
3205 __ j(equal, if_false);
3206 __ JumpIfSmi(eax, if_false);
3207 // Check for undetectable objects => true.
3208 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3209 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3210 Immediate(1 << Map::kIsUndetectable));
3211 Split(not_zero, if_true, if_false, fall_through);
3212 } else if (String::Equals(check, factory->function_string())) {
3213 __ JumpIfSmi(eax, if_false);
3214 // Check for callable and not undetectable objects => true.
3215 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3216 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3217 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3218 __ cmp(ecx, 1 << Map::kIsCallable);
3219 Split(equal, if_true, if_false, fall_through);
3220 } else if (String::Equals(check, factory->object_string())) {
3221 __ JumpIfSmi(eax, if_false);
3222 __ cmp(eax, isolate()->factory()->null_value());
3223 __ j(equal, if_true);
3224 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3225 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3226 __ j(below, if_false);
3227 // Check for callable or undetectable objects => false.
3228 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3229 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3230 Split(zero, if_true, if_false, fall_through);
3231 // clang-format off
3232 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3233 } else if (String::Equals(check, factory->type##_string())) { \
3234 __ JumpIfSmi(eax, if_false); \
3235 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \
3236 isolate()->factory()->type##_map()); \
3237 Split(equal, if_true, if_false, fall_through);
3238 SIMD128_TYPES(SIMD128_TYPE)
3239 #undef SIMD128_TYPE
3240 // clang-format on
3241 } else {
3242 if (if_false != fall_through) __ jmp(if_false);
3243 }
3244 context()->Plug(if_true, if_false);
3245 }
3246
3247
VisitCompareOperation(CompareOperation * expr)3248 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3249 Comment cmnt(masm_, "[ CompareOperation");
3250
3251 // First we try a fast inlined version of the compare when one of
3252 // the operands is a literal.
3253 if (TryLiteralCompare(expr)) return;
3254
3255 // Always perform the comparison for its control flow. Pack the result
3256 // into the expression's context after the comparison is performed.
3257 Label materialize_true, materialize_false;
3258 Label* if_true = NULL;
3259 Label* if_false = NULL;
3260 Label* fall_through = NULL;
3261 context()->PrepareTest(&materialize_true, &materialize_false,
3262 &if_true, &if_false, &fall_through);
3263
3264 Token::Value op = expr->op();
3265 VisitForStackValue(expr->left());
3266 switch (op) {
3267 case Token::IN:
3268 VisitForStackValue(expr->right());
3269 SetExpressionPosition(expr);
3270 EmitHasProperty();
3271 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3272 __ cmp(eax, isolate()->factory()->true_value());
3273 Split(equal, if_true, if_false, fall_through);
3274 break;
3275
3276 case Token::INSTANCEOF: {
3277 VisitForAccumulatorValue(expr->right());
3278 SetExpressionPosition(expr);
3279 PopOperand(edx);
3280 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3281 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3282 __ cmp(eax, isolate()->factory()->true_value());
3283 Split(equal, if_true, if_false, fall_through);
3284 break;
3285 }
3286
3287 default: {
3288 VisitForAccumulatorValue(expr->right());
3289 SetExpressionPosition(expr);
3290 Condition cc = CompareIC::ComputeCondition(op);
3291 PopOperand(edx);
3292
3293 bool inline_smi_code = ShouldInlineSmiCase(op);
3294 JumpPatchSite patch_site(masm_);
3295 if (inline_smi_code) {
3296 Label slow_case;
3297 __ mov(ecx, edx);
3298 __ or_(ecx, eax);
3299 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3300 __ cmp(edx, eax);
3301 Split(cc, if_true, if_false, NULL);
3302 __ bind(&slow_case);
3303 }
3304
3305 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3306 CallIC(ic, expr->CompareOperationFeedbackId());
3307 patch_site.EmitPatchInfo();
3308
3309 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3310 __ test(eax, eax);
3311 Split(cc, if_true, if_false, fall_through);
3312 }
3313 }
3314
3315 // Convert the result of the comparison into one expected for this
3316 // expression's context.
3317 context()->Plug(if_true, if_false);
3318 }
3319
3320
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3321 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3322 Expression* sub_expr,
3323 NilValue nil) {
3324 Label materialize_true, materialize_false;
3325 Label* if_true = NULL;
3326 Label* if_false = NULL;
3327 Label* fall_through = NULL;
3328 context()->PrepareTest(&materialize_true, &materialize_false,
3329 &if_true, &if_false, &fall_through);
3330
3331 VisitForAccumulatorValue(sub_expr);
3332 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3333
3334 Handle<Object> nil_value = nil == kNullValue
3335 ? isolate()->factory()->null_value()
3336 : isolate()->factory()->undefined_value();
3337 if (expr->op() == Token::EQ_STRICT) {
3338 __ cmp(eax, nil_value);
3339 Split(equal, if_true, if_false, fall_through);
3340 } else {
3341 __ JumpIfSmi(eax, if_false);
3342 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3343 __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
3344 Immediate(1 << Map::kIsUndetectable));
3345 Split(not_zero, if_true, if_false, fall_through);
3346 }
3347 context()->Plug(if_true, if_false);
3348 }
3349
3350
result_register()3351 Register FullCodeGenerator::result_register() {
3352 return eax;
3353 }
3354
3355
context_register()3356 Register FullCodeGenerator::context_register() {
3357 return esi;
3358 }
3359
LoadFromFrameField(int frame_offset,Register value)3360 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3361 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3362 __ mov(value, Operand(ebp, frame_offset));
3363 }
3364
StoreToFrameField(int frame_offset,Register value)3365 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3366 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3367 __ mov(Operand(ebp, frame_offset), value);
3368 }
3369
3370
LoadContextField(Register dst,int context_index)3371 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3372 __ mov(dst, ContextOperand(esi, context_index));
3373 }
3374
3375
PushFunctionArgumentForContextAllocation()3376 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3377 DeclarationScope* closure_scope = scope()->GetClosureScope();
3378 if (closure_scope->is_script_scope() ||
3379 closure_scope->is_module_scope()) {
3380 // Contexts nested in the native context have a canonical empty function
3381 // as their closure, not the anonymous closure containing the global
3382 // code.
3383 __ mov(eax, NativeContextOperand());
3384 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
3385 } else if (closure_scope->is_eval_scope()) {
3386 // Contexts nested inside eval code have the same closure as the context
3387 // calling eval, not the anonymous closure containing the eval code.
3388 // Fetch it from the context.
3389 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
3390 } else {
3391 DCHECK(closure_scope->is_function_scope());
3392 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3393 }
3394 }
3395
3396
3397 // ----------------------------------------------------------------------------
3398 // Non-local control flow support.
3399
EnterFinallyBlock()3400 void FullCodeGenerator::EnterFinallyBlock() {
3401 // Store pending message while executing finally block.
3402 ExternalReference pending_message_obj =
3403 ExternalReference::address_of_pending_message_obj(isolate());
3404 __ mov(edx, Operand::StaticVariable(pending_message_obj));
3405 PushOperand(edx);
3406
3407 ClearPendingMessage();
3408 }
3409
3410
ExitFinallyBlock()3411 void FullCodeGenerator::ExitFinallyBlock() {
3412 DCHECK(!result_register().is(edx));
3413 // Restore pending message from stack.
3414 PopOperand(edx);
3415 ExternalReference pending_message_obj =
3416 ExternalReference::address_of_pending_message_obj(isolate());
3417 __ mov(Operand::StaticVariable(pending_message_obj), edx);
3418 }
3419
3420
ClearPendingMessage()3421 void FullCodeGenerator::ClearPendingMessage() {
3422 DCHECK(!result_register().is(edx));
3423 ExternalReference pending_message_obj =
3424 ExternalReference::address_of_pending_message_obj(isolate());
3425 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
3426 __ mov(Operand::StaticVariable(pending_message_obj), edx);
3427 }
3428
3429
EmitCommands()3430 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3431 DCHECK(!result_register().is(edx));
3432 __ Pop(result_register()); // Restore the accumulator.
3433 __ Pop(edx); // Get the token.
3434 for (DeferredCommand cmd : commands_) {
3435 Label skip;
3436 __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
3437 __ j(not_equal, &skip);
3438 switch (cmd.command) {
3439 case kReturn:
3440 codegen_->EmitUnwindAndReturn();
3441 break;
3442 case kThrow:
3443 __ Push(result_register());
3444 __ CallRuntime(Runtime::kReThrow);
3445 break;
3446 case kContinue:
3447 codegen_->EmitContinue(cmd.target);
3448 break;
3449 case kBreak:
3450 codegen_->EmitBreak(cmd.target);
3451 break;
3452 }
3453 __ bind(&skip);
3454 }
3455 }
3456
3457 #undef __
3458
3459
3460 static const byte kJnsInstruction = 0x79;
3461 static const byte kJnsOffset = 0x11;
3462 static const byte kNopByteOne = 0x66;
3463 static const byte kNopByteTwo = 0x90;
3464 #ifdef DEBUG
3465 static const byte kCallInstruction = 0xe8;
3466 #endif
3467
3468
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3469 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3470 Address pc,
3471 BackEdgeState target_state,
3472 Code* replacement_code) {
3473 Address call_target_address = pc - kIntSize;
3474 Address jns_instr_address = call_target_address - 3;
3475 Address jns_offset_address = call_target_address - 2;
3476
3477 switch (target_state) {
3478 case INTERRUPT:
3479 // sub <profiling_counter>, <delta> ;; Not changed
3480 // jns ok
3481 // call <interrupt stub>
3482 // ok:
3483 *jns_instr_address = kJnsInstruction;
3484 *jns_offset_address = kJnsOffset;
3485 break;
3486 case ON_STACK_REPLACEMENT:
3487 // sub <profiling_counter>, <delta> ;; Not changed
3488 // nop
3489 // nop
3490 // call <on-stack replacment>
3491 // ok:
3492 *jns_instr_address = kNopByteOne;
3493 *jns_offset_address = kNopByteTwo;
3494 break;
3495 }
3496
3497 Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3498 call_target_address, unoptimized_code,
3499 replacement_code->entry());
3500 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3501 unoptimized_code, call_target_address, replacement_code);
3502 }
3503
3504
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3505 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3506 Isolate* isolate,
3507 Code* unoptimized_code,
3508 Address pc) {
3509 Address call_target_address = pc - kIntSize;
3510 Address jns_instr_address = call_target_address - 3;
3511 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3512
3513 if (*jns_instr_address == kJnsInstruction) {
3514 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3515 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3516 Assembler::target_address_at(call_target_address,
3517 unoptimized_code));
3518 return INTERRUPT;
3519 }
3520
3521 DCHECK_EQ(kNopByteOne, *jns_instr_address);
3522 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3523
3524 DCHECK_EQ(
3525 isolate->builtins()->OnStackReplacement()->entry(),
3526 Assembler::target_address_at(call_target_address, unoptimized_code));
3527 return ON_STACK_REPLACEMENT;
3528 }
3529
3530
3531 } // namespace internal
3532 } // namespace v8
3533
3534 #endif // V8_TARGET_ARCH_IA32
3535