1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/crankshaft/hydrogen.h"
6
7 #include <memory>
8 #include <sstream>
9
10 #include "src/allocation-site-scopes.h"
11 #include "src/ast/ast-numbering.h"
12 #include "src/ast/compile-time-value.h"
13 #include "src/ast/scopes.h"
14 #include "src/code-factory.h"
15 #include "src/crankshaft/hydrogen-bce.h"
16 #include "src/crankshaft/hydrogen-canonicalize.h"
17 #include "src/crankshaft/hydrogen-check-elimination.h"
18 #include "src/crankshaft/hydrogen-dce.h"
19 #include "src/crankshaft/hydrogen-dehoist.h"
20 #include "src/crankshaft/hydrogen-environment-liveness.h"
21 #include "src/crankshaft/hydrogen-escape-analysis.h"
22 #include "src/crankshaft/hydrogen-gvn.h"
23 #include "src/crankshaft/hydrogen-infer-representation.h"
24 #include "src/crankshaft/hydrogen-infer-types.h"
25 #include "src/crankshaft/hydrogen-load-elimination.h"
26 #include "src/crankshaft/hydrogen-mark-unreachable.h"
27 #include "src/crankshaft/hydrogen-osr.h"
28 #include "src/crankshaft/hydrogen-range-analysis.h"
29 #include "src/crankshaft/hydrogen-redundant-phi.h"
30 #include "src/crankshaft/hydrogen-removable-simulates.h"
31 #include "src/crankshaft/hydrogen-representation-changes.h"
32 #include "src/crankshaft/hydrogen-sce.h"
33 #include "src/crankshaft/hydrogen-store-elimination.h"
34 #include "src/crankshaft/hydrogen-uint32-analysis.h"
35 #include "src/crankshaft/lithium-allocator.h"
36 #include "src/crankshaft/typing.h"
37 #include "src/field-type.h"
38 #include "src/full-codegen/full-codegen.h"
39 #include "src/globals.h"
40 #include "src/ic/call-optimization.h"
41 #include "src/ic/ic.h"
42 // GetRootConstructor
43 #include "src/ic/ic-inl.h"
44 #include "src/isolate-inl.h"
45 #include "src/runtime/runtime.h"
46
47 #if V8_TARGET_ARCH_IA32
48 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
49 #elif V8_TARGET_ARCH_X64
50 #include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
51 #elif V8_TARGET_ARCH_ARM64
52 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
53 #elif V8_TARGET_ARCH_ARM
54 #include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
55 #elif V8_TARGET_ARCH_PPC
56 #include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
57 #elif V8_TARGET_ARCH_MIPS
58 #include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
59 #elif V8_TARGET_ARCH_MIPS64
60 #include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
61 #elif V8_TARGET_ARCH_S390
62 #include "src/crankshaft/s390/lithium-codegen-s390.h" // NOLINT
63 #elif V8_TARGET_ARCH_X87
64 #include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
65 #else
66 #error Unsupported target architecture.
67 #endif
68
69 namespace v8 {
70 namespace internal {
71
72 const auto GetRegConfig = RegisterConfiguration::Crankshaft;
73
74 class HOptimizedGraphBuilderWithPositions : public HOptimizedGraphBuilder {
75 public:
HOptimizedGraphBuilderWithPositions(CompilationInfo * info)76 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
77 : HOptimizedGraphBuilder(info, true) {
78 SetSourcePosition(info->shared_info()->start_position());
79 }
80
81 #define DEF_VISIT(type) \
82 void Visit##type(type* node) override { \
83 SourcePosition old_position = SourcePosition::Unknown(); \
84 if (node->position() != kNoSourcePosition) { \
85 old_position = source_position(); \
86 SetSourcePosition(node->position()); \
87 } \
88 HOptimizedGraphBuilder::Visit##type(node); \
89 if (old_position.IsKnown()) { \
90 set_source_position(old_position); \
91 } \
92 }
93 EXPRESSION_NODE_LIST(DEF_VISIT)
94 #undef DEF_VISIT
95
96 #define DEF_VISIT(type) \
97 void Visit##type(type* node) override { \
98 SourcePosition old_position = SourcePosition::Unknown(); \
99 if (node->position() != kNoSourcePosition) { \
100 old_position = source_position(); \
101 SetSourcePosition(node->position()); \
102 } \
103 HOptimizedGraphBuilder::Visit##type(node); \
104 if (old_position.IsKnown()) { \
105 set_source_position(old_position); \
106 } \
107 }
108 STATEMENT_NODE_LIST(DEF_VISIT)
109 #undef DEF_VISIT
110
111 #define DEF_VISIT(type) \
112 void Visit##type(type* node) override { \
113 HOptimizedGraphBuilder::Visit##type(node); \
114 }
115 DECLARATION_NODE_LIST(DEF_VISIT)
116 #undef DEF_VISIT
117 };
118
PrepareJobImpl()119 HCompilationJob::Status HCompilationJob::PrepareJobImpl() {
120 if (!isolate()->use_crankshaft() ||
121 info()->shared_info()->dont_crankshaft()) {
122 // Crankshaft is entirely disabled.
123 return FAILED;
124 }
125
126 // Optimization requires a version of fullcode with deoptimization support.
127 // Recompile the unoptimized version of the code if the current version
128 // doesn't have deoptimization support already.
129 // Otherwise, if we are gathering compilation time and space statistics
130 // for hydrogen, gather baseline statistics for a fullcode compilation.
131 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
132 if (should_recompile || FLAG_hydrogen_stats) {
133 base::ElapsedTimer timer;
134 if (FLAG_hydrogen_stats) {
135 timer.Start();
136 }
137 if (!Compiler::EnsureDeoptimizationSupport(info())) {
138 return FAILED;
139 }
140 if (FLAG_hydrogen_stats) {
141 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
142 }
143 }
144 DCHECK(info()->shared_info()->has_deoptimization_support());
145 DCHECK(!info()->shared_info()->never_compiled());
146
147 // Check the whitelist for Crankshaft.
148 if (!info()->shared_info()->PassesFilter(FLAG_hydrogen_filter)) {
149 return AbortOptimization(kHydrogenFilter);
150 }
151
152 Scope* scope = info()->scope();
153 if (LUnallocated::TooManyParameters(scope->num_parameters())) {
154 // Crankshaft would require too many Lithium operands.
155 return AbortOptimization(kTooManyParameters);
156 }
157
158 if (info()->is_osr() &&
159 LUnallocated::TooManyParametersOrStackSlots(scope->num_parameters(),
160 scope->num_stack_slots())) {
161 // Crankshaft would require too many Lithium operands.
162 return AbortOptimization(kTooManyParametersLocals);
163 }
164
165 if (IsGeneratorFunction(info()->shared_info()->kind())) {
166 // Crankshaft does not support generators.
167 return AbortOptimization(kGenerator);
168 }
169
170 if (FLAG_trace_hydrogen) {
171 isolate()->GetHTracer()->TraceCompilation(info());
172 }
173
174 // Optimization could have been disabled by the parser. Note that this check
175 // is only needed because the Hydrogen graph builder is missing some bailouts.
176 if (info()->shared_info()->optimization_disabled()) {
177 return AbortOptimization(
178 info()->shared_info()->disable_optimization_reason());
179 }
180
181 HOptimizedGraphBuilder* graph_builder =
182 (FLAG_hydrogen_track_positions || isolate()->is_profiling() ||
183 FLAG_trace_ic)
184 ? new (info()->zone()) HOptimizedGraphBuilderWithPositions(info())
185 : new (info()->zone()) HOptimizedGraphBuilder(info(), false);
186
187 // Type-check the function.
188 AstTyper(info()->isolate(), info()->zone(), info()->closure(),
189 info()->scope(), info()->osr_ast_id(), info()->literal(),
190 graph_builder->bounds())
191 .Run();
192
193 graph_ = graph_builder->CreateGraph();
194
195 if (isolate()->has_pending_exception()) {
196 return FAILED;
197 }
198
199 if (graph_ == NULL) return FAILED;
200
201 if (info()->dependencies()->HasAborted()) {
202 // Dependency has changed during graph creation. Let's try again later.
203 return RetryOptimization(kBailedOutDueToDependencyChange);
204 }
205
206 return SUCCEEDED;
207 }
208
ExecuteJobImpl()209 HCompilationJob::Status HCompilationJob::ExecuteJobImpl() {
210 DCHECK(graph_ != NULL);
211 BailoutReason bailout_reason = kNoReason;
212
213 if (graph_->Optimize(&bailout_reason)) {
214 chunk_ = LChunk::NewChunk(graph_);
215 if (chunk_ != NULL) return SUCCEEDED;
216 } else if (bailout_reason != kNoReason) {
217 info()->AbortOptimization(bailout_reason);
218 }
219
220 return FAILED;
221 }
222
FinalizeJobImpl()223 HCompilationJob::Status HCompilationJob::FinalizeJobImpl() {
224 DCHECK(chunk_ != NULL);
225 DCHECK(graph_ != NULL);
226 {
227 // Deferred handles reference objects that were accessible during
228 // graph creation. To make sure that we don't encounter inconsistencies
229 // between graph creation and code generation, we disallow accessing
230 // objects through deferred handles during the latter, with exceptions.
231 DisallowDeferredHandleDereference no_deferred_handle_deref;
232 Handle<Code> optimized_code = chunk_->Codegen();
233 if (optimized_code.is_null()) {
234 if (info()->bailout_reason() == kNoReason) {
235 return AbortOptimization(kCodeGenerationFailed);
236 }
237 return FAILED;
238 }
239 RegisterWeakObjectsInOptimizedCode(optimized_code);
240 info()->SetCode(optimized_code);
241 }
242 // Add to the weak list of optimized code objects.
243 info()->context()->native_context()->AddOptimizedCode(*info()->code());
244 return SUCCEEDED;
245 }
246
HBasicBlock(HGraph * graph)247 HBasicBlock::HBasicBlock(HGraph* graph)
248 : block_id_(graph->GetNextBlockID()),
249 graph_(graph),
250 phis_(4, graph->zone()),
251 first_(NULL),
252 last_(NULL),
253 end_(NULL),
254 loop_information_(NULL),
255 predecessors_(2, graph->zone()),
256 dominator_(NULL),
257 dominated_blocks_(4, graph->zone()),
258 last_environment_(NULL),
259 argument_count_(-1),
260 first_instruction_index_(-1),
261 last_instruction_index_(-1),
262 deleted_phis_(4, graph->zone()),
263 parent_loop_header_(NULL),
264 inlined_entry_block_(NULL),
265 is_inline_return_target_(false),
266 is_reachable_(true),
267 dominates_loop_successors_(false),
268 is_osr_entry_(false),
269 is_ordered_(false) { }
270
271
isolate() const272 Isolate* HBasicBlock::isolate() const {
273 return graph_->isolate();
274 }
275
276
MarkUnreachable()277 void HBasicBlock::MarkUnreachable() {
278 is_reachable_ = false;
279 }
280
281
AttachLoopInformation()282 void HBasicBlock::AttachLoopInformation() {
283 DCHECK(!IsLoopHeader());
284 loop_information_ = new(zone()) HLoopInformation(this, zone());
285 }
286
287
DetachLoopInformation()288 void HBasicBlock::DetachLoopInformation() {
289 DCHECK(IsLoopHeader());
290 loop_information_ = NULL;
291 }
292
293
AddPhi(HPhi * phi)294 void HBasicBlock::AddPhi(HPhi* phi) {
295 DCHECK(!IsStartBlock());
296 phis_.Add(phi, zone());
297 phi->SetBlock(this);
298 }
299
300
RemovePhi(HPhi * phi)301 void HBasicBlock::RemovePhi(HPhi* phi) {
302 DCHECK(phi->block() == this);
303 DCHECK(phis_.Contains(phi));
304 phi->Kill();
305 phis_.RemoveElement(phi);
306 phi->SetBlock(NULL);
307 }
308
309
AddInstruction(HInstruction * instr,SourcePosition position)310 void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) {
311 DCHECK(!IsStartBlock() || !IsFinished());
312 DCHECK(!instr->IsLinked());
313 DCHECK(!IsFinished());
314
315 if (position.IsKnown()) {
316 instr->set_position(position);
317 }
318 if (first_ == NULL) {
319 DCHECK(last_environment() != NULL);
320 DCHECK(!last_environment()->ast_id().IsNone());
321 HBlockEntry* entry = new(zone()) HBlockEntry();
322 entry->InitializeAsFirst(this);
323 if (position.IsKnown()) {
324 entry->set_position(position);
325 } else {
326 DCHECK(!FLAG_hydrogen_track_positions ||
327 !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
328 }
329 first_ = last_ = entry;
330 }
331 instr->InsertAfter(last_);
332 }
333
334
AddNewPhi(int merged_index)335 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
336 if (graph()->IsInsideNoSideEffectsScope()) {
337 merged_index = HPhi::kInvalidMergedIndex;
338 }
339 HPhi* phi = new(zone()) HPhi(merged_index, zone());
340 AddPhi(phi);
341 return phi;
342 }
343
344
CreateSimulate(BailoutId ast_id,RemovableSimulate removable)345 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
346 RemovableSimulate removable) {
347 DCHECK(HasEnvironment());
348 HEnvironment* environment = last_environment();
349 DCHECK(ast_id.IsNone() ||
350 ast_id == BailoutId::StubEntry() ||
351 environment->closure()->shared()->VerifyBailoutId(ast_id));
352
353 int push_count = environment->push_count();
354 int pop_count = environment->pop_count();
355
356 HSimulate* instr =
357 new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
358 #ifdef DEBUG
359 instr->set_closure(environment->closure());
360 #endif
361 // Order of pushed values: newest (top of stack) first. This allows
362 // HSimulate::MergeWith() to easily append additional pushed values
363 // that are older (from further down the stack).
364 for (int i = 0; i < push_count; ++i) {
365 instr->AddPushedValue(environment->ExpressionStackAt(i));
366 }
367 for (GrowableBitVector::Iterator it(environment->assigned_variables(),
368 zone());
369 !it.Done();
370 it.Advance()) {
371 int index = it.Current();
372 instr->AddAssignedValue(index, environment->Lookup(index));
373 }
374 environment->ClearHistory();
375 return instr;
376 }
377
378
Finish(HControlInstruction * end,SourcePosition position)379 void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) {
380 DCHECK(!IsFinished());
381 AddInstruction(end, position);
382 end_ = end;
383 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
384 it.Current()->RegisterPredecessor(this);
385 }
386 }
387
388
Goto(HBasicBlock * block,SourcePosition position,FunctionState * state,bool add_simulate)389 void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position,
390 FunctionState* state, bool add_simulate) {
391 bool drop_extra = state != NULL &&
392 state->inlining_kind() == NORMAL_RETURN;
393
394 if (block->IsInlineReturnTarget()) {
395 HEnvironment* env = last_environment();
396 int argument_count = env->arguments_environment()->parameter_count();
397 AddInstruction(new(zone())
398 HLeaveInlined(state->entry(), argument_count),
399 position);
400 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
401 }
402
403 if (add_simulate) AddNewSimulate(BailoutId::None(), position);
404 HGoto* instr = new(zone()) HGoto(block);
405 Finish(instr, position);
406 }
407
408
AddLeaveInlined(HValue * return_value,FunctionState * state,SourcePosition position)409 void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state,
410 SourcePosition position) {
411 HBasicBlock* target = state->function_return();
412 bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
413
414 DCHECK(target->IsInlineReturnTarget());
415 DCHECK(return_value != NULL);
416 HEnvironment* env = last_environment();
417 int argument_count = env->arguments_environment()->parameter_count();
418 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
419 position);
420 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
421 last_environment()->Push(return_value);
422 AddNewSimulate(BailoutId::None(), position);
423 HGoto* instr = new(zone()) HGoto(target);
424 Finish(instr, position);
425 }
426
427
SetInitialEnvironment(HEnvironment * env)428 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
429 DCHECK(!HasEnvironment());
430 DCHECK(first() == NULL);
431 UpdateEnvironment(env);
432 }
433
434
UpdateEnvironment(HEnvironment * env)435 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
436 last_environment_ = env;
437 graph()->update_maximum_environment_size(env->first_expression_index());
438 }
439
440
SetJoinId(BailoutId ast_id)441 void HBasicBlock::SetJoinId(BailoutId ast_id) {
442 int length = predecessors_.length();
443 DCHECK(length > 0);
444 for (int i = 0; i < length; i++) {
445 HBasicBlock* predecessor = predecessors_[i];
446 DCHECK(predecessor->end()->IsGoto());
447 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
448 DCHECK(i != 0 ||
449 (predecessor->last_environment()->closure().is_null() ||
450 predecessor->last_environment()->closure()->shared()
451 ->VerifyBailoutId(ast_id)));
452 simulate->set_ast_id(ast_id);
453 predecessor->last_environment()->set_ast_id(ast_id);
454 }
455 }
456
457
Dominates(HBasicBlock * other) const458 bool HBasicBlock::Dominates(HBasicBlock* other) const {
459 HBasicBlock* current = other->dominator();
460 while (current != NULL) {
461 if (current == this) return true;
462 current = current->dominator();
463 }
464 return false;
465 }
466
467
EqualToOrDominates(HBasicBlock * other) const468 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
469 if (this == other) return true;
470 return Dominates(other);
471 }
472
473
LoopNestingDepth() const474 int HBasicBlock::LoopNestingDepth() const {
475 const HBasicBlock* current = this;
476 int result = (current->IsLoopHeader()) ? 1 : 0;
477 while (current->parent_loop_header() != NULL) {
478 current = current->parent_loop_header();
479 result++;
480 }
481 return result;
482 }
483
484
PostProcessLoopHeader(IterationStatement * stmt)485 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
486 DCHECK(IsLoopHeader());
487
488 SetJoinId(stmt->EntryId());
489 if (predecessors()->length() == 1) {
490 // This is a degenerated loop.
491 DetachLoopInformation();
492 return;
493 }
494
495 // Only the first entry into the loop is from outside the loop. All other
496 // entries must be back edges.
497 for (int i = 1; i < predecessors()->length(); ++i) {
498 loop_information()->RegisterBackEdge(predecessors()->at(i));
499 }
500 }
501
502
MarkSuccEdgeUnreachable(int succ)503 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
504 DCHECK(IsFinished());
505 HBasicBlock* succ_block = end()->SuccessorAt(succ);
506
507 DCHECK(succ_block->predecessors()->length() == 1);
508 succ_block->MarkUnreachable();
509 }
510
511
RegisterPredecessor(HBasicBlock * pred)512 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
513 if (HasPredecessor()) {
514 // Only loop header blocks can have a predecessor added after
515 // instructions have been added to the block (they have phis for all
516 // values in the environment, these phis may be eliminated later).
517 DCHECK(IsLoopHeader() || first_ == NULL);
518 HEnvironment* incoming_env = pred->last_environment();
519 if (IsLoopHeader()) {
520 DCHECK_EQ(phis()->length(), incoming_env->length());
521 for (int i = 0; i < phis_.length(); ++i) {
522 phis_[i]->AddInput(incoming_env->values()->at(i));
523 }
524 } else {
525 last_environment()->AddIncomingEdge(this, pred->last_environment());
526 }
527 } else if (!HasEnvironment() && !IsFinished()) {
528 DCHECK(!IsLoopHeader());
529 SetInitialEnvironment(pred->last_environment()->Copy());
530 }
531
532 predecessors_.Add(pred, zone());
533 }
534
535
AddDominatedBlock(HBasicBlock * block)536 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
537 DCHECK(!dominated_blocks_.Contains(block));
538 // Keep the list of dominated blocks sorted such that if there is two
539 // succeeding block in this list, the predecessor is before the successor.
540 int index = 0;
541 while (index < dominated_blocks_.length() &&
542 dominated_blocks_[index]->block_id() < block->block_id()) {
543 ++index;
544 }
545 dominated_blocks_.InsertAt(index, block, zone());
546 }
547
548
AssignCommonDominator(HBasicBlock * other)549 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
550 if (dominator_ == NULL) {
551 dominator_ = other;
552 other->AddDominatedBlock(this);
553 } else if (other->dominator() != NULL) {
554 HBasicBlock* first = dominator_;
555 HBasicBlock* second = other;
556
557 while (first != second) {
558 if (first->block_id() > second->block_id()) {
559 first = first->dominator();
560 } else {
561 second = second->dominator();
562 }
563 DCHECK(first != NULL && second != NULL);
564 }
565
566 if (dominator_ != first) {
567 DCHECK(dominator_->dominated_blocks_.Contains(this));
568 dominator_->dominated_blocks_.RemoveElement(this);
569 dominator_ = first;
570 first->AddDominatedBlock(this);
571 }
572 }
573 }
574
575
AssignLoopSuccessorDominators()576 void HBasicBlock::AssignLoopSuccessorDominators() {
577 // Mark blocks that dominate all subsequent reachable blocks inside their
578 // loop. Exploit the fact that blocks are sorted in reverse post order. When
579 // the loop is visited in increasing block id order, if the number of
580 // non-loop-exiting successor edges at the dominator_candidate block doesn't
581 // exceed the number of previously encountered predecessor edges, there is no
582 // path from the loop header to any block with higher id that doesn't go
583 // through the dominator_candidate block. In this case, the
584 // dominator_candidate block is guaranteed to dominate all blocks reachable
585 // from it with higher ids.
586 HBasicBlock* last = loop_information()->GetLastBackEdge();
587 int outstanding_successors = 1; // one edge from the pre-header
588 // Header always dominates everything.
589 MarkAsLoopSuccessorDominator();
590 for (int j = block_id(); j <= last->block_id(); ++j) {
591 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
592 for (HPredecessorIterator it(dominator_candidate); !it.Done();
593 it.Advance()) {
594 HBasicBlock* predecessor = it.Current();
595 // Don't count back edges.
596 if (predecessor->block_id() < dominator_candidate->block_id()) {
597 outstanding_successors--;
598 }
599 }
600
601 // If more successors than predecessors have been seen in the loop up to
602 // now, it's not possible to guarantee that the current block dominates
603 // all of the blocks with higher IDs. In this case, assume conservatively
604 // that those paths through loop that don't go through the current block
605 // contain all of the loop's dependencies. Also be careful to record
606 // dominator information about the current loop that's being processed,
607 // and not nested loops, which will be processed when
608 // AssignLoopSuccessorDominators gets called on their header.
609 DCHECK(outstanding_successors >= 0);
610 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
611 if (outstanding_successors == 0 &&
612 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
613 dominator_candidate->MarkAsLoopSuccessorDominator();
614 }
615 HControlInstruction* end = dominator_candidate->end();
616 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
617 HBasicBlock* successor = it.Current();
618 // Only count successors that remain inside the loop and don't loop back
619 // to a loop header.
620 if (successor->block_id() > dominator_candidate->block_id() &&
621 successor->block_id() <= last->block_id()) {
622 // Backwards edges must land on loop headers.
623 DCHECK(successor->block_id() > dominator_candidate->block_id() ||
624 successor->IsLoopHeader());
625 outstanding_successors++;
626 }
627 }
628 }
629 }
630
631
PredecessorIndexOf(HBasicBlock * predecessor) const632 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
633 for (int i = 0; i < predecessors_.length(); ++i) {
634 if (predecessors_[i] == predecessor) return i;
635 }
636 UNREACHABLE();
637 return -1;
638 }
639
640
641 #ifdef DEBUG
Verify()642 void HBasicBlock::Verify() {
643 // Check that every block is finished.
644 DCHECK(IsFinished());
645 DCHECK(block_id() >= 0);
646
647 // Check that the incoming edges are in edge split form.
648 if (predecessors_.length() > 1) {
649 for (int i = 0; i < predecessors_.length(); ++i) {
650 DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
651 }
652 }
653 }
654 #endif
655
656
RegisterBackEdge(HBasicBlock * block)657 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
658 this->back_edges_.Add(block, block->zone());
659 AddBlock(block);
660 }
661
662
GetLastBackEdge() const663 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
664 int max_id = -1;
665 HBasicBlock* result = NULL;
666 for (int i = 0; i < back_edges_.length(); ++i) {
667 HBasicBlock* cur = back_edges_[i];
668 if (cur->block_id() > max_id) {
669 max_id = cur->block_id();
670 result = cur;
671 }
672 }
673 return result;
674 }
675
676
AddBlock(HBasicBlock * block)677 void HLoopInformation::AddBlock(HBasicBlock* block) {
678 if (block == loop_header()) return;
679 if (block->parent_loop_header() == loop_header()) return;
680 if (block->parent_loop_header() != NULL) {
681 AddBlock(block->parent_loop_header());
682 } else {
683 block->set_parent_loop_header(loop_header());
684 blocks_.Add(block, block->zone());
685 for (int i = 0; i < block->predecessors()->length(); ++i) {
686 AddBlock(block->predecessors()->at(i));
687 }
688 }
689 }
690
691
692 #ifdef DEBUG
693
694 // Checks reachability of the blocks in this graph and stores a bit in
695 // the BitVector "reachable()" for every block that can be reached
696 // from the start block of the graph. If "dont_visit" is non-null, the given
697 // block is treated as if it would not be part of the graph. "visited_count()"
698 // returns the number of reachable blocks.
699 class ReachabilityAnalyzer BASE_EMBEDDED {
700 public:
ReachabilityAnalyzer(HBasicBlock * entry_block,int block_count,HBasicBlock * dont_visit)701 ReachabilityAnalyzer(HBasicBlock* entry_block,
702 int block_count,
703 HBasicBlock* dont_visit)
704 : visited_count_(0),
705 stack_(16, entry_block->zone()),
706 reachable_(block_count, entry_block->zone()),
707 dont_visit_(dont_visit) {
708 PushBlock(entry_block);
709 Analyze();
710 }
711
visited_count() const712 int visited_count() const { return visited_count_; }
reachable() const713 const BitVector* reachable() const { return &reachable_; }
714
715 private:
PushBlock(HBasicBlock * block)716 void PushBlock(HBasicBlock* block) {
717 if (block != NULL && block != dont_visit_ &&
718 !reachable_.Contains(block->block_id())) {
719 reachable_.Add(block->block_id());
720 stack_.Add(block, block->zone());
721 visited_count_++;
722 }
723 }
724
Analyze()725 void Analyze() {
726 while (!stack_.is_empty()) {
727 HControlInstruction* end = stack_.RemoveLast()->end();
728 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
729 PushBlock(it.Current());
730 }
731 }
732 }
733
734 int visited_count_;
735 ZoneList<HBasicBlock*> stack_;
736 BitVector reachable_;
737 HBasicBlock* dont_visit_;
738 };
739
740
Verify(bool do_full_verify) const741 void HGraph::Verify(bool do_full_verify) const {
742 Heap::RelocationLock relocation_lock(isolate()->heap());
743 AllowHandleDereference allow_deref;
744 AllowDeferredHandleDereference allow_deferred_deref;
745 for (int i = 0; i < blocks_.length(); i++) {
746 HBasicBlock* block = blocks_.at(i);
747
748 block->Verify();
749
750 // Check that every block contains at least one node and that only the last
751 // node is a control instruction.
752 HInstruction* current = block->first();
753 DCHECK(current != NULL && current->IsBlockEntry());
754 while (current != NULL) {
755 DCHECK((current->next() == NULL) == current->IsControlInstruction());
756 DCHECK(current->block() == block);
757 current->Verify();
758 current = current->next();
759 }
760
761 // Check that successors are correctly set.
762 HBasicBlock* first = block->end()->FirstSuccessor();
763 HBasicBlock* second = block->end()->SecondSuccessor();
764 DCHECK(second == NULL || first != NULL);
765
766 // Check that the predecessor array is correct.
767 if (first != NULL) {
768 DCHECK(first->predecessors()->Contains(block));
769 if (second != NULL) {
770 DCHECK(second->predecessors()->Contains(block));
771 }
772 }
773
774 // Check that phis have correct arguments.
775 for (int j = 0; j < block->phis()->length(); j++) {
776 HPhi* phi = block->phis()->at(j);
777 phi->Verify();
778 }
779
780 // Check that all join blocks have predecessors that end with an
781 // unconditional goto and agree on their environment node id.
782 if (block->predecessors()->length() >= 2) {
783 BailoutId id =
784 block->predecessors()->first()->last_environment()->ast_id();
785 for (int k = 0; k < block->predecessors()->length(); k++) {
786 HBasicBlock* predecessor = block->predecessors()->at(k);
787 DCHECK(predecessor->end()->IsGoto() ||
788 predecessor->end()->IsDeoptimize());
789 DCHECK(predecessor->last_environment()->ast_id() == id);
790 }
791 }
792 }
793
794 // Check special property of first block to have no predecessors.
795 DCHECK(blocks_.at(0)->predecessors()->is_empty());
796
797 if (do_full_verify) {
798 // Check that the graph is fully connected.
799 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
800 DCHECK(analyzer.visited_count() == blocks_.length());
801
802 // Check that entry block dominator is NULL.
803 DCHECK(entry_block_->dominator() == NULL);
804
805 // Check dominators.
806 for (int i = 0; i < blocks_.length(); ++i) {
807 HBasicBlock* block = blocks_.at(i);
808 if (block->dominator() == NULL) {
809 // Only start block may have no dominator assigned to.
810 DCHECK(i == 0);
811 } else {
812 // Assert that block is unreachable if dominator must not be visited.
813 ReachabilityAnalyzer dominator_analyzer(entry_block_,
814 blocks_.length(),
815 block->dominator());
816 DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
817 }
818 }
819 }
820 }
821
822 #endif
823
824
GetConstant(SetOncePointer<HConstant> * pointer,int32_t value)825 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
826 int32_t value) {
827 if (!pointer->is_set()) {
828 // Can't pass GetInvalidContext() to HConstant::New, because that will
829 // recursively call GetConstant
830 HConstant* constant = HConstant::New(isolate(), zone(), NULL, value);
831 constant->InsertAfter(entry_block()->first());
832 pointer->set(constant);
833 return constant;
834 }
835 return ReinsertConstantIfNecessary(pointer->get());
836 }
837
838
ReinsertConstantIfNecessary(HConstant * constant)839 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
840 if (!constant->IsLinked()) {
841 // The constant was removed from the graph. Reinsert.
842 constant->ClearFlag(HValue::kIsDead);
843 constant->InsertAfter(entry_block()->first());
844 }
845 return constant;
846 }
847
848
GetConstant0()849 HConstant* HGraph::GetConstant0() {
850 return GetConstant(&constant_0_, 0);
851 }
852
853
GetConstant1()854 HConstant* HGraph::GetConstant1() {
855 return GetConstant(&constant_1_, 1);
856 }
857
858
GetConstantMinus1()859 HConstant* HGraph::GetConstantMinus1() {
860 return GetConstant(&constant_minus1_, -1);
861 }
862
863
GetConstantBool(bool value)864 HConstant* HGraph::GetConstantBool(bool value) {
865 return value ? GetConstantTrue() : GetConstantFalse();
866 }
867
868 #define DEFINE_GET_CONSTANT(Name, name, constant, type, htype, boolean_value, \
869 undetectable) \
870 HConstant* HGraph::GetConstant##Name() { \
871 if (!constant_##name##_.is_set()) { \
872 HConstant* constant = new (zone()) HConstant( \
873 Unique<Object>::CreateImmovable(isolate()->factory()->constant()), \
874 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \
875 false, Representation::Tagged(), htype, true, boolean_value, \
876 undetectable, ODDBALL_TYPE); \
877 constant->InsertAfter(entry_block()->first()); \
878 constant_##name##_.set(constant); \
879 } \
880 return ReinsertConstantIfNecessary(constant_##name##_.get()); \
881 }
882
DEFINE_GET_CONSTANT(Undefined,undefined,undefined_value,undefined,HType::Undefined (),false,true)883 DEFINE_GET_CONSTANT(Undefined, undefined, undefined_value, undefined,
884 HType::Undefined(), false, true)
885 DEFINE_GET_CONSTANT(True, true, true_value, boolean, HType::Boolean(), true,
886 false)
887 DEFINE_GET_CONSTANT(False, false, false_value, boolean, HType::Boolean(), false,
888 false)
889 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole_value, the_hole, HType::None(),
890 false, false)
891 DEFINE_GET_CONSTANT(Null, null, null_value, null, HType::Null(), false, true)
892 DEFINE_GET_CONSTANT(OptimizedOut, optimized_out, optimized_out, optimized_out,
893 HType::None(), false, false)
894
895 #undef DEFINE_GET_CONSTANT
896
897 #define DEFINE_IS_CONSTANT(Name, name) \
898 bool HGraph::IsConstant##Name(HConstant* constant) { \
899 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
900 }
901 DEFINE_IS_CONSTANT(Undefined, undefined)
902 DEFINE_IS_CONSTANT(0, 0)
903 DEFINE_IS_CONSTANT(1, 1)
904 DEFINE_IS_CONSTANT(Minus1, minus1)
905 DEFINE_IS_CONSTANT(True, true)
906 DEFINE_IS_CONSTANT(False, false)
907 DEFINE_IS_CONSTANT(Hole, the_hole)
908 DEFINE_IS_CONSTANT(Null, null)
909
910 #undef DEFINE_IS_CONSTANT
911
912
913 HConstant* HGraph::GetInvalidContext() {
914 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
915 }
916
917
IsStandardConstant(HConstant * constant)918 bool HGraph::IsStandardConstant(HConstant* constant) {
919 if (IsConstantUndefined(constant)) return true;
920 if (IsConstant0(constant)) return true;
921 if (IsConstant1(constant)) return true;
922 if (IsConstantMinus1(constant)) return true;
923 if (IsConstantTrue(constant)) return true;
924 if (IsConstantFalse(constant)) return true;
925 if (IsConstantHole(constant)) return true;
926 if (IsConstantNull(constant)) return true;
927 return false;
928 }
929
930
IfBuilder()931 HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
932
933
IfBuilder(HGraphBuilder * builder)934 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
935 : needs_compare_(true) {
936 Initialize(builder);
937 }
938
939
IfBuilder(HGraphBuilder * builder,HIfContinuation * continuation)940 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
941 HIfContinuation* continuation)
942 : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
943 InitializeDontCreateBlocks(builder);
944 continuation->Continue(&first_true_block_, &first_false_block_);
945 }
946
947
InitializeDontCreateBlocks(HGraphBuilder * builder)948 void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
949 HGraphBuilder* builder) {
950 builder_ = builder;
951 finished_ = false;
952 did_then_ = false;
953 did_else_ = false;
954 did_else_if_ = false;
955 did_and_ = false;
956 did_or_ = false;
957 captured_ = false;
958 pending_merge_block_ = false;
959 split_edge_merge_block_ = NULL;
960 merge_at_join_blocks_ = NULL;
961 normal_merge_at_join_block_count_ = 0;
962 deopt_merge_at_join_block_count_ = 0;
963 }
964
965
Initialize(HGraphBuilder * builder)966 void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
967 InitializeDontCreateBlocks(builder);
968 HEnvironment* env = builder->environment();
969 first_true_block_ = builder->CreateBasicBlock(env->Copy());
970 first_false_block_ = builder->CreateBasicBlock(env->Copy());
971 }
972
973
AddCompare(HControlInstruction * compare)974 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
975 HControlInstruction* compare) {
976 DCHECK(did_then_ == did_else_);
977 if (did_else_) {
978 // Handle if-then-elseif
979 did_else_if_ = true;
980 did_else_ = false;
981 did_then_ = false;
982 did_and_ = false;
983 did_or_ = false;
984 pending_merge_block_ = false;
985 split_edge_merge_block_ = NULL;
986 HEnvironment* env = builder()->environment();
987 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
988 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
989 }
990 if (split_edge_merge_block_ != NULL) {
991 HEnvironment* env = first_false_block_->last_environment();
992 HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
993 if (did_or_) {
994 compare->SetSuccessorAt(0, split_edge);
995 compare->SetSuccessorAt(1, first_false_block_);
996 } else {
997 compare->SetSuccessorAt(0, first_true_block_);
998 compare->SetSuccessorAt(1, split_edge);
999 }
1000 builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
1001 } else {
1002 compare->SetSuccessorAt(0, first_true_block_);
1003 compare->SetSuccessorAt(1, first_false_block_);
1004 }
1005 builder()->FinishCurrentBlock(compare);
1006 needs_compare_ = false;
1007 return compare;
1008 }
1009
1010
Or()1011 void HGraphBuilder::IfBuilder::Or() {
1012 DCHECK(!needs_compare_);
1013 DCHECK(!did_and_);
1014 did_or_ = true;
1015 HEnvironment* env = first_false_block_->last_environment();
1016 if (split_edge_merge_block_ == NULL) {
1017 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1018 builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
1019 first_true_block_ = split_edge_merge_block_;
1020 }
1021 builder()->set_current_block(first_false_block_);
1022 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
1023 }
1024
1025
And()1026 void HGraphBuilder::IfBuilder::And() {
1027 DCHECK(!needs_compare_);
1028 DCHECK(!did_or_);
1029 did_and_ = true;
1030 HEnvironment* env = first_false_block_->last_environment();
1031 if (split_edge_merge_block_ == NULL) {
1032 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1033 builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
1034 first_false_block_ = split_edge_merge_block_;
1035 }
1036 builder()->set_current_block(first_true_block_);
1037 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
1038 }
1039
1040
CaptureContinuation(HIfContinuation * continuation)1041 void HGraphBuilder::IfBuilder::CaptureContinuation(
1042 HIfContinuation* continuation) {
1043 DCHECK(!did_else_if_);
1044 DCHECK(!finished_);
1045 DCHECK(!captured_);
1046
1047 HBasicBlock* true_block = NULL;
1048 HBasicBlock* false_block = NULL;
1049 Finish(&true_block, &false_block);
1050 DCHECK(true_block != NULL);
1051 DCHECK(false_block != NULL);
1052 continuation->Capture(true_block, false_block);
1053 captured_ = true;
1054 builder()->set_current_block(NULL);
1055 End();
1056 }
1057
1058
JoinContinuation(HIfContinuation * continuation)1059 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
1060 DCHECK(!did_else_if_);
1061 DCHECK(!finished_);
1062 DCHECK(!captured_);
1063 HBasicBlock* true_block = NULL;
1064 HBasicBlock* false_block = NULL;
1065 Finish(&true_block, &false_block);
1066 merge_at_join_blocks_ = NULL;
1067 if (true_block != NULL && !true_block->IsFinished()) {
1068 DCHECK(continuation->IsTrueReachable());
1069 builder()->GotoNoSimulate(true_block, continuation->true_branch());
1070 }
1071 if (false_block != NULL && !false_block->IsFinished()) {
1072 DCHECK(continuation->IsFalseReachable());
1073 builder()->GotoNoSimulate(false_block, continuation->false_branch());
1074 }
1075 captured_ = true;
1076 End();
1077 }
1078
1079
Then()1080 void HGraphBuilder::IfBuilder::Then() {
1081 DCHECK(!captured_);
1082 DCHECK(!finished_);
1083 did_then_ = true;
1084 if (needs_compare_) {
1085 // Handle if's without any expressions, they jump directly to the "else"
1086 // branch. However, we must pretend that the "then" branch is reachable,
1087 // so that the graph builder visits it and sees any live range extending
1088 // constructs within it.
1089 HConstant* constant_false = builder()->graph()->GetConstantFalse();
1090 ToBooleanHints boolean_type = ToBooleanHint::kBoolean;
1091 HBranch* branch = builder()->New<HBranch>(
1092 constant_false, boolean_type, first_true_block_, first_false_block_);
1093 builder()->FinishCurrentBlock(branch);
1094 }
1095 builder()->set_current_block(first_true_block_);
1096 pending_merge_block_ = true;
1097 }
1098
1099
Else()1100 void HGraphBuilder::IfBuilder::Else() {
1101 DCHECK(did_then_);
1102 DCHECK(!captured_);
1103 DCHECK(!finished_);
1104 AddMergeAtJoinBlock(false);
1105 builder()->set_current_block(first_false_block_);
1106 pending_merge_block_ = true;
1107 did_else_ = true;
1108 }
1109
Deopt(DeoptimizeReason reason)1110 void HGraphBuilder::IfBuilder::Deopt(DeoptimizeReason reason) {
1111 DCHECK(did_then_);
1112 builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1113 AddMergeAtJoinBlock(true);
1114 }
1115
1116
Return(HValue * value)1117 void HGraphBuilder::IfBuilder::Return(HValue* value) {
1118 HValue* parameter_count = builder()->graph()->GetConstantMinus1();
1119 builder()->FinishExitCurrentBlock(
1120 builder()->New<HReturn>(value, parameter_count));
1121 AddMergeAtJoinBlock(false);
1122 }
1123
1124
AddMergeAtJoinBlock(bool deopt)1125 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
1126 if (!pending_merge_block_) return;
1127 HBasicBlock* block = builder()->current_block();
1128 DCHECK(block == NULL || !block->IsFinished());
1129 MergeAtJoinBlock* record = new (builder()->zone())
1130 MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
1131 merge_at_join_blocks_ = record;
1132 if (block != NULL) {
1133 DCHECK(block->end() == NULL);
1134 if (deopt) {
1135 normal_merge_at_join_block_count_++;
1136 } else {
1137 deopt_merge_at_join_block_count_++;
1138 }
1139 }
1140 builder()->set_current_block(NULL);
1141 pending_merge_block_ = false;
1142 }
1143
1144
Finish()1145 void HGraphBuilder::IfBuilder::Finish() {
1146 DCHECK(!finished_);
1147 if (!did_then_) {
1148 Then();
1149 }
1150 AddMergeAtJoinBlock(false);
1151 if (!did_else_) {
1152 Else();
1153 AddMergeAtJoinBlock(false);
1154 }
1155 finished_ = true;
1156 }
1157
1158
Finish(HBasicBlock ** then_continuation,HBasicBlock ** else_continuation)1159 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
1160 HBasicBlock** else_continuation) {
1161 Finish();
1162
1163 MergeAtJoinBlock* else_record = merge_at_join_blocks_;
1164 if (else_continuation != NULL) {
1165 *else_continuation = else_record->block_;
1166 }
1167 MergeAtJoinBlock* then_record = else_record->next_;
1168 if (then_continuation != NULL) {
1169 *then_continuation = then_record->block_;
1170 }
1171 DCHECK(then_record->next_ == NULL);
1172 }
1173
1174
EndUnreachable()1175 void HGraphBuilder::IfBuilder::EndUnreachable() {
1176 if (captured_) return;
1177 Finish();
1178 builder()->set_current_block(nullptr);
1179 }
1180
1181
End()1182 void HGraphBuilder::IfBuilder::End() {
1183 if (captured_) return;
1184 Finish();
1185
1186 int total_merged_blocks = normal_merge_at_join_block_count_ +
1187 deopt_merge_at_join_block_count_;
1188 DCHECK(total_merged_blocks >= 1);
1189 HBasicBlock* merge_block =
1190 total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
1191
1192 // Merge non-deopt blocks first to ensure environment has right size for
1193 // padding.
1194 MergeAtJoinBlock* current = merge_at_join_blocks_;
1195 while (current != NULL) {
1196 if (!current->deopt_ && current->block_ != NULL) {
1197 // If there is only one block that makes it through to the end of the
1198 // if, then just set it as the current block and continue rather then
1199 // creating an unnecessary merge block.
1200 if (total_merged_blocks == 1) {
1201 builder()->set_current_block(current->block_);
1202 return;
1203 }
1204 builder()->GotoNoSimulate(current->block_, merge_block);
1205 }
1206 current = current->next_;
1207 }
1208
1209 // Merge deopt blocks, padding when necessary.
1210 current = merge_at_join_blocks_;
1211 while (current != NULL) {
1212 if (current->deopt_ && current->block_ != NULL) {
1213 current->block_->FinishExit(
1214 HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL),
1215 SourcePosition::Unknown());
1216 }
1217 current = current->next_;
1218 }
1219 builder()->set_current_block(merge_block);
1220 }
1221
1222
LoopBuilder(HGraphBuilder * builder)1223 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
1224 Initialize(builder, NULL, kWhileTrue, NULL);
1225 }
1226
1227
LoopBuilder(HGraphBuilder * builder,HValue * context,LoopBuilder::Direction direction)1228 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1229 LoopBuilder::Direction direction) {
1230 Initialize(builder, context, direction, builder->graph()->GetConstant1());
1231 }
1232
1233
LoopBuilder(HGraphBuilder * builder,HValue * context,LoopBuilder::Direction direction,HValue * increment_amount)1234 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1235 LoopBuilder::Direction direction,
1236 HValue* increment_amount) {
1237 Initialize(builder, context, direction, increment_amount);
1238 increment_amount_ = increment_amount;
1239 }
1240
1241
Initialize(HGraphBuilder * builder,HValue * context,Direction direction,HValue * increment_amount)1242 void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
1243 HValue* context,
1244 Direction direction,
1245 HValue* increment_amount) {
1246 builder_ = builder;
1247 context_ = context;
1248 direction_ = direction;
1249 increment_amount_ = increment_amount;
1250
1251 finished_ = false;
1252 header_block_ = builder->CreateLoopHeaderBlock();
1253 body_block_ = NULL;
1254 exit_block_ = NULL;
1255 exit_trampoline_block_ = NULL;
1256 }
1257
1258
BeginBody(HValue * initial,HValue * terminating,Token::Value token)1259 HValue* HGraphBuilder::LoopBuilder::BeginBody(
1260 HValue* initial,
1261 HValue* terminating,
1262 Token::Value token) {
1263 DCHECK(direction_ != kWhileTrue);
1264 HEnvironment* env = builder_->environment();
1265 phi_ = header_block_->AddNewPhi(env->values()->length());
1266 phi_->AddInput(initial);
1267 env->Push(initial);
1268 builder_->GotoNoSimulate(header_block_);
1269
1270 HEnvironment* body_env = env->Copy();
1271 HEnvironment* exit_env = env->Copy();
1272 // Remove the phi from the expression stack
1273 body_env->Pop();
1274 exit_env->Pop();
1275 body_block_ = builder_->CreateBasicBlock(body_env);
1276 exit_block_ = builder_->CreateBasicBlock(exit_env);
1277
1278 builder_->set_current_block(header_block_);
1279 env->Pop();
1280 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1281 phi_, terminating, token, body_block_, exit_block_));
1282
1283 builder_->set_current_block(body_block_);
1284 if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1285 Isolate* isolate = builder_->isolate();
1286 HValue* one = builder_->graph()->GetConstant1();
1287 if (direction_ == kPreIncrement) {
1288 increment_ = HAdd::New(isolate, zone(), context_, phi_, one);
1289 } else {
1290 increment_ = HSub::New(isolate, zone(), context_, phi_, one);
1291 }
1292 increment_->ClearFlag(HValue::kCanOverflow);
1293 builder_->AddInstruction(increment_);
1294 return increment_;
1295 } else {
1296 return phi_;
1297 }
1298 }
1299
1300
BeginBody(int drop_count)1301 void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
1302 DCHECK(direction_ == kWhileTrue);
1303 HEnvironment* env = builder_->environment();
1304 builder_->GotoNoSimulate(header_block_);
1305 builder_->set_current_block(header_block_);
1306 env->Drop(drop_count);
1307 }
1308
1309
Break()1310 void HGraphBuilder::LoopBuilder::Break() {
1311 if (exit_trampoline_block_ == NULL) {
1312 // Its the first time we saw a break.
1313 if (direction_ == kWhileTrue) {
1314 HEnvironment* env = builder_->environment()->Copy();
1315 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1316 } else {
1317 HEnvironment* env = exit_block_->last_environment()->Copy();
1318 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1319 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1320 }
1321 }
1322
1323 builder_->GotoNoSimulate(exit_trampoline_block_);
1324 builder_->set_current_block(NULL);
1325 }
1326
1327
EndBody()1328 void HGraphBuilder::LoopBuilder::EndBody() {
1329 DCHECK(!finished_);
1330
1331 if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1332 Isolate* isolate = builder_->isolate();
1333 if (direction_ == kPostIncrement) {
1334 increment_ =
1335 HAdd::New(isolate, zone(), context_, phi_, increment_amount_);
1336 } else {
1337 increment_ =
1338 HSub::New(isolate, zone(), context_, phi_, increment_amount_);
1339 }
1340 increment_->ClearFlag(HValue::kCanOverflow);
1341 builder_->AddInstruction(increment_);
1342 }
1343
1344 if (direction_ != kWhileTrue) {
1345 // Push the new increment value on the expression stack to merge into
1346 // the phi.
1347 builder_->environment()->Push(increment_);
1348 }
1349 HBasicBlock* last_block = builder_->current_block();
1350 builder_->GotoNoSimulate(last_block, header_block_);
1351 header_block_->loop_information()->RegisterBackEdge(last_block);
1352
1353 if (exit_trampoline_block_ != NULL) {
1354 builder_->set_current_block(exit_trampoline_block_);
1355 } else {
1356 builder_->set_current_block(exit_block_);
1357 }
1358 finished_ = true;
1359 }
1360
1361
CreateGraph()1362 HGraph* HGraphBuilder::CreateGraph() {
1363 DCHECK(!FLAG_minimal);
1364 graph_ = new (zone()) HGraph(info_, descriptor_);
1365 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1366 if (!info_->IsStub() && is_tracking_positions()) {
1367 TraceInlinedFunction(info_->shared_info(), SourcePosition::Unknown(),
1368 SourcePosition::kNotInlined);
1369 }
1370 CompilationPhase phase("H_Block building", info_);
1371 set_current_block(graph()->entry_block());
1372 if (!BuildGraph()) return NULL;
1373 graph()->FinalizeUniqueness();
1374 return graph_;
1375 }
1376
TraceInlinedFunction(Handle<SharedFunctionInfo> shared,SourcePosition position,int inlining_id)1377 void HGraphBuilder::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
1378 SourcePosition position,
1379 int inlining_id) {
1380 DCHECK(is_tracking_positions());
1381
1382 if (!shared->script()->IsUndefined(isolate())) {
1383 Handle<Script> script(Script::cast(shared->script()), isolate());
1384
1385 if (FLAG_hydrogen_track_positions &&
1386 !script->source()->IsUndefined(isolate())) {
1387 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
1388 Object* source_name = script->name();
1389 OFStream os(tracing_scope.file());
1390 os << "--- FUNCTION SOURCE (";
1391 if (source_name->IsString()) {
1392 os << String::cast(source_name)->ToCString().get() << ":";
1393 }
1394 os << shared->DebugName()->ToCString().get() << ") id{";
1395 os << info_->optimization_id() << "," << inlining_id << "} ---\n";
1396 {
1397 DisallowHeapAllocation no_allocation;
1398 int start = shared->start_position();
1399 int len = shared->end_position() - start;
1400 String::SubStringRange source(String::cast(script->source()), start,
1401 len);
1402 for (const auto& c : source) {
1403 os << AsReversiblyEscapedUC16(c);
1404 }
1405 }
1406
1407 os << "\n--- END ---\n";
1408 }
1409 }
1410
1411 if (FLAG_hydrogen_track_positions &&
1412 inlining_id != SourcePosition::kNotInlined) {
1413 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
1414 OFStream os(tracing_scope.file());
1415 os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
1416 << info_->optimization_id() << "," << inlining_id << "} AS "
1417 << inlining_id << " AT " << position.ScriptOffset() << std::endl;
1418 }
1419 }
1420
AddInstruction(HInstruction * instr)1421 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1422 DCHECK(current_block() != NULL);
1423 DCHECK(!FLAG_hydrogen_track_positions || position_.IsKnown() ||
1424 !info_->IsOptimizing());
1425 current_block()->AddInstruction(instr, source_position());
1426 if (graph()->IsInsideNoSideEffectsScope()) {
1427 instr->SetFlag(HValue::kHasNoObservableSideEffects);
1428 }
1429 return instr;
1430 }
1431
1432
FinishCurrentBlock(HControlInstruction * last)1433 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1434 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1435 position_.IsKnown());
1436 current_block()->Finish(last, source_position());
1437 if (last->IsReturn() || last->IsAbnormalExit()) {
1438 set_current_block(NULL);
1439 }
1440 }
1441
1442
FinishExitCurrentBlock(HControlInstruction * instruction)1443 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1444 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1445 position_.IsKnown());
1446 current_block()->FinishExit(instruction, source_position());
1447 if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1448 set_current_block(NULL);
1449 }
1450 }
1451
1452
AddIncrementCounter(StatsCounter * counter)1453 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1454 if (FLAG_native_code_counters && counter->Enabled()) {
1455 HValue* reference = Add<HConstant>(ExternalReference(counter));
1456 HValue* old_value =
1457 Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter());
1458 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1459 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1460 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1461 new_value, STORE_TO_INITIALIZED_ENTRY);
1462 }
1463 }
1464
1465
AddSimulate(BailoutId id,RemovableSimulate removable)1466 void HGraphBuilder::AddSimulate(BailoutId id,
1467 RemovableSimulate removable) {
1468 DCHECK(current_block() != NULL);
1469 DCHECK(!graph()->IsInsideNoSideEffectsScope());
1470 current_block()->AddNewSimulate(id, source_position(), removable);
1471 }
1472
1473
CreateBasicBlock(HEnvironment * env)1474 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1475 HBasicBlock* b = graph()->CreateBasicBlock();
1476 b->SetInitialEnvironment(env);
1477 return b;
1478 }
1479
1480
CreateLoopHeaderBlock()1481 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1482 HBasicBlock* header = graph()->CreateBasicBlock();
1483 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1484 header->SetInitialEnvironment(entry_env);
1485 header->AttachLoopInformation();
1486 return header;
1487 }
1488
1489
BuildGetElementsKind(HValue * object)1490 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1491 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
1492
1493 HValue* bit_field2 =
1494 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
1495 return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1496 }
1497
1498
BuildEnumLength(HValue * map)1499 HValue* HGraphBuilder::BuildEnumLength(HValue* map) {
1500 NoObservableSideEffectsScope scope(this);
1501 HValue* bit_field3 =
1502 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
1503 return BuildDecodeField<Map::EnumLengthBits>(bit_field3);
1504 }
1505
1506
BuildCheckHeapObject(HValue * obj)1507 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1508 if (obj->type().IsHeapObject()) return obj;
1509 return Add<HCheckHeapObject>(obj);
1510 }
1511
FinishExitWithHardDeoptimization(DeoptimizeReason reason)1512 void HGraphBuilder::FinishExitWithHardDeoptimization(DeoptimizeReason reason) {
1513 Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1514 FinishExitCurrentBlock(New<HAbnormalExit>());
1515 }
1516
1517
BuildCheckString(HValue * string)1518 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1519 if (!string->type().IsString()) {
1520 DCHECK(!string->IsConstant() ||
1521 !HConstant::cast(string)->HasStringValue());
1522 BuildCheckHeapObject(string);
1523 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1524 }
1525 return string;
1526 }
1527
BuildWrapReceiver(HValue * object,HValue * checked)1528 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* checked) {
1529 if (object->type().IsJSObject()) return object;
1530 HValue* function = checked->ActualValue();
1531 if (function->IsConstant() &&
1532 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1533 Handle<JSFunction> f = Handle<JSFunction>::cast(
1534 HConstant::cast(function)->handle(isolate()));
1535 SharedFunctionInfo* shared = f->shared();
1536 if (is_strict(shared->language_mode()) || shared->native()) return object;
1537 }
1538 return Add<HWrapReceiver>(object, checked);
1539 }
1540
1541
BuildCheckAndGrowElementsCapacity(HValue * object,HValue * elements,ElementsKind kind,HValue * length,HValue * capacity,HValue * key)1542 HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity(
1543 HValue* object, HValue* elements, ElementsKind kind, HValue* length,
1544 HValue* capacity, HValue* key) {
1545 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1546 HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap);
1547 Add<HBoundsCheck>(key, max_capacity);
1548
1549 HValue* new_capacity = BuildNewElementsCapacity(key);
1550 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind,
1551 length, new_capacity);
1552 return new_elements;
1553 }
1554
1555
BuildCheckForCapacityGrow(HValue * object,HValue * elements,ElementsKind kind,HValue * length,HValue * key,bool is_js_array,PropertyAccessType access_type)1556 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1557 HValue* object,
1558 HValue* elements,
1559 ElementsKind kind,
1560 HValue* length,
1561 HValue* key,
1562 bool is_js_array,
1563 PropertyAccessType access_type) {
1564 IfBuilder length_checker(this);
1565
1566 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1567 length_checker.If<HCompareNumericAndBranch>(key, length, token);
1568
1569 length_checker.Then();
1570
1571 HValue* current_capacity = AddLoadFixedArrayLength(elements);
1572
1573 if (top_info()->IsStub()) {
1574 IfBuilder capacity_checker(this);
1575 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1576 Token::GTE);
1577 capacity_checker.Then();
1578 HValue* new_elements = BuildCheckAndGrowElementsCapacity(
1579 object, elements, kind, length, current_capacity, key);
1580 environment()->Push(new_elements);
1581 capacity_checker.Else();
1582 environment()->Push(elements);
1583 capacity_checker.End();
1584 } else {
1585 HValue* result = Add<HMaybeGrowElements>(
1586 object, elements, key, current_capacity, is_js_array, kind);
1587 environment()->Push(result);
1588 }
1589
1590 if (is_js_array) {
1591 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1592 new_length->ClearFlag(HValue::kCanOverflow);
1593
1594 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1595 new_length);
1596 }
1597
1598 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1599 HValue* checked_elements = environment()->Top();
1600
1601 // Write zero to ensure that the new element is initialized with some smi.
1602 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr,
1603 kind);
1604 }
1605
1606 length_checker.Else();
1607 Add<HBoundsCheck>(key, length);
1608
1609 environment()->Push(elements);
1610 length_checker.End();
1611
1612 return environment()->Pop();
1613 }
1614
1615
BuildCopyElementsOnWrite(HValue * object,HValue * elements,ElementsKind kind,HValue * length)1616 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1617 HValue* elements,
1618 ElementsKind kind,
1619 HValue* length) {
1620 Factory* factory = isolate()->factory();
1621
1622 IfBuilder cow_checker(this);
1623
1624 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1625 cow_checker.Then();
1626
1627 HValue* capacity = AddLoadFixedArrayLength(elements);
1628
1629 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1630 kind, length, capacity);
1631
1632 environment()->Push(new_elements);
1633
1634 cow_checker.Else();
1635
1636 environment()->Push(elements);
1637
1638 cow_checker.End();
1639
1640 return environment()->Pop();
1641 }
1642
BuildElementIndexHash(HValue * index)1643 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1644 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1645 HValue* seed = Add<HConstant>(seed_value);
1646 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1647
1648 // hash = ~hash + (hash << 15);
1649 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1650 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1651 graph()->GetConstantMinus1());
1652 hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1653
1654 // hash = hash ^ (hash >> 12);
1655 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1656 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1657
1658 // hash = hash + (hash << 2);
1659 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1660 hash = AddUncasted<HAdd>(hash, shifted_hash);
1661
1662 // hash = hash ^ (hash >> 4);
1663 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1664 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1665
1666 // hash = hash * 2057;
1667 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1668 hash->ClearFlag(HValue::kCanOverflow);
1669
1670 // hash = hash ^ (hash >> 16);
1671 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1672 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1673 }
1674
BuildUncheckedDictionaryElementLoad(HValue * receiver,HValue * elements,HValue * key,HValue * hash)1675 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1676 HValue* elements,
1677 HValue* key,
1678 HValue* hash) {
1679 HValue* capacity =
1680 Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex),
1681 nullptr, nullptr, FAST_ELEMENTS);
1682
1683 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1684 mask->ChangeRepresentation(Representation::Integer32());
1685 mask->ClearFlag(HValue::kCanOverflow);
1686
1687 HValue* entry = hash;
1688 HValue* count = graph()->GetConstant1();
1689 Push(entry);
1690 Push(count);
1691
1692 HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
1693 graph()->CreateBasicBlock());
1694 HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
1695 graph()->CreateBasicBlock());
1696 LoopBuilder probe_loop(this);
1697 probe_loop.BeginBody(2); // Drop entry, count from last environment to
1698 // appease live range building without simulates.
1699
1700 count = Pop();
1701 entry = Pop();
1702 entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
1703 int entry_size = SeededNumberDictionary::kEntrySize;
1704 HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
1705 base_index->ClearFlag(HValue::kCanOverflow);
1706 int start_offset = SeededNumberDictionary::kElementsStartIndex;
1707 HValue* key_index =
1708 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
1709 key_index->ClearFlag(HValue::kCanOverflow);
1710
1711 HValue* candidate_key =
1712 Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS);
1713 IfBuilder if_undefined(this);
1714 if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
1715 graph()->GetConstantUndefined());
1716 if_undefined.Then();
1717 {
1718 // element == undefined means "not found". Call the runtime.
1719 // TODO(jkummerow): walk the prototype chain instead.
1720 Add<HPushArguments>(receiver, key);
1721 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1722 2));
1723 }
1724 if_undefined.Else();
1725 {
1726 IfBuilder if_match(this);
1727 if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
1728 if_match.Then();
1729 if_match.Else();
1730
1731 // Update non-internalized string in the dictionary with internalized key?
1732 IfBuilder if_update_with_internalized(this);
1733 HValue* smi_check =
1734 if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
1735 if_update_with_internalized.And();
1736 HValue* map = AddLoadMap(candidate_key, smi_check);
1737 HValue* instance_type =
1738 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1739 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1740 Token::BIT_AND, instance_type,
1741 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1742 if_update_with_internalized.If<HCompareNumericAndBranch>(
1743 not_internalized_bit, graph()->GetConstant0(), Token::NE);
1744 if_update_with_internalized.And();
1745 if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
1746 candidate_key, graph()->GetConstantHole());
1747 if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
1748 key, Token::EQ);
1749 if_update_with_internalized.Then();
1750 // Replace a key that is a non-internalized string by the equivalent
1751 // internalized string for faster further lookups.
1752 Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS);
1753 if_update_with_internalized.Else();
1754
1755 if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
1756 if_match.JoinContinuation(&found_key_match_continuation);
1757
1758 IfBuilder found_key_match(this, &found_key_match_continuation);
1759 found_key_match.Then();
1760 // Key at current probe matches. Relevant bits in the |details| field must
1761 // be zero, otherwise the dictionary element requires special handling.
1762 HValue* details_index =
1763 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
1764 details_index->ClearFlag(HValue::kCanOverflow);
1765 HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr,
1766 FAST_ELEMENTS);
1767 int details_mask = PropertyDetails::TypeField::kMask;
1768 details = AddUncasted<HBitwise>(Token::BIT_AND, details,
1769 Add<HConstant>(details_mask));
1770 IfBuilder details_compare(this);
1771 details_compare.If<HCompareNumericAndBranch>(
1772 details, graph()->GetConstant0(), Token::EQ);
1773 details_compare.Then();
1774 HValue* result_index =
1775 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
1776 result_index->ClearFlag(HValue::kCanOverflow);
1777 Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr,
1778 FAST_ELEMENTS));
1779 details_compare.Else();
1780 Add<HPushArguments>(receiver, key);
1781 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1782 2));
1783 details_compare.End();
1784
1785 found_key_match.Else();
1786 found_key_match.JoinContinuation(&return_or_loop_continuation);
1787 }
1788 if_undefined.JoinContinuation(&return_or_loop_continuation);
1789
1790 IfBuilder return_or_loop(this, &return_or_loop_continuation);
1791 return_or_loop.Then();
1792 probe_loop.Break();
1793
1794 return_or_loop.Else();
1795 entry = AddUncasted<HAdd>(entry, count);
1796 entry->ClearFlag(HValue::kCanOverflow);
1797 count = AddUncasted<HAdd>(count, graph()->GetConstant1());
1798 count->ClearFlag(HValue::kCanOverflow);
1799 Push(entry);
1800 Push(count);
1801
1802 probe_loop.EndBody();
1803
1804 return_or_loop.End();
1805
1806 return Pop();
1807 }
1808
BuildCreateIterResultObject(HValue * value,HValue * done)1809 HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value,
1810 HValue* done) {
1811 NoObservableSideEffectsScope scope(this);
1812
1813 // Allocate the JSIteratorResult object.
1814 HValue* result =
1815 Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(),
1816 NOT_TENURED, JS_OBJECT_TYPE, graph()->GetConstant0());
1817
1818 // Initialize the JSIteratorResult object.
1819 HValue* native_context = BuildGetNativeContext();
1820 HValue* map = Add<HLoadNamedField>(
1821 native_context, nullptr,
1822 HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX));
1823 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
1824 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
1825 Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(),
1826 empty_fixed_array);
1827 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
1828 empty_fixed_array);
1829 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
1830 JSIteratorResult::kValueOffset),
1831 value);
1832 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
1833 JSIteratorResult::kDoneOffset),
1834 done);
1835 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1836 return result;
1837 }
1838
1839
BuildNumberToString(HValue * object,AstType * type)1840 HValue* HGraphBuilder::BuildNumberToString(HValue* object, AstType* type) {
1841 NoObservableSideEffectsScope scope(this);
1842
1843 // Convert constant numbers at compile time.
1844 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1845 Handle<Object> number = HConstant::cast(object)->handle(isolate());
1846 Handle<String> result = isolate()->factory()->NumberToString(number);
1847 return Add<HConstant>(result);
1848 }
1849
1850 // Create a joinable continuation.
1851 HIfContinuation found(graph()->CreateBasicBlock(),
1852 graph()->CreateBasicBlock());
1853
1854 // Load the number string cache.
1855 HValue* number_string_cache =
1856 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1857
1858 // Make the hash mask from the length of the number string cache. It
1859 // contains two elements (number and string) for each cache entry.
1860 HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1861 mask->set_type(HType::Smi());
1862 mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1863 mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1864
1865 // Check whether object is a smi.
1866 IfBuilder if_objectissmi(this);
1867 if_objectissmi.If<HIsSmiAndBranch>(object);
1868 if_objectissmi.Then();
1869 {
1870 // Compute hash for smi similar to smi_get_hash().
1871 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1872
1873 // Load the key.
1874 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1875 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr,
1876 nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1877
1878 // Check if object == key.
1879 IfBuilder if_objectiskey(this);
1880 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1881 if_objectiskey.Then();
1882 {
1883 // Make the key_index available.
1884 Push(key_index);
1885 }
1886 if_objectiskey.JoinContinuation(&found);
1887 }
1888 if_objectissmi.Else();
1889 {
1890 if (type->Is(AstType::SignedSmall())) {
1891 if_objectissmi.Deopt(DeoptimizeReason::kExpectedSmi);
1892 } else {
1893 // Check if the object is a heap number.
1894 IfBuilder if_objectisnumber(this);
1895 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1896 object, isolate()->factory()->heap_number_map());
1897 if_objectisnumber.Then();
1898 {
1899 // Compute hash for heap number similar to double_get_hash().
1900 HValue* low = Add<HLoadNamedField>(
1901 object, objectisnumber,
1902 HObjectAccess::ForHeapNumberValueLowestBits());
1903 HValue* high = Add<HLoadNamedField>(
1904 object, objectisnumber,
1905 HObjectAccess::ForHeapNumberValueHighestBits());
1906 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1907 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1908
1909 // Load the key.
1910 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1911 HValue* key =
1912 Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr,
1913 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1914
1915 // Check if the key is a heap number and compare it with the object.
1916 IfBuilder if_keyisnotsmi(this);
1917 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1918 if_keyisnotsmi.Then();
1919 {
1920 IfBuilder if_keyisheapnumber(this);
1921 if_keyisheapnumber.If<HCompareMap>(
1922 key, isolate()->factory()->heap_number_map());
1923 if_keyisheapnumber.Then();
1924 {
1925 // Check if values of key and object match.
1926 IfBuilder if_keyeqobject(this);
1927 if_keyeqobject.If<HCompareNumericAndBranch>(
1928 Add<HLoadNamedField>(key, keyisnotsmi,
1929 HObjectAccess::ForHeapNumberValue()),
1930 Add<HLoadNamedField>(object, objectisnumber,
1931 HObjectAccess::ForHeapNumberValue()),
1932 Token::EQ);
1933 if_keyeqobject.Then();
1934 {
1935 // Make the key_index available.
1936 Push(key_index);
1937 }
1938 if_keyeqobject.JoinContinuation(&found);
1939 }
1940 if_keyisheapnumber.JoinContinuation(&found);
1941 }
1942 if_keyisnotsmi.JoinContinuation(&found);
1943 }
1944 if_objectisnumber.Else();
1945 {
1946 if (type->Is(AstType::Number())) {
1947 if_objectisnumber.Deopt(DeoptimizeReason::kExpectedHeapNumber);
1948 }
1949 }
1950 if_objectisnumber.JoinContinuation(&found);
1951 }
1952 }
1953 if_objectissmi.JoinContinuation(&found);
1954
1955 // Check for cache hit.
1956 IfBuilder if_found(this, &found);
1957 if_found.Then();
1958 {
1959 // Count number to string operation in native code.
1960 AddIncrementCounter(isolate()->counters()->number_to_string_native());
1961
1962 // Load the value in case of cache hit.
1963 HValue* key_index = Pop();
1964 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1965 Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr,
1966 FAST_ELEMENTS, ALLOW_RETURN_HOLE));
1967 }
1968 if_found.Else();
1969 {
1970 // Cache miss, fallback to runtime.
1971 Add<HPushArguments>(object);
1972 Push(Add<HCallRuntime>(
1973 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
1974 1));
1975 }
1976 if_found.End();
1977
1978 return Pop();
1979 }
1980
BuildToNumber(HValue * input)1981 HValue* HGraphBuilder::BuildToNumber(HValue* input) {
1982 if (input->type().IsTaggedNumber() ||
1983 input->representation().IsSpecialization()) {
1984 return input;
1985 }
1986 Callable callable = CodeFactory::ToNumber(isolate());
1987 HValue* stub = Add<HConstant>(callable.code());
1988 HValue* values[] = {input};
1989 HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
1990 stub, 0, callable.descriptor(), ArrayVector(values));
1991 instr->set_type(HType::TaggedNumber());
1992 return instr;
1993 }
1994
1995
BuildToObject(HValue * receiver)1996 HValue* HGraphBuilder::BuildToObject(HValue* receiver) {
1997 NoObservableSideEffectsScope scope(this);
1998
1999 // Create a joinable continuation.
2000 HIfContinuation wrap(graph()->CreateBasicBlock(),
2001 graph()->CreateBasicBlock());
2002
2003 // Determine the proper global constructor function required to wrap
2004 // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in
2005 // which case we just return it. Deopts to Runtime::kToObject if {receiver}
2006 // is undefined or null.
2007 IfBuilder receiver_is_smi(this);
2008 receiver_is_smi.If<HIsSmiAndBranch>(receiver);
2009 receiver_is_smi.Then();
2010 {
2011 // Use global Number function.
2012 Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX));
2013 }
2014 receiver_is_smi.Else();
2015 {
2016 // Determine {receiver} map and instance type.
2017 HValue* receiver_map =
2018 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2019 HValue* receiver_instance_type = Add<HLoadNamedField>(
2020 receiver_map, nullptr, HObjectAccess::ForMapInstanceType());
2021
2022 // First check whether {receiver} is already a spec object (fast case).
2023 IfBuilder receiver_is_not_spec_object(this);
2024 receiver_is_not_spec_object.If<HCompareNumericAndBranch>(
2025 receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE),
2026 Token::LT);
2027 receiver_is_not_spec_object.Then();
2028 {
2029 // Load the constructor function index from the {receiver} map.
2030 HValue* constructor_function_index = Add<HLoadNamedField>(
2031 receiver_map, nullptr,
2032 HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex());
2033
2034 // Check if {receiver} has a constructor (null and undefined have no
2035 // constructors, so we deoptimize to the runtime to throw an exception).
2036 IfBuilder constructor_function_index_is_invalid(this);
2037 constructor_function_index_is_invalid.If<HCompareNumericAndBranch>(
2038 constructor_function_index,
2039 Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ);
2040 constructor_function_index_is_invalid.ThenDeopt(
2041 DeoptimizeReason::kUndefinedOrNullInToObject);
2042 constructor_function_index_is_invalid.End();
2043
2044 // Use the global constructor function.
2045 Push(constructor_function_index);
2046 }
2047 receiver_is_not_spec_object.JoinContinuation(&wrap);
2048 }
2049 receiver_is_smi.JoinContinuation(&wrap);
2050
2051 // Wrap the receiver if necessary.
2052 IfBuilder if_wrap(this, &wrap);
2053 if_wrap.Then();
2054 {
2055 // Grab the constructor function index.
2056 HValue* constructor_index = Pop();
2057
2058 // Load native context.
2059 HValue* native_context = BuildGetNativeContext();
2060
2061 // Determine the initial map for the global constructor.
2062 HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index,
2063 nullptr, nullptr, FAST_ELEMENTS);
2064 HValue* constructor_initial_map = Add<HLoadNamedField>(
2065 constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
2066 // Allocate and initialize a JSValue wrapper.
2067 HValue* value =
2068 BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(),
2069 JS_VALUE_TYPE, HAllocationMode());
2070 Add<HStoreNamedField>(value, HObjectAccess::ForMap(),
2071 constructor_initial_map);
2072 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2073 Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(),
2074 empty_fixed_array);
2075 Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(),
2076 empty_fixed_array);
2077 Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset(
2078 JSValue::kValueOffset),
2079 receiver);
2080 Push(value);
2081 }
2082 if_wrap.Else();
2083 { Push(receiver); }
2084 if_wrap.End();
2085 return Pop();
2086 }
2087
2088
BuildAllocate(HValue * object_size,HType type,InstanceType instance_type,HAllocationMode allocation_mode)2089 HAllocate* HGraphBuilder::BuildAllocate(
2090 HValue* object_size,
2091 HType type,
2092 InstanceType instance_type,
2093 HAllocationMode allocation_mode) {
2094 // Compute the effective allocation size.
2095 HValue* size = object_size;
2096 if (allocation_mode.CreateAllocationMementos()) {
2097 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
2098 size->ClearFlag(HValue::kCanOverflow);
2099 }
2100
2101 // Perform the actual allocation.
2102 HAllocate* object = Add<HAllocate>(
2103 size, type, allocation_mode.GetPretenureMode(), instance_type,
2104 graph()->GetConstant0(), allocation_mode.feedback_site());
2105
2106 // Setup the allocation memento.
2107 if (allocation_mode.CreateAllocationMementos()) {
2108 BuildCreateAllocationMemento(
2109 object, object_size, allocation_mode.current_site());
2110 }
2111
2112 return object;
2113 }
2114
2115
BuildAddStringLengths(HValue * left_length,HValue * right_length)2116 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
2117 HValue* right_length) {
2118 // Compute the combined string length and check against max string length.
2119 HValue* length = AddUncasted<HAdd>(left_length, right_length);
2120 // Check that length <= kMaxLength <=> length < MaxLength + 1.
2121 HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
2122 if (top_info()->IsStub() || !isolate()->IsStringLengthOverflowIntact()) {
2123 // This is a mitigation for crbug.com/627934; the real fix
2124 // will be to migrate the StringAddStub to TurboFan one day.
2125 IfBuilder if_invalid(this);
2126 if_invalid.If<HCompareNumericAndBranch>(length, max_length, Token::GT);
2127 if_invalid.Then();
2128 {
2129 Add<HCallRuntime>(
2130 Runtime::FunctionForId(Runtime::kThrowInvalidStringLength), 0);
2131 }
2132 if_invalid.End();
2133 } else {
2134 graph()->MarkDependsOnStringLengthOverflow();
2135 Add<HBoundsCheck>(length, max_length);
2136 }
2137 return length;
2138 }
2139
2140
BuildCreateConsString(HValue * length,HValue * left,HValue * right,HAllocationMode allocation_mode)2141 HValue* HGraphBuilder::BuildCreateConsString(
2142 HValue* length,
2143 HValue* left,
2144 HValue* right,
2145 HAllocationMode allocation_mode) {
2146 // Determine the string instance types.
2147 HInstruction* left_instance_type = AddLoadStringInstanceType(left);
2148 HInstruction* right_instance_type = AddLoadStringInstanceType(right);
2149
2150 // Allocate the cons string object. HAllocate does not care whether we
2151 // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
2152 // CONS_STRING_TYPE here. Below we decide whether the cons string is
2153 // one-byte or two-byte and set the appropriate map.
2154 DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
2155 CONS_ONE_BYTE_STRING_TYPE));
2156 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
2157 HType::String(), CONS_STRING_TYPE,
2158 allocation_mode);
2159
2160 // Compute intersection and difference of instance types.
2161 HValue* anded_instance_types = AddUncasted<HBitwise>(
2162 Token::BIT_AND, left_instance_type, right_instance_type);
2163 HValue* xored_instance_types = AddUncasted<HBitwise>(
2164 Token::BIT_XOR, left_instance_type, right_instance_type);
2165
2166 // We create a one-byte cons string if
2167 // 1. both strings are one-byte, or
2168 // 2. at least one of the strings is two-byte, but happens to contain only
2169 // one-byte characters.
2170 // To do this, we check
2171 // 1. if both strings are one-byte, or if the one-byte data hint is set in
2172 // both strings, or
2173 // 2. if one of the strings has the one-byte data hint set and the other
2174 // string is one-byte.
2175 IfBuilder if_onebyte(this);
2176 STATIC_ASSERT(kOneByteStringTag != 0);
2177 STATIC_ASSERT(kOneByteDataHintMask != 0);
2178 if_onebyte.If<HCompareNumericAndBranch>(
2179 AddUncasted<HBitwise>(
2180 Token::BIT_AND, anded_instance_types,
2181 Add<HConstant>(static_cast<int32_t>(
2182 kStringEncodingMask | kOneByteDataHintMask))),
2183 graph()->GetConstant0(), Token::NE);
2184 if_onebyte.Or();
2185 STATIC_ASSERT(kOneByteStringTag != 0 &&
2186 kOneByteDataHintTag != 0 &&
2187 kOneByteDataHintTag != kOneByteStringTag);
2188 if_onebyte.If<HCompareNumericAndBranch>(
2189 AddUncasted<HBitwise>(
2190 Token::BIT_AND, xored_instance_types,
2191 Add<HConstant>(static_cast<int32_t>(
2192 kOneByteStringTag | kOneByteDataHintTag))),
2193 Add<HConstant>(static_cast<int32_t>(
2194 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
2195 if_onebyte.Then();
2196 {
2197 // We can safely skip the write barrier for storing the map here.
2198 Add<HStoreNamedField>(
2199 result, HObjectAccess::ForMap(),
2200 Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
2201 }
2202 if_onebyte.Else();
2203 {
2204 // We can safely skip the write barrier for storing the map here.
2205 Add<HStoreNamedField>(
2206 result, HObjectAccess::ForMap(),
2207 Add<HConstant>(isolate()->factory()->cons_string_map()));
2208 }
2209 if_onebyte.End();
2210
2211 // Initialize the cons string fields.
2212 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2213 Add<HConstant>(String::kEmptyHashField));
2214 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2215 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2216 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2217
2218 // Count the native string addition.
2219 AddIncrementCounter(isolate()->counters()->string_add_native());
2220
2221 return result;
2222 }
2223
2224
BuildCopySeqStringChars(HValue * src,HValue * src_offset,String::Encoding src_encoding,HValue * dst,HValue * dst_offset,String::Encoding dst_encoding,HValue * length)2225 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2226 HValue* src_offset,
2227 String::Encoding src_encoding,
2228 HValue* dst,
2229 HValue* dst_offset,
2230 String::Encoding dst_encoding,
2231 HValue* length) {
2232 DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
2233 src_encoding == String::ONE_BYTE_ENCODING);
2234 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2235 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2236 {
2237 HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2238 HValue* value =
2239 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2240 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2241 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2242 }
2243 loop.EndBody();
2244 }
2245
2246
BuildObjectSizeAlignment(HValue * unaligned_size,int header_size)2247 HValue* HGraphBuilder::BuildObjectSizeAlignment(
2248 HValue* unaligned_size, int header_size) {
2249 DCHECK((header_size & kObjectAlignmentMask) == 0);
2250 HValue* size = AddUncasted<HAdd>(
2251 unaligned_size, Add<HConstant>(static_cast<int32_t>(
2252 header_size + kObjectAlignmentMask)));
2253 size->ClearFlag(HValue::kCanOverflow);
2254 return AddUncasted<HBitwise>(
2255 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2256 ~kObjectAlignmentMask)));
2257 }
2258
2259
BuildUncheckedStringAdd(HValue * left,HValue * right,HAllocationMode allocation_mode)2260 HValue* HGraphBuilder::BuildUncheckedStringAdd(
2261 HValue* left,
2262 HValue* right,
2263 HAllocationMode allocation_mode) {
2264 // Determine the string lengths.
2265 HValue* left_length = AddLoadStringLength(left);
2266 HValue* right_length = AddLoadStringLength(right);
2267
2268 // Compute the combined string length.
2269 HValue* length = BuildAddStringLengths(left_length, right_length);
2270
2271 // Do some manual constant folding here.
2272 if (left_length->IsConstant()) {
2273 HConstant* c_left_length = HConstant::cast(left_length);
2274 DCHECK_NE(0, c_left_length->Integer32Value());
2275 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2276 // The right string contains at least one character.
2277 return BuildCreateConsString(length, left, right, allocation_mode);
2278 }
2279 } else if (right_length->IsConstant()) {
2280 HConstant* c_right_length = HConstant::cast(right_length);
2281 DCHECK_NE(0, c_right_length->Integer32Value());
2282 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2283 // The left string contains at least one character.
2284 return BuildCreateConsString(length, left, right, allocation_mode);
2285 }
2286 }
2287
2288 // Check if we should create a cons string.
2289 IfBuilder if_createcons(this);
2290 if_createcons.If<HCompareNumericAndBranch>(
2291 length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2292 if_createcons.Then();
2293 {
2294 // Create a cons string.
2295 Push(BuildCreateConsString(length, left, right, allocation_mode));
2296 }
2297 if_createcons.Else();
2298 {
2299 // Determine the string instance types.
2300 HValue* left_instance_type = AddLoadStringInstanceType(left);
2301 HValue* right_instance_type = AddLoadStringInstanceType(right);
2302
2303 // Compute union and difference of instance types.
2304 HValue* ored_instance_types = AddUncasted<HBitwise>(
2305 Token::BIT_OR, left_instance_type, right_instance_type);
2306 HValue* xored_instance_types = AddUncasted<HBitwise>(
2307 Token::BIT_XOR, left_instance_type, right_instance_type);
2308
2309 // Check if both strings have the same encoding and both are
2310 // sequential.
2311 IfBuilder if_sameencodingandsequential(this);
2312 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2313 AddUncasted<HBitwise>(
2314 Token::BIT_AND, xored_instance_types,
2315 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2316 graph()->GetConstant0(), Token::EQ);
2317 if_sameencodingandsequential.And();
2318 STATIC_ASSERT(kSeqStringTag == 0);
2319 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2320 AddUncasted<HBitwise>(
2321 Token::BIT_AND, ored_instance_types,
2322 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2323 graph()->GetConstant0(), Token::EQ);
2324 if_sameencodingandsequential.Then();
2325 {
2326 HConstant* string_map =
2327 Add<HConstant>(isolate()->factory()->string_map());
2328 HConstant* one_byte_string_map =
2329 Add<HConstant>(isolate()->factory()->one_byte_string_map());
2330
2331 // Determine map and size depending on whether result is one-byte string.
2332 IfBuilder if_onebyte(this);
2333 STATIC_ASSERT(kOneByteStringTag != 0);
2334 if_onebyte.If<HCompareNumericAndBranch>(
2335 AddUncasted<HBitwise>(
2336 Token::BIT_AND, ored_instance_types,
2337 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2338 graph()->GetConstant0(), Token::NE);
2339 if_onebyte.Then();
2340 {
2341 // Allocate sequential one-byte string object.
2342 Push(length);
2343 Push(one_byte_string_map);
2344 }
2345 if_onebyte.Else();
2346 {
2347 // Allocate sequential two-byte string object.
2348 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2349 size->ClearFlag(HValue::kCanOverflow);
2350 size->SetFlag(HValue::kUint32);
2351 Push(size);
2352 Push(string_map);
2353 }
2354 if_onebyte.End();
2355 HValue* map = Pop();
2356
2357 // Calculate the number of bytes needed for the characters in the
2358 // string while observing object alignment.
2359 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2360 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2361
2362 IfBuilder if_size(this);
2363 if_size.If<HCompareNumericAndBranch>(
2364 size, Add<HConstant>(kMaxRegularHeapObjectSize), Token::LT);
2365 if_size.Then();
2366 {
2367 // Allocate the string object. HAllocate does not care whether we pass
2368 // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
2369 HAllocate* result =
2370 BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode);
2371 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2372
2373 // Initialize the string fields.
2374 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2375 Add<HConstant>(String::kEmptyHashField));
2376 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2377
2378 // Copy characters to the result string.
2379 IfBuilder if_twobyte(this);
2380 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2381 if_twobyte.Then();
2382 {
2383 // Copy characters from the left string.
2384 BuildCopySeqStringChars(
2385 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2386 graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length);
2387
2388 // Copy characters from the right string.
2389 BuildCopySeqStringChars(
2390 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2391 left_length, String::TWO_BYTE_ENCODING, right_length);
2392 }
2393 if_twobyte.Else();
2394 {
2395 // Copy characters from the left string.
2396 BuildCopySeqStringChars(
2397 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2398 graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length);
2399
2400 // Copy characters from the right string.
2401 BuildCopySeqStringChars(
2402 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2403 left_length, String::ONE_BYTE_ENCODING, right_length);
2404 }
2405 if_twobyte.End();
2406
2407 // Count the native string addition.
2408 AddIncrementCounter(isolate()->counters()->string_add_native());
2409
2410 // Return the sequential string.
2411 Push(result);
2412 }
2413 if_size.Else();
2414 {
2415 // Fallback to the runtime to add the two strings. The string has to be
2416 // allocated in LO space.
2417 Add<HPushArguments>(left, right);
2418 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2419 }
2420 if_size.End();
2421 }
2422 if_sameencodingandsequential.Else();
2423 {
2424 // Fallback to the runtime to add the two strings.
2425 Add<HPushArguments>(left, right);
2426 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2427 }
2428 if_sameencodingandsequential.End();
2429 }
2430 if_createcons.End();
2431
2432 return Pop();
2433 }
2434
2435
BuildStringAdd(HValue * left,HValue * right,HAllocationMode allocation_mode)2436 HValue* HGraphBuilder::BuildStringAdd(
2437 HValue* left,
2438 HValue* right,
2439 HAllocationMode allocation_mode) {
2440 NoObservableSideEffectsScope no_effects(this);
2441
2442 // Determine string lengths.
2443 HValue* left_length = AddLoadStringLength(left);
2444 HValue* right_length = AddLoadStringLength(right);
2445
2446 // Check if left string is empty.
2447 IfBuilder if_leftempty(this);
2448 if_leftempty.If<HCompareNumericAndBranch>(
2449 left_length, graph()->GetConstant0(), Token::EQ);
2450 if_leftempty.Then();
2451 {
2452 // Count the native string addition.
2453 AddIncrementCounter(isolate()->counters()->string_add_native());
2454
2455 // Just return the right string.
2456 Push(right);
2457 }
2458 if_leftempty.Else();
2459 {
2460 // Check if right string is empty.
2461 IfBuilder if_rightempty(this);
2462 if_rightempty.If<HCompareNumericAndBranch>(
2463 right_length, graph()->GetConstant0(), Token::EQ);
2464 if_rightempty.Then();
2465 {
2466 // Count the native string addition.
2467 AddIncrementCounter(isolate()->counters()->string_add_native());
2468
2469 // Just return the left string.
2470 Push(left);
2471 }
2472 if_rightempty.Else();
2473 {
2474 // Add the two non-empty strings.
2475 Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2476 }
2477 if_rightempty.End();
2478 }
2479 if_leftempty.End();
2480
2481 return Pop();
2482 }
2483
2484
BuildUncheckedMonomorphicElementAccess(HValue * checked_object,HValue * key,HValue * val,bool is_js_array,ElementsKind elements_kind,PropertyAccessType access_type,LoadKeyedHoleMode load_mode,KeyedAccessStoreMode store_mode)2485 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2486 HValue* checked_object,
2487 HValue* key,
2488 HValue* val,
2489 bool is_js_array,
2490 ElementsKind elements_kind,
2491 PropertyAccessType access_type,
2492 LoadKeyedHoleMode load_mode,
2493 KeyedAccessStoreMode store_mode) {
2494 DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() ||
2495 checked_object->IsCheckMaps());
2496 DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array);
2497 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2498 // on a HElementsTransition instruction. The flag can also be removed if the
2499 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2500 // ElementsKind transitions. Finally, the dependency can be removed for stores
2501 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2502 // generated store code.
2503 if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2504 (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2505 checked_object->ClearDependsOnFlag(kElementsKind);
2506 }
2507
2508 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2509 bool fast_elements = IsFastObjectElementsKind(elements_kind);
2510 HValue* elements = AddLoadElements(checked_object);
2511 if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2512 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2513 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2514 elements, isolate()->factory()->fixed_array_map());
2515 check_cow_map->ClearDependsOnFlag(kElementsKind);
2516 }
2517 HInstruction* length = NULL;
2518 if (is_js_array) {
2519 length = Add<HLoadNamedField>(
2520 checked_object->ActualValue(), checked_object,
2521 HObjectAccess::ForArrayLength(elements_kind));
2522 } else {
2523 length = AddLoadFixedArrayLength(elements);
2524 }
2525 length->set_type(HType::Smi());
2526 HValue* checked_key = NULL;
2527 if (IsFixedTypedArrayElementsKind(elements_kind)) {
2528 checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
2529
2530 HValue* external_pointer = Add<HLoadNamedField>(
2531 elements, nullptr,
2532 HObjectAccess::ForFixedTypedArrayBaseExternalPointer());
2533 HValue* base_pointer = Add<HLoadNamedField>(
2534 elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer());
2535 HValue* backing_store = AddUncasted<HAdd>(external_pointer, base_pointer,
2536 AddOfExternalAndTagged);
2537
2538 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2539 NoObservableSideEffectsScope no_effects(this);
2540 IfBuilder length_checker(this);
2541 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2542 length_checker.Then();
2543 IfBuilder negative_checker(this);
2544 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2545 key, graph()->GetConstant0(), Token::GTE);
2546 negative_checker.Then();
2547 HInstruction* result = AddElementAccess(
2548 backing_store, key, val, bounds_check, checked_object->ActualValue(),
2549 elements_kind, access_type);
2550 negative_checker.ElseDeopt(DeoptimizeReason::kNegativeKeyEncountered);
2551 negative_checker.End();
2552 length_checker.End();
2553 return result;
2554 } else {
2555 DCHECK(store_mode == STANDARD_STORE);
2556 checked_key = Add<HBoundsCheck>(key, length);
2557 return AddElementAccess(backing_store, checked_key, val, checked_object,
2558 checked_object->ActualValue(), elements_kind,
2559 access_type);
2560 }
2561 }
2562 DCHECK(fast_smi_only_elements ||
2563 fast_elements ||
2564 IsFastDoubleElementsKind(elements_kind));
2565
2566 // In case val is stored into a fast smi array, assure that the value is a smi
2567 // before manipulating the backing store. Otherwise the actual store may
2568 // deopt, leaving the backing store in an invalid state.
2569 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2570 !val->type().IsSmi()) {
2571 val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2572 }
2573
2574 if (IsGrowStoreMode(store_mode)) {
2575 NoObservableSideEffectsScope no_effects(this);
2576 Representation representation = HStoreKeyed::RequiredValueRepresentation(
2577 elements_kind, STORE_TO_INITIALIZED_ENTRY);
2578 val = AddUncasted<HForceRepresentation>(val, representation);
2579 elements = BuildCheckForCapacityGrow(checked_object, elements,
2580 elements_kind, length, key,
2581 is_js_array, access_type);
2582 checked_key = key;
2583 } else {
2584 checked_key = Add<HBoundsCheck>(key, length);
2585
2586 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2587 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2588 NoObservableSideEffectsScope no_effects(this);
2589 elements = BuildCopyElementsOnWrite(checked_object, elements,
2590 elements_kind, length);
2591 } else {
2592 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2593 elements, isolate()->factory()->fixed_array_map());
2594 check_cow_map->ClearDependsOnFlag(kElementsKind);
2595 }
2596 }
2597 }
2598 return AddElementAccess(elements, checked_key, val, checked_object, nullptr,
2599 elements_kind, access_type, load_mode);
2600 }
2601
2602
BuildCalculateElementsSize(ElementsKind kind,HValue * capacity)2603 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2604 HValue* capacity) {
2605 int elements_size = IsFastDoubleElementsKind(kind)
2606 ? kDoubleSize
2607 : kPointerSize;
2608
2609 HConstant* elements_size_value = Add<HConstant>(elements_size);
2610 HInstruction* mul =
2611 HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(),
2612 elements_size_value);
2613 AddInstruction(mul);
2614 mul->ClearFlag(HValue::kCanOverflow);
2615
2616 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2617
2618 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2619 HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2620 total_size->ClearFlag(HValue::kCanOverflow);
2621 return total_size;
2622 }
2623
2624
AllocateJSArrayObject(AllocationSiteMode mode)2625 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2626 int base_size = JSArray::kSize;
2627 if (mode == TRACK_ALLOCATION_SITE) {
2628 base_size += AllocationMemento::kSize;
2629 }
2630 HConstant* size_in_bytes = Add<HConstant>(base_size);
2631 return Add<HAllocate>(size_in_bytes, HType::JSArray(), NOT_TENURED,
2632 JS_OBJECT_TYPE, graph()->GetConstant0());
2633 }
2634
2635
EstablishElementsAllocationSize(ElementsKind kind,int capacity)2636 HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2637 ElementsKind kind,
2638 int capacity) {
2639 int base_size = IsFastDoubleElementsKind(kind)
2640 ? FixedDoubleArray::SizeFor(capacity)
2641 : FixedArray::SizeFor(capacity);
2642
2643 return Add<HConstant>(base_size);
2644 }
2645
2646
BuildAllocateElements(ElementsKind kind,HValue * size_in_bytes)2647 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2648 HValue* size_in_bytes) {
2649 InstanceType instance_type = IsFastDoubleElementsKind(kind)
2650 ? FIXED_DOUBLE_ARRAY_TYPE
2651 : FIXED_ARRAY_TYPE;
2652
2653 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
2654 instance_type, graph()->GetConstant0());
2655 }
2656
2657
BuildInitializeElementsHeader(HValue * elements,ElementsKind kind,HValue * capacity)2658 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2659 ElementsKind kind,
2660 HValue* capacity) {
2661 Factory* factory = isolate()->factory();
2662 Handle<Map> map = IsFastDoubleElementsKind(kind)
2663 ? factory->fixed_double_array_map()
2664 : factory->fixed_array_map();
2665
2666 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2667 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2668 capacity);
2669 }
2670
2671
BuildAllocateAndInitializeArray(ElementsKind kind,HValue * capacity)2672 HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
2673 HValue* capacity) {
2674 // The HForceRepresentation is to prevent possible deopt on int-smi
2675 // conversion after allocation but before the new object fields are set.
2676 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2677 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
2678 HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
2679 BuildInitializeElementsHeader(new_array, kind, capacity);
2680 return new_array;
2681 }
2682
2683
BuildJSArrayHeader(HValue * array,HValue * array_map,HValue * elements,AllocationSiteMode mode,ElementsKind elements_kind,HValue * allocation_site_payload,HValue * length_field)2684 void HGraphBuilder::BuildJSArrayHeader(HValue* array,
2685 HValue* array_map,
2686 HValue* elements,
2687 AllocationSiteMode mode,
2688 ElementsKind elements_kind,
2689 HValue* allocation_site_payload,
2690 HValue* length_field) {
2691 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2692
2693 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2694
2695 Add<HStoreNamedField>(
2696 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
2697
2698 Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(),
2699 elements != nullptr ? elements : empty_fixed_array);
2700
2701 Add<HStoreNamedField>(
2702 array, HObjectAccess::ForArrayLength(elements_kind), length_field);
2703
2704 if (mode == TRACK_ALLOCATION_SITE) {
2705 BuildCreateAllocationMemento(
2706 array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2707 }
2708 }
2709
2710
AddElementAccess(HValue * elements,HValue * checked_key,HValue * val,HValue * dependency,HValue * backing_store_owner,ElementsKind elements_kind,PropertyAccessType access_type,LoadKeyedHoleMode load_mode)2711 HInstruction* HGraphBuilder::AddElementAccess(
2712 HValue* elements, HValue* checked_key, HValue* val, HValue* dependency,
2713 HValue* backing_store_owner, ElementsKind elements_kind,
2714 PropertyAccessType access_type, LoadKeyedHoleMode load_mode) {
2715 if (access_type == STORE) {
2716 DCHECK(val != NULL);
2717 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
2718 val = Add<HClampToUint8>(val);
2719 }
2720 return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner,
2721 elements_kind, STORE_TO_INITIALIZED_ENTRY);
2722 }
2723
2724 DCHECK(access_type == LOAD);
2725 DCHECK(val == NULL);
2726 HLoadKeyed* load =
2727 Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner,
2728 elements_kind, load_mode);
2729 if (elements_kind == UINT32_ELEMENTS) {
2730 graph()->RecordUint32Instruction(load);
2731 }
2732 return load;
2733 }
2734
2735
AddLoadMap(HValue * object,HValue * dependency)2736 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
2737 HValue* dependency) {
2738 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
2739 }
2740
2741
AddLoadElements(HValue * object,HValue * dependency)2742 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
2743 HValue* dependency) {
2744 return Add<HLoadNamedField>(
2745 object, dependency, HObjectAccess::ForElementsPointer());
2746 }
2747
2748
AddLoadFixedArrayLength(HValue * array,HValue * dependency)2749 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
2750 HValue* array,
2751 HValue* dependency) {
2752 return Add<HLoadNamedField>(
2753 array, dependency, HObjectAccess::ForFixedArrayLength());
2754 }
2755
2756
AddLoadArrayLength(HValue * array,ElementsKind kind,HValue * dependency)2757 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
2758 ElementsKind kind,
2759 HValue* dependency) {
2760 return Add<HLoadNamedField>(
2761 array, dependency, HObjectAccess::ForArrayLength(kind));
2762 }
2763
2764
BuildNewElementsCapacity(HValue * old_capacity)2765 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2766 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2767 graph_->GetConstant1());
2768
2769 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2770 new_capacity->ClearFlag(HValue::kCanOverflow);
2771
2772 HValue* min_growth = Add<HConstant>(16);
2773
2774 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2775 new_capacity->ClearFlag(HValue::kCanOverflow);
2776
2777 return new_capacity;
2778 }
2779
2780
BuildGrowElementsCapacity(HValue * object,HValue * elements,ElementsKind kind,ElementsKind new_kind,HValue * length,HValue * new_capacity)2781 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2782 HValue* elements,
2783 ElementsKind kind,
2784 ElementsKind new_kind,
2785 HValue* length,
2786 HValue* new_capacity) {
2787 Add<HBoundsCheck>(
2788 new_capacity,
2789 Add<HConstant>((kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
2790 ElementsKindToShiftSize(new_kind)));
2791
2792 HValue* new_elements =
2793 BuildAllocateAndInitializeArray(new_kind, new_capacity);
2794
2795 BuildCopyElements(elements, kind, new_elements,
2796 new_kind, length, new_capacity);
2797
2798 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2799 new_elements);
2800
2801 return new_elements;
2802 }
2803
2804
BuildFillElementsWithValue(HValue * elements,ElementsKind elements_kind,HValue * from,HValue * to,HValue * value)2805 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
2806 ElementsKind elements_kind,
2807 HValue* from,
2808 HValue* to,
2809 HValue* value) {
2810 if (to == NULL) {
2811 to = AddLoadFixedArrayLength(elements);
2812 }
2813
2814 // Special loop unfolding case
2815 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
2816 kElementLoopUnrollThreshold);
2817 int initial_capacity = -1;
2818 if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2819 int constant_from = from->GetInteger32Constant();
2820 int constant_to = to->GetInteger32Constant();
2821
2822 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
2823 initial_capacity = constant_to;
2824 }
2825 }
2826
2827 if (initial_capacity >= 0) {
2828 for (int i = 0; i < initial_capacity; i++) {
2829 HInstruction* key = Add<HConstant>(i);
2830 Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind);
2831 }
2832 } else {
2833 // Carefully loop backwards so that the "from" remains live through the loop
2834 // rather than the to. This often corresponds to keeping length live rather
2835 // then capacity, which helps register allocation, since length is used more
2836 // other than capacity after filling with holes.
2837 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2838
2839 HValue* key = builder.BeginBody(to, from, Token::GT);
2840
2841 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
2842 adjusted_key->ClearFlag(HValue::kCanOverflow);
2843
2844 Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind);
2845
2846 builder.EndBody();
2847 }
2848 }
2849
2850
BuildFillElementsWithHole(HValue * elements,ElementsKind elements_kind,HValue * from,HValue * to)2851 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2852 ElementsKind elements_kind,
2853 HValue* from,
2854 HValue* to) {
2855 // Fast elements kinds need to be initialized in case statements below cause a
2856 // garbage collection.
2857
2858 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2859 ? graph()->GetConstantHole()
2860 : Add<HConstant>(HConstant::kHoleNaN);
2861
2862 // Since we're about to store a hole value, the store instruction below must
2863 // assume an elements kind that supports heap object values.
2864 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2865 elements_kind = FAST_HOLEY_ELEMENTS;
2866 }
2867
2868 BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
2869 }
2870
2871
BuildCopyProperties(HValue * from_properties,HValue * to_properties,HValue * length,HValue * capacity)2872 void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
2873 HValue* to_properties, HValue* length,
2874 HValue* capacity) {
2875 ElementsKind kind = FAST_ELEMENTS;
2876
2877 BuildFillElementsWithValue(to_properties, kind, length, capacity,
2878 graph()->GetConstantUndefined());
2879
2880 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2881
2882 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
2883
2884 key = AddUncasted<HSub>(key, graph()->GetConstant1());
2885 key->ClearFlag(HValue::kCanOverflow);
2886
2887 HValue* element =
2888 Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind);
2889
2890 Add<HStoreKeyed>(to_properties, key, element, nullptr, kind);
2891
2892 builder.EndBody();
2893 }
2894
2895
BuildCopyElements(HValue * from_elements,ElementsKind from_elements_kind,HValue * to_elements,ElementsKind to_elements_kind,HValue * length,HValue * capacity)2896 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2897 ElementsKind from_elements_kind,
2898 HValue* to_elements,
2899 ElementsKind to_elements_kind,
2900 HValue* length,
2901 HValue* capacity) {
2902 int constant_capacity = -1;
2903 if (capacity != NULL &&
2904 capacity->IsConstant() &&
2905 HConstant::cast(capacity)->HasInteger32Value()) {
2906 int constant_candidate = HConstant::cast(capacity)->Integer32Value();
2907 if (constant_candidate <= kElementLoopUnrollThreshold) {
2908 constant_capacity = constant_candidate;
2909 }
2910 }
2911
2912 bool pre_fill_with_holes =
2913 IsFastDoubleElementsKind(from_elements_kind) &&
2914 IsFastObjectElementsKind(to_elements_kind);
2915 if (pre_fill_with_holes) {
2916 // If the copy might trigger a GC, make sure that the FixedArray is
2917 // pre-initialized with holes to make sure that it's always in a
2918 // consistent state.
2919 BuildFillElementsWithHole(to_elements, to_elements_kind,
2920 graph()->GetConstant0(), NULL);
2921 }
2922
2923 if (constant_capacity != -1) {
2924 // Unroll the loop for small elements kinds.
2925 for (int i = 0; i < constant_capacity; i++) {
2926 HValue* key_constant = Add<HConstant>(i);
2927 HInstruction* value = Add<HLoadKeyed>(
2928 from_elements, key_constant, nullptr, nullptr, from_elements_kind);
2929 Add<HStoreKeyed>(to_elements, key_constant, value, nullptr,
2930 to_elements_kind);
2931 }
2932 } else {
2933 if (!pre_fill_with_holes &&
2934 (capacity == NULL || !length->Equals(capacity))) {
2935 BuildFillElementsWithHole(to_elements, to_elements_kind,
2936 length, NULL);
2937 }
2938
2939 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2940
2941 HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
2942 Token::GT);
2943
2944 key = AddUncasted<HSub>(key, graph()->GetConstant1());
2945 key->ClearFlag(HValue::kCanOverflow);
2946
2947 HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr,
2948 from_elements_kind, ALLOW_RETURN_HOLE);
2949
2950 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2951 IsFastSmiElementsKind(to_elements_kind))
2952 ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2953
2954 if (IsHoleyElementsKind(from_elements_kind) &&
2955 from_elements_kind != to_elements_kind) {
2956 IfBuilder if_hole(this);
2957 if_hole.If<HCompareHoleAndBranch>(element);
2958 if_hole.Then();
2959 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2960 ? Add<HConstant>(HConstant::kHoleNaN)
2961 : graph()->GetConstantHole();
2962 Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind);
2963 if_hole.Else();
2964 HStoreKeyed* store =
2965 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
2966 store->SetFlag(HValue::kTruncatingToNumber);
2967 if_hole.End();
2968 } else {
2969 HStoreKeyed* store =
2970 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
2971 store->SetFlag(HValue::kTruncatingToNumber);
2972 }
2973
2974 builder.EndBody();
2975 }
2976
2977 Counters* counters = isolate()->counters();
2978 AddIncrementCounter(counters->inlined_copied_elements());
2979 }
2980
BuildCreateAllocationMemento(HValue * previous_object,HValue * previous_object_size,HValue * allocation_site)2981 void HGraphBuilder::BuildCreateAllocationMemento(
2982 HValue* previous_object,
2983 HValue* previous_object_size,
2984 HValue* allocation_site) {
2985 DCHECK(allocation_site != NULL);
2986 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
2987 previous_object, previous_object_size, HType::HeapObject());
2988 AddStoreMapConstant(
2989 allocation_memento, isolate()->factory()->allocation_memento_map());
2990 Add<HStoreNamedField>(
2991 allocation_memento,
2992 HObjectAccess::ForAllocationMementoSite(),
2993 allocation_site);
2994 if (FLAG_allocation_site_pretenuring) {
2995 HValue* memento_create_count =
2996 Add<HLoadNamedField>(allocation_site, nullptr,
2997 HObjectAccess::ForAllocationSiteOffset(
2998 AllocationSite::kPretenureCreateCountOffset));
2999 memento_create_count = AddUncasted<HAdd>(
3000 memento_create_count, graph()->GetConstant1());
3001 // This smi value is reset to zero after every gc, overflow isn't a problem
3002 // since the counter is bounded by the new space size.
3003 memento_create_count->ClearFlag(HValue::kCanOverflow);
3004 Add<HStoreNamedField>(
3005 allocation_site, HObjectAccess::ForAllocationSiteOffset(
3006 AllocationSite::kPretenureCreateCountOffset), memento_create_count);
3007 }
3008 }
3009
3010
BuildGetNativeContext()3011 HInstruction* HGraphBuilder::BuildGetNativeContext() {
3012 return Add<HLoadNamedField>(
3013 context(), nullptr,
3014 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3015 }
3016
3017
BuildGetNativeContext(HValue * closure)3018 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
3019 // Get the global object, then the native context
3020 HInstruction* context = Add<HLoadNamedField>(
3021 closure, nullptr, HObjectAccess::ForFunctionContextPointer());
3022 return Add<HLoadNamedField>(
3023 context, nullptr,
3024 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3025 }
3026
3027
BuildGetParentContext(HValue * depth,int depth_value)3028 HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
3029 HValue* script_context = context();
3030 if (depth != NULL) {
3031 HValue* zero = graph()->GetConstant0();
3032
3033 Push(script_context);
3034 Push(depth);
3035
3036 LoopBuilder loop(this);
3037 loop.BeginBody(2); // Drop script_context and depth from last environment
3038 // to appease live range building without simulates.
3039 depth = Pop();
3040 script_context = Pop();
3041
3042 script_context = Add<HLoadNamedField>(
3043 script_context, nullptr,
3044 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3045 depth = AddUncasted<HSub>(depth, graph()->GetConstant1());
3046 depth->ClearFlag(HValue::kCanOverflow);
3047
3048 IfBuilder if_break(this);
3049 if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ);
3050 if_break.Then();
3051 {
3052 Push(script_context); // The result.
3053 loop.Break();
3054 }
3055 if_break.Else();
3056 {
3057 Push(script_context);
3058 Push(depth);
3059 }
3060 loop.EndBody();
3061 if_break.End();
3062
3063 script_context = Pop();
3064 } else if (depth_value > 0) {
3065 // Unroll the above loop.
3066 for (int i = 0; i < depth_value; i++) {
3067 script_context = Add<HLoadNamedField>(
3068 script_context, nullptr,
3069 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3070 }
3071 }
3072 return script_context;
3073 }
3074
3075
BuildGetArrayFunction()3076 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3077 HInstruction* native_context = BuildGetNativeContext();
3078 HInstruction* index =
3079 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3080 return Add<HLoadKeyed>(native_context, index, nullptr, nullptr,
3081 FAST_ELEMENTS);
3082 }
3083
3084
BuildArrayBufferViewFieldAccessor(HValue * object,HValue * checked_object,FieldIndex index)3085 HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object,
3086 HValue* checked_object,
3087 FieldIndex index) {
3088 NoObservableSideEffectsScope scope(this);
3089 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3090 index.offset(), Representation::Tagged());
3091 HInstruction* buffer = Add<HLoadNamedField>(
3092 object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer());
3093 HInstruction* field = Add<HLoadNamedField>(object, checked_object, access);
3094
3095 HInstruction* flags = Add<HLoadNamedField>(
3096 buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField());
3097 HValue* was_neutered_mask =
3098 Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift);
3099 HValue* was_neutered_test =
3100 AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask);
3101
3102 IfBuilder if_was_neutered(this);
3103 if_was_neutered.If<HCompareNumericAndBranch>(
3104 was_neutered_test, graph()->GetConstant0(), Token::NE);
3105 if_was_neutered.Then();
3106 Push(graph()->GetConstant0());
3107 if_was_neutered.Else();
3108 Push(field);
3109 if_was_neutered.End();
3110
3111 return Pop();
3112 }
3113
AddLoadJSBuiltin(int context_index)3114 HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) {
3115 HValue* native_context = BuildGetNativeContext();
3116 HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index);
3117 return Add<HLoadNamedField>(native_context, nullptr, function_access);
3118 }
3119
HOptimizedGraphBuilder(CompilationInfo * info,bool track_positions)3120 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info,
3121 bool track_positions)
3122 : HGraphBuilder(info, CallInterfaceDescriptor(), track_positions),
3123 function_state_(NULL),
3124 initial_function_state_(this, info, NORMAL_RETURN, -1,
3125 TailCallMode::kAllow),
3126 ast_context_(NULL),
3127 break_scope_(NULL),
3128 inlined_count_(0),
3129 globals_(10, info->zone()),
3130 osr_(new (info->zone()) HOsrBuilder(this)),
3131 bounds_(info->zone()) {
3132 // This is not initialized in the initializer list because the
3133 // constructor for the initial state relies on function_state_ == NULL
3134 // to know it's the initial state.
3135 function_state_ = &initial_function_state_;
3136 InitializeAstVisitor(info->isolate());
3137 }
3138
3139
CreateJoin(HBasicBlock * first,HBasicBlock * second,BailoutId join_id)3140 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3141 HBasicBlock* second,
3142 BailoutId join_id) {
3143 if (first == NULL) {
3144 return second;
3145 } else if (second == NULL) {
3146 return first;
3147 } else {
3148 HBasicBlock* join_block = graph()->CreateBasicBlock();
3149 Goto(first, join_block);
3150 Goto(second, join_block);
3151 join_block->SetJoinId(join_id);
3152 return join_block;
3153 }
3154 }
3155
JoinContinue(IterationStatement * statement,BailoutId continue_id,HBasicBlock * exit_block,HBasicBlock * continue_block)3156 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
3157 BailoutId continue_id,
3158 HBasicBlock* exit_block,
3159 HBasicBlock* continue_block) {
3160 if (continue_block != NULL) {
3161 if (exit_block != NULL) Goto(exit_block, continue_block);
3162 continue_block->SetJoinId(continue_id);
3163 return continue_block;
3164 }
3165 return exit_block;
3166 }
3167
3168
CreateLoop(IterationStatement * statement,HBasicBlock * loop_entry,HBasicBlock * body_exit,HBasicBlock * loop_successor,HBasicBlock * break_block)3169 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3170 HBasicBlock* loop_entry,
3171 HBasicBlock* body_exit,
3172 HBasicBlock* loop_successor,
3173 HBasicBlock* break_block) {
3174 if (body_exit != NULL) Goto(body_exit, loop_entry);
3175 loop_entry->PostProcessLoopHeader(statement);
3176 if (break_block != NULL) {
3177 if (loop_successor != NULL) Goto(loop_successor, break_block);
3178 break_block->SetJoinId(statement->ExitId());
3179 return break_block;
3180 }
3181 return loop_successor;
3182 }
3183
3184
3185 // Build a new loop header block and set it as the current block.
BuildLoopEntry()3186 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3187 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3188 Goto(loop_entry);
3189 set_current_block(loop_entry);
3190 return loop_entry;
3191 }
3192
3193
BuildLoopEntry(IterationStatement * statement)3194 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3195 IterationStatement* statement) {
3196 HBasicBlock* loop_entry;
3197
3198 if (osr()->HasOsrEntryAt(statement)) {
3199 loop_entry = osr()->BuildOsrLoopEntry(statement);
3200 if (function_state()->IsInsideDoExpressionScope()) {
3201 Bailout(kDoExpressionUnmodelable);
3202 }
3203 } else {
3204 loop_entry = BuildLoopEntry();
3205 }
3206 return loop_entry;
3207 }
3208
3209
FinishExit(HControlInstruction * instruction,SourcePosition position)3210 void HBasicBlock::FinishExit(HControlInstruction* instruction,
3211 SourcePosition position) {
3212 Finish(instruction, position);
3213 ClearEnvironment();
3214 }
3215
3216
operator <<(std::ostream & os,const HBasicBlock & b)3217 std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
3218 return os << "B" << b.block_id();
3219 }
3220
HGraph(CompilationInfo * info,CallInterfaceDescriptor descriptor)3221 HGraph::HGraph(CompilationInfo* info, CallInterfaceDescriptor descriptor)
3222 : isolate_(info->isolate()),
3223 next_block_id_(0),
3224 entry_block_(NULL),
3225 blocks_(8, info->zone()),
3226 values_(16, info->zone()),
3227 phi_list_(NULL),
3228 uint32_instructions_(NULL),
3229 osr_(NULL),
3230 info_(info),
3231 descriptor_(descriptor),
3232 zone_(info->zone()),
3233 allow_code_motion_(false),
3234 use_optimistic_licm_(false),
3235 depends_on_empty_array_proto_elements_(false),
3236 depends_on_string_length_overflow_(false),
3237 type_change_checksum_(0),
3238 maximum_environment_size_(0),
3239 no_side_effects_scope_count_(0),
3240 disallow_adding_new_values_(false) {
3241 if (info->IsStub()) {
3242 // For stubs, explicitly add the context to the environment.
3243 start_environment_ =
3244 new (zone_) HEnvironment(zone_, descriptor.GetParameterCount() + 1);
3245 } else {
3246 start_environment_ =
3247 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3248 }
3249 start_environment_->set_ast_id(BailoutId::FunctionContext());
3250 entry_block_ = CreateBasicBlock();
3251 entry_block_->SetInitialEnvironment(start_environment_);
3252 }
3253
3254
CreateBasicBlock()3255 HBasicBlock* HGraph::CreateBasicBlock() {
3256 HBasicBlock* result = new(zone()) HBasicBlock(this);
3257 blocks_.Add(result, zone());
3258 return result;
3259 }
3260
3261
FinalizeUniqueness()3262 void HGraph::FinalizeUniqueness() {
3263 DisallowHeapAllocation no_gc;
3264 for (int i = 0; i < blocks()->length(); ++i) {
3265 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3266 it.Current()->FinalizeUniqueness();
3267 }
3268 }
3269 }
3270
3271
3272 // Block ordering was implemented with two mutually recursive methods,
3273 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
3274 // The recursion could lead to stack overflow so the algorithm has been
3275 // implemented iteratively.
3276 // At a high level the algorithm looks like this:
3277 //
3278 // Postorder(block, loop_header) : {
3279 // if (block has already been visited or is of another loop) return;
3280 // mark block as visited;
3281 // if (block is a loop header) {
3282 // VisitLoopMembers(block, loop_header);
3283 // VisitSuccessorsOfLoopHeader(block);
3284 // } else {
3285 // VisitSuccessors(block)
3286 // }
3287 // put block in result list;
3288 // }
3289 //
3290 // VisitLoopMembers(block, outer_loop_header) {
3291 // foreach (block b in block loop members) {
3292 // VisitSuccessorsOfLoopMember(b, outer_loop_header);
3293 // if (b is loop header) VisitLoopMembers(b);
3294 // }
3295 // }
3296 //
3297 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3298 // foreach (block b in block successors) Postorder(b, outer_loop_header)
3299 // }
3300 //
3301 // VisitSuccessorsOfLoopHeader(block) {
3302 // foreach (block b in block successors) Postorder(b, block)
3303 // }
3304 //
3305 // VisitSuccessors(block, loop_header) {
3306 // foreach (block b in block successors) Postorder(b, loop_header)
3307 // }
3308 //
3309 // The ordering is started calling Postorder(entry, NULL).
3310 //
3311 // Each instance of PostorderProcessor represents the "stack frame" of the
3312 // recursion, and particularly keeps the state of the loop (iteration) of the
3313 // "Visit..." function it represents.
3314 // To recycle memory we keep all the frames in a double linked list but
3315 // this means that we cannot use constructors to initialize the frames.
3316 //
3317 class PostorderProcessor : public ZoneObject {
3318 public:
3319 // Back link (towards the stack bottom).
parent()3320 PostorderProcessor* parent() {return father_; }
3321 // Forward link (towards the stack top).
child()3322 PostorderProcessor* child() {return child_; }
block()3323 HBasicBlock* block() { return block_; }
loop()3324 HLoopInformation* loop() { return loop_; }
loop_header()3325 HBasicBlock* loop_header() { return loop_header_; }
3326
CreateEntryProcessor(Zone * zone,HBasicBlock * block)3327 static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3328 HBasicBlock* block) {
3329 PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3330 return result->SetupSuccessors(zone, block, NULL);
3331 }
3332
PerformStep(Zone * zone,ZoneList<HBasicBlock * > * order)3333 PostorderProcessor* PerformStep(Zone* zone,
3334 ZoneList<HBasicBlock*>* order) {
3335 PostorderProcessor* next =
3336 PerformNonBacktrackingStep(zone, order);
3337 if (next != NULL) {
3338 return next;
3339 } else {
3340 return Backtrack(zone, order);
3341 }
3342 }
3343
3344 private:
PostorderProcessor(PostorderProcessor * father)3345 explicit PostorderProcessor(PostorderProcessor* father)
3346 : father_(father), child_(NULL), successor_iterator(NULL) { }
3347
3348 // Each enum value states the cycle whose state is kept by this instance.
3349 enum LoopKind {
3350 NONE,
3351 SUCCESSORS,
3352 SUCCESSORS_OF_LOOP_HEADER,
3353 LOOP_MEMBERS,
3354 SUCCESSORS_OF_LOOP_MEMBER
3355 };
3356
3357 // Each "Setup..." method is like a constructor for a cycle state.
SetupSuccessors(Zone * zone,HBasicBlock * block,HBasicBlock * loop_header)3358 PostorderProcessor* SetupSuccessors(Zone* zone,
3359 HBasicBlock* block,
3360 HBasicBlock* loop_header) {
3361 if (block == NULL || block->IsOrdered() ||
3362 block->parent_loop_header() != loop_header) {
3363 kind_ = NONE;
3364 block_ = NULL;
3365 loop_ = NULL;
3366 loop_header_ = NULL;
3367 return this;
3368 } else {
3369 block_ = block;
3370 loop_ = NULL;
3371 block->MarkAsOrdered();
3372
3373 if (block->IsLoopHeader()) {
3374 kind_ = SUCCESSORS_OF_LOOP_HEADER;
3375 loop_header_ = block;
3376 InitializeSuccessors();
3377 PostorderProcessor* result = Push(zone);
3378 return result->SetupLoopMembers(zone, block, block->loop_information(),
3379 loop_header);
3380 } else {
3381 DCHECK(block->IsFinished());
3382 kind_ = SUCCESSORS;
3383 loop_header_ = loop_header;
3384 InitializeSuccessors();
3385 return this;
3386 }
3387 }
3388 }
3389
SetupLoopMembers(Zone * zone,HBasicBlock * block,HLoopInformation * loop,HBasicBlock * loop_header)3390 PostorderProcessor* SetupLoopMembers(Zone* zone,
3391 HBasicBlock* block,
3392 HLoopInformation* loop,
3393 HBasicBlock* loop_header) {
3394 kind_ = LOOP_MEMBERS;
3395 block_ = block;
3396 loop_ = loop;
3397 loop_header_ = loop_header;
3398 InitializeLoopMembers();
3399 return this;
3400 }
3401
SetupSuccessorsOfLoopMember(HBasicBlock * block,HLoopInformation * loop,HBasicBlock * loop_header)3402 PostorderProcessor* SetupSuccessorsOfLoopMember(
3403 HBasicBlock* block,
3404 HLoopInformation* loop,
3405 HBasicBlock* loop_header) {
3406 kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3407 block_ = block;
3408 loop_ = loop;
3409 loop_header_ = loop_header;
3410 InitializeSuccessors();
3411 return this;
3412 }
3413
3414 // This method "allocates" a new stack frame.
Push(Zone * zone)3415 PostorderProcessor* Push(Zone* zone) {
3416 if (child_ == NULL) {
3417 child_ = new(zone) PostorderProcessor(this);
3418 }
3419 return child_;
3420 }
3421
ClosePostorder(ZoneList<HBasicBlock * > * order,Zone * zone)3422 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3423 DCHECK(block_->end()->FirstSuccessor() == NULL ||
3424 order->Contains(block_->end()->FirstSuccessor()) ||
3425 block_->end()->FirstSuccessor()->IsLoopHeader());
3426 DCHECK(block_->end()->SecondSuccessor() == NULL ||
3427 order->Contains(block_->end()->SecondSuccessor()) ||
3428 block_->end()->SecondSuccessor()->IsLoopHeader());
3429 order->Add(block_, zone);
3430 }
3431
3432 // This method is the basic block to walk up the stack.
Pop(Zone * zone,ZoneList<HBasicBlock * > * order)3433 PostorderProcessor* Pop(Zone* zone,
3434 ZoneList<HBasicBlock*>* order) {
3435 switch (kind_) {
3436 case SUCCESSORS:
3437 case SUCCESSORS_OF_LOOP_HEADER:
3438 ClosePostorder(order, zone);
3439 return father_;
3440 case LOOP_MEMBERS:
3441 return father_;
3442 case SUCCESSORS_OF_LOOP_MEMBER:
3443 if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3444 // In this case we need to perform a LOOP_MEMBERS cycle so we
3445 // initialize it and return this instead of father.
3446 return SetupLoopMembers(zone, block(),
3447 block()->loop_information(), loop_header_);
3448 } else {
3449 return father_;
3450 }
3451 case NONE:
3452 return father_;
3453 }
3454 UNREACHABLE();
3455 return NULL;
3456 }
3457
3458 // Walks up the stack.
Backtrack(Zone * zone,ZoneList<HBasicBlock * > * order)3459 PostorderProcessor* Backtrack(Zone* zone,
3460 ZoneList<HBasicBlock*>* order) {
3461 PostorderProcessor* parent = Pop(zone, order);
3462 while (parent != NULL) {
3463 PostorderProcessor* next =
3464 parent->PerformNonBacktrackingStep(zone, order);
3465 if (next != NULL) {
3466 return next;
3467 } else {
3468 parent = parent->Pop(zone, order);
3469 }
3470 }
3471 return NULL;
3472 }
3473
PerformNonBacktrackingStep(Zone * zone,ZoneList<HBasicBlock * > * order)3474 PostorderProcessor* PerformNonBacktrackingStep(
3475 Zone* zone,
3476 ZoneList<HBasicBlock*>* order) {
3477 HBasicBlock* next_block;
3478 switch (kind_) {
3479 case SUCCESSORS:
3480 next_block = AdvanceSuccessors();
3481 if (next_block != NULL) {
3482 PostorderProcessor* result = Push(zone);
3483 return result->SetupSuccessors(zone, next_block, loop_header_);
3484 }
3485 break;
3486 case SUCCESSORS_OF_LOOP_HEADER:
3487 next_block = AdvanceSuccessors();
3488 if (next_block != NULL) {
3489 PostorderProcessor* result = Push(zone);
3490 return result->SetupSuccessors(zone, next_block, block());
3491 }
3492 break;
3493 case LOOP_MEMBERS:
3494 next_block = AdvanceLoopMembers();
3495 if (next_block != NULL) {
3496 PostorderProcessor* result = Push(zone);
3497 return result->SetupSuccessorsOfLoopMember(next_block,
3498 loop_, loop_header_);
3499 }
3500 break;
3501 case SUCCESSORS_OF_LOOP_MEMBER:
3502 next_block = AdvanceSuccessors();
3503 if (next_block != NULL) {
3504 PostorderProcessor* result = Push(zone);
3505 return result->SetupSuccessors(zone, next_block, loop_header_);
3506 }
3507 break;
3508 case NONE:
3509 return NULL;
3510 }
3511 return NULL;
3512 }
3513
3514 // The following two methods implement a "foreach b in successors" cycle.
InitializeSuccessors()3515 void InitializeSuccessors() {
3516 loop_index = 0;
3517 loop_length = 0;
3518 successor_iterator = HSuccessorIterator(block_->end());
3519 }
3520
AdvanceSuccessors()3521 HBasicBlock* AdvanceSuccessors() {
3522 if (!successor_iterator.Done()) {
3523 HBasicBlock* result = successor_iterator.Current();
3524 successor_iterator.Advance();
3525 return result;
3526 }
3527 return NULL;
3528 }
3529
3530 // The following two methods implement a "foreach b in loop members" cycle.
InitializeLoopMembers()3531 void InitializeLoopMembers() {
3532 loop_index = 0;
3533 loop_length = loop_->blocks()->length();
3534 }
3535
AdvanceLoopMembers()3536 HBasicBlock* AdvanceLoopMembers() {
3537 if (loop_index < loop_length) {
3538 HBasicBlock* result = loop_->blocks()->at(loop_index);
3539 loop_index++;
3540 return result;
3541 } else {
3542 return NULL;
3543 }
3544 }
3545
3546 LoopKind kind_;
3547 PostorderProcessor* father_;
3548 PostorderProcessor* child_;
3549 HLoopInformation* loop_;
3550 HBasicBlock* block_;
3551 HBasicBlock* loop_header_;
3552 int loop_index;
3553 int loop_length;
3554 HSuccessorIterator successor_iterator;
3555 };
3556
3557
OrderBlocks()3558 void HGraph::OrderBlocks() {
3559 CompilationPhase phase("H_Block ordering", info());
3560
3561 #ifdef DEBUG
3562 // Initially the blocks must not be ordered.
3563 for (int i = 0; i < blocks_.length(); ++i) {
3564 DCHECK(!blocks_[i]->IsOrdered());
3565 }
3566 #endif
3567
3568 PostorderProcessor* postorder =
3569 PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
3570 blocks_.Rewind(0);
3571 while (postorder) {
3572 postorder = postorder->PerformStep(zone(), &blocks_);
3573 }
3574
3575 #ifdef DEBUG
3576 // Now all blocks must be marked as ordered.
3577 for (int i = 0; i < blocks_.length(); ++i) {
3578 DCHECK(blocks_[i]->IsOrdered());
3579 }
3580 #endif
3581
3582 // Reverse block list and assign block IDs.
3583 for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
3584 HBasicBlock* bi = blocks_[i];
3585 HBasicBlock* bj = blocks_[j];
3586 bi->set_block_id(j);
3587 bj->set_block_id(i);
3588 blocks_[i] = bj;
3589 blocks_[j] = bi;
3590 }
3591 }
3592
3593
AssignDominators()3594 void HGraph::AssignDominators() {
3595 HPhase phase("H_Assign dominators", this);
3596 for (int i = 0; i < blocks_.length(); ++i) {
3597 HBasicBlock* block = blocks_[i];
3598 if (block->IsLoopHeader()) {
3599 // Only the first predecessor of a loop header is from outside the loop.
3600 // All others are back edges, and thus cannot dominate the loop header.
3601 block->AssignCommonDominator(block->predecessors()->first());
3602 block->AssignLoopSuccessorDominators();
3603 } else {
3604 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3605 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3606 }
3607 }
3608 }
3609 }
3610
3611
CheckArgumentsPhiUses()3612 bool HGraph::CheckArgumentsPhiUses() {
3613 int block_count = blocks_.length();
3614 for (int i = 0; i < block_count; ++i) {
3615 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3616 HPhi* phi = blocks_[i]->phis()->at(j);
3617 // We don't support phi uses of arguments for now.
3618 if (phi->CheckFlag(HValue::kIsArguments)) return false;
3619 }
3620 }
3621 return true;
3622 }
3623
3624
CheckConstPhiUses()3625 bool HGraph::CheckConstPhiUses() {
3626 int block_count = blocks_.length();
3627 for (int i = 0; i < block_count; ++i) {
3628 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3629 HPhi* phi = blocks_[i]->phis()->at(j);
3630 // Check for the hole value (from an uninitialized const).
3631 for (int k = 0; k < phi->OperandCount(); k++) {
3632 if (phi->OperandAt(k) == GetConstantHole()) return false;
3633 }
3634 }
3635 }
3636 return true;
3637 }
3638
3639
CollectPhis()3640 void HGraph::CollectPhis() {
3641 int block_count = blocks_.length();
3642 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3643 for (int i = 0; i < block_count; ++i) {
3644 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3645 HPhi* phi = blocks_[i]->phis()->at(j);
3646 phi_list_->Add(phi, zone());
3647 }
3648 }
3649 }
3650
3651
3652 // Implementation of utility class to encapsulate the translation state for
3653 // a (possibly inlined) function.
FunctionState(HOptimizedGraphBuilder * owner,CompilationInfo * info,InliningKind inlining_kind,int inlining_id,TailCallMode tail_call_mode)3654 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3655 CompilationInfo* info, InliningKind inlining_kind,
3656 int inlining_id, TailCallMode tail_call_mode)
3657 : owner_(owner),
3658 compilation_info_(info),
3659 call_context_(NULL),
3660 inlining_kind_(inlining_kind),
3661 tail_call_mode_(tail_call_mode),
3662 function_return_(NULL),
3663 test_context_(NULL),
3664 entry_(NULL),
3665 arguments_object_(NULL),
3666 arguments_elements_(NULL),
3667 inlining_id_(inlining_id),
3668 outer_source_position_(SourcePosition::Unknown()),
3669 do_expression_scope_count_(0),
3670 outer_(owner->function_state()) {
3671 if (outer_ != NULL) {
3672 // State for an inline function.
3673 if (owner->ast_context()->IsTest()) {
3674 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3675 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3676 if_true->MarkAsInlineReturnTarget(owner->current_block());
3677 if_false->MarkAsInlineReturnTarget(owner->current_block());
3678 TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3679 Expression* cond = outer_test_context->condition();
3680 // The AstContext constructor pushed on the context stack. This newed
3681 // instance is the reason that AstContext can't be BASE_EMBEDDED.
3682 test_context_ = new TestContext(owner, cond, if_true, if_false);
3683 } else {
3684 function_return_ = owner->graph()->CreateBasicBlock();
3685 function_return()->MarkAsInlineReturnTarget(owner->current_block());
3686 }
3687 // Set this after possibly allocating a new TestContext above.
3688 call_context_ = owner->ast_context();
3689 }
3690
3691 // Push on the state stack.
3692 owner->set_function_state(this);
3693
3694 if (owner->is_tracking_positions()) {
3695 outer_source_position_ = owner->source_position();
3696 owner->EnterInlinedSource(inlining_id);
3697 owner->SetSourcePosition(info->shared_info()->start_position());
3698 }
3699 }
3700
3701
~FunctionState()3702 FunctionState::~FunctionState() {
3703 delete test_context_;
3704 owner_->set_function_state(outer_);
3705
3706 if (owner_->is_tracking_positions()) {
3707 owner_->set_source_position(outer_source_position_);
3708 owner_->EnterInlinedSource(outer_->inlining_id());
3709 }
3710 }
3711
3712
3713 // Implementation of utility classes to represent an expression's context in
3714 // the AST.
AstContext(HOptimizedGraphBuilder * owner,Expression::Context kind)3715 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
3716 : owner_(owner),
3717 kind_(kind),
3718 outer_(owner->ast_context()),
3719 typeof_mode_(NOT_INSIDE_TYPEOF) {
3720 owner->set_ast_context(this); // Push.
3721 #ifdef DEBUG
3722 DCHECK_EQ(JS_FUNCTION, owner->environment()->frame_type());
3723 original_length_ = owner->environment()->length();
3724 #endif
3725 }
3726
3727
~AstContext()3728 AstContext::~AstContext() {
3729 owner_->set_ast_context(outer_); // Pop.
3730 }
3731
3732
~EffectContext()3733 EffectContext::~EffectContext() {
3734 DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
3735 (owner()->environment()->length() == original_length_ &&
3736 (owner()->environment()->frame_type() == JS_FUNCTION ||
3737 owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
3738 }
3739
3740
~ValueContext()3741 ValueContext::~ValueContext() {
3742 DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
3743 (owner()->environment()->length() == original_length_ + 1 &&
3744 (owner()->environment()->frame_type() == JS_FUNCTION ||
3745 owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
3746 }
3747
3748
ReturnValue(HValue * value)3749 void EffectContext::ReturnValue(HValue* value) {
3750 // The value is simply ignored.
3751 }
3752
3753
ReturnValue(HValue * value)3754 void ValueContext::ReturnValue(HValue* value) {
3755 // The value is tracked in the bailout environment, and communicated
3756 // through the environment as the result of the expression.
3757 if (value->CheckFlag(HValue::kIsArguments)) {
3758 if (flag_ == ARGUMENTS_FAKED) {
3759 value = owner()->graph()->GetConstantUndefined();
3760 } else if (!arguments_allowed()) {
3761 owner()->Bailout(kBadValueContextForArgumentsValue);
3762 }
3763 }
3764 owner()->Push(value);
3765 }
3766
3767
ReturnValue(HValue * value)3768 void TestContext::ReturnValue(HValue* value) {
3769 BuildBranch(value);
3770 }
3771
3772
ReturnInstruction(HInstruction * instr,BailoutId ast_id)3773 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3774 DCHECK(!instr->IsControlInstruction());
3775 owner()->AddInstruction(instr);
3776 if (instr->HasObservableSideEffects()) {
3777 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3778 }
3779 }
3780
3781
ReturnControl(HControlInstruction * instr,BailoutId ast_id)3782 void EffectContext::ReturnControl(HControlInstruction* instr,
3783 BailoutId ast_id) {
3784 DCHECK(!instr->HasObservableSideEffects());
3785 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3786 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3787 instr->SetSuccessorAt(0, empty_true);
3788 instr->SetSuccessorAt(1, empty_false);
3789 owner()->FinishCurrentBlock(instr);
3790 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3791 owner()->set_current_block(join);
3792 }
3793
3794
ReturnContinuation(HIfContinuation * continuation,BailoutId ast_id)3795 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3796 BailoutId ast_id) {
3797 HBasicBlock* true_branch = NULL;
3798 HBasicBlock* false_branch = NULL;
3799 continuation->Continue(&true_branch, &false_branch);
3800 if (!continuation->IsTrueReachable()) {
3801 owner()->set_current_block(false_branch);
3802 } else if (!continuation->IsFalseReachable()) {
3803 owner()->set_current_block(true_branch);
3804 } else {
3805 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3806 owner()->set_current_block(join);
3807 }
3808 }
3809
3810
ReturnInstruction(HInstruction * instr,BailoutId ast_id)3811 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3812 DCHECK(!instr->IsControlInstruction());
3813 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3814 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3815 }
3816 owner()->AddInstruction(instr);
3817 owner()->Push(instr);
3818 if (instr->HasObservableSideEffects()) {
3819 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3820 }
3821 }
3822
3823
ReturnControl(HControlInstruction * instr,BailoutId ast_id)3824 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3825 DCHECK(!instr->HasObservableSideEffects());
3826 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3827 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3828 }
3829 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3830 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3831 instr->SetSuccessorAt(0, materialize_true);
3832 instr->SetSuccessorAt(1, materialize_false);
3833 owner()->FinishCurrentBlock(instr);
3834 owner()->set_current_block(materialize_true);
3835 owner()->Push(owner()->graph()->GetConstantTrue());
3836 owner()->set_current_block(materialize_false);
3837 owner()->Push(owner()->graph()->GetConstantFalse());
3838 HBasicBlock* join =
3839 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3840 owner()->set_current_block(join);
3841 }
3842
3843
ReturnContinuation(HIfContinuation * continuation,BailoutId ast_id)3844 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
3845 BailoutId ast_id) {
3846 HBasicBlock* materialize_true = NULL;
3847 HBasicBlock* materialize_false = NULL;
3848 continuation->Continue(&materialize_true, &materialize_false);
3849 if (continuation->IsTrueReachable()) {
3850 owner()->set_current_block(materialize_true);
3851 owner()->Push(owner()->graph()->GetConstantTrue());
3852 owner()->set_current_block(materialize_true);
3853 }
3854 if (continuation->IsFalseReachable()) {
3855 owner()->set_current_block(materialize_false);
3856 owner()->Push(owner()->graph()->GetConstantFalse());
3857 owner()->set_current_block(materialize_false);
3858 }
3859 if (continuation->TrueAndFalseReachable()) {
3860 HBasicBlock* join =
3861 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3862 owner()->set_current_block(join);
3863 }
3864 }
3865
3866
ReturnInstruction(HInstruction * instr,BailoutId ast_id)3867 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3868 DCHECK(!instr->IsControlInstruction());
3869 HOptimizedGraphBuilder* builder = owner();
3870 builder->AddInstruction(instr);
3871 // We expect a simulate after every expression with side effects, though
3872 // this one isn't actually needed (and wouldn't work if it were targeted).
3873 if (instr->HasObservableSideEffects()) {
3874 builder->Push(instr);
3875 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3876 builder->Pop();
3877 }
3878 BuildBranch(instr);
3879 }
3880
3881
ReturnControl(HControlInstruction * instr,BailoutId ast_id)3882 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3883 DCHECK(!instr->HasObservableSideEffects());
3884 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3885 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3886 instr->SetSuccessorAt(0, empty_true);
3887 instr->SetSuccessorAt(1, empty_false);
3888 owner()->FinishCurrentBlock(instr);
3889 owner()->Goto(empty_true, if_true(), owner()->function_state());
3890 owner()->Goto(empty_false, if_false(), owner()->function_state());
3891 owner()->set_current_block(NULL);
3892 }
3893
3894
ReturnContinuation(HIfContinuation * continuation,BailoutId ast_id)3895 void TestContext::ReturnContinuation(HIfContinuation* continuation,
3896 BailoutId ast_id) {
3897 HBasicBlock* true_branch = NULL;
3898 HBasicBlock* false_branch = NULL;
3899 continuation->Continue(&true_branch, &false_branch);
3900 if (continuation->IsTrueReachable()) {
3901 owner()->Goto(true_branch, if_true(), owner()->function_state());
3902 }
3903 if (continuation->IsFalseReachable()) {
3904 owner()->Goto(false_branch, if_false(), owner()->function_state());
3905 }
3906 owner()->set_current_block(NULL);
3907 }
3908
3909
BuildBranch(HValue * value)3910 void TestContext::BuildBranch(HValue* value) {
3911 // We expect the graph to be in edge-split form: there is no edge that
3912 // connects a branch node to a join node. We conservatively ensure that
3913 // property by always adding an empty block on the outgoing edges of this
3914 // branch.
3915 HOptimizedGraphBuilder* builder = owner();
3916 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
3917 builder->Bailout(kArgumentsObjectValueInATestContext);
3918 }
3919 ToBooleanHints expected(condition()->to_boolean_types());
3920 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
3921 }
3922
3923
3924 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
3925 #define CHECK_BAILOUT(call) \
3926 do { \
3927 call; \
3928 if (HasStackOverflow()) return; \
3929 } while (false)
3930
3931
3932 #define CHECK_ALIVE(call) \
3933 do { \
3934 call; \
3935 if (HasStackOverflow() || current_block() == NULL) return; \
3936 } while (false)
3937
3938
3939 #define CHECK_ALIVE_OR_RETURN(call, value) \
3940 do { \
3941 call; \
3942 if (HasStackOverflow() || current_block() == NULL) return value; \
3943 } while (false)
3944
3945
Bailout(BailoutReason reason)3946 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
3947 current_info()->AbortOptimization(reason);
3948 SetStackOverflow();
3949 }
3950
3951
VisitForEffect(Expression * expr)3952 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
3953 EffectContext for_effect(this);
3954 Visit(expr);
3955 }
3956
3957
VisitForValue(Expression * expr,ArgumentsAllowedFlag flag)3958 void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
3959 ArgumentsAllowedFlag flag) {
3960 ValueContext for_value(this, flag);
3961 Visit(expr);
3962 }
3963
3964
VisitForTypeOf(Expression * expr)3965 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
3966 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
3967 for_value.set_typeof_mode(INSIDE_TYPEOF);
3968 Visit(expr);
3969 }
3970
3971
VisitForControl(Expression * expr,HBasicBlock * true_block,HBasicBlock * false_block)3972 void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
3973 HBasicBlock* true_block,
3974 HBasicBlock* false_block) {
3975 TestContext for_control(this, expr, true_block, false_block);
3976 Visit(expr);
3977 }
3978
3979
VisitExpressions(ZoneList<Expression * > * exprs)3980 void HOptimizedGraphBuilder::VisitExpressions(
3981 ZoneList<Expression*>* exprs) {
3982 for (int i = 0; i < exprs->length(); ++i) {
3983 CHECK_ALIVE(VisitForValue(exprs->at(i)));
3984 }
3985 }
3986
3987
VisitExpressions(ZoneList<Expression * > * exprs,ArgumentsAllowedFlag flag)3988 void HOptimizedGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs,
3989 ArgumentsAllowedFlag flag) {
3990 for (int i = 0; i < exprs->length(); ++i) {
3991 CHECK_ALIVE(VisitForValue(exprs->at(i), flag));
3992 }
3993 }
3994
3995
BuildGraph()3996 bool HOptimizedGraphBuilder::BuildGraph() {
3997 if (IsSubclassConstructor(current_info()->literal()->kind())) {
3998 Bailout(kSuperReference);
3999 return false;
4000 }
4001
4002 DeclarationScope* scope = current_info()->scope();
4003 SetUpScope(scope);
4004
4005 // Add an edge to the body entry. This is warty: the graph's start
4006 // environment will be used by the Lithium translation as the initial
4007 // environment on graph entry, but it has now been mutated by the
4008 // Hydrogen translation of the instructions in the start block. This
4009 // environment uses values which have not been defined yet. These
4010 // Hydrogen instructions will then be replayed by the Lithium
4011 // translation, so they cannot have an environment effect. The edge to
4012 // the body's entry block (along with some special logic for the start
4013 // block in HInstruction::InsertAfter) seals the start block from
4014 // getting unwanted instructions inserted.
4015 //
4016 // TODO(kmillikin): Fix this. Stop mutating the initial environment.
4017 // Make the Hydrogen instructions in the initial block into Hydrogen
4018 // values (but not instructions), present in the initial environment and
4019 // not replayed by the Lithium translation.
4020 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4021 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4022 Goto(body_entry);
4023 body_entry->SetJoinId(BailoutId::FunctionEntry());
4024 set_current_block(body_entry);
4025
4026 VisitDeclarations(scope->declarations());
4027 Add<HSimulate>(BailoutId::Declarations());
4028
4029 Add<HStackCheck>(HStackCheck::kFunctionEntry);
4030
4031 VisitStatements(current_info()->literal()->body());
4032 if (HasStackOverflow()) return false;
4033
4034 if (current_block() != NULL) {
4035 Add<HReturn>(graph()->GetConstantUndefined());
4036 set_current_block(NULL);
4037 }
4038
4039 // If the checksum of the number of type info changes is the same as the
4040 // last time this function was compiled, then this recompile is likely not
4041 // due to missing/inadequate type feedback, but rather too aggressive
4042 // optimization. Disable optimistic LICM in that case.
4043 Handle<Code> unoptimized_code(current_info()->shared_info()->code());
4044 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
4045 Handle<TypeFeedbackInfo> type_info(
4046 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4047 int checksum = type_info->own_type_change_checksum();
4048 int composite_checksum = graph()->update_type_change_checksum(checksum);
4049 graph()->set_use_optimistic_licm(
4050 !type_info->matches_inlined_type_change_checksum(composite_checksum));
4051 type_info->set_inlined_type_change_checksum(composite_checksum);
4052
4053 // Set this predicate early to avoid handle deref during graph optimization.
4054 graph()->set_allow_code_motion(
4055 current_info()->IsStub() ||
4056 current_info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count);
4057
4058 // Perform any necessary OSR-specific cleanups or changes to the graph.
4059 osr()->FinishGraph();
4060
4061 return true;
4062 }
4063
4064
Optimize(BailoutReason * bailout_reason)4065 bool HGraph::Optimize(BailoutReason* bailout_reason) {
4066 OrderBlocks();
4067 AssignDominators();
4068
4069 // We need to create a HConstant "zero" now so that GVN will fold every
4070 // zero-valued constant in the graph together.
4071 // The constant is needed to make idef-based bounds check work: the pass
4072 // evaluates relations with "zero" and that zero cannot be created after GVN.
4073 GetConstant0();
4074
4075 #ifdef DEBUG
4076 // Do a full verify after building the graph and computing dominators.
4077 Verify(true);
4078 #endif
4079
4080 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4081 Run<HEnvironmentLivenessAnalysisPhase>();
4082 }
4083
4084 if (!CheckConstPhiUses()) {
4085 *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4086 return false;
4087 }
4088 Run<HRedundantPhiEliminationPhase>();
4089 if (!CheckArgumentsPhiUses()) {
4090 *bailout_reason = kUnsupportedPhiUseOfArguments;
4091 return false;
4092 }
4093
4094 // Find and mark unreachable code to simplify optimizations, especially gvn,
4095 // where unreachable code could unnecessarily defeat LICM.
4096 Run<HMarkUnreachableBlocksPhase>();
4097
4098 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4099 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4100
4101 if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4102
4103 CollectPhis();
4104
4105 if (has_osr()) osr()->FinishOsrValues();
4106
4107 Run<HInferRepresentationPhase>();
4108
4109 // Remove HSimulate instructions that have turned out not to be needed
4110 // after all by folding them into the following HSimulate.
4111 // This must happen after inferring representations.
4112 Run<HMergeRemovableSimulatesPhase>();
4113
4114 Run<HRepresentationChangesPhase>();
4115
4116 Run<HInferTypesPhase>();
4117
4118 // Must be performed before canonicalization to ensure that Canonicalize
4119 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4120 // zero.
4121 Run<HUint32AnalysisPhase>();
4122
4123 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4124
4125 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4126
4127 if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4128
4129 if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4130
4131 Run<HRangeAnalysisPhase>();
4132
4133 // Eliminate redundant stack checks on backwards branches.
4134 Run<HStackCheckEliminationPhase>();
4135
4136 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4137 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4138 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4139
4140 RestoreActualValues();
4141
4142 // Find unreachable code a second time, GVN and other optimizations may have
4143 // made blocks unreachable that were previously reachable.
4144 Run<HMarkUnreachableBlocksPhase>();
4145
4146 return true;
4147 }
4148
4149
RestoreActualValues()4150 void HGraph::RestoreActualValues() {
4151 HPhase phase("H_Restore actual values", this);
4152
4153 for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4154 HBasicBlock* block = blocks()->at(block_index);
4155
4156 #ifdef DEBUG
4157 for (int i = 0; i < block->phis()->length(); i++) {
4158 HPhi* phi = block->phis()->at(i);
4159 DCHECK(phi->ActualValue() == phi);
4160 }
4161 #endif
4162
4163 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4164 HInstruction* instruction = it.Current();
4165 if (instruction->ActualValue() == instruction) continue;
4166 if (instruction->CheckFlag(HValue::kIsDead)) {
4167 // The instruction was marked as deleted but left in the graph
4168 // as a control flow dependency point for subsequent
4169 // instructions.
4170 instruction->DeleteAndReplaceWith(instruction->ActualValue());
4171 } else {
4172 DCHECK(instruction->IsInformativeDefinition());
4173 if (instruction->IsPurelyInformativeDefinition()) {
4174 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4175 } else {
4176 instruction->ReplaceAllUsesWith(instruction->ActualValue());
4177 }
4178 }
4179 }
4180 }
4181 }
4182
4183
PushArgumentsFromEnvironment(int count)4184 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4185 ZoneList<HValue*> arguments(count, zone());
4186 for (int i = 0; i < count; ++i) {
4187 arguments.Add(Pop(), zone());
4188 }
4189
4190 HPushArguments* push_args = New<HPushArguments>();
4191 while (!arguments.is_empty()) {
4192 push_args->AddInput(arguments.RemoveLast());
4193 }
4194 AddInstruction(push_args);
4195 }
4196
4197
4198 template <class Instruction>
PreProcessCall(Instruction * call)4199 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4200 PushArgumentsFromEnvironment(call->argument_count());
4201 return call;
4202 }
4203
SetUpScope(DeclarationScope * scope)4204 void HOptimizedGraphBuilder::SetUpScope(DeclarationScope* scope) {
4205 HEnvironment* prolog_env = environment();
4206 int parameter_count = environment()->parameter_count();
4207 ZoneList<HValue*> parameters(parameter_count, zone());
4208 for (int i = 0; i < parameter_count; ++i) {
4209 HInstruction* parameter = Add<HParameter>(static_cast<unsigned>(i));
4210 parameters.Add(parameter, zone());
4211 environment()->Bind(i, parameter);
4212 }
4213
4214 HConstant* undefined_constant = graph()->GetConstantUndefined();
4215 // Initialize specials and locals to undefined.
4216 for (int i = parameter_count + 1; i < environment()->length(); ++i) {
4217 environment()->Bind(i, undefined_constant);
4218 }
4219 Add<HPrologue>();
4220
4221 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4222 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4223 GotoNoSimulate(body_entry);
4224 set_current_block(body_entry);
4225
4226 // Initialize context of prolog environment to undefined.
4227 prolog_env->BindContext(undefined_constant);
4228
4229 // First special is HContext.
4230 HInstruction* context = Add<HContext>();
4231 environment()->BindContext(context);
4232
4233 // Create an arguments object containing the initial parameters. Set the
4234 // initial values of parameters including "this" having parameter index 0.
4235 DCHECK_EQ(scope->num_parameters() + 1, parameter_count);
4236 HArgumentsObject* arguments_object = New<HArgumentsObject>(parameter_count);
4237 for (int i = 0; i < parameter_count; ++i) {
4238 HValue* parameter = parameters.at(i);
4239 arguments_object->AddArgument(parameter, zone());
4240 }
4241
4242 AddInstruction(arguments_object);
4243
4244 // Handle the arguments and arguments shadow variables specially (they do
4245 // not have declarations).
4246 if (scope->arguments() != NULL) {
4247 environment()->Bind(scope->arguments(), arguments_object);
4248 }
4249
4250 if (scope->rest_parameter() != nullptr) {
4251 return Bailout(kRestParameter);
4252 }
4253
4254 if (scope->this_function_var() != nullptr ||
4255 scope->new_target_var() != nullptr) {
4256 return Bailout(kSuperReference);
4257 }
4258
4259 // Trace the call.
4260 if (FLAG_trace && top_info()->IsOptimizing()) {
4261 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kTraceEnter), 0);
4262 }
4263 }
4264
4265
VisitStatements(ZoneList<Statement * > * statements)4266 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4267 for (int i = 0; i < statements->length(); i++) {
4268 Statement* stmt = statements->at(i);
4269 CHECK_ALIVE(Visit(stmt));
4270 if (stmt->IsJump()) break;
4271 }
4272 }
4273
4274
VisitBlock(Block * stmt)4275 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4276 DCHECK(!HasStackOverflow());
4277 DCHECK(current_block() != NULL);
4278 DCHECK(current_block()->HasPredecessor());
4279
4280 Scope* outer_scope = scope();
4281 Scope* scope = stmt->scope();
4282 BreakAndContinueInfo break_info(stmt, outer_scope);
4283
4284 { BreakAndContinueScope push(&break_info, this);
4285 if (scope != NULL) {
4286 if (scope->NeedsContext()) {
4287 // Load the function object.
4288 DeclarationScope* declaration_scope = scope->GetDeclarationScope();
4289 HInstruction* function;
4290 HValue* outer_context = environment()->context();
4291 if (declaration_scope->is_script_scope() ||
4292 declaration_scope->is_eval_scope()) {
4293 function = new (zone())
4294 HLoadContextSlot(outer_context, Context::CLOSURE_INDEX,
4295 HLoadContextSlot::kNoCheck);
4296 } else {
4297 function = New<HThisFunction>();
4298 }
4299 AddInstruction(function);
4300 // Allocate a block context and store it to the stack frame.
4301 HValue* scope_info = Add<HConstant>(scope->scope_info());
4302 Add<HPushArguments>(scope_info, function);
4303 HInstruction* inner_context = Add<HCallRuntime>(
4304 Runtime::FunctionForId(Runtime::kPushBlockContext), 2);
4305 inner_context->SetFlag(HValue::kHasNoObservableSideEffects);
4306 set_scope(scope);
4307 environment()->BindContext(inner_context);
4308 }
4309 VisitDeclarations(scope->declarations());
4310 AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4311 }
4312 CHECK_BAILOUT(VisitStatements(stmt->statements()));
4313 }
4314 set_scope(outer_scope);
4315 if (scope != NULL && current_block() != NULL &&
4316 scope->ContextLocalCount() > 0) {
4317 HValue* inner_context = environment()->context();
4318 HValue* outer_context = Add<HLoadNamedField>(
4319 inner_context, nullptr,
4320 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4321
4322 environment()->BindContext(outer_context);
4323 }
4324 HBasicBlock* break_block = break_info.break_block();
4325 if (break_block != NULL) {
4326 if (current_block() != NULL) Goto(break_block);
4327 break_block->SetJoinId(stmt->ExitId());
4328 set_current_block(break_block);
4329 }
4330 }
4331
4332
VisitExpressionStatement(ExpressionStatement * stmt)4333 void HOptimizedGraphBuilder::VisitExpressionStatement(
4334 ExpressionStatement* stmt) {
4335 DCHECK(!HasStackOverflow());
4336 DCHECK(current_block() != NULL);
4337 DCHECK(current_block()->HasPredecessor());
4338 VisitForEffect(stmt->expression());
4339 }
4340
4341
VisitEmptyStatement(EmptyStatement * stmt)4342 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4343 DCHECK(!HasStackOverflow());
4344 DCHECK(current_block() != NULL);
4345 DCHECK(current_block()->HasPredecessor());
4346 }
4347
4348
VisitSloppyBlockFunctionStatement(SloppyBlockFunctionStatement * stmt)4349 void HOptimizedGraphBuilder::VisitSloppyBlockFunctionStatement(
4350 SloppyBlockFunctionStatement* stmt) {
4351 Visit(stmt->statement());
4352 }
4353
4354
VisitIfStatement(IfStatement * stmt)4355 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4356 DCHECK(!HasStackOverflow());
4357 DCHECK(current_block() != NULL);
4358 DCHECK(current_block()->HasPredecessor());
4359 if (stmt->condition()->ToBooleanIsTrue()) {
4360 Add<HSimulate>(stmt->ThenId());
4361 Visit(stmt->then_statement());
4362 } else if (stmt->condition()->ToBooleanIsFalse()) {
4363 Add<HSimulate>(stmt->ElseId());
4364 Visit(stmt->else_statement());
4365 } else {
4366 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4367 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4368 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4369
4370 // Technically, we should be able to handle the case when one side of
4371 // the test is not connected, but this can trip up liveness analysis
4372 // if we did not fully connect the test context based on some optimistic
4373 // assumption. If such an assumption was violated, we would end up with
4374 // an environment with optimized-out values. So we should always
4375 // conservatively connect the test context.
4376 CHECK(cond_true->HasPredecessor());
4377 CHECK(cond_false->HasPredecessor());
4378
4379 cond_true->SetJoinId(stmt->ThenId());
4380 set_current_block(cond_true);
4381 CHECK_BAILOUT(Visit(stmt->then_statement()));
4382 cond_true = current_block();
4383
4384 cond_false->SetJoinId(stmt->ElseId());
4385 set_current_block(cond_false);
4386 CHECK_BAILOUT(Visit(stmt->else_statement()));
4387 cond_false = current_block();
4388
4389 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4390 set_current_block(join);
4391 }
4392 }
4393
4394
Get(BreakableStatement * stmt,BreakType type,Scope ** scope,int * drop_extra)4395 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4396 BreakableStatement* stmt,
4397 BreakType type,
4398 Scope** scope,
4399 int* drop_extra) {
4400 *drop_extra = 0;
4401 BreakAndContinueScope* current = this;
4402 while (current != NULL && current->info()->target() != stmt) {
4403 *drop_extra += current->info()->drop_extra();
4404 current = current->next();
4405 }
4406 DCHECK(current != NULL); // Always found (unless stack is malformed).
4407 *scope = current->info()->scope();
4408
4409 if (type == BREAK) {
4410 *drop_extra += current->info()->drop_extra();
4411 }
4412
4413 HBasicBlock* block = NULL;
4414 switch (type) {
4415 case BREAK:
4416 block = current->info()->break_block();
4417 if (block == NULL) {
4418 block = current->owner()->graph()->CreateBasicBlock();
4419 current->info()->set_break_block(block);
4420 }
4421 break;
4422
4423 case CONTINUE:
4424 block = current->info()->continue_block();
4425 if (block == NULL) {
4426 block = current->owner()->graph()->CreateBasicBlock();
4427 current->info()->set_continue_block(block);
4428 }
4429 break;
4430 }
4431
4432 return block;
4433 }
4434
4435
VisitContinueStatement(ContinueStatement * stmt)4436 void HOptimizedGraphBuilder::VisitContinueStatement(
4437 ContinueStatement* stmt) {
4438 DCHECK(!HasStackOverflow());
4439 DCHECK(current_block() != NULL);
4440 DCHECK(current_block()->HasPredecessor());
4441
4442 if (function_state()->IsInsideDoExpressionScope()) {
4443 return Bailout(kDoExpressionUnmodelable);
4444 }
4445
4446 Scope* outer_scope = NULL;
4447 Scope* inner_scope = scope();
4448 int drop_extra = 0;
4449 HBasicBlock* continue_block = break_scope()->Get(
4450 stmt->target(), BreakAndContinueScope::CONTINUE,
4451 &outer_scope, &drop_extra);
4452 HValue* context = environment()->context();
4453 Drop(drop_extra);
4454 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4455 if (context_pop_count > 0) {
4456 while (context_pop_count-- > 0) {
4457 HInstruction* context_instruction = Add<HLoadNamedField>(
4458 context, nullptr,
4459 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4460 context = context_instruction;
4461 }
4462 environment()->BindContext(context);
4463 }
4464
4465 Goto(continue_block);
4466 set_current_block(NULL);
4467 }
4468
4469
VisitBreakStatement(BreakStatement * stmt)4470 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4471 DCHECK(!HasStackOverflow());
4472 DCHECK(current_block() != NULL);
4473 DCHECK(current_block()->HasPredecessor());
4474
4475 if (function_state()->IsInsideDoExpressionScope()) {
4476 return Bailout(kDoExpressionUnmodelable);
4477 }
4478
4479 Scope* outer_scope = NULL;
4480 Scope* inner_scope = scope();
4481 int drop_extra = 0;
4482 HBasicBlock* break_block = break_scope()->Get(
4483 stmt->target(), BreakAndContinueScope::BREAK,
4484 &outer_scope, &drop_extra);
4485 HValue* context = environment()->context();
4486 Drop(drop_extra);
4487 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4488 if (context_pop_count > 0) {
4489 while (context_pop_count-- > 0) {
4490 HInstruction* context_instruction = Add<HLoadNamedField>(
4491 context, nullptr,
4492 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4493 context = context_instruction;
4494 }
4495 environment()->BindContext(context);
4496 }
4497 Goto(break_block);
4498 set_current_block(NULL);
4499 }
4500
4501
VisitReturnStatement(ReturnStatement * stmt)4502 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4503 DCHECK(!HasStackOverflow());
4504 DCHECK(current_block() != NULL);
4505 DCHECK(current_block()->HasPredecessor());
4506 FunctionState* state = function_state();
4507 AstContext* context = call_context();
4508 if (context == NULL) {
4509 // Not an inlined return, so an actual one.
4510 CHECK_ALIVE(VisitForValue(stmt->expression()));
4511 HValue* result = environment()->Pop();
4512 Add<HReturn>(result);
4513 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4514 // Return from an inlined construct call. In a test context the return value
4515 // will always evaluate to true, in a value context the return value needs
4516 // to be a JSObject.
4517 if (context->IsTest()) {
4518 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4519 context->ReturnValue(graph()->GetConstantTrue());
4520 } else if (context->IsEffect()) {
4521 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4522 Goto(function_return(), state);
4523 } else {
4524 DCHECK(context->IsValue());
4525 CHECK_ALIVE(VisitForValue(stmt->expression()));
4526 HValue* return_value = Pop();
4527 HValue* receiver = environment()->arguments_environment()->Lookup(0);
4528 HHasInstanceTypeAndBranch* typecheck =
4529 New<HHasInstanceTypeAndBranch>(return_value,
4530 FIRST_JS_RECEIVER_TYPE,
4531 LAST_JS_RECEIVER_TYPE);
4532 HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4533 HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4534 typecheck->SetSuccessorAt(0, if_spec_object);
4535 typecheck->SetSuccessorAt(1, not_spec_object);
4536 FinishCurrentBlock(typecheck);
4537 AddLeaveInlined(if_spec_object, return_value, state);
4538 AddLeaveInlined(not_spec_object, receiver, state);
4539 }
4540 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4541 // Return from an inlined setter call. The returned value is never used, the
4542 // value of an assignment is always the value of the RHS of the assignment.
4543 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4544 if (context->IsTest()) {
4545 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4546 context->ReturnValue(rhs);
4547 } else if (context->IsEffect()) {
4548 Goto(function_return(), state);
4549 } else {
4550 DCHECK(context->IsValue());
4551 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4552 AddLeaveInlined(rhs, state);
4553 }
4554 } else {
4555 // Return from a normal inlined function. Visit the subexpression in the
4556 // expression context of the call.
4557 if (context->IsTest()) {
4558 TestContext* test = TestContext::cast(context);
4559 VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4560 } else if (context->IsEffect()) {
4561 // Visit in value context and ignore the result. This is needed to keep
4562 // environment in sync with full-codegen since some visitors (e.g.
4563 // VisitCountOperation) use the operand stack differently depending on
4564 // context.
4565 CHECK_ALIVE(VisitForValue(stmt->expression()));
4566 Pop();
4567 Goto(function_return(), state);
4568 } else {
4569 DCHECK(context->IsValue());
4570 CHECK_ALIVE(VisitForValue(stmt->expression()));
4571 AddLeaveInlined(Pop(), state);
4572 }
4573 }
4574 set_current_block(NULL);
4575 }
4576
4577
VisitWithStatement(WithStatement * stmt)4578 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4579 DCHECK(!HasStackOverflow());
4580 DCHECK(current_block() != NULL);
4581 DCHECK(current_block()->HasPredecessor());
4582 return Bailout(kWithStatement);
4583 }
4584
4585
VisitSwitchStatement(SwitchStatement * stmt)4586 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4587 DCHECK(!HasStackOverflow());
4588 DCHECK(current_block() != NULL);
4589 DCHECK(current_block()->HasPredecessor());
4590
4591 ZoneList<CaseClause*>* clauses = stmt->cases();
4592 int clause_count = clauses->length();
4593 ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4594
4595 CHECK_ALIVE(VisitForValue(stmt->tag()));
4596 Add<HSimulate>(stmt->EntryId());
4597 HValue* tag_value = Top();
4598 AstType* tag_type = bounds_.get(stmt->tag()).lower;
4599
4600 // 1. Build all the tests, with dangling true branches
4601 BailoutId default_id = BailoutId::None();
4602 for (int i = 0; i < clause_count; ++i) {
4603 CaseClause* clause = clauses->at(i);
4604 if (clause->is_default()) {
4605 body_blocks.Add(NULL, zone());
4606 if (default_id.IsNone()) default_id = clause->EntryId();
4607 continue;
4608 }
4609
4610 // Generate a compare and branch.
4611 CHECK_BAILOUT(VisitForValue(clause->label()));
4612 if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
4613 HValue* label_value = Pop();
4614
4615 AstType* label_type = bounds_.get(clause->label()).lower;
4616 AstType* combined_type = clause->compare_type();
4617 HControlInstruction* compare = BuildCompareInstruction(
4618 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4619 combined_type,
4620 ScriptPositionToSourcePosition(stmt->tag()->position()),
4621 ScriptPositionToSourcePosition(clause->label()->position()),
4622 PUSH_BEFORE_SIMULATE, clause->id());
4623
4624 HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4625 HBasicBlock* body_block = graph()->CreateBasicBlock();
4626 body_blocks.Add(body_block, zone());
4627 compare->SetSuccessorAt(0, body_block);
4628 compare->SetSuccessorAt(1, next_test_block);
4629 FinishCurrentBlock(compare);
4630
4631 set_current_block(body_block);
4632 Drop(1); // tag_value
4633
4634 set_current_block(next_test_block);
4635 }
4636
4637 // Save the current block to use for the default or to join with the
4638 // exit.
4639 HBasicBlock* last_block = current_block();
4640 Drop(1); // tag_value
4641
4642 // 2. Loop over the clauses and the linked list of tests in lockstep,
4643 // translating the clause bodies.
4644 HBasicBlock* fall_through_block = NULL;
4645
4646 BreakAndContinueInfo break_info(stmt, scope());
4647 { BreakAndContinueScope push(&break_info, this);
4648 for (int i = 0; i < clause_count; ++i) {
4649 CaseClause* clause = clauses->at(i);
4650
4651 // Identify the block where normal (non-fall-through) control flow
4652 // goes to.
4653 HBasicBlock* normal_block = NULL;
4654 if (clause->is_default()) {
4655 if (last_block == NULL) continue;
4656 normal_block = last_block;
4657 last_block = NULL; // Cleared to indicate we've handled it.
4658 } else {
4659 normal_block = body_blocks[i];
4660 }
4661
4662 if (fall_through_block == NULL) {
4663 set_current_block(normal_block);
4664 } else {
4665 HBasicBlock* join = CreateJoin(fall_through_block,
4666 normal_block,
4667 clause->EntryId());
4668 set_current_block(join);
4669 }
4670
4671 CHECK_BAILOUT(VisitStatements(clause->statements()));
4672 fall_through_block = current_block();
4673 }
4674 }
4675
4676 // Create an up-to-3-way join. Use the break block if it exists since
4677 // it's already a join block.
4678 HBasicBlock* break_block = break_info.break_block();
4679 if (break_block == NULL) {
4680 set_current_block(CreateJoin(fall_through_block,
4681 last_block,
4682 stmt->ExitId()));
4683 } else {
4684 if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4685 if (last_block != NULL) Goto(last_block, break_block);
4686 break_block->SetJoinId(stmt->ExitId());
4687 set_current_block(break_block);
4688 }
4689 }
4690
VisitLoopBody(IterationStatement * stmt,BailoutId stack_check_id,HBasicBlock * loop_entry)4691 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4692 BailoutId stack_check_id,
4693 HBasicBlock* loop_entry) {
4694 Add<HSimulate>(stack_check_id);
4695 HStackCheck* stack_check =
4696 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4697 DCHECK(loop_entry->IsLoopHeader());
4698 loop_entry->loop_information()->set_stack_check(stack_check);
4699 CHECK_BAILOUT(Visit(stmt->body()));
4700 }
4701
4702
VisitDoWhileStatement(DoWhileStatement * stmt)4703 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4704 DCHECK(!HasStackOverflow());
4705 DCHECK(current_block() != NULL);
4706 DCHECK(current_block()->HasPredecessor());
4707 DCHECK(current_block() != NULL);
4708 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4709
4710 BreakAndContinueInfo break_info(stmt, scope());
4711 {
4712 BreakAndContinueScope push(&break_info, this);
4713 CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
4714 }
4715 HBasicBlock* body_exit = JoinContinue(
4716 stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
4717 HBasicBlock* loop_successor = NULL;
4718 if (body_exit != NULL) {
4719 set_current_block(body_exit);
4720 loop_successor = graph()->CreateBasicBlock();
4721 if (stmt->cond()->ToBooleanIsFalse()) {
4722 loop_entry->loop_information()->stack_check()->Eliminate();
4723 Goto(loop_successor);
4724 body_exit = NULL;
4725 } else {
4726 // The block for a true condition, the actual predecessor block of the
4727 // back edge.
4728 body_exit = graph()->CreateBasicBlock();
4729 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4730 }
4731 if (body_exit != NULL && body_exit->HasPredecessor()) {
4732 body_exit->SetJoinId(stmt->BackEdgeId());
4733 } else {
4734 body_exit = NULL;
4735 }
4736 if (loop_successor->HasPredecessor()) {
4737 loop_successor->SetJoinId(stmt->ExitId());
4738 } else {
4739 loop_successor = NULL;
4740 }
4741 }
4742 HBasicBlock* loop_exit = CreateLoop(stmt,
4743 loop_entry,
4744 body_exit,
4745 loop_successor,
4746 break_info.break_block());
4747 set_current_block(loop_exit);
4748 }
4749
4750
VisitWhileStatement(WhileStatement * stmt)4751 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4752 DCHECK(!HasStackOverflow());
4753 DCHECK(current_block() != NULL);
4754 DCHECK(current_block()->HasPredecessor());
4755 DCHECK(current_block() != NULL);
4756 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4757
4758 // If the condition is constant true, do not generate a branch.
4759 HBasicBlock* loop_successor = NULL;
4760 HBasicBlock* body_entry = graph()->CreateBasicBlock();
4761 loop_successor = graph()->CreateBasicBlock();
4762 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4763 if (body_entry->HasPredecessor()) {
4764 body_entry->SetJoinId(stmt->BodyId());
4765 set_current_block(body_entry);
4766 }
4767 if (loop_successor->HasPredecessor()) {
4768 loop_successor->SetJoinId(stmt->ExitId());
4769 } else {
4770 loop_successor = NULL;
4771 }
4772
4773 BreakAndContinueInfo break_info(stmt, scope());
4774 if (current_block() != NULL) {
4775 BreakAndContinueScope push(&break_info, this);
4776 CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
4777 }
4778 HBasicBlock* body_exit = JoinContinue(
4779 stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
4780 HBasicBlock* loop_exit = CreateLoop(stmt,
4781 loop_entry,
4782 body_exit,
4783 loop_successor,
4784 break_info.break_block());
4785 set_current_block(loop_exit);
4786 }
4787
4788
VisitForStatement(ForStatement * stmt)4789 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4790 DCHECK(!HasStackOverflow());
4791 DCHECK(current_block() != NULL);
4792 DCHECK(current_block()->HasPredecessor());
4793 if (stmt->init() != NULL) {
4794 CHECK_ALIVE(Visit(stmt->init()));
4795 }
4796 DCHECK(current_block() != NULL);
4797 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4798
4799 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4800 HBasicBlock* body_entry = graph()->CreateBasicBlock();
4801 if (stmt->cond() != NULL) {
4802 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4803 if (body_entry->HasPredecessor()) {
4804 body_entry->SetJoinId(stmt->BodyId());
4805 set_current_block(body_entry);
4806 }
4807 if (loop_successor->HasPredecessor()) {
4808 loop_successor->SetJoinId(stmt->ExitId());
4809 } else {
4810 loop_successor = NULL;
4811 }
4812 } else {
4813 // Create dummy control flow so that variable liveness analysis
4814 // produces teh correct result.
4815 HControlInstruction* branch = New<HBranch>(graph()->GetConstantTrue());
4816 branch->SetSuccessorAt(0, body_entry);
4817 branch->SetSuccessorAt(1, loop_successor);
4818 FinishCurrentBlock(branch);
4819 set_current_block(body_entry);
4820 }
4821
4822 BreakAndContinueInfo break_info(stmt, scope());
4823 if (current_block() != NULL) {
4824 BreakAndContinueScope push(&break_info, this);
4825 CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
4826 }
4827 HBasicBlock* body_exit = JoinContinue(
4828 stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
4829
4830 if (stmt->next() != NULL && body_exit != NULL) {
4831 set_current_block(body_exit);
4832 CHECK_BAILOUT(Visit(stmt->next()));
4833 body_exit = current_block();
4834 }
4835
4836 HBasicBlock* loop_exit = CreateLoop(stmt,
4837 loop_entry,
4838 body_exit,
4839 loop_successor,
4840 break_info.break_block());
4841 set_current_block(loop_exit);
4842 }
4843
4844
VisitForInStatement(ForInStatement * stmt)4845 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4846 DCHECK(!HasStackOverflow());
4847 DCHECK(current_block() != NULL);
4848 DCHECK(current_block()->HasPredecessor());
4849
4850 if (!stmt->each()->IsVariableProxy() ||
4851 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4852 return Bailout(kForInStatementWithNonLocalEachVariable);
4853 }
4854
4855 Variable* each_var = stmt->each()->AsVariableProxy()->var();
4856
4857 CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4858 HValue* enumerable = Top(); // Leave enumerable at the top.
4859
4860 IfBuilder if_undefined_or_null(this);
4861 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
4862 enumerable, graph()->GetConstantUndefined());
4863 if_undefined_or_null.Or();
4864 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
4865 enumerable, graph()->GetConstantNull());
4866 if_undefined_or_null.ThenDeopt(DeoptimizeReason::kUndefinedOrNullInForIn);
4867 if_undefined_or_null.End();
4868 BuildForInBody(stmt, each_var, enumerable);
4869 }
4870
4871
BuildForInBody(ForInStatement * stmt,Variable * each_var,HValue * enumerable)4872 void HOptimizedGraphBuilder::BuildForInBody(ForInStatement* stmt,
4873 Variable* each_var,
4874 HValue* enumerable) {
4875 Handle<Map> meta_map = isolate()->factory()->meta_map();
4876 bool fast = stmt->for_in_type() == ForInStatement::FAST_FOR_IN;
4877 BuildCheckHeapObject(enumerable);
4878 Add<HCheckInstanceType>(enumerable, HCheckInstanceType::IS_JS_RECEIVER);
4879 Add<HSimulate>(stmt->ToObjectId());
4880 if (fast) {
4881 HForInPrepareMap* map = Add<HForInPrepareMap>(enumerable);
4882 Push(map);
4883 Add<HSimulate>(stmt->EnumId());
4884 Drop(1);
4885 Add<HCheckMaps>(map, meta_map);
4886
4887 HForInCacheArray* array = Add<HForInCacheArray>(
4888 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
4889 HValue* enum_length = BuildEnumLength(map);
4890
4891 HForInCacheArray* index_cache = Add<HForInCacheArray>(
4892 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
4893 array->set_index_cache(index_cache);
4894
4895 Push(map);
4896 Push(array);
4897 Push(enum_length);
4898 Add<HSimulate>(stmt->PrepareId());
4899 } else {
4900 Runtime::FunctionId function_id = Runtime::kForInEnumerate;
4901 Add<HPushArguments>(enumerable);
4902 HCallRuntime* array =
4903 Add<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
4904 Push(array);
4905 Add<HSimulate>(stmt->EnumId());
4906 Drop(1);
4907
4908 IfBuilder if_fast(this);
4909 if_fast.If<HCompareMap>(array, meta_map);
4910 if_fast.Then();
4911 {
4912 HValue* cache_map = array;
4913 HForInCacheArray* cache = Add<HForInCacheArray>(
4914 enumerable, cache_map, DescriptorArray::kEnumCacheBridgeCacheIndex);
4915 HValue* enum_length = BuildEnumLength(cache_map);
4916 Push(cache_map);
4917 Push(cache);
4918 Push(enum_length);
4919 Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
4920 }
4921 if_fast.Else();
4922 {
4923 Push(graph()->GetConstant1());
4924 Push(array);
4925 Push(AddLoadFixedArrayLength(array));
4926 Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
4927 }
4928 }
4929
4930 Push(graph()->GetConstant0());
4931
4932 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4933
4934 // Reload the values to ensure we have up-to-date values inside of the loop.
4935 // This is relevant especially for OSR where the values don't come from the
4936 // computation above, but from the OSR entry block.
4937 HValue* index = environment()->ExpressionStackAt(0);
4938 HValue* limit = environment()->ExpressionStackAt(1);
4939 HValue* array = environment()->ExpressionStackAt(2);
4940 HValue* type = environment()->ExpressionStackAt(3);
4941 enumerable = environment()->ExpressionStackAt(4);
4942
4943 // Check that we still have more keys.
4944 HCompareNumericAndBranch* compare_index =
4945 New<HCompareNumericAndBranch>(index, limit, Token::LT);
4946 compare_index->set_observed_input_representation(
4947 Representation::Smi(), Representation::Smi());
4948
4949 HBasicBlock* loop_body = graph()->CreateBasicBlock();
4950 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4951
4952 compare_index->SetSuccessorAt(0, loop_body);
4953 compare_index->SetSuccessorAt(1, loop_successor);
4954 FinishCurrentBlock(compare_index);
4955
4956 set_current_block(loop_successor);
4957 Drop(5);
4958
4959 set_current_block(loop_body);
4960
4961 // Compute the next enumerated value.
4962 HValue* key = Add<HLoadKeyed>(array, index, index, nullptr, FAST_ELEMENTS);
4963
4964 HBasicBlock* continue_block = nullptr;
4965 if (fast) {
4966 // Check if expected map still matches that of the enumerable.
4967 Add<HCheckMapValue>(enumerable, type);
4968 Add<HSimulate>(stmt->FilterId());
4969 } else {
4970 // We need the continue block here to be able to skip over invalidated keys.
4971 continue_block = graph()->CreateBasicBlock();
4972
4973 // We cannot use the IfBuilder here, since we need to be able to jump
4974 // over the loop body in case of undefined result from %ForInFilter,
4975 // and the poor soul that is the IfBuilder get's really confused about
4976 // such "advanced control flow requirements".
4977 HBasicBlock* if_fast = graph()->CreateBasicBlock();
4978 HBasicBlock* if_slow = graph()->CreateBasicBlock();
4979 HBasicBlock* if_slow_pass = graph()->CreateBasicBlock();
4980 HBasicBlock* if_slow_skip = graph()->CreateBasicBlock();
4981 HBasicBlock* if_join = graph()->CreateBasicBlock();
4982
4983 // Check if expected map still matches that of the enumerable.
4984 HValue* enumerable_map =
4985 Add<HLoadNamedField>(enumerable, nullptr, HObjectAccess::ForMap());
4986 FinishCurrentBlock(
4987 New<HCompareObjectEqAndBranch>(enumerable_map, type, if_fast, if_slow));
4988 set_current_block(if_fast);
4989 {
4990 // The enum cache for enumerable is still valid, no need to check key.
4991 Push(key);
4992 Goto(if_join);
4993 }
4994 set_current_block(if_slow);
4995 {
4996 Callable callable = CodeFactory::ForInFilter(isolate());
4997 HValue* values[] = {key, enumerable};
4998 HConstant* stub_value = Add<HConstant>(callable.code());
4999 Push(Add<HCallWithDescriptor>(stub_value, 0, callable.descriptor(),
5000 ArrayVector(values)));
5001 Add<HSimulate>(stmt->FilterId());
5002 FinishCurrentBlock(New<HCompareObjectEqAndBranch>(
5003 Top(), graph()->GetConstantUndefined(), if_slow_skip, if_slow_pass));
5004 }
5005 set_current_block(if_slow_pass);
5006 { Goto(if_join); }
5007 set_current_block(if_slow_skip);
5008 {
5009 // The key is no longer valid for enumerable, skip it.
5010 Drop(1);
5011 Goto(continue_block);
5012 }
5013 if_join->SetJoinId(stmt->FilterId());
5014 set_current_block(if_join);
5015 key = Pop();
5016 }
5017
5018 Bind(each_var, key);
5019 Add<HSimulate>(stmt->AssignmentId());
5020
5021 BreakAndContinueInfo break_info(stmt, scope(), 5);
5022 break_info.set_continue_block(continue_block);
5023 {
5024 BreakAndContinueScope push(&break_info, this);
5025 CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
5026 }
5027
5028 HBasicBlock* body_exit = JoinContinue(
5029 stmt, stmt->IncrementId(), current_block(), break_info.continue_block());
5030
5031 if (body_exit != NULL) {
5032 set_current_block(body_exit);
5033
5034 HValue* current_index = Pop();
5035 HValue* increment =
5036 AddUncasted<HAdd>(current_index, graph()->GetConstant1());
5037 increment->ClearFlag(HValue::kCanOverflow);
5038 Push(increment);
5039 body_exit = current_block();
5040 }
5041
5042 HBasicBlock* loop_exit = CreateLoop(stmt,
5043 loop_entry,
5044 body_exit,
5045 loop_successor,
5046 break_info.break_block());
5047
5048 set_current_block(loop_exit);
5049 }
5050
5051
VisitForOfStatement(ForOfStatement * stmt)5052 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5053 DCHECK(!HasStackOverflow());
5054 DCHECK(current_block() != NULL);
5055 DCHECK(current_block()->HasPredecessor());
5056 return Bailout(kForOfStatement);
5057 }
5058
5059
VisitTryCatchStatement(TryCatchStatement * stmt)5060 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5061 DCHECK(!HasStackOverflow());
5062 DCHECK(current_block() != NULL);
5063 DCHECK(current_block()->HasPredecessor());
5064 return Bailout(kTryCatchStatement);
5065 }
5066
5067
VisitTryFinallyStatement(TryFinallyStatement * stmt)5068 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5069 TryFinallyStatement* stmt) {
5070 DCHECK(!HasStackOverflow());
5071 DCHECK(current_block() != NULL);
5072 DCHECK(current_block()->HasPredecessor());
5073 return Bailout(kTryFinallyStatement);
5074 }
5075
5076
VisitDebuggerStatement(DebuggerStatement * stmt)5077 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5078 DCHECK(!HasStackOverflow());
5079 DCHECK(current_block() != NULL);
5080 DCHECK(current_block()->HasPredecessor());
5081 return Bailout(kDebuggerStatement);
5082 }
5083
5084
VisitCaseClause(CaseClause * clause)5085 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5086 UNREACHABLE();
5087 }
5088
5089
VisitFunctionLiteral(FunctionLiteral * expr)5090 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5091 DCHECK(!HasStackOverflow());
5092 DCHECK(current_block() != NULL);
5093 DCHECK(current_block()->HasPredecessor());
5094 Handle<SharedFunctionInfo> shared_info = Compiler::GetSharedFunctionInfo(
5095 expr, current_info()->script(), top_info());
5096 // We also have a stack overflow if the recursive compilation did.
5097 if (HasStackOverflow()) return;
5098 // Use the fast case closure allocation code that allocates in new
5099 // space for nested functions that don't need pretenuring.
5100 HConstant* shared_info_value = Add<HConstant>(shared_info);
5101 HInstruction* instr;
5102 if (!expr->pretenure()) {
5103 FastNewClosureStub stub(isolate());
5104 FastNewClosureDescriptor descriptor(isolate());
5105 HValue* values[] = {shared_info_value};
5106 HConstant* stub_value = Add<HConstant>(stub.GetCode());
5107 instr = New<HCallWithDescriptor>(stub_value, 0, descriptor,
5108 ArrayVector(values));
5109 } else {
5110 Add<HPushArguments>(shared_info_value);
5111 Runtime::FunctionId function_id =
5112 expr->pretenure() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
5113 instr = New<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
5114 }
5115 return ast_context()->ReturnInstruction(instr, expr->id());
5116 }
5117
5118
VisitClassLiteral(ClassLiteral * lit)5119 void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
5120 DCHECK(!HasStackOverflow());
5121 DCHECK(current_block() != NULL);
5122 DCHECK(current_block()->HasPredecessor());
5123 return Bailout(kClassLiteral);
5124 }
5125
5126
VisitNativeFunctionLiteral(NativeFunctionLiteral * expr)5127 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5128 NativeFunctionLiteral* expr) {
5129 DCHECK(!HasStackOverflow());
5130 DCHECK(current_block() != NULL);
5131 DCHECK(current_block()->HasPredecessor());
5132 return Bailout(kNativeFunctionLiteral);
5133 }
5134
5135
VisitDoExpression(DoExpression * expr)5136 void HOptimizedGraphBuilder::VisitDoExpression(DoExpression* expr) {
5137 DoExpressionScope scope(this);
5138 DCHECK(!HasStackOverflow());
5139 DCHECK(current_block() != NULL);
5140 DCHECK(current_block()->HasPredecessor());
5141 CHECK_ALIVE(VisitBlock(expr->block()));
5142 Visit(expr->result());
5143 }
5144
5145
VisitConditional(Conditional * expr)5146 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5147 DCHECK(!HasStackOverflow());
5148 DCHECK(current_block() != NULL);
5149 DCHECK(current_block()->HasPredecessor());
5150 HBasicBlock* cond_true = graph()->CreateBasicBlock();
5151 HBasicBlock* cond_false = graph()->CreateBasicBlock();
5152 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5153
5154 // Visit the true and false subexpressions in the same AST context as the
5155 // whole expression.
5156 if (cond_true->HasPredecessor()) {
5157 cond_true->SetJoinId(expr->ThenId());
5158 set_current_block(cond_true);
5159 CHECK_BAILOUT(Visit(expr->then_expression()));
5160 cond_true = current_block();
5161 } else {
5162 cond_true = NULL;
5163 }
5164
5165 if (cond_false->HasPredecessor()) {
5166 cond_false->SetJoinId(expr->ElseId());
5167 set_current_block(cond_false);
5168 CHECK_BAILOUT(Visit(expr->else_expression()));
5169 cond_false = current_block();
5170 } else {
5171 cond_false = NULL;
5172 }
5173
5174 if (!ast_context()->IsTest()) {
5175 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5176 set_current_block(join);
5177 if (join != NULL && !ast_context()->IsEffect()) {
5178 return ast_context()->ReturnValue(Pop());
5179 }
5180 }
5181 }
5182
CanInlineGlobalPropertyAccess(Variable * var,LookupIterator * it,PropertyAccessType access_type)5183 bool HOptimizedGraphBuilder::CanInlineGlobalPropertyAccess(
5184 Variable* var, LookupIterator* it, PropertyAccessType access_type) {
5185 if (var->is_this()) return false;
5186 return CanInlineGlobalPropertyAccess(it, access_type);
5187 }
5188
CanInlineGlobalPropertyAccess(LookupIterator * it,PropertyAccessType access_type)5189 bool HOptimizedGraphBuilder::CanInlineGlobalPropertyAccess(
5190 LookupIterator* it, PropertyAccessType access_type) {
5191 if (!current_info()->has_global_object()) {
5192 return false;
5193 }
5194
5195 switch (it->state()) {
5196 case LookupIterator::ACCESSOR:
5197 case LookupIterator::ACCESS_CHECK:
5198 case LookupIterator::INTERCEPTOR:
5199 case LookupIterator::INTEGER_INDEXED_EXOTIC:
5200 case LookupIterator::NOT_FOUND:
5201 return false;
5202 case LookupIterator::DATA:
5203 if (access_type == STORE && it->IsReadOnly()) return false;
5204 if (!it->GetHolder<JSObject>()->IsJSGlobalObject()) return false;
5205 return true;
5206 case LookupIterator::JSPROXY:
5207 case LookupIterator::TRANSITION:
5208 UNREACHABLE();
5209 }
5210 UNREACHABLE();
5211 return false;
5212 }
5213
5214
BuildContextChainWalk(Variable * var)5215 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
5216 DCHECK(var->IsContextSlot());
5217 HValue* context = environment()->context();
5218 int length = scope()->ContextChainLength(var->scope());
5219 while (length-- > 0) {
5220 context = Add<HLoadNamedField>(
5221 context, nullptr,
5222 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5223 }
5224 return context;
5225 }
5226
InlineGlobalPropertyLoad(LookupIterator * it,BailoutId ast_id)5227 void HOptimizedGraphBuilder::InlineGlobalPropertyLoad(LookupIterator* it,
5228 BailoutId ast_id) {
5229 Handle<PropertyCell> cell = it->GetPropertyCell();
5230 top_info()->dependencies()->AssumePropertyCell(cell);
5231 auto cell_type = it->property_details().cell_type();
5232 if (cell_type == PropertyCellType::kConstant ||
5233 cell_type == PropertyCellType::kUndefined) {
5234 Handle<Object> constant_object(cell->value(), isolate());
5235 if (constant_object->IsConsString()) {
5236 constant_object = String::Flatten(Handle<String>::cast(constant_object));
5237 }
5238 HConstant* constant = New<HConstant>(constant_object);
5239 return ast_context()->ReturnInstruction(constant, ast_id);
5240 } else {
5241 auto access = HObjectAccess::ForPropertyCellValue();
5242 UniqueSet<Map>* field_maps = nullptr;
5243 if (cell_type == PropertyCellType::kConstantType) {
5244 switch (cell->GetConstantType()) {
5245 case PropertyCellConstantType::kSmi:
5246 access = access.WithRepresentation(Representation::Smi());
5247 break;
5248 case PropertyCellConstantType::kStableMap: {
5249 // Check that the map really is stable. The heap object could
5250 // have mutated without the cell updating state. In that case,
5251 // make no promises about the loaded value except that it's a
5252 // heap object.
5253 access = access.WithRepresentation(Representation::HeapObject());
5254 Handle<Map> map(HeapObject::cast(cell->value())->map());
5255 if (map->is_stable()) {
5256 field_maps = new (zone())
5257 UniqueSet<Map>(Unique<Map>::CreateImmovable(map), zone());
5258 }
5259 break;
5260 }
5261 }
5262 }
5263 HConstant* cell_constant = Add<HConstant>(cell);
5264 HLoadNamedField* instr;
5265 if (field_maps == nullptr) {
5266 instr = New<HLoadNamedField>(cell_constant, nullptr, access);
5267 } else {
5268 instr = New<HLoadNamedField>(cell_constant, nullptr, access, field_maps,
5269 HType::HeapObject());
5270 }
5271 instr->ClearDependsOnFlag(kInobjectFields);
5272 instr->SetDependsOnFlag(kGlobalVars);
5273 return ast_context()->ReturnInstruction(instr, ast_id);
5274 }
5275 }
5276
VisitVariableProxy(VariableProxy * expr)5277 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5278 DCHECK(!HasStackOverflow());
5279 DCHECK(current_block() != NULL);
5280 DCHECK(current_block()->HasPredecessor());
5281 Variable* variable = expr->var();
5282 switch (variable->location()) {
5283 case VariableLocation::UNALLOCATED: {
5284 if (IsLexicalVariableMode(variable->mode())) {
5285 // TODO(rossberg): should this be an DCHECK?
5286 return Bailout(kReferenceToGlobalLexicalVariable);
5287 }
5288 // Handle known global constants like 'undefined' specially to avoid a
5289 // load from a global cell for them.
5290 Handle<Object> constant_value =
5291 isolate()->factory()->GlobalConstantFor(variable->name());
5292 if (!constant_value.is_null()) {
5293 HConstant* instr = New<HConstant>(constant_value);
5294 return ast_context()->ReturnInstruction(instr, expr->id());
5295 }
5296
5297 Handle<JSGlobalObject> global(current_info()->global_object());
5298
5299 // Lookup in script contexts.
5300 {
5301 Handle<ScriptContextTable> script_contexts(
5302 global->native_context()->script_context_table());
5303 ScriptContextTable::LookupResult lookup;
5304 if (ScriptContextTable::Lookup(script_contexts, variable->name(),
5305 &lookup)) {
5306 Handle<Context> script_context = ScriptContextTable::GetContext(
5307 script_contexts, lookup.context_index);
5308 Handle<Object> current_value =
5309 FixedArray::get(*script_context, lookup.slot_index, isolate());
5310
5311 // If the values is not the hole, it will stay initialized,
5312 // so no need to generate a check.
5313 if (current_value->IsTheHole(isolate())) {
5314 return Bailout(kReferenceToUninitializedVariable);
5315 }
5316 HInstruction* result = New<HLoadNamedField>(
5317 Add<HConstant>(script_context), nullptr,
5318 HObjectAccess::ForContextSlot(lookup.slot_index));
5319 return ast_context()->ReturnInstruction(result, expr->id());
5320 }
5321 }
5322
5323 LookupIterator it(global, variable->name(), LookupIterator::OWN);
5324 it.TryLookupCachedProperty();
5325 if (CanInlineGlobalPropertyAccess(variable, &it, LOAD)) {
5326 InlineGlobalPropertyLoad(&it, expr->id());
5327 return;
5328 } else {
5329 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
5330
5331 HValue* vector_value = Add<HConstant>(vector);
5332 HValue* slot_value =
5333 Add<HConstant>(vector->GetIndex(expr->VariableFeedbackSlot()));
5334 Callable callable = CodeFactory::LoadGlobalICInOptimizedCode(
5335 isolate(), ast_context()->typeof_mode());
5336 HValue* stub = Add<HConstant>(callable.code());
5337 HValue* values[] = {slot_value, vector_value};
5338 HCallWithDescriptor* instr = New<HCallWithDescriptor>(
5339 Code::LOAD_GLOBAL_IC, stub, 0, callable.descriptor(),
5340 ArrayVector(values));
5341 return ast_context()->ReturnInstruction(instr, expr->id());
5342 }
5343 }
5344
5345 case VariableLocation::PARAMETER:
5346 case VariableLocation::LOCAL: {
5347 HValue* value = LookupAndMakeLive(variable);
5348 if (value == graph()->GetConstantHole()) {
5349 DCHECK(IsDeclaredVariableMode(variable->mode()) &&
5350 variable->mode() != VAR);
5351 return Bailout(kReferenceToUninitializedVariable);
5352 }
5353 return ast_context()->ReturnValue(value);
5354 }
5355
5356 case VariableLocation::CONTEXT: {
5357 HValue* context = BuildContextChainWalk(variable);
5358 HLoadContextSlot::Mode mode;
5359 switch (variable->mode()) {
5360 case LET:
5361 case CONST:
5362 mode = HLoadContextSlot::kCheckDeoptimize;
5363 break;
5364 default:
5365 mode = HLoadContextSlot::kNoCheck;
5366 break;
5367 }
5368 HLoadContextSlot* instr =
5369 new(zone()) HLoadContextSlot(context, variable->index(), mode);
5370 return ast_context()->ReturnInstruction(instr, expr->id());
5371 }
5372
5373 case VariableLocation::LOOKUP:
5374 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5375
5376 case VariableLocation::MODULE:
5377 UNREACHABLE();
5378 }
5379 }
5380
5381
VisitLiteral(Literal * expr)5382 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5383 DCHECK(!HasStackOverflow());
5384 DCHECK(current_block() != NULL);
5385 DCHECK(current_block()->HasPredecessor());
5386 HConstant* instr = New<HConstant>(expr->value());
5387 return ast_context()->ReturnInstruction(instr, expr->id());
5388 }
5389
5390
VisitRegExpLiteral(RegExpLiteral * expr)5391 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5392 DCHECK(!HasStackOverflow());
5393 DCHECK(current_block() != NULL);
5394 DCHECK(current_block()->HasPredecessor());
5395 Callable callable = CodeFactory::FastCloneRegExp(isolate());
5396 HValue* values[] = {AddThisFunction(), Add<HConstant>(expr->literal_index()),
5397 Add<HConstant>(expr->pattern()),
5398 Add<HConstant>(expr->flags())};
5399 HConstant* stub_value = Add<HConstant>(callable.code());
5400 HInstruction* instr = New<HCallWithDescriptor>(
5401 stub_value, 0, callable.descriptor(), ArrayVector(values));
5402 return ast_context()->ReturnInstruction(instr, expr->id());
5403 }
5404
5405
CanInlinePropertyAccess(Handle<Map> map)5406 static bool CanInlinePropertyAccess(Handle<Map> map) {
5407 if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
5408 if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
5409 return map->IsJSObjectMap() && !map->is_dictionary_map() &&
5410 !map->has_named_interceptor() &&
5411 // TODO(verwaest): Whitelist contexts to which we have access.
5412 !map->is_access_check_needed();
5413 }
5414
5415
5416 // Determines whether the given array or object literal boilerplate satisfies
5417 // all limits to be considered for fast deep-copying and computes the total
5418 // size of all objects that are part of the graph.
IsFastLiteral(Handle<JSObject> boilerplate,int max_depth,int * max_properties)5419 static bool IsFastLiteral(Handle<JSObject> boilerplate,
5420 int max_depth,
5421 int* max_properties) {
5422 if (boilerplate->map()->is_deprecated() &&
5423 !JSObject::TryMigrateInstance(boilerplate)) {
5424 return false;
5425 }
5426
5427 DCHECK(max_depth >= 0 && *max_properties >= 0);
5428 if (max_depth == 0) return false;
5429
5430 Isolate* isolate = boilerplate->GetIsolate();
5431 Handle<FixedArrayBase> elements(boilerplate->elements());
5432 if (elements->length() > 0 &&
5433 elements->map() != isolate->heap()->fixed_cow_array_map()) {
5434 if (boilerplate->HasFastSmiOrObjectElements()) {
5435 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5436 int length = elements->length();
5437 for (int i = 0; i < length; i++) {
5438 if ((*max_properties)-- == 0) return false;
5439 Handle<Object> value(fast_elements->get(i), isolate);
5440 if (value->IsJSObject()) {
5441 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5442 if (!IsFastLiteral(value_object,
5443 max_depth - 1,
5444 max_properties)) {
5445 return false;
5446 }
5447 }
5448 }
5449 } else if (!boilerplate->HasFastDoubleElements()) {
5450 return false;
5451 }
5452 }
5453
5454 Handle<FixedArray> properties(boilerplate->properties());
5455 if (properties->length() > 0) {
5456 return false;
5457 } else {
5458 Handle<DescriptorArray> descriptors(
5459 boilerplate->map()->instance_descriptors());
5460 int limit = boilerplate->map()->NumberOfOwnDescriptors();
5461 for (int i = 0; i < limit; i++) {
5462 PropertyDetails details = descriptors->GetDetails(i);
5463 if (details.type() != DATA) continue;
5464 if ((*max_properties)-- == 0) return false;
5465 FieldIndex field_index = FieldIndex::ForDescriptor(boilerplate->map(), i);
5466 if (boilerplate->IsUnboxedDoubleField(field_index)) continue;
5467 Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
5468 isolate);
5469 if (value->IsJSObject()) {
5470 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5471 if (!IsFastLiteral(value_object,
5472 max_depth - 1,
5473 max_properties)) {
5474 return false;
5475 }
5476 }
5477 }
5478 }
5479 return true;
5480 }
5481
5482
VisitObjectLiteral(ObjectLiteral * expr)5483 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5484 DCHECK(!HasStackOverflow());
5485 DCHECK(current_block() != NULL);
5486 DCHECK(current_block()->HasPredecessor());
5487
5488 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5489 HInstruction* literal;
5490
5491 // Check whether to use fast or slow deep-copying for boilerplate.
5492 int max_properties = kMaxFastLiteralProperties;
5493 Handle<Object> literals_cell(
5494 closure->literals()->literal(expr->literal_index()), isolate());
5495 Handle<AllocationSite> site;
5496 Handle<JSObject> boilerplate;
5497 if (!literals_cell->IsUndefined(isolate())) {
5498 // Retrieve the boilerplate
5499 site = Handle<AllocationSite>::cast(literals_cell);
5500 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5501 isolate());
5502 }
5503
5504 if (!boilerplate.is_null() &&
5505 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5506 AllocationSiteUsageContext site_context(isolate(), site, false);
5507 site_context.EnterNewScope();
5508 literal = BuildFastLiteral(boilerplate, &site_context);
5509 site_context.ExitScope(site, boilerplate);
5510 } else {
5511 NoObservableSideEffectsScope no_effects(this);
5512 Handle<FixedArray> constant_properties = expr->constant_properties();
5513 int literal_index = expr->literal_index();
5514 int flags = expr->ComputeFlags(true);
5515
5516 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
5517 Add<HConstant>(constant_properties),
5518 Add<HConstant>(flags));
5519
5520 Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
5521 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
5522 }
5523
5524 // The object is expected in the bailout environment during computation
5525 // of the property values and is the value of the entire expression.
5526 Push(literal);
5527 for (int i = 0; i < expr->properties()->length(); i++) {
5528 ObjectLiteral::Property* property = expr->properties()->at(i);
5529 if (property->is_computed_name()) return Bailout(kComputedPropertyName);
5530 if (property->IsCompileTimeValue()) continue;
5531
5532 Literal* key = property->key()->AsLiteral();
5533 Expression* value = property->value();
5534
5535 switch (property->kind()) {
5536 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5537 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
5538 // Fall through.
5539 case ObjectLiteral::Property::COMPUTED:
5540 // It is safe to use [[Put]] here because the boilerplate already
5541 // contains computed properties with an uninitialized value.
5542 if (key->IsStringLiteral()) {
5543 DCHECK(key->IsPropertyName());
5544 if (property->emit_store()) {
5545 CHECK_ALIVE(VisitForValue(value));
5546 HValue* value = Pop();
5547
5548 Handle<Map> map = property->GetReceiverType();
5549 Handle<String> name = key->AsPropertyName();
5550 HValue* store;
5551 FeedbackVectorSlot slot = property->GetSlot();
5552 if (map.is_null()) {
5553 // If we don't know the monomorphic type, do a generic store.
5554 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal,
5555 name, value));
5556 } else {
5557 PropertyAccessInfo info(this, STORE, map, name);
5558 if (info.CanAccessMonomorphic()) {
5559 HValue* checked_literal = Add<HCheckMaps>(literal, map);
5560 DCHECK(!info.IsAccessorConstant());
5561 store = BuildMonomorphicAccess(
5562 &info, literal, checked_literal, value,
5563 BailoutId::None(), BailoutId::None());
5564 } else {
5565 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot,
5566 literal, name, value));
5567 }
5568 }
5569 if (store->IsInstruction()) {
5570 AddInstruction(HInstruction::cast(store));
5571 }
5572 DCHECK(store->HasObservableSideEffects());
5573 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5574
5575 // Add [[HomeObject]] to function literals.
5576 if (FunctionLiteral::NeedsHomeObject(property->value())) {
5577 Handle<Symbol> sym = isolate()->factory()->home_object_symbol();
5578 HInstruction* store_home = BuildNamedGeneric(
5579 STORE, NULL, property->GetSlot(1), value, sym, literal);
5580 AddInstruction(store_home);
5581 DCHECK(store_home->HasObservableSideEffects());
5582 Add<HSimulate>(property->value()->id(), REMOVABLE_SIMULATE);
5583 }
5584 } else {
5585 CHECK_ALIVE(VisitForEffect(value));
5586 }
5587 break;
5588 }
5589 // Fall through.
5590 case ObjectLiteral::Property::PROTOTYPE:
5591 case ObjectLiteral::Property::SETTER:
5592 case ObjectLiteral::Property::GETTER:
5593 return Bailout(kObjectLiteralWithComplexProperty);
5594 default: UNREACHABLE();
5595 }
5596 }
5597
5598 return ast_context()->ReturnValue(Pop());
5599 }
5600
5601
VisitArrayLiteral(ArrayLiteral * expr)5602 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5603 DCHECK(!HasStackOverflow());
5604 DCHECK(current_block() != NULL);
5605 DCHECK(current_block()->HasPredecessor());
5606 ZoneList<Expression*>* subexprs = expr->values();
5607 int length = subexprs->length();
5608 HInstruction* literal;
5609
5610 Handle<AllocationSite> site;
5611 Handle<LiteralsArray> literals(environment()->closure()->literals(),
5612 isolate());
5613 Handle<Object> literals_cell(literals->literal(expr->literal_index()),
5614 isolate());
5615 Handle<JSObject> boilerplate_object;
5616 if (!literals_cell->IsUndefined(isolate())) {
5617 DCHECK(literals_cell->IsAllocationSite());
5618 site = Handle<AllocationSite>::cast(literals_cell);
5619 boilerplate_object = Handle<JSObject>(
5620 JSObject::cast(site->transition_info()), isolate());
5621 }
5622
5623 // Check whether to use fast or slow deep-copying for boilerplate.
5624 int max_properties = kMaxFastLiteralProperties;
5625 if (!boilerplate_object.is_null() &&
5626 IsFastLiteral(boilerplate_object, kMaxFastLiteralDepth,
5627 &max_properties)) {
5628 DCHECK(site->SitePointsToLiteral());
5629 AllocationSiteUsageContext site_context(isolate(), site, false);
5630 site_context.EnterNewScope();
5631 literal = BuildFastLiteral(boilerplate_object, &site_context);
5632 site_context.ExitScope(site, boilerplate_object);
5633 } else {
5634 NoObservableSideEffectsScope no_effects(this);
5635 Handle<FixedArray> constants = expr->constant_elements();
5636 int literal_index = expr->literal_index();
5637 int flags = expr->ComputeFlags(true);
5638
5639 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
5640 Add<HConstant>(constants), Add<HConstant>(flags));
5641
5642 Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
5643 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
5644
5645 // Register to deopt if the boilerplate ElementsKind changes.
5646 if (!site.is_null()) {
5647 top_info()->dependencies()->AssumeTransitionStable(site);
5648 }
5649 }
5650
5651 // The array is expected in the bailout environment during computation
5652 // of the property values and is the value of the entire expression.
5653 Push(literal);
5654
5655 HInstruction* elements = NULL;
5656
5657 for (int i = 0; i < length; i++) {
5658 Expression* subexpr = subexprs->at(i);
5659 DCHECK(!subexpr->IsSpread());
5660
5661 // If the subexpression is a literal or a simple materialized literal it
5662 // is already set in the cloned array.
5663 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5664
5665 CHECK_ALIVE(VisitForValue(subexpr));
5666 HValue* value = Pop();
5667 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5668
5669 elements = AddLoadElements(literal);
5670
5671 HValue* key = Add<HConstant>(i);
5672
5673 if (!boilerplate_object.is_null()) {
5674 ElementsKind boilerplate_elements_kind =
5675 boilerplate_object->GetElementsKind();
5676 switch (boilerplate_elements_kind) {
5677 case FAST_SMI_ELEMENTS:
5678 case FAST_HOLEY_SMI_ELEMENTS:
5679 case FAST_ELEMENTS:
5680 case FAST_HOLEY_ELEMENTS:
5681 case FAST_DOUBLE_ELEMENTS:
5682 case FAST_HOLEY_DOUBLE_ELEMENTS: {
5683 Add<HStoreKeyed>(elements, key, value, nullptr,
5684 boilerplate_elements_kind);
5685 break;
5686 }
5687 default:
5688 UNREACHABLE();
5689 break;
5690 }
5691 } else {
5692 HInstruction* instr = BuildKeyedGeneric(
5693 STORE, expr, expr->LiteralFeedbackSlot(), literal, key, value);
5694 AddInstruction(instr);
5695 }
5696
5697 Add<HSimulate>(expr->GetIdForElement(i));
5698 }
5699
5700 return ast_context()->ReturnValue(Pop());
5701 }
5702
5703
AddCheckMap(HValue * object,Handle<Map> map)5704 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5705 Handle<Map> map) {
5706 BuildCheckHeapObject(object);
5707 return Add<HCheckMaps>(object, map);
5708 }
5709
5710
BuildLoadNamedField(PropertyAccessInfo * info,HValue * checked_object)5711 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5712 PropertyAccessInfo* info,
5713 HValue* checked_object) {
5714 // See if this is a load for an immutable property
5715 if (checked_object->ActualValue()->IsConstant()) {
5716 Handle<Object> object(
5717 HConstant::cast(checked_object->ActualValue())->handle(isolate()));
5718
5719 if (object->IsJSObject()) {
5720 LookupIterator it(object, info->name(),
5721 LookupIterator::OWN_SKIP_INTERCEPTOR);
5722 Handle<Object> value = JSReceiver::GetDataProperty(&it);
5723 if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
5724 return New<HConstant>(value);
5725 }
5726 }
5727 }
5728
5729 HObjectAccess access = info->access();
5730 if (access.representation().IsDouble() &&
5731 (!FLAG_unbox_double_fields || !access.IsInobject())) {
5732 // Load the heap number.
5733 checked_object = Add<HLoadNamedField>(
5734 checked_object, nullptr,
5735 access.WithRepresentation(Representation::Tagged()));
5736 // Load the double value from it.
5737 access = HObjectAccess::ForHeapNumberValue();
5738 }
5739
5740 SmallMapList* map_list = info->field_maps();
5741 if (map_list->length() == 0) {
5742 return New<HLoadNamedField>(checked_object, checked_object, access);
5743 }
5744
5745 UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
5746 for (int i = 0; i < map_list->length(); ++i) {
5747 maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
5748 }
5749 return New<HLoadNamedField>(
5750 checked_object, checked_object, access, maps, info->field_type());
5751 }
5752
5753
BuildStoreNamedField(PropertyAccessInfo * info,HValue * checked_object,HValue * value)5754 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5755 PropertyAccessInfo* info,
5756 HValue* checked_object,
5757 HValue* value) {
5758 bool transition_to_field = info->IsTransition();
5759 // TODO(verwaest): Move this logic into PropertyAccessInfo.
5760 HObjectAccess field_access = info->access();
5761
5762 HStoreNamedField *instr;
5763 if (field_access.representation().IsDouble() &&
5764 (!FLAG_unbox_double_fields || !field_access.IsInobject())) {
5765 HObjectAccess heap_number_access =
5766 field_access.WithRepresentation(Representation::Tagged());
5767 if (transition_to_field) {
5768 // The store requires a mutable HeapNumber to be allocated.
5769 NoObservableSideEffectsScope no_side_effects(this);
5770 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5771
5772 // TODO(hpayer): Allocation site pretenuring support.
5773 HInstruction* heap_number =
5774 Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
5775 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
5776 AddStoreMapConstant(
5777 heap_number, isolate()->factory()->mutable_heap_number_map());
5778 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5779 value);
5780 instr = New<HStoreNamedField>(checked_object->ActualValue(),
5781 heap_number_access,
5782 heap_number);
5783 } else {
5784 // Already holds a HeapNumber; load the box and write its value field.
5785 HInstruction* heap_number =
5786 Add<HLoadNamedField>(checked_object, nullptr, heap_number_access);
5787 instr = New<HStoreNamedField>(heap_number,
5788 HObjectAccess::ForHeapNumberValue(),
5789 value, STORE_TO_INITIALIZED_ENTRY);
5790 }
5791 } else {
5792 if (field_access.representation().IsHeapObject()) {
5793 BuildCheckHeapObject(value);
5794 }
5795
5796 if (!info->field_maps()->is_empty()) {
5797 DCHECK(field_access.representation().IsHeapObject());
5798 value = Add<HCheckMaps>(value, info->field_maps());
5799 }
5800
5801 // This is a normal store.
5802 instr = New<HStoreNamedField>(
5803 checked_object->ActualValue(), field_access, value,
5804 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
5805 }
5806
5807 if (transition_to_field) {
5808 Handle<Map> transition(info->transition());
5809 DCHECK(!transition->is_deprecated());
5810 instr->SetTransition(Add<HConstant>(transition));
5811 }
5812 return instr;
5813 }
5814
5815 Handle<FieldType>
GetFieldTypeFromMap(Handle<Map> map) const5816 HOptimizedGraphBuilder::PropertyAccessInfo::GetFieldTypeFromMap(
5817 Handle<Map> map) const {
5818 DCHECK(IsFound());
5819 DCHECK(number_ < map->NumberOfOwnDescriptors());
5820 return handle(map->instance_descriptors()->GetFieldType(number_), isolate());
5821 }
5822
IsCompatible(PropertyAccessInfo * info)5823 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5824 PropertyAccessInfo* info) {
5825 if (!CanInlinePropertyAccess(map_)) return false;
5826
5827 // Currently only handle AstType::Number as a polymorphic case.
5828 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5829 // instruction.
5830 if (IsNumberType()) return false;
5831
5832 // Values are only compatible for monomorphic load if they all behave the same
5833 // regarding value wrappers.
5834 if (IsValueWrapped() != info->IsValueWrapped()) return false;
5835
5836 if (!LookupDescriptor()) return false;
5837
5838 if (!IsFound()) {
5839 return (!info->IsFound() || info->has_holder()) &&
5840 map()->prototype() == info->map()->prototype();
5841 }
5842
5843 // Mismatch if the other access info found the property in the prototype
5844 // chain.
5845 if (info->has_holder()) return false;
5846
5847 if (IsAccessorConstant()) {
5848 return accessor_.is_identical_to(info->accessor_) &&
5849 api_holder_.is_identical_to(info->api_holder_);
5850 }
5851
5852 if (IsDataConstant()) {
5853 return constant_.is_identical_to(info->constant_);
5854 }
5855
5856 DCHECK(IsData());
5857 if (!info->IsData()) return false;
5858
5859 Representation r = access_.representation();
5860 if (IsLoad()) {
5861 if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5862 } else {
5863 if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5864 }
5865 if (info->access_.offset() != access_.offset()) return false;
5866 if (info->access_.IsInobject() != access_.IsInobject()) return false;
5867 if (IsLoad()) {
5868 if (field_maps_.is_empty()) {
5869 info->field_maps_.Clear();
5870 } else if (!info->field_maps_.is_empty()) {
5871 for (int i = 0; i < field_maps_.length(); ++i) {
5872 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
5873 }
5874 info->field_maps_.Sort();
5875 }
5876 } else {
5877 // We can only merge stores that agree on their field maps. The comparison
5878 // below is safe, since we keep the field maps sorted.
5879 if (field_maps_.length() != info->field_maps_.length()) return false;
5880 for (int i = 0; i < field_maps_.length(); ++i) {
5881 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
5882 return false;
5883 }
5884 }
5885 }
5886 info->GeneralizeRepresentation(r);
5887 info->field_type_ = info->field_type_.Combine(field_type_);
5888 return true;
5889 }
5890
5891
LookupDescriptor()5892 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5893 if (!map_->IsJSObjectMap()) return true;
5894 LookupDescriptor(*map_, *name_);
5895 return LoadResult(map_);
5896 }
5897
5898
LoadResult(Handle<Map> map)5899 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5900 if (!IsLoad() && IsProperty() && IsReadOnly()) {
5901 return false;
5902 }
5903
5904 if (IsData()) {
5905 // Construct the object field access.
5906 int index = GetLocalFieldIndexFromMap(map);
5907 access_ = HObjectAccess::ForField(map, index, representation(), name_);
5908
5909 // Load field map for heap objects.
5910 return LoadFieldMaps(map);
5911 } else if (IsAccessorConstant()) {
5912 Handle<Object> accessors = GetAccessorsFromMap(map);
5913 if (!accessors->IsAccessorPair()) return false;
5914 Object* raw_accessor =
5915 IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
5916 : Handle<AccessorPair>::cast(accessors)->setter();
5917 if (!raw_accessor->IsJSFunction() &&
5918 !raw_accessor->IsFunctionTemplateInfo())
5919 return false;
5920 Handle<Object> accessor = handle(HeapObject::cast(raw_accessor));
5921 CallOptimization call_optimization(accessor);
5922 if (call_optimization.is_simple_api_call()) {
5923 CallOptimization::HolderLookup holder_lookup;
5924 api_holder_ =
5925 call_optimization.LookupHolderOfExpectedType(map_, &holder_lookup);
5926 }
5927 accessor_ = accessor;
5928 } else if (IsDataConstant()) {
5929 constant_ = GetConstantFromMap(map);
5930 }
5931
5932 return true;
5933 }
5934
5935
LoadFieldMaps(Handle<Map> map)5936 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
5937 Handle<Map> map) {
5938 // Clear any previously collected field maps/type.
5939 field_maps_.Clear();
5940 field_type_ = HType::Tagged();
5941
5942 // Figure out the field type from the accessor map.
5943 Handle<FieldType> field_type = GetFieldTypeFromMap(map);
5944
5945 // Collect the (stable) maps from the field type.
5946 if (field_type->IsClass()) {
5947 DCHECK(access_.representation().IsHeapObject());
5948 Handle<Map> field_map = field_type->AsClass();
5949 if (field_map->is_stable()) {
5950 field_maps_.Add(field_map, zone());
5951 }
5952 }
5953
5954 if (field_maps_.is_empty()) {
5955 // Store is not safe if the field map was cleared.
5956 return IsLoad() || !field_type->IsNone();
5957 }
5958
5959 // Determine field HType from field type.
5960 field_type_ = HType::FromFieldType(field_type, zone());
5961 DCHECK(field_type_.IsHeapObject());
5962
5963 // Add dependency on the map that introduced the field.
5964 top_info()->dependencies()->AssumeFieldOwner(GetFieldOwnerFromMap(map));
5965 return true;
5966 }
5967
5968
LookupInPrototypes()5969 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5970 Handle<Map> map = this->map();
5971 if (name_->IsPrivate()) {
5972 NotFound();
5973 return !map->has_hidden_prototype();
5974 }
5975
5976 while (map->prototype()->IsJSObject()) {
5977 holder_ = handle(JSObject::cast(map->prototype()));
5978 if (holder_->map()->is_deprecated()) {
5979 JSObject::TryMigrateInstance(holder_);
5980 }
5981 map = Handle<Map>(holder_->map());
5982 if (!CanInlinePropertyAccess(map)) {
5983 NotFound();
5984 return false;
5985 }
5986 LookupDescriptor(*map, *name_);
5987 if (IsFound()) return LoadResult(map);
5988 }
5989
5990 NotFound();
5991 return !map->prototype()->IsJSReceiver();
5992 }
5993
5994
IsIntegerIndexedExotic()5995 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsIntegerIndexedExotic() {
5996 InstanceType instance_type = map_->instance_type();
5997 return instance_type == JS_TYPED_ARRAY_TYPE && name_->IsString() &&
5998 IsSpecialIndex(isolate()->unicode_cache(), String::cast(*name_));
5999 }
6000
6001
CanAccessMonomorphic()6002 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
6003 if (!CanInlinePropertyAccess(map_)) return false;
6004 if (IsJSObjectFieldAccessor()) return IsLoad();
6005 if (map_->IsJSFunctionMap() && map_->is_constructor() &&
6006 !map_->has_non_instance_prototype() &&
6007 name_.is_identical_to(isolate()->factory()->prototype_string())) {
6008 return IsLoad();
6009 }
6010 if (!LookupDescriptor()) return false;
6011 if (IsFound()) return IsLoad() || !IsReadOnly();
6012 if (IsIntegerIndexedExotic()) return false;
6013 if (!LookupInPrototypes()) return false;
6014 if (IsLoad()) return true;
6015
6016 if (IsAccessorConstant()) return true;
6017 LookupTransition(*map_, *name_, NONE);
6018 if (IsTransitionToData() && map_->unused_property_fields() > 0) {
6019 // Construct the object field access.
6020 int descriptor = transition()->LastAdded();
6021 int index =
6022 transition()->instance_descriptors()->GetFieldIndex(descriptor) -
6023 map_->GetInObjectProperties();
6024 PropertyDetails details =
6025 transition()->instance_descriptors()->GetDetails(descriptor);
6026 Representation representation = details.representation();
6027 access_ = HObjectAccess::ForField(map_, index, representation, name_);
6028
6029 // Load field map for heap objects.
6030 return LoadFieldMaps(transition());
6031 }
6032 return false;
6033 }
6034
6035
CanAccessAsMonomorphic(SmallMapList * maps)6036 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
6037 SmallMapList* maps) {
6038 DCHECK(map_.is_identical_to(maps->first()));
6039 if (!CanAccessMonomorphic()) return false;
6040 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6041 if (maps->length() > kMaxLoadPolymorphism) return false;
6042 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6043 if (GetJSObjectFieldAccess(&access)) {
6044 for (int i = 1; i < maps->length(); ++i) {
6045 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6046 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6047 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6048 if (!access.Equals(test_access)) return false;
6049 }
6050 return true;
6051 }
6052
6053 // Currently only handle numbers as a polymorphic case.
6054 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6055 // instruction.
6056 if (IsNumberType()) return false;
6057
6058 // Multiple maps cannot transition to the same target map.
6059 DCHECK(!IsLoad() || !IsTransition());
6060 if (IsTransition() && maps->length() > 1) return false;
6061
6062 for (int i = 1; i < maps->length(); ++i) {
6063 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6064 if (!test_info.IsCompatible(this)) return false;
6065 }
6066
6067 return true;
6068 }
6069
6070
map()6071 Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
6072 Handle<JSFunction> ctor;
6073 if (Map::GetConstructorFunction(
6074 map_, handle(current_info()->closure()->context()->native_context()))
6075 .ToHandle(&ctor)) {
6076 return handle(ctor->initial_map());
6077 }
6078 return map_;
6079 }
6080
6081
NeedsWrapping(Handle<Map> map,Handle<JSFunction> target)6082 static bool NeedsWrapping(Handle<Map> map, Handle<JSFunction> target) {
6083 return !map->IsJSObjectMap() &&
6084 is_sloppy(target->shared()->language_mode()) &&
6085 !target->shared()->native();
6086 }
6087
6088
NeedsWrappingFor(Handle<JSFunction> target) const6089 bool HOptimizedGraphBuilder::PropertyAccessInfo::NeedsWrappingFor(
6090 Handle<JSFunction> target) const {
6091 return NeedsWrapping(map_, target);
6092 }
6093
6094
BuildMonomorphicAccess(PropertyAccessInfo * info,HValue * object,HValue * checked_object,HValue * value,BailoutId ast_id,BailoutId return_id,bool can_inline_accessor)6095 HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess(
6096 PropertyAccessInfo* info, HValue* object, HValue* checked_object,
6097 HValue* value, BailoutId ast_id, BailoutId return_id,
6098 bool can_inline_accessor) {
6099 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6100 if (info->GetJSObjectFieldAccess(&access)) {
6101 DCHECK(info->IsLoad());
6102 return New<HLoadNamedField>(object, checked_object, access);
6103 }
6104
6105 if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
6106 info->map()->IsJSFunctionMap() && info->map()->is_constructor()) {
6107 DCHECK(!info->map()->has_non_instance_prototype());
6108 return New<HLoadFunctionPrototype>(checked_object);
6109 }
6110
6111 HValue* checked_holder = checked_object;
6112 if (info->has_holder()) {
6113 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6114 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6115 }
6116
6117 if (!info->IsFound()) {
6118 DCHECK(info->IsLoad());
6119 return graph()->GetConstantUndefined();
6120 }
6121
6122 if (info->IsData()) {
6123 if (info->IsLoad()) {
6124 return BuildLoadNamedField(info, checked_holder);
6125 } else {
6126 return BuildStoreNamedField(info, checked_object, value);
6127 }
6128 }
6129
6130 if (info->IsTransition()) {
6131 DCHECK(!info->IsLoad());
6132 return BuildStoreNamedField(info, checked_object, value);
6133 }
6134
6135 if (info->IsAccessorConstant()) {
6136 MaybeHandle<Name> maybe_name =
6137 FunctionTemplateInfo::TryGetCachedPropertyName(isolate(),
6138 info->accessor());
6139 if (!maybe_name.is_null()) {
6140 Handle<Name> name = maybe_name.ToHandleChecked();
6141 PropertyAccessInfo cache_info(this, LOAD, info->map(), name);
6142 // Load new target.
6143 if (cache_info.CanAccessMonomorphic()) {
6144 return BuildLoadNamedField(&cache_info, checked_object);
6145 }
6146 }
6147
6148 Push(checked_object);
6149 int argument_count = 1;
6150 if (!info->IsLoad()) {
6151 argument_count = 2;
6152 Push(value);
6153 }
6154
6155 if (info->accessor()->IsJSFunction() &&
6156 info->NeedsWrappingFor(Handle<JSFunction>::cast(info->accessor()))) {
6157 HValue* function = Add<HConstant>(info->accessor());
6158 PushArgumentsFromEnvironment(argument_count);
6159 return NewCallFunction(function, argument_count, TailCallMode::kDisallow,
6160 ConvertReceiverMode::kNotNullOrUndefined,
6161 TailCallMode::kDisallow);
6162 } else if (FLAG_inline_accessors && can_inline_accessor) {
6163 bool success = info->IsLoad()
6164 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6165 : TryInlineSetter(
6166 info->accessor(), info->map(), ast_id, return_id, value);
6167 if (success || HasStackOverflow()) return NULL;
6168 }
6169
6170 PushArgumentsFromEnvironment(argument_count);
6171 if (!info->accessor()->IsJSFunction()) {
6172 Bailout(kInliningBailedOut);
6173 return nullptr;
6174 }
6175 return NewCallConstantFunction(Handle<JSFunction>::cast(info->accessor()),
6176 argument_count, TailCallMode::kDisallow,
6177 TailCallMode::kDisallow);
6178 }
6179
6180 DCHECK(info->IsDataConstant());
6181 if (info->IsLoad()) {
6182 return New<HConstant>(info->constant());
6183 } else {
6184 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6185 }
6186 }
6187
6188
HandlePolymorphicNamedFieldAccess(PropertyAccessType access_type,Expression * expr,FeedbackVectorSlot slot,BailoutId ast_id,BailoutId return_id,HValue * object,HValue * value,SmallMapList * maps,Handle<Name> name)6189 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
6190 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
6191 BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value,
6192 SmallMapList* maps, Handle<Name> name) {
6193 // Something did not match; must use a polymorphic load.
6194 int count = 0;
6195 HBasicBlock* join = NULL;
6196 HBasicBlock* number_block = NULL;
6197 bool handled_string = false;
6198
6199 bool handle_smi = false;
6200 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6201 int i;
6202 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6203 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6204 if (info.IsStringType()) {
6205 if (handled_string) continue;
6206 handled_string = true;
6207 }
6208 if (info.CanAccessMonomorphic()) {
6209 count++;
6210 if (info.IsNumberType()) {
6211 handle_smi = true;
6212 break;
6213 }
6214 }
6215 }
6216
6217 if (i < maps->length()) {
6218 count = -1;
6219 maps->Clear();
6220 } else {
6221 count = 0;
6222 }
6223 HControlInstruction* smi_check = NULL;
6224 handled_string = false;
6225
6226 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6227 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6228 if (info.IsStringType()) {
6229 if (handled_string) continue;
6230 handled_string = true;
6231 }
6232 if (!info.CanAccessMonomorphic()) continue;
6233
6234 if (count == 0) {
6235 join = graph()->CreateBasicBlock();
6236 if (handle_smi) {
6237 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6238 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6239 number_block = graph()->CreateBasicBlock();
6240 smi_check = New<HIsSmiAndBranch>(
6241 object, empty_smi_block, not_smi_block);
6242 FinishCurrentBlock(smi_check);
6243 GotoNoSimulate(empty_smi_block, number_block);
6244 set_current_block(not_smi_block);
6245 } else {
6246 BuildCheckHeapObject(object);
6247 }
6248 }
6249 ++count;
6250 HBasicBlock* if_true = graph()->CreateBasicBlock();
6251 HBasicBlock* if_false = graph()->CreateBasicBlock();
6252 HUnaryControlInstruction* compare;
6253
6254 HValue* dependency;
6255 if (info.IsNumberType()) {
6256 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6257 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6258 dependency = smi_check;
6259 } else if (info.IsStringType()) {
6260 compare = New<HIsStringAndBranch>(object, if_true, if_false);
6261 dependency = compare;
6262 } else {
6263 compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6264 dependency = compare;
6265 }
6266 FinishCurrentBlock(compare);
6267
6268 if (info.IsNumberType()) {
6269 GotoNoSimulate(if_true, number_block);
6270 if_true = number_block;
6271 }
6272
6273 set_current_block(if_true);
6274
6275 HValue* access =
6276 BuildMonomorphicAccess(&info, object, dependency, value, ast_id,
6277 return_id, FLAG_polymorphic_inlining);
6278
6279 HValue* result = NULL;
6280 switch (access_type) {
6281 case LOAD:
6282 result = access;
6283 break;
6284 case STORE:
6285 result = value;
6286 break;
6287 }
6288
6289 if (access == NULL) {
6290 if (HasStackOverflow()) return;
6291 } else {
6292 if (access->IsInstruction()) {
6293 HInstruction* instr = HInstruction::cast(access);
6294 if (!instr->IsLinked()) AddInstruction(instr);
6295 }
6296 if (!ast_context()->IsEffect()) Push(result);
6297 }
6298
6299 if (current_block() != NULL) Goto(join);
6300 set_current_block(if_false);
6301 }
6302
6303 // Finish up. Unconditionally deoptimize if we've handled all the maps we
6304 // know about and do not want to handle ones we've never seen. Otherwise
6305 // use a generic IC.
6306 if (count == maps->length() && FLAG_deoptimize_uncommon_cases) {
6307 FinishExitWithHardDeoptimization(
6308 DeoptimizeReason::kUnknownMapInPolymorphicAccess);
6309 } else {
6310 HInstruction* instr =
6311 BuildNamedGeneric(access_type, expr, slot, object, name, value);
6312 AddInstruction(instr);
6313 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6314
6315 if (join != NULL) {
6316 Goto(join);
6317 } else {
6318 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6319 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6320 return;
6321 }
6322 }
6323
6324 DCHECK(join != NULL);
6325 if (join->HasPredecessor()) {
6326 join->SetJoinId(ast_id);
6327 set_current_block(join);
6328 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6329 } else {
6330 set_current_block(NULL);
6331 }
6332 }
6333
ComputeReceiverTypes(Expression * expr,HValue * receiver,SmallMapList ** t,HOptimizedGraphBuilder * builder)6334 static bool ComputeReceiverTypes(Expression* expr, HValue* receiver,
6335 SmallMapList** t,
6336 HOptimizedGraphBuilder* builder) {
6337 Zone* zone = builder->zone();
6338 SmallMapList* maps = expr->GetReceiverTypes();
6339 *t = maps;
6340 bool monomorphic = expr->IsMonomorphic();
6341 if (maps != nullptr && receiver->HasMonomorphicJSObjectType()) {
6342 if (maps->length() > 0) {
6343 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6344 maps->FilterForPossibleTransitions(root_map);
6345 monomorphic = maps->length() == 1;
6346 } else {
6347 // No type feedback, see if we can infer the type. This is safely
6348 // possible if the receiver had a known map at some point, and no
6349 // map-changing stores have happened to it since.
6350 Handle<Map> candidate_map = receiver->GetMonomorphicJSObjectMap();
6351 for (HInstruction* current = builder->current_block()->last();
6352 current != nullptr; current = current->previous()) {
6353 if (current->IsBlockEntry()) break;
6354 if (current->CheckChangesFlag(kMaps)) {
6355 // Only allow map changes that store the candidate map. We don't
6356 // need to care which object the map is being written into.
6357 if (!current->IsStoreNamedField()) break;
6358 HStoreNamedField* map_change = HStoreNamedField::cast(current);
6359 if (!map_change->value()->IsConstant()) break;
6360 HConstant* map_constant = HConstant::cast(map_change->value());
6361 if (!map_constant->representation().IsTagged()) break;
6362 Handle<Object> map = map_constant->handle(builder->isolate());
6363 if (!map.is_identical_to(candidate_map)) break;
6364 }
6365 if (current == receiver) {
6366 // We made it all the way back to the receiver without encountering
6367 // a map change! So we can assume that the receiver still has the
6368 // candidate_map we know about.
6369 maps->Add(candidate_map, zone);
6370 monomorphic = true;
6371 break;
6372 }
6373 }
6374 }
6375 }
6376 return monomorphic && CanInlinePropertyAccess(maps->first());
6377 }
6378
6379
AreStringTypes(SmallMapList * maps)6380 static bool AreStringTypes(SmallMapList* maps) {
6381 for (int i = 0; i < maps->length(); i++) {
6382 if (maps->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6383 }
6384 return true;
6385 }
6386
BuildStore(Expression * expr,Property * prop,FeedbackVectorSlot slot,BailoutId ast_id,BailoutId return_id,bool is_uninitialized)6387 void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop,
6388 FeedbackVectorSlot slot,
6389 BailoutId ast_id, BailoutId return_id,
6390 bool is_uninitialized) {
6391 if (!prop->key()->IsPropertyName()) {
6392 // Keyed store.
6393 HValue* value = Pop();
6394 HValue* key = Pop();
6395 HValue* object = Pop();
6396 bool has_side_effects = false;
6397 HValue* result =
6398 HandleKeyedElementAccess(object, key, value, expr, slot, ast_id,
6399 return_id, STORE, &has_side_effects);
6400 if (has_side_effects) {
6401 if (!ast_context()->IsEffect()) Push(value);
6402 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6403 if (!ast_context()->IsEffect()) Drop(1);
6404 }
6405 if (result == NULL) return;
6406 return ast_context()->ReturnValue(value);
6407 }
6408
6409 // Named store.
6410 HValue* value = Pop();
6411 HValue* object = Pop();
6412
6413 Literal* key = prop->key()->AsLiteral();
6414 Handle<String> name = Handle<String>::cast(key->value());
6415 DCHECK(!name.is_null());
6416
6417 HValue* access = BuildNamedAccess(STORE, ast_id, return_id, expr, slot,
6418 object, name, value, is_uninitialized);
6419 if (access == NULL) return;
6420
6421 if (!ast_context()->IsEffect()) Push(value);
6422 if (access->IsInstruction()) AddInstruction(HInstruction::cast(access));
6423 if (access->HasObservableSideEffects()) {
6424 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6425 }
6426 if (!ast_context()->IsEffect()) Drop(1);
6427 return ast_context()->ReturnValue(value);
6428 }
6429
6430
HandlePropertyAssignment(Assignment * expr)6431 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6432 Property* prop = expr->target()->AsProperty();
6433 DCHECK(prop != NULL);
6434 CHECK_ALIVE(VisitForValue(prop->obj()));
6435 if (!prop->key()->IsPropertyName()) {
6436 CHECK_ALIVE(VisitForValue(prop->key()));
6437 }
6438 CHECK_ALIVE(VisitForValue(expr->value()));
6439 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
6440 expr->AssignmentId(), expr->IsUninitialized());
6441 }
6442
InlineGlobalPropertyStore(LookupIterator * it,HValue * value,BailoutId ast_id)6443 HInstruction* HOptimizedGraphBuilder::InlineGlobalPropertyStore(
6444 LookupIterator* it, HValue* value, BailoutId ast_id) {
6445 Handle<PropertyCell> cell = it->GetPropertyCell();
6446 top_info()->dependencies()->AssumePropertyCell(cell);
6447 auto cell_type = it->property_details().cell_type();
6448 if (cell_type == PropertyCellType::kConstant ||
6449 cell_type == PropertyCellType::kUndefined) {
6450 Handle<Object> constant(cell->value(), isolate());
6451 if (value->IsConstant()) {
6452 HConstant* c_value = HConstant::cast(value);
6453 if (!constant.is_identical_to(c_value->handle(isolate()))) {
6454 Add<HDeoptimize>(DeoptimizeReason::kConstantGlobalVariableAssignment,
6455 Deoptimizer::EAGER);
6456 }
6457 } else {
6458 HValue* c_constant = Add<HConstant>(constant);
6459 IfBuilder builder(this);
6460 if (constant->IsNumber()) {
6461 builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6462 } else {
6463 builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6464 }
6465 builder.Then();
6466 builder.Else();
6467 Add<HDeoptimize>(DeoptimizeReason::kConstantGlobalVariableAssignment,
6468 Deoptimizer::EAGER);
6469 builder.End();
6470 }
6471 }
6472 HConstant* cell_constant = Add<HConstant>(cell);
6473 auto access = HObjectAccess::ForPropertyCellValue();
6474 if (cell_type == PropertyCellType::kConstantType) {
6475 switch (cell->GetConstantType()) {
6476 case PropertyCellConstantType::kSmi:
6477 access = access.WithRepresentation(Representation::Smi());
6478 break;
6479 case PropertyCellConstantType::kStableMap: {
6480 // First check that the previous value of the {cell} still has the
6481 // map that we are about to check the new {value} for. If not, then
6482 // the stable map assumption was invalidated and we cannot continue
6483 // with the optimized code.
6484 Handle<HeapObject> cell_value(HeapObject::cast(cell->value()));
6485 Handle<Map> cell_value_map(cell_value->map());
6486 if (!cell_value_map->is_stable()) {
6487 Bailout(kUnstableConstantTypeHeapObject);
6488 return nullptr;
6489 }
6490 top_info()->dependencies()->AssumeMapStable(cell_value_map);
6491 // Now check that the new {value} is a HeapObject with the same map
6492 Add<HCheckHeapObject>(value);
6493 value = Add<HCheckMaps>(value, cell_value_map);
6494 access = access.WithRepresentation(Representation::HeapObject());
6495 break;
6496 }
6497 }
6498 }
6499 HInstruction* instr = New<HStoreNamedField>(cell_constant, access, value);
6500 instr->ClearChangesFlag(kInobjectFields);
6501 instr->SetChangesFlag(kGlobalVars);
6502 return instr;
6503 }
6504
6505 // Because not every expression has a position and there is not common
6506 // superclass of Assignment and CountOperation, we cannot just pass the
6507 // owning expression instead of position and ast_id separately.
HandleGlobalVariableAssignment(Variable * var,HValue * value,FeedbackVectorSlot slot,BailoutId ast_id)6508 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
6509 Variable* var, HValue* value, FeedbackVectorSlot slot, BailoutId ast_id) {
6510 Handle<JSGlobalObject> global(current_info()->global_object());
6511
6512 // Lookup in script contexts.
6513 {
6514 Handle<ScriptContextTable> script_contexts(
6515 global->native_context()->script_context_table());
6516 ScriptContextTable::LookupResult lookup;
6517 if (ScriptContextTable::Lookup(script_contexts, var->name(), &lookup)) {
6518 if (lookup.mode == CONST) {
6519 return Bailout(kNonInitializerAssignmentToConst);
6520 }
6521 Handle<Context> script_context =
6522 ScriptContextTable::GetContext(script_contexts, lookup.context_index);
6523
6524 Handle<Object> current_value =
6525 FixedArray::get(*script_context, lookup.slot_index, isolate());
6526
6527 // If the values is not the hole, it will stay initialized,
6528 // so no need to generate a check.
6529 if (current_value->IsTheHole(isolate())) {
6530 return Bailout(kReferenceToUninitializedVariable);
6531 }
6532
6533 HStoreNamedField* instr = Add<HStoreNamedField>(
6534 Add<HConstant>(script_context),
6535 HObjectAccess::ForContextSlot(lookup.slot_index), value);
6536 USE(instr);
6537 DCHECK(instr->HasObservableSideEffects());
6538 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6539 return;
6540 }
6541 }
6542
6543 LookupIterator it(global, var->name(), LookupIterator::OWN);
6544 if (CanInlineGlobalPropertyAccess(var, &it, STORE)) {
6545 HInstruction* instr = InlineGlobalPropertyStore(&it, value, ast_id);
6546 if (!instr) return;
6547 AddInstruction(instr);
6548 if (instr->HasObservableSideEffects()) {
6549 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6550 }
6551 } else {
6552 HValue* global_object = Add<HLoadNamedField>(
6553 BuildGetNativeContext(), nullptr,
6554 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
6555 Handle<TypeFeedbackVector> vector =
6556 handle(current_feedback_vector(), isolate());
6557 HValue* name = Add<HConstant>(var->name());
6558 HValue* vector_value = Add<HConstant>(vector);
6559 HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
6560 Callable callable = CodeFactory::StoreICInOptimizedCode(
6561 isolate(), function_language_mode());
6562 HValue* stub = Add<HConstant>(callable.code());
6563 HValue* values[] = {global_object, name, value, slot_value, vector_value};
6564 HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
6565 Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values));
6566 USE(instr);
6567 DCHECK(instr->HasObservableSideEffects());
6568 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6569 }
6570 }
6571
6572
HandleCompoundAssignment(Assignment * expr)6573 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
6574 Expression* target = expr->target();
6575 VariableProxy* proxy = target->AsVariableProxy();
6576 Property* prop = target->AsProperty();
6577 DCHECK(proxy == NULL || prop == NULL);
6578
6579 // We have a second position recorded in the FullCodeGenerator to have
6580 // type feedback for the binary operation.
6581 BinaryOperation* operation = expr->binary_operation();
6582
6583 if (proxy != NULL) {
6584 Variable* var = proxy->var();
6585 if (var->mode() == LET) {
6586 return Bailout(kUnsupportedLetCompoundAssignment);
6587 }
6588
6589 CHECK_ALIVE(VisitForValue(operation));
6590
6591 switch (var->location()) {
6592 case VariableLocation::UNALLOCATED:
6593 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
6594 expr->AssignmentId());
6595 break;
6596
6597 case VariableLocation::PARAMETER:
6598 case VariableLocation::LOCAL:
6599 if (var->mode() == CONST) {
6600 return Bailout(kNonInitializerAssignmentToConst);
6601 }
6602 BindIfLive(var, Top());
6603 break;
6604
6605 case VariableLocation::CONTEXT: {
6606 // Bail out if we try to mutate a parameter value in a function
6607 // using the arguments object. We do not (yet) correctly handle the
6608 // arguments property of the function.
6609 if (current_info()->scope()->arguments() != NULL) {
6610 // Parameters will be allocated to context slots. We have no
6611 // direct way to detect that the variable is a parameter so we do
6612 // a linear search of the parameter variables.
6613 int count = current_info()->scope()->num_parameters();
6614 for (int i = 0; i < count; ++i) {
6615 if (var == current_info()->scope()->parameter(i)) {
6616 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6617 }
6618 }
6619 }
6620
6621 HStoreContextSlot::Mode mode;
6622
6623 switch (var->mode()) {
6624 case LET:
6625 mode = HStoreContextSlot::kCheckDeoptimize;
6626 break;
6627 case CONST:
6628 if (var->throw_on_const_assignment(function_language_mode())) {
6629 return Bailout(kNonInitializerAssignmentToConst);
6630 } else {
6631 return ast_context()->ReturnValue(Pop());
6632 }
6633 default:
6634 mode = HStoreContextSlot::kNoCheck;
6635 }
6636
6637 HValue* context = BuildContextChainWalk(var);
6638 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6639 context, var->index(), mode, Top());
6640 if (instr->HasObservableSideEffects()) {
6641 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6642 }
6643 break;
6644 }
6645
6646 case VariableLocation::LOOKUP:
6647 return Bailout(kCompoundAssignmentToLookupSlot);
6648
6649 case VariableLocation::MODULE:
6650 UNREACHABLE();
6651 }
6652 return ast_context()->ReturnValue(Pop());
6653
6654 } else if (prop != NULL) {
6655 CHECK_ALIVE(VisitForValue(prop->obj()));
6656 HValue* object = Top();
6657 HValue* key = NULL;
6658 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
6659 CHECK_ALIVE(VisitForValue(prop->key()));
6660 key = Top();
6661 }
6662
6663 CHECK_ALIVE(PushLoad(prop, object, key));
6664
6665 CHECK_ALIVE(VisitForValue(expr->value()));
6666 HValue* right = Pop();
6667 HValue* left = Pop();
6668
6669 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6670
6671 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
6672 expr->AssignmentId(), expr->IsUninitialized());
6673 } else {
6674 return Bailout(kInvalidLhsInCompoundAssignment);
6675 }
6676 }
6677
6678
VisitAssignment(Assignment * expr)6679 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6680 DCHECK(!HasStackOverflow());
6681 DCHECK(current_block() != NULL);
6682 DCHECK(current_block()->HasPredecessor());
6683
6684 VariableProxy* proxy = expr->target()->AsVariableProxy();
6685 Property* prop = expr->target()->AsProperty();
6686 DCHECK(proxy == NULL || prop == NULL);
6687
6688 if (expr->is_compound()) {
6689 HandleCompoundAssignment(expr);
6690 return;
6691 }
6692
6693 if (prop != NULL) {
6694 HandlePropertyAssignment(expr);
6695 } else if (proxy != NULL) {
6696 Variable* var = proxy->var();
6697
6698 if (var->mode() == CONST) {
6699 if (expr->op() != Token::INIT) {
6700 if (var->throw_on_const_assignment(function_language_mode())) {
6701 return Bailout(kNonInitializerAssignmentToConst);
6702 } else {
6703 CHECK_ALIVE(VisitForValue(expr->value()));
6704 return ast_context()->ReturnValue(Pop());
6705 }
6706 }
6707 }
6708
6709 // Handle the assignment.
6710 switch (var->location()) {
6711 case VariableLocation::UNALLOCATED:
6712 CHECK_ALIVE(VisitForValue(expr->value()));
6713 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
6714 expr->AssignmentId());
6715 return ast_context()->ReturnValue(Pop());
6716
6717 case VariableLocation::PARAMETER:
6718 case VariableLocation::LOCAL: {
6719 // Perform an initialization check for let declared variables
6720 // or parameters.
6721 if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6722 HValue* env_value = environment()->Lookup(var);
6723 if (env_value == graph()->GetConstantHole()) {
6724 return Bailout(kAssignmentToLetVariableBeforeInitialization);
6725 }
6726 }
6727 // We do not allow the arguments object to occur in a context where it
6728 // may escape, but assignments to stack-allocated locals are
6729 // permitted.
6730 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6731 HValue* value = Pop();
6732 BindIfLive(var, value);
6733 return ast_context()->ReturnValue(value);
6734 }
6735
6736 case VariableLocation::CONTEXT: {
6737 // Bail out if we try to mutate a parameter value in a function using
6738 // the arguments object. We do not (yet) correctly handle the
6739 // arguments property of the function.
6740 if (current_info()->scope()->arguments() != NULL) {
6741 // Parameters will rewrite to context slots. We have no direct way
6742 // to detect that the variable is a parameter.
6743 int count = current_info()->scope()->num_parameters();
6744 for (int i = 0; i < count; ++i) {
6745 if (var == current_info()->scope()->parameter(i)) {
6746 return Bailout(kAssignmentToParameterInArgumentsObject);
6747 }
6748 }
6749 }
6750
6751 CHECK_ALIVE(VisitForValue(expr->value()));
6752 HStoreContextSlot::Mode mode;
6753 if (expr->op() == Token::ASSIGN) {
6754 switch (var->mode()) {
6755 case LET:
6756 mode = HStoreContextSlot::kCheckDeoptimize;
6757 break;
6758 case CONST:
6759 // If we reached this point, the only possibility
6760 // is a sloppy assignment to a function name.
6761 DCHECK(function_language_mode() == SLOPPY &&
6762 !var->throw_on_const_assignment(SLOPPY));
6763 return ast_context()->ReturnValue(Pop());
6764 default:
6765 mode = HStoreContextSlot::kNoCheck;
6766 }
6767 } else {
6768 DCHECK_EQ(Token::INIT, expr->op());
6769 mode = HStoreContextSlot::kNoCheck;
6770 }
6771
6772 HValue* context = BuildContextChainWalk(var);
6773 HStoreContextSlot* instr = Add<HStoreContextSlot>(
6774 context, var->index(), mode, Top());
6775 if (instr->HasObservableSideEffects()) {
6776 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6777 }
6778 return ast_context()->ReturnValue(Pop());
6779 }
6780
6781 case VariableLocation::LOOKUP:
6782 return Bailout(kAssignmentToLOOKUPVariable);
6783
6784 case VariableLocation::MODULE:
6785 UNREACHABLE();
6786 }
6787 } else {
6788 return Bailout(kInvalidLeftHandSideInAssignment);
6789 }
6790 }
6791
6792
VisitYield(Yield * expr)6793 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6794 // Generators are not optimized, so we should never get here.
6795 UNREACHABLE();
6796 }
6797
6798
VisitThrow(Throw * expr)6799 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6800 DCHECK(!HasStackOverflow());
6801 DCHECK(current_block() != NULL);
6802 DCHECK(current_block()->HasPredecessor());
6803 if (!ast_context()->IsEffect()) {
6804 // The parser turns invalid left-hand sides in assignments into throw
6805 // statements, which may not be in effect contexts. We might still try
6806 // to optimize such functions; bail out now if we do.
6807 return Bailout(kInvalidLeftHandSideInAssignment);
6808 }
6809 CHECK_ALIVE(VisitForValue(expr->exception()));
6810
6811 HValue* value = environment()->Pop();
6812 if (!is_tracking_positions()) SetSourcePosition(expr->position());
6813 Add<HPushArguments>(value);
6814 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
6815 Add<HSimulate>(expr->id());
6816
6817 // If the throw definitely exits the function, we can finish with a dummy
6818 // control flow at this point. This is not the case if the throw is inside
6819 // an inlined function which may be replaced.
6820 if (call_context() == NULL) {
6821 FinishExitCurrentBlock(New<HAbnormalExit>());
6822 }
6823 }
6824
6825
AddLoadStringInstanceType(HValue * string)6826 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
6827 if (string->IsConstant()) {
6828 HConstant* c_string = HConstant::cast(string);
6829 if (c_string->HasStringValue()) {
6830 return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6831 }
6832 }
6833 return Add<HLoadNamedField>(
6834 Add<HLoadNamedField>(string, nullptr, HObjectAccess::ForMap()), nullptr,
6835 HObjectAccess::ForMapInstanceType());
6836 }
6837
6838
AddLoadStringLength(HValue * string)6839 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
6840 return AddInstruction(BuildLoadStringLength(string));
6841 }
6842
6843
BuildLoadStringLength(HValue * string)6844 HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) {
6845 if (string->IsConstant()) {
6846 HConstant* c_string = HConstant::cast(string);
6847 if (c_string->HasStringValue()) {
6848 return New<HConstant>(c_string->StringValue()->length());
6849 }
6850 }
6851 return New<HLoadNamedField>(string, nullptr,
6852 HObjectAccess::ForStringLength());
6853 }
6854
6855
BuildNamedGeneric(PropertyAccessType access_type,Expression * expr,FeedbackVectorSlot slot,HValue * object,Handle<Name> name,HValue * value,bool is_uninitialized)6856 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6857 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
6858 HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) {
6859 if (is_uninitialized) {
6860 Add<HDeoptimize>(
6861 DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess,
6862 Deoptimizer::SOFT);
6863 }
6864 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
6865
6866 HValue* key = Add<HConstant>(name);
6867 HValue* vector_value = Add<HConstant>(vector);
6868 HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
6869
6870 if (access_type == LOAD) {
6871 HValue* values[] = {object, key, slot_value, vector_value};
6872 if (!expr->AsProperty()->key()->IsPropertyName()) {
6873 // It's possible that a keyed load of a constant string was converted
6874 // to a named load. Here, at the last minute, we need to make sure to
6875 // use a generic Keyed Load if we are using the type vector, because
6876 // it has to share information with full code.
6877 Callable callable = CodeFactory::KeyedLoadICInOptimizedCode(isolate());
6878 HValue* stub = Add<HConstant>(callable.code());
6879 HCallWithDescriptor* result =
6880 New<HCallWithDescriptor>(Code::KEYED_LOAD_IC, stub, 0,
6881 callable.descriptor(), ArrayVector(values));
6882 return result;
6883 }
6884 Callable callable = CodeFactory::LoadICInOptimizedCode(isolate());
6885 HValue* stub = Add<HConstant>(callable.code());
6886 HCallWithDescriptor* result = New<HCallWithDescriptor>(
6887 Code::LOAD_IC, stub, 0, callable.descriptor(), ArrayVector(values));
6888 return result;
6889
6890 } else {
6891 HValue* values[] = {object, key, value, slot_value, vector_value};
6892 if (vector->GetKind(slot) == FeedbackVectorSlotKind::KEYED_STORE_IC) {
6893 // It's possible that a keyed store of a constant string was converted
6894 // to a named store. Here, at the last minute, we need to make sure to
6895 // use a generic Keyed Store if we are using the type vector, because
6896 // it has to share information with full code.
6897 Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
6898 isolate(), function_language_mode());
6899 HValue* stub = Add<HConstant>(callable.code());
6900 HCallWithDescriptor* result =
6901 New<HCallWithDescriptor>(Code::KEYED_STORE_IC, stub, 0,
6902 callable.descriptor(), ArrayVector(values));
6903 return result;
6904 }
6905 Callable callable = CodeFactory::StoreICInOptimizedCode(
6906 isolate(), function_language_mode());
6907 HValue* stub = Add<HConstant>(callable.code());
6908 HCallWithDescriptor* result = New<HCallWithDescriptor>(
6909 Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values));
6910 return result;
6911 }
6912 }
6913
6914
BuildKeyedGeneric(PropertyAccessType access_type,Expression * expr,FeedbackVectorSlot slot,HValue * object,HValue * key,HValue * value)6915 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6916 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
6917 HValue* object, HValue* key, HValue* value) {
6918 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
6919 HValue* vector_value = Add<HConstant>(vector);
6920 HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
6921
6922 if (access_type == LOAD) {
6923 HValue* values[] = {object, key, slot_value, vector_value};
6924
6925 Callable callable = CodeFactory::KeyedLoadICInOptimizedCode(isolate());
6926 HValue* stub = Add<HConstant>(callable.code());
6927 HCallWithDescriptor* result =
6928 New<HCallWithDescriptor>(Code::KEYED_LOAD_IC, stub, 0,
6929 callable.descriptor(), ArrayVector(values));
6930 return result;
6931 } else {
6932 HValue* values[] = {object, key, value, slot_value, vector_value};
6933
6934 Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
6935 isolate(), function_language_mode());
6936 HValue* stub = Add<HConstant>(callable.code());
6937 HCallWithDescriptor* result =
6938 New<HCallWithDescriptor>(Code::KEYED_STORE_IC, stub, 0,
6939 callable.descriptor(), ArrayVector(values));
6940 return result;
6941 }
6942 }
6943
6944
BuildKeyedHoleMode(Handle<Map> map)6945 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6946 // Loads from a "stock" fast holey double arrays can elide the hole check.
6947 // Loads from a "stock" fast holey array can convert the hole to undefined
6948 // with impunity.
6949 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
6950 bool holey_double_elements =
6951 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS);
6952 bool holey_elements =
6953 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS);
6954 if ((holey_double_elements || holey_elements) &&
6955 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6956 load_mode =
6957 holey_double_elements ? ALLOW_RETURN_HOLE : CONVERT_HOLE_TO_UNDEFINED;
6958
6959 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6960 Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6961 BuildCheckPrototypeMaps(prototype, object_prototype);
6962 graph()->MarkDependsOnEmptyArrayProtoElements();
6963 }
6964 return load_mode;
6965 }
6966
6967
BuildMonomorphicElementAccess(HValue * object,HValue * key,HValue * val,HValue * dependency,Handle<Map> map,PropertyAccessType access_type,KeyedAccessStoreMode store_mode)6968 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6969 HValue* object,
6970 HValue* key,
6971 HValue* val,
6972 HValue* dependency,
6973 Handle<Map> map,
6974 PropertyAccessType access_type,
6975 KeyedAccessStoreMode store_mode) {
6976 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
6977
6978 if (access_type == STORE && map->prototype()->IsJSObject()) {
6979 // monomorphic stores need a prototype chain check because shape
6980 // changes could allow callbacks on elements in the chain that
6981 // aren't compatible with monomorphic keyed stores.
6982 PrototypeIterator iter(map);
6983 JSObject* holder = NULL;
6984 while (!iter.IsAtEnd()) {
6985 // JSProxies can't occur here because we wouldn't have installed a
6986 // non-generic IC if there were any.
6987 holder = *PrototypeIterator::GetCurrent<JSObject>(iter);
6988 iter.Advance();
6989 }
6990 DCHECK(holder && holder->IsJSObject());
6991
6992 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
6993 Handle<JSObject>(holder));
6994 }
6995
6996 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6997 return BuildUncheckedMonomorphicElementAccess(
6998 checked_object, key, val,
6999 map->instance_type() == JS_ARRAY_TYPE,
7000 map->elements_kind(), access_type,
7001 load_mode, store_mode);
7002 }
7003
7004
CanInlineElementAccess(Handle<Map> map)7005 static bool CanInlineElementAccess(Handle<Map> map) {
7006 return map->IsJSObjectMap() &&
7007 (map->has_fast_elements() || map->has_fixed_typed_array_elements()) &&
7008 !map->has_indexed_interceptor() && !map->is_access_check_needed();
7009 }
7010
7011
TryBuildConsolidatedElementLoad(HValue * object,HValue * key,HValue * val,SmallMapList * maps)7012 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
7013 HValue* object,
7014 HValue* key,
7015 HValue* val,
7016 SmallMapList* maps) {
7017 // For polymorphic loads of similar elements kinds (i.e. all tagged or all
7018 // double), always use the "worst case" code without a transition. This is
7019 // much faster than transitioning the elements to the worst case, trading a
7020 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
7021 bool has_double_maps = false;
7022 bool has_smi_or_object_maps = false;
7023 bool has_js_array_access = false;
7024 bool has_non_js_array_access = false;
7025 bool has_seen_holey_elements = false;
7026 Handle<Map> most_general_consolidated_map;
7027 for (int i = 0; i < maps->length(); ++i) {
7028 Handle<Map> map = maps->at(i);
7029 if (!CanInlineElementAccess(map)) return NULL;
7030 // Don't allow mixing of JSArrays with JSObjects.
7031 if (map->instance_type() == JS_ARRAY_TYPE) {
7032 if (has_non_js_array_access) return NULL;
7033 has_js_array_access = true;
7034 } else if (has_js_array_access) {
7035 return NULL;
7036 } else {
7037 has_non_js_array_access = true;
7038 }
7039 // Don't allow mixed, incompatible elements kinds.
7040 if (map->has_fast_double_elements()) {
7041 if (has_smi_or_object_maps) return NULL;
7042 has_double_maps = true;
7043 } else if (map->has_fast_smi_or_object_elements()) {
7044 if (has_double_maps) return NULL;
7045 has_smi_or_object_maps = true;
7046 } else {
7047 return NULL;
7048 }
7049 // Remember if we've ever seen holey elements.
7050 if (IsHoleyElementsKind(map->elements_kind())) {
7051 has_seen_holey_elements = true;
7052 }
7053 // Remember the most general elements kind, the code for its load will
7054 // properly handle all of the more specific cases.
7055 if ((i == 0) || IsMoreGeneralElementsKindTransition(
7056 most_general_consolidated_map->elements_kind(),
7057 map->elements_kind())) {
7058 most_general_consolidated_map = map;
7059 }
7060 }
7061 if (!has_double_maps && !has_smi_or_object_maps) return NULL;
7062
7063 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
7064 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
7065 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
7066 ElementsKind consolidated_elements_kind = has_seen_holey_elements
7067 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
7068 : most_general_consolidated_map->elements_kind();
7069 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7070 if (has_seen_holey_elements) {
7071 // Make sure that all of the maps we are handling have the initial array
7072 // prototype.
7073 bool saw_non_array_prototype = false;
7074 for (int i = 0; i < maps->length(); ++i) {
7075 Handle<Map> map = maps->at(i);
7076 if (map->prototype() != *isolate()->initial_array_prototype()) {
7077 // We can't guarantee that loading the hole is safe. The prototype may
7078 // have an element at this position.
7079 saw_non_array_prototype = true;
7080 break;
7081 }
7082 }
7083
7084 if (!saw_non_array_prototype) {
7085 Handle<Map> holey_map = handle(
7086 isolate()->get_initial_js_array_map(consolidated_elements_kind));
7087 load_mode = BuildKeyedHoleMode(holey_map);
7088 if (load_mode != NEVER_RETURN_HOLE) {
7089 for (int i = 0; i < maps->length(); ++i) {
7090 Handle<Map> map = maps->at(i);
7091 // The prototype check was already done for the holey map in
7092 // BuildKeyedHoleMode.
7093 if (!map.is_identical_to(holey_map)) {
7094 Handle<JSObject> prototype(JSObject::cast(map->prototype()),
7095 isolate());
7096 Handle<JSObject> object_prototype =
7097 isolate()->initial_object_prototype();
7098 BuildCheckPrototypeMaps(prototype, object_prototype);
7099 }
7100 }
7101 }
7102 }
7103 }
7104 HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
7105 checked_object, key, val,
7106 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
7107 consolidated_elements_kind, LOAD, load_mode, STANDARD_STORE);
7108 return instr;
7109 }
7110
7111
HandlePolymorphicElementAccess(Expression * expr,FeedbackVectorSlot slot,HValue * object,HValue * key,HValue * val,SmallMapList * maps,PropertyAccessType access_type,KeyedAccessStoreMode store_mode,bool * has_side_effects)7112 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
7113 Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key,
7114 HValue* val, SmallMapList* maps, PropertyAccessType access_type,
7115 KeyedAccessStoreMode store_mode, bool* has_side_effects) {
7116 *has_side_effects = false;
7117 BuildCheckHeapObject(object);
7118
7119 if (access_type == LOAD) {
7120 HInstruction* consolidated_load =
7121 TryBuildConsolidatedElementLoad(object, key, val, maps);
7122 if (consolidated_load != NULL) {
7123 *has_side_effects |= consolidated_load->HasObservableSideEffects();
7124 return consolidated_load;
7125 }
7126 }
7127
7128 // Elements_kind transition support.
7129 MapHandleList transition_target(maps->length());
7130 // Collect possible transition targets.
7131 MapHandleList possible_transitioned_maps(maps->length());
7132 for (int i = 0; i < maps->length(); ++i) {
7133 Handle<Map> map = maps->at(i);
7134 // Loads from strings or loads with a mix of string and non-string maps
7135 // shouldn't be handled polymorphically.
7136 DCHECK(access_type != LOAD || !map->IsStringMap());
7137 ElementsKind elements_kind = map->elements_kind();
7138 if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
7139 elements_kind != GetInitialFastElementsKind()) {
7140 possible_transitioned_maps.Add(map);
7141 }
7142 if (IsSloppyArgumentsElements(elements_kind)) {
7143 HInstruction* result =
7144 BuildKeyedGeneric(access_type, expr, slot, object, key, val);
7145 *has_side_effects = result->HasObservableSideEffects();
7146 return AddInstruction(result);
7147 }
7148 }
7149 // Get transition target for each map (NULL == no transition).
7150 for (int i = 0; i < maps->length(); ++i) {
7151 Handle<Map> map = maps->at(i);
7152 Map* transitioned_map =
7153 map->FindElementsKindTransitionedMap(&possible_transitioned_maps);
7154 if (transitioned_map != nullptr) {
7155 transition_target.Add(handle(transitioned_map));
7156 } else {
7157 transition_target.Add(Handle<Map>());
7158 }
7159 }
7160
7161 MapHandleList untransitionable_maps(maps->length());
7162 HTransitionElementsKind* transition = NULL;
7163 for (int i = 0; i < maps->length(); ++i) {
7164 Handle<Map> map = maps->at(i);
7165 DCHECK(map->IsMap());
7166 if (!transition_target.at(i).is_null()) {
7167 DCHECK(Map::IsValidElementsTransition(
7168 map->elements_kind(),
7169 transition_target.at(i)->elements_kind()));
7170 transition = Add<HTransitionElementsKind>(object, map,
7171 transition_target.at(i));
7172 } else {
7173 untransitionable_maps.Add(map);
7174 }
7175 }
7176
7177 // If only one map is left after transitioning, handle this case
7178 // monomorphically.
7179 DCHECK(untransitionable_maps.length() >= 1);
7180 if (untransitionable_maps.length() == 1) {
7181 Handle<Map> untransitionable_map = untransitionable_maps[0];
7182 HInstruction* instr = NULL;
7183 if (!CanInlineElementAccess(untransitionable_map)) {
7184 instr = AddInstruction(
7185 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7186 } else {
7187 instr = BuildMonomorphicElementAccess(
7188 object, key, val, transition, untransitionable_map, access_type,
7189 store_mode);
7190 }
7191 *has_side_effects |= instr->HasObservableSideEffects();
7192 return access_type == STORE ? val : instr;
7193 }
7194
7195 HBasicBlock* join = graph()->CreateBasicBlock();
7196
7197 for (int i = 0; i < untransitionable_maps.length(); ++i) {
7198 Handle<Map> map = untransitionable_maps[i];
7199 ElementsKind elements_kind = map->elements_kind();
7200 HBasicBlock* this_map = graph()->CreateBasicBlock();
7201 HBasicBlock* other_map = graph()->CreateBasicBlock();
7202 HCompareMap* mapcompare =
7203 New<HCompareMap>(object, map, this_map, other_map);
7204 FinishCurrentBlock(mapcompare);
7205
7206 set_current_block(this_map);
7207 HInstruction* access = NULL;
7208 if (!CanInlineElementAccess(map)) {
7209 access = AddInstruction(
7210 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7211 } else {
7212 DCHECK(IsFastElementsKind(elements_kind) ||
7213 IsFixedTypedArrayElementsKind(elements_kind));
7214 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7215 // Happily, mapcompare is a checked object.
7216 access = BuildUncheckedMonomorphicElementAccess(
7217 mapcompare, key, val,
7218 map->instance_type() == JS_ARRAY_TYPE,
7219 elements_kind, access_type,
7220 load_mode,
7221 store_mode);
7222 }
7223 *has_side_effects |= access->HasObservableSideEffects();
7224 // The caller will use has_side_effects and add a correct Simulate.
7225 access->SetFlag(HValue::kHasNoObservableSideEffects);
7226 if (access_type == LOAD) {
7227 Push(access);
7228 }
7229 NoObservableSideEffectsScope scope(this);
7230 GotoNoSimulate(join);
7231 set_current_block(other_map);
7232 }
7233
7234 // Ensure that we visited at least one map above that goes to join. This is
7235 // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7236 // rather than joining the join block. If this becomes an issue, insert a
7237 // generic access in the case length() == 0.
7238 DCHECK(join->predecessors()->length() > 0);
7239 // Deopt if none of the cases matched.
7240 NoObservableSideEffectsScope scope(this);
7241 FinishExitWithHardDeoptimization(
7242 DeoptimizeReason::kUnknownMapInPolymorphicElementAccess);
7243 set_current_block(join);
7244 return access_type == STORE ? val : Pop();
7245 }
7246
HandleKeyedElementAccess(HValue * obj,HValue * key,HValue * val,Expression * expr,FeedbackVectorSlot slot,BailoutId ast_id,BailoutId return_id,PropertyAccessType access_type,bool * has_side_effects)7247 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
7248 HValue* obj, HValue* key, HValue* val, Expression* expr,
7249 FeedbackVectorSlot slot, BailoutId ast_id, BailoutId return_id,
7250 PropertyAccessType access_type, bool* has_side_effects) {
7251 // A keyed name access with type feedback may contain the name.
7252 Handle<TypeFeedbackVector> vector =
7253 handle(current_feedback_vector(), isolate());
7254 HValue* expected_key = key;
7255 if (!key->ActualValue()->IsConstant()) {
7256 Name* name = nullptr;
7257 if (access_type == LOAD) {
7258 KeyedLoadICNexus nexus(vector, slot);
7259 name = nexus.FindFirstName();
7260 } else {
7261 KeyedStoreICNexus nexus(vector, slot);
7262 name = nexus.FindFirstName();
7263 }
7264 if (name != nullptr) {
7265 Handle<Name> handle_name(name);
7266 expected_key = Add<HConstant>(handle_name);
7267 // We need a check against the key.
7268 bool in_new_space = isolate()->heap()->InNewSpace(*handle_name);
7269 Unique<Name> unique_name = Unique<Name>::CreateUninitialized(handle_name);
7270 Add<HCheckValue>(key, unique_name, in_new_space);
7271 }
7272 }
7273 if (expected_key->ActualValue()->IsConstant()) {
7274 Handle<Object> constant =
7275 HConstant::cast(expected_key->ActualValue())->handle(isolate());
7276 uint32_t array_index;
7277 if ((constant->IsString() &&
7278 !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) ||
7279 constant->IsSymbol()) {
7280 if (!constant->IsUniqueName()) {
7281 constant = isolate()->factory()->InternalizeString(
7282 Handle<String>::cast(constant));
7283 }
7284 HValue* access =
7285 BuildNamedAccess(access_type, ast_id, return_id, expr, slot, obj,
7286 Handle<Name>::cast(constant), val, false);
7287 if (access == NULL || access->IsPhi() ||
7288 HInstruction::cast(access)->IsLinked()) {
7289 *has_side_effects = false;
7290 } else {
7291 HInstruction* instr = HInstruction::cast(access);
7292 AddInstruction(instr);
7293 *has_side_effects = instr->HasObservableSideEffects();
7294 }
7295 return access;
7296 }
7297 }
7298
7299 DCHECK(!expr->IsPropertyName());
7300 HInstruction* instr = NULL;
7301
7302 SmallMapList* maps;
7303 bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, this);
7304
7305 bool force_generic = false;
7306 if (expr->GetKeyType() == PROPERTY) {
7307 // Non-Generic accesses assume that elements are being accessed, and will
7308 // deopt for non-index keys, which the IC knows will occur.
7309 // TODO(jkummerow): Consider adding proper support for property accesses.
7310 force_generic = true;
7311 monomorphic = false;
7312 } else if (access_type == STORE &&
7313 (monomorphic || (maps != NULL && !maps->is_empty()))) {
7314 // Stores can't be mono/polymorphic if their prototype chain has dictionary
7315 // elements. However a receiver map that has dictionary elements itself
7316 // should be left to normal mono/poly behavior (the other maps may benefit
7317 // from highly optimized stores).
7318 for (int i = 0; i < maps->length(); i++) {
7319 Handle<Map> current_map = maps->at(i);
7320 if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7321 force_generic = true;
7322 monomorphic = false;
7323 break;
7324 }
7325 }
7326 } else if (access_type == LOAD && !monomorphic &&
7327 (maps != NULL && !maps->is_empty())) {
7328 // Polymorphic loads have to go generic if any of the maps are strings.
7329 // If some, but not all of the maps are strings, we should go generic
7330 // because polymorphic access wants to key on ElementsKind and isn't
7331 // compatible with strings.
7332 for (int i = 0; i < maps->length(); i++) {
7333 Handle<Map> current_map = maps->at(i);
7334 if (current_map->IsStringMap()) {
7335 force_generic = true;
7336 break;
7337 }
7338 }
7339 }
7340
7341 if (monomorphic) {
7342 Handle<Map> map = maps->first();
7343 if (!CanInlineElementAccess(map)) {
7344 instr = AddInstruction(
7345 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7346 } else {
7347 BuildCheckHeapObject(obj);
7348 instr = BuildMonomorphicElementAccess(
7349 obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7350 }
7351 } else if (!force_generic && (maps != NULL && !maps->is_empty())) {
7352 return HandlePolymorphicElementAccess(expr, slot, obj, key, val, maps,
7353 access_type, expr->GetStoreMode(),
7354 has_side_effects);
7355 } else {
7356 if (access_type == STORE) {
7357 if (expr->IsAssignment() &&
7358 expr->AsAssignment()->HasNoTypeInformation()) {
7359 Add<HDeoptimize>(
7360 DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess,
7361 Deoptimizer::SOFT);
7362 }
7363 } else {
7364 if (expr->AsProperty()->HasNoTypeInformation()) {
7365 Add<HDeoptimize>(
7366 DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess,
7367 Deoptimizer::SOFT);
7368 }
7369 }
7370 instr = AddInstruction(
7371 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7372 }
7373 *has_side_effects = instr->HasObservableSideEffects();
7374 return instr;
7375 }
7376
7377
EnsureArgumentsArePushedForAccess()7378 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
7379 // Outermost function already has arguments on the stack.
7380 if (function_state()->outer() == NULL) return;
7381
7382 if (function_state()->arguments_pushed()) return;
7383
7384 // Push arguments when entering inlined function.
7385 HEnterInlined* entry = function_state()->entry();
7386 entry->set_arguments_pushed();
7387
7388 HArgumentsObject* arguments = entry->arguments_object();
7389 const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7390
7391 HInstruction* insert_after = entry;
7392 for (int i = 0; i < arguments_values->length(); i++) {
7393 HValue* argument = arguments_values->at(i);
7394 HInstruction* push_argument = New<HPushArguments>(argument);
7395 push_argument->InsertAfter(insert_after);
7396 insert_after = push_argument;
7397 }
7398
7399 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7400 arguments_elements->ClearFlag(HValue::kUseGVN);
7401 arguments_elements->InsertAfter(insert_after);
7402 function_state()->set_arguments_elements(arguments_elements);
7403 }
7404
IsAnyParameterContextAllocated()7405 bool HOptimizedGraphBuilder::IsAnyParameterContextAllocated() {
7406 int count = current_info()->scope()->num_parameters();
7407 for (int i = 0; i < count; ++i) {
7408 if (current_info()->scope()->parameter(i)->location() ==
7409 VariableLocation::CONTEXT) {
7410 return true;
7411 }
7412 }
7413 return false;
7414 }
7415
TryArgumentsAccess(Property * expr)7416 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
7417 VariableProxy* proxy = expr->obj()->AsVariableProxy();
7418 if (proxy == NULL) return false;
7419 if (!proxy->var()->IsStackAllocated()) return false;
7420 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7421 return false;
7422 }
7423
7424 HInstruction* result = NULL;
7425 if (expr->key()->IsPropertyName()) {
7426 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7427 if (!String::Equals(name, isolate()->factory()->length_string())) {
7428 return false;
7429 }
7430
7431 // Make sure we visit the arguments object so that the liveness analysis
7432 // still records the access.
7433 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->obj(), ARGUMENTS_ALLOWED), true);
7434 Drop(1);
7435
7436 if (function_state()->outer() == NULL) {
7437 HInstruction* elements = Add<HArgumentsElements>(false);
7438 result = New<HArgumentsLength>(elements);
7439 } else {
7440 // Number of arguments without receiver.
7441 int argument_count = environment()->
7442 arguments_environment()->parameter_count() - 1;
7443 result = New<HConstant>(argument_count);
7444 }
7445 } else {
7446 // We need to take into account the KEYED_LOAD_IC feedback to guard the
7447 // HBoundsCheck instructions below.
7448 if (!expr->IsMonomorphic() && !expr->IsUninitialized()) return false;
7449 if (IsAnyParameterContextAllocated()) return false;
7450 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->obj(), ARGUMENTS_ALLOWED), true);
7451 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7452 HValue* key = Pop();
7453 Drop(1); // Arguments object.
7454 if (function_state()->outer() == NULL) {
7455 HInstruction* elements = Add<HArgumentsElements>(false);
7456 HInstruction* length = Add<HArgumentsLength>(elements);
7457 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7458 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7459 } else {
7460 EnsureArgumentsArePushedForAccess();
7461
7462 // Number of arguments without receiver.
7463 HInstruction* elements = function_state()->arguments_elements();
7464 int argument_count = environment()->
7465 arguments_environment()->parameter_count() - 1;
7466 HInstruction* length = Add<HConstant>(argument_count);
7467 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7468 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7469 }
7470 }
7471 ast_context()->ReturnInstruction(result, expr->id());
7472 return true;
7473 }
7474
BuildNamedAccess(PropertyAccessType access,BailoutId ast_id,BailoutId return_id,Expression * expr,FeedbackVectorSlot slot,HValue * object,Handle<Name> name,HValue * value,bool is_uninitialized)7475 HValue* HOptimizedGraphBuilder::BuildNamedAccess(
7476 PropertyAccessType access, BailoutId ast_id, BailoutId return_id,
7477 Expression* expr, FeedbackVectorSlot slot, HValue* object,
7478 Handle<Name> name, HValue* value, bool is_uninitialized) {
7479 SmallMapList* maps;
7480 ComputeReceiverTypes(expr, object, &maps, this);
7481 DCHECK(maps != NULL);
7482
7483 // Check for special case: Access via a single map to the global proxy
7484 // can also be handled monomorphically.
7485 if (maps->length() > 0) {
7486 Handle<Object> map_constructor =
7487 handle(maps->first()->GetConstructor(), isolate());
7488 if (map_constructor->IsJSFunction()) {
7489 Handle<Context> map_context =
7490 handle(Handle<JSFunction>::cast(map_constructor)->context());
7491 Handle<Context> current_context(current_info()->context());
7492 bool is_same_context_global_proxy_access =
7493 maps->length() == 1 && // >1 map => fallback to polymorphic
7494 maps->first()->IsJSGlobalProxyMap() &&
7495 (*map_context == *current_context);
7496 if (is_same_context_global_proxy_access) {
7497 Handle<JSGlobalObject> global_object(current_info()->global_object());
7498 LookupIterator it(global_object, name, LookupIterator::OWN);
7499 if (CanInlineGlobalPropertyAccess(&it, access)) {
7500 BuildCheckHeapObject(object);
7501 Add<HCheckMaps>(object, maps);
7502 if (access == LOAD) {
7503 InlineGlobalPropertyLoad(&it, expr->id());
7504 return nullptr;
7505 } else {
7506 return InlineGlobalPropertyStore(&it, value, expr->id());
7507 }
7508 }
7509 }
7510 }
7511
7512 PropertyAccessInfo info(this, access, maps->first(), name);
7513 if (!info.CanAccessAsMonomorphic(maps)) {
7514 HandlePolymorphicNamedFieldAccess(access, expr, slot, ast_id, return_id,
7515 object, value, maps, name);
7516 return NULL;
7517 }
7518
7519 HValue* checked_object;
7520 // AstType::Number() is only supported by polymorphic load/call handling.
7521 DCHECK(!info.IsNumberType());
7522 BuildCheckHeapObject(object);
7523 if (AreStringTypes(maps)) {
7524 checked_object =
7525 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
7526 } else {
7527 checked_object = Add<HCheckMaps>(object, maps);
7528 }
7529 return BuildMonomorphicAccess(
7530 &info, object, checked_object, value, ast_id, return_id);
7531 }
7532
7533 return BuildNamedGeneric(access, expr, slot, object, name, value,
7534 is_uninitialized);
7535 }
7536
7537
PushLoad(Property * expr,HValue * object,HValue * key)7538 void HOptimizedGraphBuilder::PushLoad(Property* expr,
7539 HValue* object,
7540 HValue* key) {
7541 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
7542 Push(object);
7543 if (key != NULL) Push(key);
7544 BuildLoad(expr, expr->LoadId());
7545 }
7546
7547
BuildLoad(Property * expr,BailoutId ast_id)7548 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
7549 BailoutId ast_id) {
7550 HInstruction* instr = NULL;
7551 if (expr->IsStringAccess() && expr->GetKeyType() == ELEMENT) {
7552 HValue* index = Pop();
7553 HValue* string = Pop();
7554 HInstruction* char_code = BuildStringCharCodeAt(string, index);
7555 AddInstruction(char_code);
7556 instr = NewUncasted<HStringCharFromCode>(char_code);
7557
7558 } else if (expr->key()->IsPropertyName()) {
7559 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7560 HValue* object = Pop();
7561
7562 HValue* value = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
7563 expr->PropertyFeedbackSlot(), object, name,
7564 NULL, expr->IsUninitialized());
7565 if (value == NULL) return;
7566 if (value->IsPhi()) return ast_context()->ReturnValue(value);
7567 instr = HInstruction::cast(value);
7568 if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
7569
7570 } else {
7571 HValue* key = Pop();
7572 HValue* obj = Pop();
7573
7574 bool has_side_effects = false;
7575 HValue* load = HandleKeyedElementAccess(
7576 obj, key, NULL, expr, expr->PropertyFeedbackSlot(), ast_id,
7577 expr->LoadId(), LOAD, &has_side_effects);
7578 if (has_side_effects) {
7579 if (ast_context()->IsEffect()) {
7580 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7581 } else {
7582 Push(load);
7583 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7584 Drop(1);
7585 }
7586 }
7587 if (load == NULL) return;
7588 return ast_context()->ReturnValue(load);
7589 }
7590 return ast_context()->ReturnInstruction(instr, ast_id);
7591 }
7592
7593
VisitProperty(Property * expr)7594 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
7595 DCHECK(!HasStackOverflow());
7596 DCHECK(current_block() != NULL);
7597 DCHECK(current_block()->HasPredecessor());
7598
7599 if (TryArgumentsAccess(expr)) return;
7600
7601 CHECK_ALIVE(VisitForValue(expr->obj()));
7602 if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
7603 CHECK_ALIVE(VisitForValue(expr->key()));
7604 }
7605
7606 BuildLoad(expr, expr->id());
7607 }
7608
7609
BuildConstantMapCheck(Handle<JSObject> constant)7610 HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
7611 HCheckMaps* check = Add<HCheckMaps>(
7612 Add<HConstant>(constant), handle(constant->map()));
7613 check->ClearDependsOnFlag(kElementsKind);
7614 return check;
7615 }
7616
7617
BuildCheckPrototypeMaps(Handle<JSObject> prototype,Handle<JSObject> holder)7618 HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
7619 Handle<JSObject> holder) {
7620 PrototypeIterator iter(isolate(), prototype, kStartAtReceiver);
7621 while (holder.is_null() ||
7622 !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
7623 BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
7624 iter.Advance();
7625 if (iter.IsAtEnd()) {
7626 return NULL;
7627 }
7628 }
7629 return BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
7630 }
7631
7632
AddCheckPrototypeMaps(Handle<JSObject> holder,Handle<Map> receiver_map)7633 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
7634 Handle<Map> receiver_map) {
7635 if (!holder.is_null()) {
7636 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7637 BuildCheckPrototypeMaps(prototype, holder);
7638 }
7639 }
7640
BuildEnsureCallable(HValue * object)7641 void HOptimizedGraphBuilder::BuildEnsureCallable(HValue* object) {
7642 NoObservableSideEffectsScope scope(this);
7643 const Runtime::Function* throw_called_non_callable =
7644 Runtime::FunctionForId(Runtime::kThrowCalledNonCallable);
7645
7646 IfBuilder is_not_function(this);
7647 HValue* smi_check = is_not_function.If<HIsSmiAndBranch>(object);
7648 is_not_function.Or();
7649 HValue* map = AddLoadMap(object, smi_check);
7650 HValue* bit_field =
7651 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField());
7652 HValue* bit_field_masked = AddUncasted<HBitwise>(
7653 Token::BIT_AND, bit_field, Add<HConstant>(1 << Map::kIsCallable));
7654 is_not_function.IfNot<HCompareNumericAndBranch>(
7655 bit_field_masked, Add<HConstant>(1 << Map::kIsCallable), Token::EQ);
7656 is_not_function.Then();
7657 {
7658 Add<HPushArguments>(object);
7659 Add<HCallRuntime>(throw_called_non_callable, 1);
7660 }
7661 is_not_function.End();
7662 }
7663
NewCallFunction(HValue * function,int argument_count,TailCallMode syntactic_tail_call_mode,ConvertReceiverMode convert_mode,TailCallMode tail_call_mode)7664 HInstruction* HOptimizedGraphBuilder::NewCallFunction(
7665 HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
7666 ConvertReceiverMode convert_mode, TailCallMode tail_call_mode) {
7667 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
7668 BuildEnsureCallable(function);
7669 } else {
7670 DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
7671 }
7672 HValue* arity = Add<HConstant>(argument_count - 1);
7673
7674 HValue* op_vals[] = {function, arity};
7675
7676 Callable callable =
7677 CodeFactory::Call(isolate(), convert_mode, tail_call_mode);
7678 HConstant* stub = Add<HConstant>(callable.code());
7679
7680 return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
7681 ArrayVector(op_vals),
7682 syntactic_tail_call_mode);
7683 }
7684
NewCallFunctionViaIC(HValue * function,int argument_count,TailCallMode syntactic_tail_call_mode,ConvertReceiverMode convert_mode,TailCallMode tail_call_mode,FeedbackVectorSlot slot)7685 HInstruction* HOptimizedGraphBuilder::NewCallFunctionViaIC(
7686 HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
7687 ConvertReceiverMode convert_mode, TailCallMode tail_call_mode,
7688 FeedbackVectorSlot slot) {
7689 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
7690 BuildEnsureCallable(function);
7691 } else {
7692 DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
7693 }
7694 int arity = argument_count - 1;
7695 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
7696 HValue* arity_val = Add<HConstant>(arity);
7697 HValue* index_val = Add<HConstant>(vector->GetIndex(slot));
7698 HValue* vector_val = Add<HConstant>(vector);
7699
7700 HValue* op_vals[] = {function, arity_val, index_val, vector_val};
7701 Callable callable = CodeFactory::CallICInOptimizedCode(
7702 isolate(), convert_mode, tail_call_mode);
7703 HConstant* stub = Add<HConstant>(callable.code());
7704
7705 return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
7706 ArrayVector(op_vals),
7707 syntactic_tail_call_mode);
7708 }
7709
NewCallConstantFunction(Handle<JSFunction> function,int argument_count,TailCallMode syntactic_tail_call_mode,TailCallMode tail_call_mode)7710 HInstruction* HOptimizedGraphBuilder::NewCallConstantFunction(
7711 Handle<JSFunction> function, int argument_count,
7712 TailCallMode syntactic_tail_call_mode, TailCallMode tail_call_mode) {
7713 HValue* target = Add<HConstant>(function);
7714 return New<HInvokeFunction>(target, function, argument_count,
7715 syntactic_tail_call_mode, tail_call_mode);
7716 }
7717
7718
7719 class FunctionSorter {
7720 public:
FunctionSorter(int index=0,int ticks=0,int size=0)7721 explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
7722 : index_(index), ticks_(ticks), size_(size) {}
7723
index() const7724 int index() const { return index_; }
ticks() const7725 int ticks() const { return ticks_; }
size() const7726 int size() const { return size_; }
7727
7728 private:
7729 int index_;
7730 int ticks_;
7731 int size_;
7732 };
7733
7734
operator <(const FunctionSorter & lhs,const FunctionSorter & rhs)7735 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
7736 int diff = lhs.ticks() - rhs.ticks();
7737 if (diff != 0) return diff > 0;
7738 return lhs.size() < rhs.size();
7739 }
7740
7741
HandlePolymorphicCallNamed(Call * expr,HValue * receiver,SmallMapList * maps,Handle<String> name)7742 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
7743 HValue* receiver,
7744 SmallMapList* maps,
7745 Handle<String> name) {
7746 int argument_count = expr->arguments()->length() + 1; // Includes receiver.
7747 FunctionSorter order[kMaxCallPolymorphism];
7748
7749 bool handle_smi = false;
7750 bool handled_string = false;
7751 int ordered_functions = 0;
7752
7753 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
7754 TailCallMode tail_call_mode =
7755 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
7756
7757 int i;
7758 for (i = 0; i < maps->length() && ordered_functions < kMaxCallPolymorphism;
7759 ++i) {
7760 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
7761 if (info.CanAccessMonomorphic() && info.IsDataConstant() &&
7762 info.constant()->IsJSFunction()) {
7763 if (info.IsStringType()) {
7764 if (handled_string) continue;
7765 handled_string = true;
7766 }
7767 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7768 if (info.IsNumberType()) {
7769 handle_smi = true;
7770 }
7771 expr->set_target(target);
7772 order[ordered_functions++] = FunctionSorter(
7773 i, target->shared()->profiler_ticks(), InliningAstSize(target));
7774 }
7775 }
7776
7777 std::sort(order, order + ordered_functions);
7778
7779 if (i < maps->length()) {
7780 maps->Clear();
7781 ordered_functions = -1;
7782 }
7783
7784 HBasicBlock* number_block = NULL;
7785 HBasicBlock* join = NULL;
7786 handled_string = false;
7787 int count = 0;
7788
7789 for (int fn = 0; fn < ordered_functions; ++fn) {
7790 int i = order[fn].index();
7791 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
7792 if (info.IsStringType()) {
7793 if (handled_string) continue;
7794 handled_string = true;
7795 }
7796 // Reloads the target.
7797 info.CanAccessMonomorphic();
7798 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7799
7800 expr->set_target(target);
7801 if (count == 0) {
7802 // Only needed once.
7803 join = graph()->CreateBasicBlock();
7804 if (handle_smi) {
7805 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
7806 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
7807 number_block = graph()->CreateBasicBlock();
7808 FinishCurrentBlock(New<HIsSmiAndBranch>(
7809 receiver, empty_smi_block, not_smi_block));
7810 GotoNoSimulate(empty_smi_block, number_block);
7811 set_current_block(not_smi_block);
7812 } else {
7813 BuildCheckHeapObject(receiver);
7814 }
7815 }
7816 ++count;
7817 HBasicBlock* if_true = graph()->CreateBasicBlock();
7818 HBasicBlock* if_false = graph()->CreateBasicBlock();
7819 HUnaryControlInstruction* compare;
7820
7821 Handle<Map> map = info.map();
7822 if (info.IsNumberType()) {
7823 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
7824 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
7825 } else if (info.IsStringType()) {
7826 compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
7827 } else {
7828 compare = New<HCompareMap>(receiver, map, if_true, if_false);
7829 }
7830 FinishCurrentBlock(compare);
7831
7832 if (info.IsNumberType()) {
7833 GotoNoSimulate(if_true, number_block);
7834 if_true = number_block;
7835 }
7836
7837 set_current_block(if_true);
7838
7839 AddCheckPrototypeMaps(info.holder(), map);
7840
7841 HValue* function = Add<HConstant>(expr->target());
7842 environment()->SetExpressionStackAt(0, function);
7843 Push(receiver);
7844 CHECK_ALIVE(VisitExpressions(expr->arguments()));
7845 bool needs_wrapping = info.NeedsWrappingFor(target);
7846 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
7847 if (FLAG_trace_inlining && try_inline) {
7848 Handle<JSFunction> caller = current_info()->closure();
7849 std::unique_ptr<char[]> caller_name =
7850 caller->shared()->DebugName()->ToCString();
7851 PrintF("Trying to inline the polymorphic call to %s from %s\n",
7852 name->ToCString().get(),
7853 caller_name.get());
7854 }
7855 if (try_inline && TryInlineCall(expr)) {
7856 // Trying to inline will signal that we should bailout from the
7857 // entire compilation by setting stack overflow on the visitor.
7858 if (HasStackOverflow()) return;
7859 } else {
7860 // Since HWrapReceiver currently cannot actually wrap numbers and strings,
7861 // use the regular call builtin for method calls to wrap the receiver.
7862 // TODO(verwaest): Support creation of value wrappers directly in
7863 // HWrapReceiver.
7864 HInstruction* call =
7865 needs_wrapping
7866 ? NewCallFunction(
7867 function, argument_count, syntactic_tail_call_mode,
7868 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode)
7869 : NewCallConstantFunction(target, argument_count,
7870 syntactic_tail_call_mode,
7871 tail_call_mode);
7872 PushArgumentsFromEnvironment(argument_count);
7873 AddInstruction(call);
7874 Drop(1); // Drop the function.
7875 if (!ast_context()->IsEffect()) Push(call);
7876 }
7877
7878 if (current_block() != NULL) Goto(join);
7879 set_current_block(if_false);
7880 }
7881
7882 // Finish up. Unconditionally deoptimize if we've handled all the maps we
7883 // know about and do not want to handle ones we've never seen. Otherwise
7884 // use a generic IC.
7885 if (ordered_functions == maps->length() && FLAG_deoptimize_uncommon_cases) {
7886 FinishExitWithHardDeoptimization(
7887 DeoptimizeReason::kUnknownMapInPolymorphicCall);
7888 } else {
7889 Property* prop = expr->expression()->AsProperty();
7890 HInstruction* function =
7891 BuildNamedGeneric(LOAD, prop, prop->PropertyFeedbackSlot(), receiver,
7892 name, NULL, prop->IsUninitialized());
7893 AddInstruction(function);
7894 Push(function);
7895 AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7896
7897 environment()->SetExpressionStackAt(1, function);
7898 environment()->SetExpressionStackAt(0, receiver);
7899 CHECK_ALIVE(VisitExpressions(expr->arguments()));
7900
7901 HInstruction* call = NewCallFunction(
7902 function, argument_count, syntactic_tail_call_mode,
7903 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
7904
7905 PushArgumentsFromEnvironment(argument_count);
7906
7907 Drop(1); // Function.
7908
7909 if (join != NULL) {
7910 AddInstruction(call);
7911 if (!ast_context()->IsEffect()) Push(call);
7912 Goto(join);
7913 } else {
7914 return ast_context()->ReturnInstruction(call, expr->id());
7915 }
7916 }
7917
7918 // We assume that control flow is always live after an expression. So
7919 // even without predecessors to the join block, we set it as the exit
7920 // block and continue by adding instructions there.
7921 DCHECK(join != NULL);
7922 if (join->HasPredecessor()) {
7923 set_current_block(join);
7924 join->SetJoinId(expr->id());
7925 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7926 } else {
7927 set_current_block(NULL);
7928 }
7929 }
7930
TraceInline(Handle<JSFunction> target,Handle<JSFunction> caller,const char * reason,TailCallMode tail_call_mode)7931 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7932 Handle<JSFunction> caller,
7933 const char* reason,
7934 TailCallMode tail_call_mode) {
7935 if (FLAG_trace_inlining) {
7936 std::unique_ptr<char[]> target_name =
7937 target->shared()->DebugName()->ToCString();
7938 std::unique_ptr<char[]> caller_name =
7939 caller->shared()->DebugName()->ToCString();
7940 if (reason == NULL) {
7941 const char* call_mode =
7942 tail_call_mode == TailCallMode::kAllow ? "tail called" : "called";
7943 PrintF("Inlined %s %s from %s.\n", target_name.get(), call_mode,
7944 caller_name.get());
7945 } else {
7946 PrintF("Did not inline %s called from %s (%s).\n",
7947 target_name.get(), caller_name.get(), reason);
7948 }
7949 }
7950 }
7951
7952
7953 static const int kNotInlinable = 1000000000;
7954
7955
InliningAstSize(Handle<JSFunction> target)7956 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
7957 if (!FLAG_use_inlining) return kNotInlinable;
7958
7959 // Precondition: call is monomorphic and we have found a target with the
7960 // appropriate arity.
7961 Handle<JSFunction> caller = current_info()->closure();
7962 Handle<SharedFunctionInfo> target_shared(target->shared());
7963
7964 // Always inline functions that force inlining.
7965 if (target_shared->force_inline()) {
7966 return 0;
7967 }
7968 if (target->shared()->IsBuiltin()) {
7969 return kNotInlinable;
7970 }
7971
7972 if (target_shared->IsApiFunction()) {
7973 TraceInline(target, caller, "target is api function");
7974 return kNotInlinable;
7975 }
7976
7977 // Do a quick check on source code length to avoid parsing large
7978 // inlining candidates.
7979 if (target_shared->SourceSize() >
7980 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
7981 TraceInline(target, caller, "target text too big");
7982 return kNotInlinable;
7983 }
7984
7985 // Target must be inlineable.
7986 BailoutReason noopt_reason = target_shared->disable_optimization_reason();
7987 if (!target_shared->IsInlineable() && noopt_reason != kHydrogenFilter) {
7988 TraceInline(target, caller, "target not inlineable");
7989 return kNotInlinable;
7990 }
7991 if (noopt_reason != kNoReason && noopt_reason != kHydrogenFilter) {
7992 TraceInline(target, caller, "target contains unsupported syntax [early]");
7993 return kNotInlinable;
7994 }
7995
7996 int nodes_added = target_shared->ast_node_count();
7997 return nodes_added;
7998 }
7999
TryInline(Handle<JSFunction> target,int arguments_count,HValue * implicit_return_value,BailoutId ast_id,BailoutId return_id,InliningKind inlining_kind,TailCallMode syntactic_tail_call_mode)8000 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
8001 int arguments_count,
8002 HValue* implicit_return_value,
8003 BailoutId ast_id, BailoutId return_id,
8004 InliningKind inlining_kind,
8005 TailCallMode syntactic_tail_call_mode) {
8006 if (target->context()->native_context() !=
8007 top_info()->closure()->context()->native_context()) {
8008 return false;
8009 }
8010 int nodes_added = InliningAstSize(target);
8011 if (nodes_added == kNotInlinable) return false;
8012
8013 Handle<JSFunction> caller = current_info()->closure();
8014 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8015 TraceInline(target, caller, "target AST is too large [early]");
8016 return false;
8017 }
8018
8019 // Don't inline deeper than the maximum number of inlining levels.
8020 HEnvironment* env = environment();
8021 int current_level = 1;
8022 while (env->outer() != NULL) {
8023 if (current_level == FLAG_max_inlining_levels) {
8024 TraceInline(target, caller, "inline depth limit reached");
8025 return false;
8026 }
8027 if (env->outer()->frame_type() == JS_FUNCTION) {
8028 current_level++;
8029 }
8030 env = env->outer();
8031 }
8032
8033 // Don't inline recursive functions.
8034 for (FunctionState* state = function_state();
8035 state != NULL;
8036 state = state->outer()) {
8037 if (*state->compilation_info()->closure() == *target) {
8038 TraceInline(target, caller, "target is recursive");
8039 return false;
8040 }
8041 }
8042
8043 // We don't want to add more than a certain number of nodes from inlining.
8044 // Always inline small methods (<= 10 nodes).
8045 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
8046 kUnlimitedMaxInlinedNodesCumulative)) {
8047 TraceInline(target, caller, "cumulative AST node limit reached");
8048 return false;
8049 }
8050
8051 // Parse and allocate variables.
8052 // Use the same AstValueFactory for creating strings in the sub-compilation
8053 // step, but don't transfer ownership to target_info.
8054 Handle<SharedFunctionInfo> target_shared(target->shared());
8055 ParseInfo parse_info(zone(), target_shared);
8056 parse_info.set_ast_value_factory(
8057 top_info()->parse_info()->ast_value_factory());
8058 parse_info.set_ast_value_factory_owned(false);
8059
8060 CompilationInfo target_info(&parse_info, target);
8061
8062 if (inlining_kind != CONSTRUCT_CALL_RETURN &&
8063 IsClassConstructor(target_shared->kind())) {
8064 TraceInline(target, caller, "target is classConstructor");
8065 return false;
8066 }
8067
8068 if (target_shared->HasDebugInfo()) {
8069 TraceInline(target, caller, "target is being debugged");
8070 return false;
8071 }
8072 if (!Compiler::ParseAndAnalyze(target_info.parse_info())) {
8073 if (target_info.isolate()->has_pending_exception()) {
8074 // Parse or scope error, never optimize this function.
8075 SetStackOverflow();
8076 target_shared->DisableOptimization(kParseScopeError);
8077 }
8078 TraceInline(target, caller, "parse failure");
8079 return false;
8080 }
8081 if (target_shared->dont_crankshaft()) {
8082 TraceInline(target, caller, "ParseAndAnalyze found incompatibility");
8083 return false;
8084 }
8085
8086 if (target_info.scope()->NeedsContext()) {
8087 TraceInline(target, caller, "target has context-allocated variables");
8088 return false;
8089 }
8090
8091 if (target_info.scope()->rest_parameter() != nullptr) {
8092 TraceInline(target, caller, "target uses rest parameters");
8093 return false;
8094 }
8095
8096 FunctionLiteral* function = target_info.literal();
8097
8098 // The following conditions must be checked again after re-parsing, because
8099 // earlier the information might not have been complete due to lazy parsing.
8100 nodes_added = function->ast_node_count();
8101 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8102 TraceInline(target, caller, "target AST is too large [late]");
8103 return false;
8104 }
8105 if (function->dont_optimize()) {
8106 TraceInline(target, caller, "target contains unsupported syntax [late]");
8107 return false;
8108 }
8109
8110 // If the function uses the arguments object check that inlining of functions
8111 // with arguments object is enabled and the arguments-variable is
8112 // stack allocated.
8113 if (function->scope()->arguments() != NULL) {
8114 if (!FLAG_inline_arguments) {
8115 TraceInline(target, caller, "target uses arguments object");
8116 return false;
8117 }
8118 }
8119
8120 // Unsupported variable references present.
8121 if (function->scope()->this_function_var() != nullptr ||
8122 function->scope()->new_target_var() != nullptr) {
8123 TraceInline(target, caller, "target uses new target or this function");
8124 return false;
8125 }
8126
8127 // All declarations must be inlineable.
8128 Declaration::List* decls = target_info.scope()->declarations();
8129 for (Declaration* decl : *decls) {
8130 if (decl->IsFunctionDeclaration() ||
8131 !decl->proxy()->var()->IsStackAllocated()) {
8132 TraceInline(target, caller, "target has non-trivial declaration");
8133 return false;
8134 }
8135 }
8136
8137 // Generate the deoptimization data for the unoptimized version of
8138 // the target function if we don't already have it.
8139 if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
8140 TraceInline(target, caller, "could not generate deoptimization info");
8141 return false;
8142 }
8143
8144 // Remember that we inlined this function. This needs to be called right
8145 // after the EnsureDeoptimizationSupport call so that the code flusher
8146 // does not remove the code with the deoptimization support.
8147 int inlining_id = top_info()->AddInlinedFunction(target_info.shared_info(),
8148 source_position());
8149
8150 // ----------------------------------------------------------------
8151 // After this point, we've made a decision to inline this function (so
8152 // TryInline should always return true).
8153
8154 // If target was lazily compiled, it's literals array may not yet be set up.
8155 JSFunction::EnsureLiterals(target);
8156
8157 // Type-check the inlined function.
8158 DCHECK(target_shared->has_deoptimization_support());
8159 AstTyper(target_info.isolate(), target_info.zone(), target_info.closure(),
8160 target_info.scope(), target_info.osr_ast_id(), target_info.literal(),
8161 &bounds_)
8162 .Run();
8163
8164 if (is_tracking_positions()) {
8165 TraceInlinedFunction(target_shared, source_position(), inlining_id);
8166 }
8167
8168 // Save the pending call context. Set up new one for the inlined function.
8169 // The function state is new-allocated because we need to delete it
8170 // in two different places.
8171 FunctionState* target_state = new FunctionState(
8172 this, &target_info, inlining_kind, inlining_id,
8173 function_state()->ComputeTailCallMode(syntactic_tail_call_mode));
8174
8175 HConstant* undefined = graph()->GetConstantUndefined();
8176
8177 HEnvironment* inner_env = environment()->CopyForInlining(
8178 target, arguments_count, function, undefined,
8179 function_state()->inlining_kind(), syntactic_tail_call_mode);
8180
8181 HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
8182 inner_env->BindContext(context);
8183
8184 // Create a dematerialized arguments object for the function, also copy the
8185 // current arguments values to use them for materialization.
8186 HEnvironment* arguments_env = inner_env->arguments_environment();
8187 int parameter_count = arguments_env->parameter_count();
8188 HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
8189 for (int i = 0; i < parameter_count; i++) {
8190 arguments_object->AddArgument(arguments_env->Lookup(i), zone());
8191 }
8192
8193 // If the function uses arguments object then bind bind one.
8194 if (function->scope()->arguments() != NULL) {
8195 DCHECK(function->scope()->arguments()->IsStackAllocated());
8196 inner_env->Bind(function->scope()->arguments(), arguments_object);
8197 }
8198
8199 // Capture the state before invoking the inlined function for deopt in the
8200 // inlined function. This simulate has no bailout-id since it's not directly
8201 // reachable for deopt, and is only used to capture the state. If the simulate
8202 // becomes reachable by merging, the ast id of the simulate merged into it is
8203 // adopted.
8204 Add<HSimulate>(BailoutId::None());
8205
8206 current_block()->UpdateEnvironment(inner_env);
8207 Scope* saved_scope = scope();
8208 set_scope(target_info.scope());
8209 HEnterInlined* enter_inlined = Add<HEnterInlined>(
8210 return_id, target, context, arguments_count, function,
8211 function_state()->inlining_kind(), function->scope()->arguments(),
8212 arguments_object, syntactic_tail_call_mode);
8213 if (is_tracking_positions()) {
8214 enter_inlined->set_inlining_id(inlining_id);
8215 }
8216
8217 function_state()->set_entry(enter_inlined);
8218
8219 VisitDeclarations(target_info.scope()->declarations());
8220 VisitStatements(function->body());
8221 set_scope(saved_scope);
8222 if (HasStackOverflow()) {
8223 // Bail out if the inline function did, as we cannot residualize a call
8224 // instead, but do not disable optimization for the outer function.
8225 TraceInline(target, caller, "inline graph construction failed");
8226 target_shared->DisableOptimization(kInliningBailedOut);
8227 current_info()->RetryOptimization(kInliningBailedOut);
8228 delete target_state;
8229 return true;
8230 }
8231
8232 // Update inlined nodes count.
8233 inlined_count_ += nodes_added;
8234
8235 Handle<Code> unoptimized_code(target_shared->code());
8236 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
8237 Handle<TypeFeedbackInfo> type_info(
8238 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
8239 graph()->update_type_change_checksum(type_info->own_type_change_checksum());
8240
8241 TraceInline(target, caller, NULL, syntactic_tail_call_mode);
8242
8243 if (current_block() != NULL) {
8244 FunctionState* state = function_state();
8245 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
8246 // Falling off the end of an inlined construct call. In a test context the
8247 // return value will always evaluate to true, in a value context the
8248 // return value is the newly allocated receiver.
8249 if (call_context()->IsTest()) {
8250 inlined_test_context()->ReturnValue(graph()->GetConstantTrue());
8251 } else if (call_context()->IsEffect()) {
8252 Goto(function_return(), state);
8253 } else {
8254 DCHECK(call_context()->IsValue());
8255 AddLeaveInlined(implicit_return_value, state);
8256 }
8257 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
8258 // Falling off the end of an inlined setter call. The returned value is
8259 // never used, the value of an assignment is always the value of the RHS
8260 // of the assignment.
8261 if (call_context()->IsTest()) {
8262 inlined_test_context()->ReturnValue(implicit_return_value);
8263 } else if (call_context()->IsEffect()) {
8264 Goto(function_return(), state);
8265 } else {
8266 DCHECK(call_context()->IsValue());
8267 AddLeaveInlined(implicit_return_value, state);
8268 }
8269 } else {
8270 // Falling off the end of a normal inlined function. This basically means
8271 // returning undefined.
8272 if (call_context()->IsTest()) {
8273 inlined_test_context()->ReturnValue(graph()->GetConstantFalse());
8274 } else if (call_context()->IsEffect()) {
8275 Goto(function_return(), state);
8276 } else {
8277 DCHECK(call_context()->IsValue());
8278 AddLeaveInlined(undefined, state);
8279 }
8280 }
8281 }
8282
8283 // Fix up the function exits.
8284 if (inlined_test_context() != NULL) {
8285 HBasicBlock* if_true = inlined_test_context()->if_true();
8286 HBasicBlock* if_false = inlined_test_context()->if_false();
8287
8288 HEnterInlined* entry = function_state()->entry();
8289
8290 // Pop the return test context from the expression context stack.
8291 DCHECK(ast_context() == inlined_test_context());
8292 ClearInlinedTestContext();
8293 delete target_state;
8294
8295 // Forward to the real test context.
8296 if (if_true->HasPredecessor()) {
8297 entry->RegisterReturnTarget(if_true, zone());
8298 if_true->SetJoinId(ast_id);
8299 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8300 Goto(if_true, true_target, function_state());
8301 }
8302 if (if_false->HasPredecessor()) {
8303 entry->RegisterReturnTarget(if_false, zone());
8304 if_false->SetJoinId(ast_id);
8305 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8306 Goto(if_false, false_target, function_state());
8307 }
8308 set_current_block(NULL);
8309 return true;
8310
8311 } else if (function_return()->HasPredecessor()) {
8312 function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8313 function_return()->SetJoinId(ast_id);
8314 set_current_block(function_return());
8315 } else {
8316 set_current_block(NULL);
8317 }
8318 delete target_state;
8319 return true;
8320 }
8321
8322
TryInlineCall(Call * expr)8323 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
8324 return TryInline(expr->target(), expr->arguments()->length(), NULL,
8325 expr->id(), expr->ReturnId(), NORMAL_RETURN,
8326 expr->tail_call_mode());
8327 }
8328
8329
TryInlineConstruct(CallNew * expr,HValue * implicit_return_value)8330 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
8331 HValue* implicit_return_value) {
8332 return TryInline(expr->target(), expr->arguments()->length(),
8333 implicit_return_value, expr->id(), expr->ReturnId(),
8334 CONSTRUCT_CALL_RETURN, TailCallMode::kDisallow);
8335 }
8336
TryInlineGetter(Handle<Object> getter,Handle<Map> receiver_map,BailoutId ast_id,BailoutId return_id)8337 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<Object> getter,
8338 Handle<Map> receiver_map,
8339 BailoutId ast_id,
8340 BailoutId return_id) {
8341 if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
8342 if (getter->IsJSFunction()) {
8343 Handle<JSFunction> getter_function = Handle<JSFunction>::cast(getter);
8344 return TryInlineBuiltinGetterCall(getter_function, receiver_map, ast_id) ||
8345 TryInline(getter_function, 0, NULL, ast_id, return_id,
8346 GETTER_CALL_RETURN, TailCallMode::kDisallow);
8347 }
8348 return false;
8349 }
8350
TryInlineSetter(Handle<Object> setter,Handle<Map> receiver_map,BailoutId id,BailoutId assignment_id,HValue * implicit_return_value)8351 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<Object> setter,
8352 Handle<Map> receiver_map,
8353 BailoutId id,
8354 BailoutId assignment_id,
8355 HValue* implicit_return_value) {
8356 if (TryInlineApiSetter(setter, receiver_map, id)) return true;
8357 return setter->IsJSFunction() &&
8358 TryInline(Handle<JSFunction>::cast(setter), 1, implicit_return_value,
8359 id, assignment_id, SETTER_CALL_RETURN,
8360 TailCallMode::kDisallow);
8361 }
8362
8363
TryInlineIndirectCall(Handle<JSFunction> function,Call * expr,int arguments_count)8364 bool HOptimizedGraphBuilder::TryInlineIndirectCall(Handle<JSFunction> function,
8365 Call* expr,
8366 int arguments_count) {
8367 return TryInline(function, arguments_count, NULL, expr->id(),
8368 expr->ReturnId(), NORMAL_RETURN, expr->tail_call_mode());
8369 }
8370
8371
TryInlineBuiltinFunctionCall(Call * expr)8372 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
8373 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8374 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8375 // We intentionally ignore expr->tail_call_mode() here because builtins
8376 // we inline here do not observe if they were tail called or not.
8377 switch (id) {
8378 case kMathCos:
8379 case kMathExp:
8380 case kMathRound:
8381 case kMathFround:
8382 case kMathFloor:
8383 case kMathAbs:
8384 case kMathSin:
8385 case kMathSqrt:
8386 case kMathLog:
8387 case kMathClz32:
8388 if (expr->arguments()->length() == 1) {
8389 HValue* argument = Pop();
8390 Drop(2); // Receiver and function.
8391 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8392 ast_context()->ReturnInstruction(op, expr->id());
8393 return true;
8394 }
8395 break;
8396 case kMathImul:
8397 if (expr->arguments()->length() == 2) {
8398 HValue* right = Pop();
8399 HValue* left = Pop();
8400 Drop(2); // Receiver and function.
8401 HInstruction* op =
8402 HMul::NewImul(isolate(), zone(), context(), left, right);
8403 ast_context()->ReturnInstruction(op, expr->id());
8404 return true;
8405 }
8406 break;
8407 default:
8408 // Not supported for inlining yet.
8409 break;
8410 }
8411 return false;
8412 }
8413
8414
8415 // static
IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map)8416 bool HOptimizedGraphBuilder::IsReadOnlyLengthDescriptor(
8417 Handle<Map> jsarray_map) {
8418 DCHECK(!jsarray_map->is_dictionary_map());
8419 Isolate* isolate = jsarray_map->GetIsolate();
8420 Handle<Name> length_string = isolate->factory()->length_string();
8421 DescriptorArray* descriptors = jsarray_map->instance_descriptors();
8422 int number =
8423 descriptors->SearchWithCache(isolate, *length_string, *jsarray_map);
8424 DCHECK_NE(DescriptorArray::kNotFound, number);
8425 return descriptors->GetDetails(number).IsReadOnly();
8426 }
8427
8428
8429 // static
CanInlineArrayResizeOperation(Handle<Map> receiver_map)8430 bool HOptimizedGraphBuilder::CanInlineArrayResizeOperation(
8431 Handle<Map> receiver_map) {
8432 return !receiver_map.is_null() && receiver_map->prototype()->IsJSObject() &&
8433 receiver_map->instance_type() == JS_ARRAY_TYPE &&
8434 IsFastElementsKind(receiver_map->elements_kind()) &&
8435 !receiver_map->is_dictionary_map() && receiver_map->is_extensible() &&
8436 (!receiver_map->is_prototype_map() || receiver_map->is_stable()) &&
8437 !IsReadOnlyLengthDescriptor(receiver_map);
8438 }
8439
TryInlineBuiltinGetterCall(Handle<JSFunction> function,Handle<Map> receiver_map,BailoutId ast_id)8440 bool HOptimizedGraphBuilder::TryInlineBuiltinGetterCall(
8441 Handle<JSFunction> function, Handle<Map> receiver_map, BailoutId ast_id) {
8442 if (!function->shared()->HasBuiltinFunctionId()) return false;
8443 BuiltinFunctionId id = function->shared()->builtin_function_id();
8444
8445 // Try to inline getter calls like DataView.prototype.byteLength/byteOffset
8446 // as operations in the calling function.
8447 switch (id) {
8448 case kDataViewBuffer: {
8449 if (!receiver_map->IsJSDataViewMap()) return false;
8450 HObjectAccess access = HObjectAccess::ForMapAndOffset(
8451 receiver_map, JSDataView::kBufferOffset);
8452 HValue* object = Pop(); // receiver
8453 HInstruction* result = New<HLoadNamedField>(object, object, access);
8454 ast_context()->ReturnInstruction(result, ast_id);
8455 return true;
8456 }
8457 case kDataViewByteLength:
8458 case kDataViewByteOffset: {
8459 if (!receiver_map->IsJSDataViewMap()) return false;
8460 int offset = (id == kDataViewByteLength) ? JSDataView::kByteLengthOffset
8461 : JSDataView::kByteOffsetOffset;
8462 HObjectAccess access =
8463 HObjectAccess::ForMapAndOffset(receiver_map, offset);
8464 HValue* object = Pop(); // receiver
8465 HValue* checked_object = Add<HCheckArrayBufferNotNeutered>(object);
8466 HInstruction* result =
8467 New<HLoadNamedField>(object, checked_object, access);
8468 ast_context()->ReturnInstruction(result, ast_id);
8469 return true;
8470 }
8471 case kTypedArrayByteLength:
8472 case kTypedArrayByteOffset:
8473 case kTypedArrayLength: {
8474 if (!receiver_map->IsJSTypedArrayMap()) return false;
8475 int offset = (id == kTypedArrayLength)
8476 ? JSTypedArray::kLengthOffset
8477 : (id == kTypedArrayByteLength)
8478 ? JSTypedArray::kByteLengthOffset
8479 : JSTypedArray::kByteOffsetOffset;
8480 HObjectAccess access =
8481 HObjectAccess::ForMapAndOffset(receiver_map, offset);
8482 HValue* object = Pop(); // receiver
8483 HValue* checked_object = Add<HCheckArrayBufferNotNeutered>(object);
8484 HInstruction* result =
8485 New<HLoadNamedField>(object, checked_object, access);
8486 ast_context()->ReturnInstruction(result, ast_id);
8487 return true;
8488 }
8489 default:
8490 return false;
8491 }
8492 }
8493
TryInlineBuiltinMethodCall(Handle<JSFunction> function,Handle<Map> receiver_map,BailoutId ast_id,int args_count_no_receiver)8494 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
8495 Handle<JSFunction> function, Handle<Map> receiver_map, BailoutId ast_id,
8496 int args_count_no_receiver) {
8497 if (!function->shared()->HasBuiltinFunctionId()) return false;
8498 BuiltinFunctionId id = function->shared()->builtin_function_id();
8499 int argument_count = args_count_no_receiver + 1; // Plus receiver.
8500
8501 if (receiver_map.is_null()) {
8502 HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
8503 if (receiver->IsConstant() &&
8504 HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
8505 receiver_map =
8506 handle(Handle<HeapObject>::cast(
8507 HConstant::cast(receiver)->handle(isolate()))->map());
8508 }
8509 }
8510 // Try to inline calls like Math.* as operations in the calling function.
8511 switch (id) {
8512 case kObjectHasOwnProperty: {
8513 // It's not safe to look through the phi for elements if we're compiling
8514 // for osr.
8515 if (top_info()->is_osr()) return false;
8516 if (argument_count != 2) return false;
8517 HValue* key = Top();
8518 if (!key->IsLoadKeyed()) return false;
8519 HValue* elements = HLoadKeyed::cast(key)->elements();
8520 if (!elements->IsPhi() || elements->OperandCount() != 1) return false;
8521 if (!elements->OperandAt(0)->IsForInCacheArray()) return false;
8522 HForInCacheArray* cache = HForInCacheArray::cast(elements->OperandAt(0));
8523 HValue* receiver = environment()->ExpressionStackAt(1);
8524 if (!receiver->IsPhi() || receiver->OperandCount() != 1) return false;
8525 if (cache->enumerable() != receiver->OperandAt(0)) return false;
8526 Drop(3); // key, receiver, function
8527 Add<HCheckMapValue>(receiver, cache->map());
8528 ast_context()->ReturnValue(graph()->GetConstantTrue());
8529 return true;
8530 }
8531 case kStringCharCodeAt:
8532 case kStringCharAt:
8533 if (argument_count == 2) {
8534 HValue* index = Pop();
8535 HValue* string = Pop();
8536 Drop(1); // Function.
8537 HInstruction* char_code =
8538 BuildStringCharCodeAt(string, index);
8539 if (id == kStringCharCodeAt) {
8540 ast_context()->ReturnInstruction(char_code, ast_id);
8541 return true;
8542 }
8543 AddInstruction(char_code);
8544 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
8545 ast_context()->ReturnInstruction(result, ast_id);
8546 return true;
8547 }
8548 break;
8549 case kStringFromCharCode:
8550 if (argument_count == 2) {
8551 HValue* argument = Pop();
8552 Drop(2); // Receiver and function.
8553 argument = AddUncasted<HForceRepresentation>(
8554 argument, Representation::Integer32());
8555 argument->SetFlag(HValue::kTruncatingToInt32);
8556 HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
8557 ast_context()->ReturnInstruction(result, ast_id);
8558 return true;
8559 }
8560 break;
8561 case kMathCos:
8562 case kMathExp:
8563 case kMathRound:
8564 case kMathFround:
8565 case kMathFloor:
8566 case kMathAbs:
8567 case kMathSin:
8568 case kMathSqrt:
8569 case kMathLog:
8570 case kMathClz32:
8571 if (argument_count == 2) {
8572 HValue* argument = Pop();
8573 Drop(2); // Receiver and function.
8574 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8575 ast_context()->ReturnInstruction(op, ast_id);
8576 return true;
8577 }
8578 break;
8579 case kMathPow:
8580 if (argument_count == 3) {
8581 HValue* right = Pop();
8582 HValue* left = Pop();
8583 Drop(2); // Receiver and function.
8584 HInstruction* result = NULL;
8585 // Use sqrt() if exponent is 0.5 or -0.5.
8586 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
8587 double exponent = HConstant::cast(right)->DoubleValue();
8588 if (exponent == 0.5) {
8589 result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
8590 } else if (exponent == -0.5) {
8591 HValue* one = graph()->GetConstant1();
8592 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
8593 left, kMathPowHalf);
8594 // MathPowHalf doesn't have side effects so there's no need for
8595 // an environment simulation here.
8596 DCHECK(!sqrt->HasObservableSideEffects());
8597 result = NewUncasted<HDiv>(one, sqrt);
8598 } else if (exponent == 2.0) {
8599 result = NewUncasted<HMul>(left, left);
8600 }
8601 }
8602
8603 if (result == NULL) {
8604 result = NewUncasted<HPower>(left, right);
8605 }
8606 ast_context()->ReturnInstruction(result, ast_id);
8607 return true;
8608 }
8609 break;
8610 case kMathMax:
8611 case kMathMin:
8612 if (argument_count == 3) {
8613 HValue* right = Pop();
8614 HValue* left = Pop();
8615 Drop(2); // Receiver and function.
8616 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
8617 : HMathMinMax::kMathMax;
8618 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
8619 ast_context()->ReturnInstruction(result, ast_id);
8620 return true;
8621 }
8622 break;
8623 case kMathImul:
8624 if (argument_count == 3) {
8625 HValue* right = Pop();
8626 HValue* left = Pop();
8627 Drop(2); // Receiver and function.
8628 HInstruction* result =
8629 HMul::NewImul(isolate(), zone(), context(), left, right);
8630 ast_context()->ReturnInstruction(result, ast_id);
8631 return true;
8632 }
8633 break;
8634 case kArrayPop: {
8635 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8636 ElementsKind elements_kind = receiver_map->elements_kind();
8637
8638 Drop(args_count_no_receiver);
8639 HValue* result;
8640 HValue* reduced_length;
8641 HValue* receiver = Pop();
8642
8643 HValue* checked_object = AddCheckMap(receiver, receiver_map);
8644 HValue* length =
8645 Add<HLoadNamedField>(checked_object, nullptr,
8646 HObjectAccess::ForArrayLength(elements_kind));
8647
8648 Drop(1); // Function.
8649
8650 { NoObservableSideEffectsScope scope(this);
8651 IfBuilder length_checker(this);
8652
8653 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
8654 length, graph()->GetConstant0(), Token::EQ);
8655 length_checker.Then();
8656
8657 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8658
8659 length_checker.Else();
8660 HValue* elements = AddLoadElements(checked_object);
8661 // Ensure that we aren't popping from a copy-on-write array.
8662 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8663 elements = BuildCopyElementsOnWrite(checked_object, elements,
8664 elements_kind, length);
8665 }
8666 reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
8667 result = AddElementAccess(elements, reduced_length, nullptr,
8668 bounds_check, nullptr, elements_kind, LOAD);
8669 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
8670 ? graph()->GetConstantHole()
8671 : Add<HConstant>(HConstant::kHoleNaN);
8672 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8673 elements_kind = FAST_HOLEY_ELEMENTS;
8674 }
8675 AddElementAccess(elements, reduced_length, hole, bounds_check, nullptr,
8676 elements_kind, STORE);
8677 Add<HStoreNamedField>(
8678 checked_object, HObjectAccess::ForArrayLength(elements_kind),
8679 reduced_length, STORE_TO_INITIALIZED_ENTRY);
8680
8681 if (!ast_context()->IsEffect()) Push(result);
8682
8683 length_checker.End();
8684 }
8685 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8686 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8687 if (!ast_context()->IsEffect()) Drop(1);
8688
8689 ast_context()->ReturnValue(result);
8690 return true;
8691 }
8692 case kArrayPush: {
8693 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8694 ElementsKind elements_kind = receiver_map->elements_kind();
8695
8696 // If there may be elements accessors in the prototype chain, the fast
8697 // inlined version can't be used.
8698 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8699 // If there currently can be no elements accessors on the prototype chain,
8700 // it doesn't mean that there won't be any later. Install a full prototype
8701 // chain check to trap element accessors being installed on the prototype
8702 // chain, which would cause elements to go to dictionary mode and result
8703 // in a map change.
8704 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8705 BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
8706
8707 // Protect against adding elements to the Array prototype, which needs to
8708 // route through appropriate bottlenecks.
8709 if (isolate()->IsFastArrayConstructorPrototypeChainIntact() &&
8710 !prototype->IsJSArray()) {
8711 return false;
8712 }
8713
8714 const int argc = args_count_no_receiver;
8715 if (argc != 1) return false;
8716
8717 HValue* value_to_push = Pop();
8718 HValue* array = Pop();
8719 Drop(1); // Drop function.
8720
8721 HInstruction* new_size = NULL;
8722 HValue* length = NULL;
8723
8724 {
8725 NoObservableSideEffectsScope scope(this);
8726
8727 length = Add<HLoadNamedField>(
8728 array, nullptr, HObjectAccess::ForArrayLength(elements_kind));
8729
8730 new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8731
8732 bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8733 HValue* checked_array = Add<HCheckMaps>(array, receiver_map);
8734 BuildUncheckedMonomorphicElementAccess(
8735 checked_array, length, value_to_push, is_array, elements_kind,
8736 STORE, NEVER_RETURN_HOLE, STORE_AND_GROW_NO_TRANSITION);
8737
8738 if (!ast_context()->IsEffect()) Push(new_size);
8739 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8740 if (!ast_context()->IsEffect()) Drop(1);
8741 }
8742
8743 ast_context()->ReturnValue(new_size);
8744 return true;
8745 }
8746 case kArrayShift: {
8747 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8748 ElementsKind kind = receiver_map->elements_kind();
8749
8750 // If there may be elements accessors in the prototype chain, the fast
8751 // inlined version can't be used.
8752 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8753
8754 // If there currently can be no elements accessors on the prototype chain,
8755 // it doesn't mean that there won't be any later. Install a full prototype
8756 // chain check to trap element accessors being installed on the prototype
8757 // chain, which would cause elements to go to dictionary mode and result
8758 // in a map change.
8759 BuildCheckPrototypeMaps(
8760 handle(JSObject::cast(receiver_map->prototype()), isolate()),
8761 Handle<JSObject>::null());
8762
8763 // Threshold for fast inlined Array.shift().
8764 HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
8765
8766 Drop(args_count_no_receiver);
8767 HValue* result;
8768 HValue* receiver = Pop();
8769 HValue* checked_object = AddCheckMap(receiver, receiver_map);
8770 HValue* length = Add<HLoadNamedField>(
8771 receiver, checked_object, HObjectAccess::ForArrayLength(kind));
8772
8773 Drop(1); // Function.
8774 {
8775 NoObservableSideEffectsScope scope(this);
8776
8777 IfBuilder if_lengthiszero(this);
8778 HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
8779 length, graph()->GetConstant0(), Token::EQ);
8780 if_lengthiszero.Then();
8781 {
8782 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8783 }
8784 if_lengthiszero.Else();
8785 {
8786 HValue* elements = AddLoadElements(receiver);
8787
8788 // Check if we can use the fast inlined Array.shift().
8789 IfBuilder if_inline(this);
8790 if_inline.If<HCompareNumericAndBranch>(
8791 length, inline_threshold, Token::LTE);
8792 if (IsFastSmiOrObjectElementsKind(kind)) {
8793 // We cannot handle copy-on-write backing stores here.
8794 if_inline.AndIf<HCompareMap>(
8795 elements, isolate()->factory()->fixed_array_map());
8796 }
8797 if_inline.Then();
8798 {
8799 // Remember the result.
8800 if (!ast_context()->IsEffect()) {
8801 Push(AddElementAccess(elements, graph()->GetConstant0(), nullptr,
8802 lengthiszero, nullptr, kind, LOAD));
8803 }
8804
8805 // Compute the new length.
8806 HValue* new_length = AddUncasted<HSub>(
8807 length, graph()->GetConstant1());
8808 new_length->ClearFlag(HValue::kCanOverflow);
8809
8810 // Copy the remaining elements.
8811 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
8812 {
8813 HValue* new_key = loop.BeginBody(
8814 graph()->GetConstant0(), new_length, Token::LT);
8815 HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
8816 key->ClearFlag(HValue::kCanOverflow);
8817 ElementsKind copy_kind =
8818 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
8819 HValue* element =
8820 AddUncasted<HLoadKeyed>(elements, key, lengthiszero, nullptr,
8821 copy_kind, ALLOW_RETURN_HOLE);
8822 HStoreKeyed* store = Add<HStoreKeyed>(elements, new_key, element,
8823 nullptr, copy_kind);
8824 store->SetFlag(HValue::kTruncatingToNumber);
8825 }
8826 loop.EndBody();
8827
8828 // Put a hole at the end.
8829 HValue* hole = IsFastSmiOrObjectElementsKind(kind)
8830 ? graph()->GetConstantHole()
8831 : Add<HConstant>(HConstant::kHoleNaN);
8832 if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
8833 Add<HStoreKeyed>(elements, new_length, hole, nullptr, kind,
8834 INITIALIZING_STORE);
8835
8836 // Remember new length.
8837 Add<HStoreNamedField>(
8838 receiver, HObjectAccess::ForArrayLength(kind),
8839 new_length, STORE_TO_INITIALIZED_ENTRY);
8840 }
8841 if_inline.Else();
8842 {
8843 Add<HPushArguments>(receiver);
8844 result = AddInstruction(NewCallConstantFunction(
8845 function, 1, TailCallMode::kDisallow, TailCallMode::kDisallow));
8846 if (!ast_context()->IsEffect()) Push(result);
8847 }
8848 if_inline.End();
8849 }
8850 if_lengthiszero.End();
8851 }
8852 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8853 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8854 if (!ast_context()->IsEffect()) Drop(1);
8855 ast_context()->ReturnValue(result);
8856 return true;
8857 }
8858 case kArrayIndexOf:
8859 case kArrayLastIndexOf: {
8860 if (receiver_map.is_null()) return false;
8861 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8862 if (!receiver_map->prototype()->IsJSObject()) return false;
8863 ElementsKind kind = receiver_map->elements_kind();
8864 if (!IsFastElementsKind(kind)) return false;
8865 if (argument_count != 2) return false;
8866 if (!receiver_map->is_extensible()) return false;
8867
8868 // If there may be elements accessors in the prototype chain, the fast
8869 // inlined version can't be used.
8870 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8871
8872 // If there currently can be no elements accessors on the prototype chain,
8873 // it doesn't mean that there won't be any later. Install a full prototype
8874 // chain check to trap element accessors being installed on the prototype
8875 // chain, which would cause elements to go to dictionary mode and result
8876 // in a map change.
8877 BuildCheckPrototypeMaps(
8878 handle(JSObject::cast(receiver_map->prototype()), isolate()),
8879 Handle<JSObject>::null());
8880
8881 HValue* search_element = Pop();
8882 HValue* receiver = Pop();
8883 Drop(1); // Drop function.
8884
8885 ArrayIndexOfMode mode = (id == kArrayIndexOf)
8886 ? kFirstIndexOf : kLastIndexOf;
8887 HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
8888
8889 if (!ast_context()->IsEffect()) Push(index);
8890 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8891 if (!ast_context()->IsEffect()) Drop(1);
8892 ast_context()->ReturnValue(index);
8893 return true;
8894 }
8895 default:
8896 // Not yet supported for inlining.
8897 break;
8898 }
8899 return false;
8900 }
8901
8902
TryInlineApiFunctionCall(Call * expr,HValue * receiver)8903 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
8904 HValue* receiver) {
8905 if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
8906 Handle<JSFunction> function = expr->target();
8907 int argc = expr->arguments()->length();
8908 SmallMapList receiver_maps;
8909 return TryInlineApiCall(function, receiver, &receiver_maps, argc, expr->id(),
8910 kCallApiFunction, expr->tail_call_mode());
8911 }
8912
8913
TryInlineApiMethodCall(Call * expr,HValue * receiver,SmallMapList * receiver_maps)8914 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
8915 Call* expr,
8916 HValue* receiver,
8917 SmallMapList* receiver_maps) {
8918 if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
8919 Handle<JSFunction> function = expr->target();
8920 int argc = expr->arguments()->length();
8921 return TryInlineApiCall(function, receiver, receiver_maps, argc, expr->id(),
8922 kCallApiMethod, expr->tail_call_mode());
8923 }
8924
TryInlineApiGetter(Handle<Object> function,Handle<Map> receiver_map,BailoutId ast_id)8925 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<Object> function,
8926 Handle<Map> receiver_map,
8927 BailoutId ast_id) {
8928 if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
8929 SmallMapList receiver_maps(1, zone());
8930 receiver_maps.Add(receiver_map, zone());
8931 return TryInlineApiCall(function,
8932 NULL, // Receiver is on expression stack.
8933 &receiver_maps, 0, ast_id, kCallApiGetter,
8934 TailCallMode::kDisallow);
8935 }
8936
TryInlineApiSetter(Handle<Object> function,Handle<Map> receiver_map,BailoutId ast_id)8937 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<Object> function,
8938 Handle<Map> receiver_map,
8939 BailoutId ast_id) {
8940 SmallMapList receiver_maps(1, zone());
8941 receiver_maps.Add(receiver_map, zone());
8942 return TryInlineApiCall(function,
8943 NULL, // Receiver is on expression stack.
8944 &receiver_maps, 1, ast_id, kCallApiSetter,
8945 TailCallMode::kDisallow);
8946 }
8947
TryInlineApiCall(Handle<Object> function,HValue * receiver,SmallMapList * receiver_maps,int argc,BailoutId ast_id,ApiCallType call_type,TailCallMode syntactic_tail_call_mode)8948 bool HOptimizedGraphBuilder::TryInlineApiCall(
8949 Handle<Object> function, HValue* receiver, SmallMapList* receiver_maps,
8950 int argc, BailoutId ast_id, ApiCallType call_type,
8951 TailCallMode syntactic_tail_call_mode) {
8952 if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
8953 if (function->IsJSFunction() &&
8954 Handle<JSFunction>::cast(function)->context()->native_context() !=
8955 top_info()->closure()->context()->native_context()) {
8956 return false;
8957 }
8958 if (argc > CallApiCallbackStub::kArgMax) {
8959 return false;
8960 }
8961
8962 CallOptimization optimization(function);
8963 if (!optimization.is_simple_api_call()) return false;
8964 Handle<Map> holder_map;
8965 for (int i = 0; i < receiver_maps->length(); ++i) {
8966 auto map = receiver_maps->at(i);
8967 // Don't inline calls to receivers requiring accesschecks.
8968 if (map->is_access_check_needed()) return false;
8969 }
8970 if (call_type == kCallApiFunction) {
8971 // Cannot embed a direct reference to the global proxy map
8972 // as it maybe dropped on deserialization.
8973 CHECK(!isolate()->serializer_enabled());
8974 DCHECK(function->IsJSFunction());
8975 DCHECK_EQ(0, receiver_maps->length());
8976 receiver_maps->Add(
8977 handle(Handle<JSFunction>::cast(function)->global_proxy()->map()),
8978 zone());
8979 }
8980 CallOptimization::HolderLookup holder_lookup =
8981 CallOptimization::kHolderNotFound;
8982 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
8983 receiver_maps->first(), &holder_lookup);
8984 if (holder_lookup == CallOptimization::kHolderNotFound) return false;
8985
8986 if (FLAG_trace_inlining) {
8987 PrintF("Inlining api function ");
8988 function->ShortPrint();
8989 PrintF("\n");
8990 }
8991
8992 bool is_function = false;
8993 bool is_store = false;
8994 switch (call_type) {
8995 case kCallApiFunction:
8996 case kCallApiMethod:
8997 // Need to check that none of the receiver maps could have changed.
8998 Add<HCheckMaps>(receiver, receiver_maps);
8999 // Need to ensure the chain between receiver and api_holder is intact.
9000 if (holder_lookup == CallOptimization::kHolderFound) {
9001 AddCheckPrototypeMaps(api_holder, receiver_maps->first());
9002 } else {
9003 DCHECK_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
9004 }
9005 // Includes receiver.
9006 PushArgumentsFromEnvironment(argc + 1);
9007 is_function = true;
9008 break;
9009 case kCallApiGetter:
9010 // Receiver and prototype chain cannot have changed.
9011 DCHECK_EQ(0, argc);
9012 DCHECK_NULL(receiver);
9013 // Receiver is on expression stack.
9014 receiver = Pop();
9015 Add<HPushArguments>(receiver);
9016 break;
9017 case kCallApiSetter:
9018 {
9019 is_store = true;
9020 // Receiver and prototype chain cannot have changed.
9021 DCHECK_EQ(1, argc);
9022 DCHECK_NULL(receiver);
9023 // Receiver and value are on expression stack.
9024 HValue* value = Pop();
9025 receiver = Pop();
9026 Add<HPushArguments>(receiver, value);
9027 break;
9028 }
9029 }
9030
9031 HValue* holder = NULL;
9032 switch (holder_lookup) {
9033 case CallOptimization::kHolderFound:
9034 holder = Add<HConstant>(api_holder);
9035 break;
9036 case CallOptimization::kHolderIsReceiver:
9037 holder = receiver;
9038 break;
9039 case CallOptimization::kHolderNotFound:
9040 UNREACHABLE();
9041 break;
9042 }
9043 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
9044 Handle<Object> call_data_obj(api_call_info->data(), isolate());
9045 bool call_data_undefined = call_data_obj->IsUndefined(isolate());
9046 HValue* call_data = Add<HConstant>(call_data_obj);
9047 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
9048 ExternalReference ref = ExternalReference(&fun,
9049 ExternalReference::DIRECT_API_CALL,
9050 isolate());
9051 HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
9052
9053 HValue* op_vals[] = {Add<HConstant>(function), call_data, holder,
9054 api_function_address};
9055
9056 HInstruction* call = nullptr;
9057 CHECK(argc <= CallApiCallbackStub::kArgMax);
9058 if (!is_function) {
9059 CallApiCallbackStub stub(isolate(), is_store, call_data_undefined,
9060 !optimization.is_constant_call());
9061 Handle<Code> code = stub.GetCode();
9062 HConstant* code_value = Add<HConstant>(code);
9063 call = New<HCallWithDescriptor>(
9064 code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9065 Vector<HValue*>(op_vals, arraysize(op_vals)), syntactic_tail_call_mode);
9066 } else {
9067 CallApiCallbackStub stub(isolate(), argc, call_data_undefined, false);
9068 Handle<Code> code = stub.GetCode();
9069 HConstant* code_value = Add<HConstant>(code);
9070 call = New<HCallWithDescriptor>(
9071 code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9072 Vector<HValue*>(op_vals, arraysize(op_vals)), syntactic_tail_call_mode);
9073 Drop(1); // Drop function.
9074 }
9075
9076 ast_context()->ReturnInstruction(call, ast_id);
9077 return true;
9078 }
9079
9080
HandleIndirectCall(Call * expr,HValue * function,int arguments_count)9081 void HOptimizedGraphBuilder::HandleIndirectCall(Call* expr, HValue* function,
9082 int arguments_count) {
9083 Handle<JSFunction> known_function;
9084 int args_count_no_receiver = arguments_count - 1;
9085 if (function->IsConstant() &&
9086 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9087 known_function =
9088 Handle<JSFunction>::cast(HConstant::cast(function)->handle(isolate()));
9089 if (TryInlineBuiltinMethodCall(known_function, Handle<Map>(), expr->id(),
9090 args_count_no_receiver)) {
9091 if (FLAG_trace_inlining) {
9092 PrintF("Inlining builtin ");
9093 known_function->ShortPrint();
9094 PrintF("\n");
9095 }
9096 return;
9097 }
9098
9099 if (TryInlineIndirectCall(known_function, expr, args_count_no_receiver)) {
9100 return;
9101 }
9102 }
9103
9104 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9105 TailCallMode tail_call_mode =
9106 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9107
9108 PushArgumentsFromEnvironment(arguments_count);
9109 HInvokeFunction* call =
9110 New<HInvokeFunction>(function, known_function, arguments_count,
9111 syntactic_tail_call_mode, tail_call_mode);
9112 Drop(1); // Function
9113 ast_context()->ReturnInstruction(call, expr->id());
9114 }
9115
9116
TryIndirectCall(Call * expr)9117 bool HOptimizedGraphBuilder::TryIndirectCall(Call* expr) {
9118 DCHECK(expr->expression()->IsProperty());
9119
9120 if (!expr->IsMonomorphic()) {
9121 return false;
9122 }
9123 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9124 if (function_map->instance_type() != JS_FUNCTION_TYPE ||
9125 !expr->target()->shared()->HasBuiltinFunctionId()) {
9126 return false;
9127 }
9128
9129 switch (expr->target()->shared()->builtin_function_id()) {
9130 case kFunctionCall: {
9131 if (expr->arguments()->length() == 0) return false;
9132 BuildFunctionCall(expr);
9133 return true;
9134 }
9135 case kFunctionApply: {
9136 // For .apply, only the pattern f.apply(receiver, arguments)
9137 // is supported.
9138 if (!CanBeFunctionApplyArguments(expr)) return false;
9139
9140 BuildFunctionApply(expr);
9141 return true;
9142 }
9143 default: { return false; }
9144 }
9145 UNREACHABLE();
9146 }
9147
9148
9149 // f.apply(...)
BuildFunctionApply(Call * expr)9150 void HOptimizedGraphBuilder::BuildFunctionApply(Call* expr) {
9151 ZoneList<Expression*>* args = expr->arguments();
9152 CHECK_ALIVE(VisitForValue(args->at(0)));
9153 HValue* receiver = Pop(); // receiver
9154 HValue* function = Pop(); // f
9155 Drop(1); // apply
9156
9157 // Make sure the arguments object is live.
9158 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
9159 LookupAndMakeLive(arg_two->var());
9160
9161 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9162 HValue* checked_function = AddCheckMap(function, function_map);
9163
9164 if (function_state()->outer() == NULL) {
9165 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9166 TailCallMode tail_call_mode =
9167 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9168
9169 HInstruction* elements = Add<HArgumentsElements>(false);
9170 HInstruction* length = Add<HArgumentsLength>(elements);
9171 HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
9172 HInstruction* result = New<HApplyArguments>(
9173 function, wrapped_receiver, length, elements, tail_call_mode);
9174 ast_context()->ReturnInstruction(result, expr->id());
9175 } else {
9176 // We are inside inlined function and we know exactly what is inside
9177 // arguments object. But we need to be able to materialize at deopt.
9178 DCHECK_EQ(environment()->arguments_environment()->parameter_count(),
9179 function_state()->entry()->arguments_object()->arguments_count());
9180 HArgumentsObject* args = function_state()->entry()->arguments_object();
9181 const ZoneList<HValue*>* arguments_values = args->arguments_values();
9182 int arguments_count = arguments_values->length();
9183 Push(function);
9184 Push(BuildWrapReceiver(receiver, checked_function));
9185 for (int i = 1; i < arguments_count; i++) {
9186 Push(arguments_values->at(i));
9187 }
9188 HandleIndirectCall(expr, function, arguments_count);
9189 }
9190 }
9191
9192
9193 // f.call(...)
BuildFunctionCall(Call * expr)9194 void HOptimizedGraphBuilder::BuildFunctionCall(Call* expr) {
9195 HValue* function = Top(); // f
9196 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9197 HValue* checked_function = AddCheckMap(function, function_map);
9198
9199 // f and call are on the stack in the unoptimized code
9200 // during evaluation of the arguments.
9201 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9202
9203 int args_length = expr->arguments()->length();
9204 int receiver_index = args_length - 1;
9205 // Patch the receiver.
9206 HValue* receiver = BuildWrapReceiver(
9207 environment()->ExpressionStackAt(receiver_index), checked_function);
9208 environment()->SetExpressionStackAt(receiver_index, receiver);
9209
9210 // Call must not be on the stack from now on.
9211 int call_index = args_length + 1;
9212 environment()->RemoveExpressionStackAt(call_index);
9213
9214 HandleIndirectCall(expr, function, args_length);
9215 }
9216
9217
ImplicitReceiverFor(HValue * function,Handle<JSFunction> target)9218 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
9219 Handle<JSFunction> target) {
9220 SharedFunctionInfo* shared = target->shared();
9221 if (is_sloppy(shared->language_mode()) && !shared->native()) {
9222 // Cannot embed a direct reference to the global proxy
9223 // as is it dropped on deserialization.
9224 CHECK(!isolate()->serializer_enabled());
9225 Handle<JSObject> global_proxy(target->context()->global_proxy());
9226 return Add<HConstant>(global_proxy);
9227 }
9228 return graph()->GetConstantUndefined();
9229 }
9230
9231
BuildArrayIndexOf(HValue * receiver,HValue * search_element,ElementsKind kind,ArrayIndexOfMode mode)9232 HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver,
9233 HValue* search_element,
9234 ElementsKind kind,
9235 ArrayIndexOfMode mode) {
9236 DCHECK(IsFastElementsKind(kind));
9237
9238 NoObservableSideEffectsScope no_effects(this);
9239
9240 HValue* elements = AddLoadElements(receiver);
9241 HValue* length = AddLoadArrayLength(receiver, kind);
9242
9243 HValue* initial;
9244 HValue* terminating;
9245 Token::Value token;
9246 LoopBuilder::Direction direction;
9247 if (mode == kFirstIndexOf) {
9248 initial = graph()->GetConstant0();
9249 terminating = length;
9250 token = Token::LT;
9251 direction = LoopBuilder::kPostIncrement;
9252 } else {
9253 DCHECK_EQ(kLastIndexOf, mode);
9254 initial = length;
9255 terminating = graph()->GetConstant0();
9256 token = Token::GT;
9257 direction = LoopBuilder::kPreDecrement;
9258 }
9259
9260 Push(graph()->GetConstantMinus1());
9261 if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
9262 // Make sure that we can actually compare numbers correctly below, see
9263 // https://code.google.com/p/chromium/issues/detail?id=407946 for details.
9264 search_element = AddUncasted<HForceRepresentation>(
9265 search_element, IsFastSmiElementsKind(kind) ? Representation::Smi()
9266 : Representation::Double());
9267
9268 LoopBuilder loop(this, context(), direction);
9269 {
9270 HValue* index = loop.BeginBody(initial, terminating, token);
9271 HValue* element = AddUncasted<HLoadKeyed>(
9272 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9273 IfBuilder if_issame(this);
9274 if_issame.If<HCompareNumericAndBranch>(element, search_element,
9275 Token::EQ_STRICT);
9276 if_issame.Then();
9277 {
9278 Drop(1);
9279 Push(index);
9280 loop.Break();
9281 }
9282 if_issame.End();
9283 }
9284 loop.EndBody();
9285 } else {
9286 IfBuilder if_isstring(this);
9287 if_isstring.If<HIsStringAndBranch>(search_element);
9288 if_isstring.Then();
9289 {
9290 LoopBuilder loop(this, context(), direction);
9291 {
9292 HValue* index = loop.BeginBody(initial, terminating, token);
9293 HValue* element = AddUncasted<HLoadKeyed>(
9294 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9295 IfBuilder if_issame(this);
9296 if_issame.If<HIsStringAndBranch>(element);
9297 if_issame.AndIf<HStringCompareAndBranch>(
9298 element, search_element, Token::EQ_STRICT);
9299 if_issame.Then();
9300 {
9301 Drop(1);
9302 Push(index);
9303 loop.Break();
9304 }
9305 if_issame.End();
9306 }
9307 loop.EndBody();
9308 }
9309 if_isstring.Else();
9310 {
9311 IfBuilder if_isnumber(this);
9312 if_isnumber.If<HIsSmiAndBranch>(search_element);
9313 if_isnumber.OrIf<HCompareMap>(
9314 search_element, isolate()->factory()->heap_number_map());
9315 if_isnumber.Then();
9316 {
9317 HValue* search_number =
9318 AddUncasted<HForceRepresentation>(search_element,
9319 Representation::Double());
9320 LoopBuilder loop(this, context(), direction);
9321 {
9322 HValue* index = loop.BeginBody(initial, terminating, token);
9323 HValue* element = AddUncasted<HLoadKeyed>(
9324 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9325
9326 IfBuilder if_element_isnumber(this);
9327 if_element_isnumber.If<HIsSmiAndBranch>(element);
9328 if_element_isnumber.OrIf<HCompareMap>(
9329 element, isolate()->factory()->heap_number_map());
9330 if_element_isnumber.Then();
9331 {
9332 HValue* number =
9333 AddUncasted<HForceRepresentation>(element,
9334 Representation::Double());
9335 IfBuilder if_issame(this);
9336 if_issame.If<HCompareNumericAndBranch>(
9337 number, search_number, Token::EQ_STRICT);
9338 if_issame.Then();
9339 {
9340 Drop(1);
9341 Push(index);
9342 loop.Break();
9343 }
9344 if_issame.End();
9345 }
9346 if_element_isnumber.End();
9347 }
9348 loop.EndBody();
9349 }
9350 if_isnumber.Else();
9351 {
9352 LoopBuilder loop(this, context(), direction);
9353 {
9354 HValue* index = loop.BeginBody(initial, terminating, token);
9355 HValue* element = AddUncasted<HLoadKeyed>(
9356 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9357 IfBuilder if_issame(this);
9358 if_issame.If<HCompareObjectEqAndBranch>(
9359 element, search_element);
9360 if_issame.Then();
9361 {
9362 Drop(1);
9363 Push(index);
9364 loop.Break();
9365 }
9366 if_issame.End();
9367 }
9368 loop.EndBody();
9369 }
9370 if_isnumber.End();
9371 }
9372 if_isstring.End();
9373 }
9374
9375 return Pop();
9376 }
9377
9378 template <class T>
TryHandleArrayCall(T * expr,HValue * function)9379 bool HOptimizedGraphBuilder::TryHandleArrayCall(T* expr, HValue* function) {
9380 if (!array_function().is_identical_to(expr->target())) {
9381 return false;
9382 }
9383
9384 Handle<AllocationSite> site = expr->allocation_site();
9385 if (site.is_null()) return false;
9386
9387 Add<HCheckValue>(function, array_function());
9388
9389 int arguments_count = expr->arguments()->length();
9390 if (TryInlineArrayCall(expr, arguments_count, site)) return true;
9391
9392 HInstruction* call = PreProcessCall(New<HCallNewArray>(
9393 function, arguments_count + 1, site->GetElementsKind(), site));
9394 if (expr->IsCall()) Drop(1);
9395 ast_context()->ReturnInstruction(call, expr->id());
9396
9397 return true;
9398 }
9399
9400
CanBeFunctionApplyArguments(Call * expr)9401 bool HOptimizedGraphBuilder::CanBeFunctionApplyArguments(Call* expr) {
9402 ZoneList<Expression*>* args = expr->arguments();
9403 if (args->length() != 2) return false;
9404 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
9405 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
9406 HValue* arg_two_value = environment()->Lookup(arg_two->var());
9407 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
9408 DCHECK_NOT_NULL(current_info()->scope()->arguments());
9409 return true;
9410 }
9411
9412
VisitCall(Call * expr)9413 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
9414 DCHECK(!HasStackOverflow());
9415 DCHECK(current_block() != NULL);
9416 DCHECK(current_block()->HasPredecessor());
9417 if (!is_tracking_positions()) SetSourcePosition(expr->position());
9418 Expression* callee = expr->expression();
9419 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
9420 HInstruction* call = NULL;
9421
9422 TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9423 TailCallMode tail_call_mode =
9424 function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9425
9426 Property* prop = callee->AsProperty();
9427 if (prop != NULL) {
9428 CHECK_ALIVE(VisitForValue(prop->obj()));
9429 HValue* receiver = Top();
9430
9431 SmallMapList* maps;
9432 ComputeReceiverTypes(expr, receiver, &maps, this);
9433
9434 if (prop->key()->IsPropertyName() && maps->length() > 0) {
9435 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9436 PropertyAccessInfo info(this, LOAD, maps->first(), name);
9437 if (!info.CanAccessAsMonomorphic(maps)) {
9438 HandlePolymorphicCallNamed(expr, receiver, maps, name);
9439 return;
9440 }
9441 }
9442 HValue* key = NULL;
9443 if (!prop->key()->IsPropertyName()) {
9444 CHECK_ALIVE(VisitForValue(prop->key()));
9445 key = Pop();
9446 }
9447
9448 CHECK_ALIVE(PushLoad(prop, receiver, key));
9449 HValue* function = Pop();
9450
9451 if (function->IsConstant() &&
9452 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9453 // Push the function under the receiver.
9454 environment()->SetExpressionStackAt(0, function);
9455 Push(receiver);
9456
9457 Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9458 HConstant::cast(function)->handle(isolate()));
9459 expr->set_target(known_function);
9460
9461 if (TryIndirectCall(expr)) return;
9462 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9463
9464 Handle<Map> map = maps->length() == 1 ? maps->first() : Handle<Map>();
9465 if (TryInlineBuiltinMethodCall(known_function, map, expr->id(),
9466 expr->arguments()->length())) {
9467 if (FLAG_trace_inlining) {
9468 PrintF("Inlining builtin ");
9469 known_function->ShortPrint();
9470 PrintF("\n");
9471 }
9472 return;
9473 }
9474 if (TryInlineApiMethodCall(expr, receiver, maps)) return;
9475
9476 // Wrap the receiver if necessary.
9477 if (NeedsWrapping(maps->first(), known_function)) {
9478 // Since HWrapReceiver currently cannot actually wrap numbers and
9479 // strings, use the regular call builtin for method calls to wrap
9480 // the receiver.
9481 // TODO(verwaest): Support creation of value wrappers directly in
9482 // HWrapReceiver.
9483 call = NewCallFunction(
9484 function, argument_count, syntactic_tail_call_mode,
9485 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
9486 } else if (TryInlineCall(expr)) {
9487 return;
9488 } else {
9489 call =
9490 NewCallConstantFunction(known_function, argument_count,
9491 syntactic_tail_call_mode, tail_call_mode);
9492 }
9493
9494 } else {
9495 ArgumentsAllowedFlag arguments_flag = ARGUMENTS_NOT_ALLOWED;
9496 if (CanBeFunctionApplyArguments(expr) && expr->is_uninitialized()) {
9497 // We have to use EAGER deoptimization here because Deoptimizer::SOFT
9498 // gets ignored by the always-opt flag, which leads to incorrect code.
9499 Add<HDeoptimize>(
9500 DeoptimizeReason::kInsufficientTypeFeedbackForCallWithArguments,
9501 Deoptimizer::EAGER);
9502 arguments_flag = ARGUMENTS_FAKED;
9503 }
9504
9505 // Push the function under the receiver.
9506 environment()->SetExpressionStackAt(0, function);
9507 Push(receiver);
9508
9509 CHECK_ALIVE(VisitExpressions(expr->arguments(), arguments_flag));
9510 call = NewCallFunction(function, argument_count, syntactic_tail_call_mode,
9511 ConvertReceiverMode::kNotNullOrUndefined,
9512 tail_call_mode);
9513 }
9514 PushArgumentsFromEnvironment(argument_count);
9515
9516 } else {
9517 if (expr->is_possibly_eval()) {
9518 return Bailout(kPossibleDirectCallToEval);
9519 }
9520
9521 // The function is on the stack in the unoptimized code during
9522 // evaluation of the arguments.
9523 CHECK_ALIVE(VisitForValue(expr->expression()));
9524 HValue* function = Top();
9525 if (function->IsConstant() &&
9526 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9527 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
9528 Handle<JSFunction> target = Handle<JSFunction>::cast(constant);
9529 expr->SetKnownGlobalTarget(target);
9530 }
9531
9532 // Placeholder for the receiver.
9533 Push(graph()->GetConstantUndefined());
9534 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9535
9536 if (expr->IsMonomorphic() &&
9537 !IsClassConstructor(expr->target()->shared()->kind())) {
9538 Add<HCheckValue>(function, expr->target());
9539
9540 // Patch the global object on the stack by the expected receiver.
9541 HValue* receiver = ImplicitReceiverFor(function, expr->target());
9542 const int receiver_index = argument_count - 1;
9543 environment()->SetExpressionStackAt(receiver_index, receiver);
9544
9545 if (TryInlineBuiltinFunctionCall(expr)) {
9546 if (FLAG_trace_inlining) {
9547 PrintF("Inlining builtin ");
9548 expr->target()->ShortPrint();
9549 PrintF("\n");
9550 }
9551 return;
9552 }
9553 if (TryInlineApiFunctionCall(expr, receiver)) return;
9554 if (TryHandleArrayCall(expr, function)) return;
9555 if (TryInlineCall(expr)) return;
9556
9557 PushArgumentsFromEnvironment(argument_count);
9558 call = NewCallConstantFunction(expr->target(), argument_count,
9559 syntactic_tail_call_mode, tail_call_mode);
9560 } else {
9561 PushArgumentsFromEnvironment(argument_count);
9562 if (expr->is_uninitialized()) {
9563 // We've never seen this call before, so let's have Crankshaft learn
9564 // through the type vector.
9565 call = NewCallFunctionViaIC(function, argument_count,
9566 syntactic_tail_call_mode,
9567 ConvertReceiverMode::kNullOrUndefined,
9568 tail_call_mode, expr->CallFeedbackICSlot());
9569 } else {
9570 call = NewCallFunction(
9571 function, argument_count, syntactic_tail_call_mode,
9572 ConvertReceiverMode::kNullOrUndefined, tail_call_mode);
9573 }
9574 }
9575 }
9576
9577 Drop(1); // Drop the function.
9578 return ast_context()->ReturnInstruction(call, expr->id());
9579 }
9580
TryInlineArrayCall(Expression * expression,int argument_count,Handle<AllocationSite> site)9581 bool HOptimizedGraphBuilder::TryInlineArrayCall(Expression* expression,
9582 int argument_count,
9583 Handle<AllocationSite> site) {
9584 Handle<JSFunction> caller = current_info()->closure();
9585 Handle<JSFunction> target = array_function();
9586
9587 if (!site->CanInlineCall()) {
9588 TraceInline(target, caller, "AllocationSite requested no inlining.");
9589 return false;
9590 }
9591
9592 if (argument_count > 1) {
9593 TraceInline(target, caller, "Too many arguments to inline.");
9594 return false;
9595 }
9596
9597 int array_length = 0;
9598 // Do not inline if the constant length argument is not a smi or outside the
9599 // valid range for unrolled loop initialization.
9600 if (argument_count == 1) {
9601 HValue* argument = Top();
9602 if (!argument->IsConstant()) {
9603 TraceInline(target, caller,
9604 "Dont inline [new] Array(n) where n isn't constant.");
9605 return false;
9606 }
9607
9608 HConstant* constant_argument = HConstant::cast(argument);
9609 if (!constant_argument->HasSmiValue()) {
9610 TraceInline(target, caller,
9611 "Constant length outside of valid inlining range.");
9612 return false;
9613 }
9614 array_length = constant_argument->Integer32Value();
9615 if (array_length < 0 || array_length > kElementLoopUnrollThreshold) {
9616 TraceInline(target, caller,
9617 "Constant length outside of valid inlining range.");
9618 return false;
9619 }
9620 }
9621
9622 TraceInline(target, caller, NULL);
9623
9624 NoObservableSideEffectsScope no_effects(this);
9625
9626 // Register on the site for deoptimization if the transition feedback changes.
9627 top_info()->dependencies()->AssumeTransitionStable(site);
9628
9629 // Build the array.
9630 ElementsKind kind = site->GetElementsKind();
9631 HValue* capacity;
9632 HValue* length;
9633 if (array_length == 0) {
9634 STATIC_ASSERT(0 < JSArray::kPreallocatedArrayElements);
9635 const int initial_capacity = JSArray::kPreallocatedArrayElements;
9636 capacity = Add<HConstant>(initial_capacity);
9637 length = graph()->GetConstant0();
9638 } else {
9639 length = Top();
9640 capacity = length;
9641 kind = GetHoleyElementsKind(kind);
9642 }
9643
9644 // These HForceRepresentations are because we store these as fields in the
9645 // objects we construct, and an int32-to-smi HChange could deopt. Accept
9646 // the deopt possibility now, before allocation occurs.
9647 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9648 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
9649
9650 // Generate size calculation code here in order to make it dominate
9651 // the JSArray allocation.
9652 HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
9653
9654 // Bail out for large objects.
9655 HValue* max_size = Add<HConstant>(kMaxRegularHeapObjectSize);
9656 Add<HBoundsCheck>(elements_size, max_size);
9657
9658 // Allocate (dealing with failure appropriately).
9659 AllocationSiteMode mode = DONT_TRACK_ALLOCATION_SITE;
9660 HAllocate* new_object = AllocateJSArrayObject(mode);
9661
9662 // Fill in the fields: map, properties, length.
9663 Handle<Map> map_constant(isolate()->get_initial_js_array_map(kind));
9664 HValue* map = Add<HConstant>(map_constant);
9665
9666 BuildJSArrayHeader(new_object, map,
9667 nullptr, // set elements to empty fixed array
9668 mode, kind, nullptr, length);
9669
9670 // Allocate and initialize the elements.
9671 HAllocate* elements = BuildAllocateElements(kind, elements_size);
9672 BuildInitializeElementsHeader(elements, kind, capacity);
9673 BuildFillElementsWithHole(elements, kind, graph()->GetConstant0(), capacity);
9674
9675 // Set the elements.
9676 Add<HStoreNamedField>(new_object, HObjectAccess::ForElementsPointer(),
9677 elements);
9678
9679 int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
9680 Drop(args_to_drop);
9681 ast_context()->ReturnValue(new_object);
9682 return true;
9683 }
9684
9685
9686 // Checks whether allocation using the given constructor can be inlined.
IsAllocationInlineable(Handle<JSFunction> constructor)9687 static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
9688 return constructor->has_initial_map() &&
9689 !IsSubclassConstructor(constructor->shared()->kind()) &&
9690 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
9691 constructor->initial_map()->instance_size() <
9692 HAllocate::kMaxInlineSize;
9693 }
9694
VisitCallNew(CallNew * expr)9695 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
9696 DCHECK(!HasStackOverflow());
9697 DCHECK(current_block() != NULL);
9698 DCHECK(current_block()->HasPredecessor());
9699 if (!is_tracking_positions()) SetSourcePosition(expr->position());
9700 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
9701 Factory* factory = isolate()->factory();
9702
9703 // The constructor function is on the stack in the unoptimized code
9704 // during evaluation of the arguments.
9705 CHECK_ALIVE(VisitForValue(expr->expression()));
9706 HValue* function = Top();
9707 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9708
9709 if (function->IsConstant() &&
9710 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9711 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
9712 expr->SetKnownGlobalTarget(Handle<JSFunction>::cast(constant));
9713 }
9714
9715 if (FLAG_inline_construct &&
9716 expr->IsMonomorphic() &&
9717 IsAllocationInlineable(expr->target())) {
9718 Handle<JSFunction> constructor = expr->target();
9719 DCHECK(
9720 constructor->shared()->construct_stub() ==
9721 isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric) ||
9722 constructor->shared()->construct_stub() ==
9723 isolate()->builtins()->builtin(Builtins::kJSConstructStubApi));
9724 HValue* check = Add<HCheckValue>(function, constructor);
9725
9726 // Force completion of inobject slack tracking before generating
9727 // allocation code to finalize instance size.
9728 constructor->CompleteInobjectSlackTrackingIfActive();
9729
9730 // Calculate instance size from initial map of constructor.
9731 DCHECK(constructor->has_initial_map());
9732 Handle<Map> initial_map(constructor->initial_map());
9733 int instance_size = initial_map->instance_size();
9734
9735 // Allocate an instance of the implicit receiver object.
9736 HValue* size_in_bytes = Add<HConstant>(instance_size);
9737 HAllocationMode allocation_mode;
9738 HAllocate* receiver = BuildAllocate(
9739 size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
9740 receiver->set_known_initial_map(initial_map);
9741
9742 // Initialize map and fields of the newly allocated object.
9743 { NoObservableSideEffectsScope no_effects(this);
9744 DCHECK(initial_map->instance_type() == JS_OBJECT_TYPE);
9745 Add<HStoreNamedField>(receiver,
9746 HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
9747 Add<HConstant>(initial_map));
9748 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
9749 Add<HStoreNamedField>(receiver,
9750 HObjectAccess::ForMapAndOffset(initial_map,
9751 JSObject::kPropertiesOffset),
9752 empty_fixed_array);
9753 Add<HStoreNamedField>(receiver,
9754 HObjectAccess::ForMapAndOffset(initial_map,
9755 JSObject::kElementsOffset),
9756 empty_fixed_array);
9757 BuildInitializeInobjectProperties(receiver, initial_map);
9758 }
9759
9760 // Replace the constructor function with a newly allocated receiver using
9761 // the index of the receiver from the top of the expression stack.
9762 const int receiver_index = argument_count - 1;
9763 DCHECK(environment()->ExpressionStackAt(receiver_index) == function);
9764 environment()->SetExpressionStackAt(receiver_index, receiver);
9765
9766 if (TryInlineConstruct(expr, receiver)) {
9767 // Inlining worked, add a dependency on the initial map to make sure that
9768 // this code is deoptimized whenever the initial map of the constructor
9769 // changes.
9770 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
9771 return;
9772 }
9773
9774 // TODO(mstarzinger): For now we remove the previous HAllocate and all
9775 // corresponding instructions and instead add HPushArguments for the
9776 // arguments in case inlining failed. What we actually should do is for
9777 // inlining to try to build a subgraph without mutating the parent graph.
9778 HInstruction* instr = current_block()->last();
9779 do {
9780 HInstruction* prev_instr = instr->previous();
9781 instr->DeleteAndReplaceWith(NULL);
9782 instr = prev_instr;
9783 } while (instr != check);
9784 environment()->SetExpressionStackAt(receiver_index, function);
9785 } else {
9786 // The constructor function is both an operand to the instruction and an
9787 // argument to the construct call.
9788 if (TryHandleArrayCall(expr, function)) return;
9789 }
9790
9791 HValue* arity = Add<HConstant>(argument_count - 1);
9792 HValue* op_vals[] = {function, function, arity};
9793 Callable callable = CodeFactory::Construct(isolate());
9794 HConstant* stub = Add<HConstant>(callable.code());
9795 PushArgumentsFromEnvironment(argument_count);
9796 HInstruction* construct = New<HCallWithDescriptor>(
9797 stub, argument_count, callable.descriptor(), ArrayVector(op_vals));
9798 return ast_context()->ReturnInstruction(construct, expr->id());
9799 }
9800
9801
BuildInitializeInobjectProperties(HValue * receiver,Handle<Map> initial_map)9802 void HOptimizedGraphBuilder::BuildInitializeInobjectProperties(
9803 HValue* receiver, Handle<Map> initial_map) {
9804 if (initial_map->GetInObjectProperties() != 0) {
9805 HConstant* undefined = graph()->GetConstantUndefined();
9806 for (int i = 0; i < initial_map->GetInObjectProperties(); i++) {
9807 int property_offset = initial_map->GetInObjectPropertyOffset(i);
9808 Add<HStoreNamedField>(receiver, HObjectAccess::ForMapAndOffset(
9809 initial_map, property_offset),
9810 undefined);
9811 }
9812 }
9813 }
9814
9815
BuildAllocateEmptyArrayBuffer(HValue * byte_length)9816 HValue* HGraphBuilder::BuildAllocateEmptyArrayBuffer(HValue* byte_length) {
9817 // We HForceRepresentation here to avoid allocations during an *-to-tagged
9818 // HChange that could cause GC while the array buffer object is not fully
9819 // initialized.
9820 HObjectAccess byte_length_access(HObjectAccess::ForJSArrayBufferByteLength());
9821 byte_length = AddUncasted<HForceRepresentation>(
9822 byte_length, byte_length_access.representation());
9823 HAllocate* result =
9824 BuildAllocate(Add<HConstant>(JSArrayBuffer::kSizeWithInternalFields),
9825 HType::JSObject(), JS_ARRAY_BUFFER_TYPE, HAllocationMode());
9826
9827 HValue* native_context = BuildGetNativeContext();
9828 Add<HStoreNamedField>(
9829 result, HObjectAccess::ForMap(),
9830 Add<HLoadNamedField>(
9831 native_context, nullptr,
9832 HObjectAccess::ForContextSlot(Context::ARRAY_BUFFER_MAP_INDEX)));
9833
9834 HConstant* empty_fixed_array =
9835 Add<HConstant>(isolate()->factory()->empty_fixed_array());
9836 Add<HStoreNamedField>(
9837 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
9838 empty_fixed_array);
9839 Add<HStoreNamedField>(
9840 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
9841 empty_fixed_array);
9842 Add<HStoreNamedField>(
9843 result, HObjectAccess::ForJSArrayBufferBackingStore().WithRepresentation(
9844 Representation::Smi()),
9845 graph()->GetConstant0());
9846 Add<HStoreNamedField>(result, byte_length_access, byte_length);
9847 Add<HStoreNamedField>(result, HObjectAccess::ForJSArrayBufferBitFieldSlot(),
9848 graph()->GetConstant0());
9849 Add<HStoreNamedField>(
9850 result, HObjectAccess::ForJSArrayBufferBitField(),
9851 Add<HConstant>((1 << JSArrayBuffer::IsExternal::kShift) |
9852 (1 << JSArrayBuffer::IsNeuterable::kShift)));
9853
9854 for (int field = 0; field < v8::ArrayBuffer::kInternalFieldCount; ++field) {
9855 Add<HStoreNamedField>(
9856 result,
9857 HObjectAccess::ForObservableJSObjectOffset(
9858 JSArrayBuffer::kSize + field * kPointerSize, Representation::Smi()),
9859 graph()->GetConstant0());
9860 }
9861
9862 return result;
9863 }
9864
9865
9866 template <class ViewClass>
BuildArrayBufferViewInitialization(HValue * obj,HValue * buffer,HValue * byte_offset,HValue * byte_length)9867 void HGraphBuilder::BuildArrayBufferViewInitialization(
9868 HValue* obj,
9869 HValue* buffer,
9870 HValue* byte_offset,
9871 HValue* byte_length) {
9872
9873 for (int offset = ViewClass::kSize;
9874 offset < ViewClass::kSizeWithInternalFields;
9875 offset += kPointerSize) {
9876 Add<HStoreNamedField>(obj,
9877 HObjectAccess::ForObservableJSObjectOffset(offset),
9878 graph()->GetConstant0());
9879 }
9880
9881 Add<HStoreNamedField>(
9882 obj,
9883 HObjectAccess::ForJSArrayBufferViewByteOffset(),
9884 byte_offset);
9885 Add<HStoreNamedField>(
9886 obj,
9887 HObjectAccess::ForJSArrayBufferViewByteLength(),
9888 byte_length);
9889 Add<HStoreNamedField>(obj, HObjectAccess::ForJSArrayBufferViewBuffer(),
9890 buffer);
9891 }
9892
9893
BuildAllocateExternalElements(ExternalArrayType array_type,bool is_zero_byte_offset,HValue * buffer,HValue * byte_offset,HValue * length)9894 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
9895 ExternalArrayType array_type,
9896 bool is_zero_byte_offset,
9897 HValue* buffer, HValue* byte_offset, HValue* length) {
9898 Handle<Map> external_array_map(
9899 isolate()->heap()->MapForFixedTypedArray(array_type));
9900
9901 // The HForceRepresentation is to prevent possible deopt on int-smi
9902 // conversion after allocation but before the new object fields are set.
9903 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9904 HValue* elements = Add<HAllocate>(
9905 Add<HConstant>(FixedTypedArrayBase::kHeaderSize), HType::HeapObject(),
9906 NOT_TENURED, external_array_map->instance_type(),
9907 graph()->GetConstant0());
9908
9909 AddStoreMapConstant(elements, external_array_map);
9910 Add<HStoreNamedField>(elements,
9911 HObjectAccess::ForFixedArrayLength(), length);
9912
9913 HValue* backing_store = Add<HLoadNamedField>(
9914 buffer, nullptr, HObjectAccess::ForJSArrayBufferBackingStore());
9915
9916 HValue* typed_array_start;
9917 if (is_zero_byte_offset) {
9918 typed_array_start = backing_store;
9919 } else {
9920 HInstruction* external_pointer =
9921 AddUncasted<HAdd>(backing_store, byte_offset);
9922 // Arguments are checked prior to call to TypedArrayInitialize,
9923 // including byte_offset.
9924 external_pointer->ClearFlag(HValue::kCanOverflow);
9925 typed_array_start = external_pointer;
9926 }
9927
9928 Add<HStoreNamedField>(elements,
9929 HObjectAccess::ForFixedTypedArrayBaseBasePointer(),
9930 graph()->GetConstant0());
9931 Add<HStoreNamedField>(elements,
9932 HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
9933 typed_array_start);
9934
9935 return elements;
9936 }
9937
9938
BuildAllocateFixedTypedArray(ExternalArrayType array_type,size_t element_size,ElementsKind fixed_elements_kind,HValue * byte_length,HValue * length,bool initialize)9939 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
9940 ExternalArrayType array_type, size_t element_size,
9941 ElementsKind fixed_elements_kind, HValue* byte_length, HValue* length,
9942 bool initialize) {
9943 STATIC_ASSERT(
9944 (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
9945 HValue* total_size;
9946
9947 // if fixed array's elements are not aligned to object's alignment,
9948 // we need to align the whole array to object alignment.
9949 if (element_size % kObjectAlignment != 0) {
9950 total_size = BuildObjectSizeAlignment(
9951 byte_length, FixedTypedArrayBase::kHeaderSize);
9952 } else {
9953 total_size = AddUncasted<HAdd>(byte_length,
9954 Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
9955 total_size->ClearFlag(HValue::kCanOverflow);
9956 }
9957
9958 // The HForceRepresentation is to prevent possible deopt on int-smi
9959 // conversion after allocation but before the new object fields are set.
9960 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9961 Handle<Map> fixed_typed_array_map(
9962 isolate()->heap()->MapForFixedTypedArray(array_type));
9963 HAllocate* elements = Add<HAllocate>(
9964 total_size, HType::HeapObject(), NOT_TENURED,
9965 fixed_typed_array_map->instance_type(), graph()->GetConstant0());
9966
9967 #ifndef V8_HOST_ARCH_64_BIT
9968 if (array_type == kExternalFloat64Array) {
9969 elements->MakeDoubleAligned();
9970 }
9971 #endif
9972
9973 AddStoreMapConstant(elements, fixed_typed_array_map);
9974
9975 Add<HStoreNamedField>(elements,
9976 HObjectAccess::ForFixedArrayLength(),
9977 length);
9978 Add<HStoreNamedField>(
9979 elements, HObjectAccess::ForFixedTypedArrayBaseBasePointer(), elements);
9980
9981 Add<HStoreNamedField>(
9982 elements, HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
9983 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()));
9984
9985 HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
9986
9987 if (initialize) {
9988 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
9989
9990 HValue* backing_store = AddUncasted<HAdd>(
9991 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()),
9992 elements, AddOfExternalAndTagged);
9993
9994 HValue* key = builder.BeginBody(
9995 Add<HConstant>(static_cast<int32_t>(0)),
9996 length, Token::LT);
9997 Add<HStoreKeyed>(backing_store, key, filler, elements, fixed_elements_kind);
9998
9999 builder.EndBody();
10000 }
10001 return elements;
10002 }
10003
10004
GenerateTypedArrayInitialize(CallRuntime * expr)10005 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
10006 CallRuntime* expr) {
10007 ZoneList<Expression*>* arguments = expr->arguments();
10008
10009 static const int kObjectArg = 0;
10010 static const int kArrayIdArg = 1;
10011 static const int kBufferArg = 2;
10012 static const int kByteOffsetArg = 3;
10013 static const int kByteLengthArg = 4;
10014 static const int kInitializeArg = 5;
10015 static const int kArgsLength = 6;
10016 DCHECK(arguments->length() == kArgsLength);
10017
10018
10019 CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
10020 HValue* obj = Pop();
10021
10022 if (!arguments->at(kArrayIdArg)->IsLiteral()) {
10023 // This should never happen in real use, but can happen when fuzzing.
10024 // Just bail out.
10025 Bailout(kNeedSmiLiteral);
10026 return;
10027 }
10028 Handle<Object> value =
10029 static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
10030 if (!value->IsSmi()) {
10031 // This should never happen in real use, but can happen when fuzzing.
10032 // Just bail out.
10033 Bailout(kNeedSmiLiteral);
10034 return;
10035 }
10036 int array_id = Smi::cast(*value)->value();
10037
10038 HValue* buffer;
10039 if (!arguments->at(kBufferArg)->IsNullLiteral()) {
10040 CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
10041 buffer = Pop();
10042 } else {
10043 buffer = NULL;
10044 }
10045
10046 HValue* byte_offset;
10047 bool is_zero_byte_offset;
10048
10049 if (arguments->at(kByteOffsetArg)->IsLiteral() &&
10050 Smi::kZero ==
10051 *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
10052 byte_offset = Add<HConstant>(static_cast<int32_t>(0));
10053 is_zero_byte_offset = true;
10054 } else {
10055 CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
10056 byte_offset = Pop();
10057 is_zero_byte_offset = false;
10058 DCHECK(buffer != NULL);
10059 }
10060
10061 CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
10062 HValue* byte_length = Pop();
10063
10064 CHECK(arguments->at(kInitializeArg)->IsLiteral());
10065 bool initialize = static_cast<Literal*>(arguments->at(kInitializeArg))
10066 ->value()
10067 ->BooleanValue();
10068
10069 NoObservableSideEffectsScope scope(this);
10070 IfBuilder byte_offset_smi(this);
10071
10072 if (!is_zero_byte_offset) {
10073 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
10074 byte_offset_smi.Then();
10075 }
10076
10077 ExternalArrayType array_type =
10078 kExternalInt8Array; // Bogus initialization.
10079 size_t element_size = 1; // Bogus initialization.
10080 ElementsKind fixed_elements_kind = // Bogus initialization.
10081 INT8_ELEMENTS;
10082 Runtime::ArrayIdToTypeAndSize(array_id,
10083 &array_type,
10084 &fixed_elements_kind,
10085 &element_size);
10086
10087
10088 { // byte_offset is Smi.
10089 HValue* allocated_buffer = buffer;
10090 if (buffer == NULL) {
10091 allocated_buffer = BuildAllocateEmptyArrayBuffer(byte_length);
10092 }
10093 BuildArrayBufferViewInitialization<JSTypedArray>(obj, allocated_buffer,
10094 byte_offset, byte_length);
10095
10096
10097 HInstruction* length = AddUncasted<HDiv>(byte_length,
10098 Add<HConstant>(static_cast<int32_t>(element_size)));
10099 // Callers (in typedarray.js) ensure that length <= %_MaxSmi().
10100 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10101
10102 Add<HStoreNamedField>(obj,
10103 HObjectAccess::ForJSTypedArrayLength(),
10104 length);
10105
10106 HValue* elements;
10107 if (buffer != NULL) {
10108 elements = BuildAllocateExternalElements(
10109 array_type, is_zero_byte_offset, buffer, byte_offset, length);
10110 } else {
10111 DCHECK(is_zero_byte_offset);
10112 elements = BuildAllocateFixedTypedArray(array_type, element_size,
10113 fixed_elements_kind, byte_length,
10114 length, initialize);
10115 }
10116 Add<HStoreNamedField>(
10117 obj, HObjectAccess::ForElementsPointer(), elements);
10118 }
10119
10120 if (!is_zero_byte_offset) {
10121 byte_offset_smi.Else();
10122 { // byte_offset is not Smi.
10123 Push(obj);
10124 CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
10125 Push(buffer);
10126 Push(byte_offset);
10127 Push(byte_length);
10128 CHECK_ALIVE(VisitForValue(arguments->at(kInitializeArg)));
10129 PushArgumentsFromEnvironment(kArgsLength);
10130 Add<HCallRuntime>(expr->function(), kArgsLength);
10131 }
10132 }
10133 byte_offset_smi.End();
10134 }
10135
10136
GenerateMaxSmi(CallRuntime * expr)10137 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
10138 DCHECK(expr->arguments()->length() == 0);
10139 HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
10140 return ast_context()->ReturnInstruction(max_smi, expr->id());
10141 }
10142
10143
GenerateTypedArrayMaxSizeInHeap(CallRuntime * expr)10144 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
10145 CallRuntime* expr) {
10146 DCHECK(expr->arguments()->length() == 0);
10147 HConstant* result = New<HConstant>(static_cast<int32_t>(
10148 FLAG_typed_array_max_size_in_heap));
10149 return ast_context()->ReturnInstruction(result, expr->id());
10150 }
10151
10152
GenerateArrayBufferGetByteLength(CallRuntime * expr)10153 void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
10154 CallRuntime* expr) {
10155 DCHECK(expr->arguments()->length() == 1);
10156 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10157 HValue* buffer = Pop();
10158 HInstruction* result = New<HLoadNamedField>(
10159 buffer, nullptr, HObjectAccess::ForJSArrayBufferByteLength());
10160 return ast_context()->ReturnInstruction(result, expr->id());
10161 }
10162
10163
GenerateArrayBufferViewGetByteLength(CallRuntime * expr)10164 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
10165 CallRuntime* expr) {
10166 NoObservableSideEffectsScope scope(this);
10167 DCHECK(expr->arguments()->length() == 1);
10168 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10169 HValue* view = Pop();
10170
10171 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10172 view, nullptr,
10173 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteLengthOffset)));
10174 }
10175
10176
GenerateArrayBufferViewGetByteOffset(CallRuntime * expr)10177 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
10178 CallRuntime* expr) {
10179 NoObservableSideEffectsScope scope(this);
10180 DCHECK(expr->arguments()->length() == 1);
10181 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10182 HValue* view = Pop();
10183
10184 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10185 view, nullptr,
10186 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteOffsetOffset)));
10187 }
10188
10189
GenerateTypedArrayGetLength(CallRuntime * expr)10190 void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
10191 CallRuntime* expr) {
10192 NoObservableSideEffectsScope scope(this);
10193 DCHECK(expr->arguments()->length() == 1);
10194 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10195 HValue* view = Pop();
10196
10197 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10198 view, nullptr,
10199 FieldIndex::ForInObjectOffset(JSTypedArray::kLengthOffset)));
10200 }
10201
10202
VisitCallRuntime(CallRuntime * expr)10203 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
10204 DCHECK(!HasStackOverflow());
10205 DCHECK(current_block() != NULL);
10206 DCHECK(current_block()->HasPredecessor());
10207 if (expr->is_jsruntime()) {
10208 // Crankshaft always specializes to the native context, so we can just grab
10209 // the constant function from the current native context and embed that into
10210 // the code object.
10211 Handle<JSFunction> known_function(
10212 JSFunction::cast(
10213 current_info()->native_context()->get(expr->context_index())),
10214 isolate());
10215
10216 // The callee and the receiver both have to be pushed onto the operand stack
10217 // before arguments are being evaluated.
10218 HConstant* function = Add<HConstant>(known_function);
10219 HValue* receiver = ImplicitReceiverFor(function, known_function);
10220 Push(function);
10221 Push(receiver);
10222
10223 int argument_count = expr->arguments()->length() + 1; // Count receiver.
10224 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10225 PushArgumentsFromEnvironment(argument_count);
10226 HInstruction* call = NewCallConstantFunction(known_function, argument_count,
10227 TailCallMode::kDisallow,
10228 TailCallMode::kDisallow);
10229 Drop(1); // Function
10230 return ast_context()->ReturnInstruction(call, expr->id());
10231 }
10232
10233 const Runtime::Function* function = expr->function();
10234 DCHECK(function != NULL);
10235 switch (function->function_id) {
10236 #define CALL_INTRINSIC_GENERATOR(Name) \
10237 case Runtime::kInline##Name: \
10238 return Generate##Name(expr);
10239
10240 FOR_EACH_HYDROGEN_INTRINSIC(CALL_INTRINSIC_GENERATOR)
10241 #undef CALL_INTRINSIC_GENERATOR
10242 default: {
10243 int argument_count = expr->arguments()->length();
10244 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10245 PushArgumentsFromEnvironment(argument_count);
10246 HCallRuntime* call = New<HCallRuntime>(function, argument_count);
10247 return ast_context()->ReturnInstruction(call, expr->id());
10248 }
10249 }
10250 }
10251
10252
VisitUnaryOperation(UnaryOperation * expr)10253 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
10254 DCHECK(!HasStackOverflow());
10255 DCHECK(current_block() != NULL);
10256 DCHECK(current_block()->HasPredecessor());
10257 switch (expr->op()) {
10258 case Token::DELETE: return VisitDelete(expr);
10259 case Token::VOID: return VisitVoid(expr);
10260 case Token::TYPEOF: return VisitTypeof(expr);
10261 case Token::NOT: return VisitNot(expr);
10262 default: UNREACHABLE();
10263 }
10264 }
10265
10266
VisitDelete(UnaryOperation * expr)10267 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
10268 Property* prop = expr->expression()->AsProperty();
10269 VariableProxy* proxy = expr->expression()->AsVariableProxy();
10270 if (prop != NULL) {
10271 CHECK_ALIVE(VisitForValue(prop->obj()));
10272 CHECK_ALIVE(VisitForValue(prop->key()));
10273 HValue* key = Pop();
10274 HValue* obj = Pop();
10275 Add<HPushArguments>(obj, key);
10276 HInstruction* instr = New<HCallRuntime>(
10277 Runtime::FunctionForId(is_strict(function_language_mode())
10278 ? Runtime::kDeleteProperty_Strict
10279 : Runtime::kDeleteProperty_Sloppy),
10280 2);
10281 return ast_context()->ReturnInstruction(instr, expr->id());
10282 } else if (proxy != NULL) {
10283 Variable* var = proxy->var();
10284 if (var->IsUnallocated()) {
10285 Bailout(kDeleteWithGlobalVariable);
10286 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
10287 // Result of deleting non-global variables is false. 'this' is not really
10288 // a variable, though we implement it as one. The subexpression does not
10289 // have side effects.
10290 HValue* value = var->is_this() ? graph()->GetConstantTrue()
10291 : graph()->GetConstantFalse();
10292 return ast_context()->ReturnValue(value);
10293 } else {
10294 Bailout(kDeleteWithNonGlobalVariable);
10295 }
10296 } else {
10297 // Result of deleting non-property, non-variable reference is true.
10298 // Evaluate the subexpression for side effects.
10299 CHECK_ALIVE(VisitForEffect(expr->expression()));
10300 return ast_context()->ReturnValue(graph()->GetConstantTrue());
10301 }
10302 }
10303
10304
VisitVoid(UnaryOperation * expr)10305 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
10306 CHECK_ALIVE(VisitForEffect(expr->expression()));
10307 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10308 }
10309
10310
VisitTypeof(UnaryOperation * expr)10311 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
10312 CHECK_ALIVE(VisitForTypeOf(expr->expression()));
10313 HValue* value = Pop();
10314 HInstruction* instr = New<HTypeof>(value);
10315 return ast_context()->ReturnInstruction(instr, expr->id());
10316 }
10317
10318
VisitNot(UnaryOperation * expr)10319 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
10320 if (ast_context()->IsTest()) {
10321 TestContext* context = TestContext::cast(ast_context());
10322 VisitForControl(expr->expression(),
10323 context->if_false(),
10324 context->if_true());
10325 return;
10326 }
10327
10328 if (ast_context()->IsEffect()) {
10329 VisitForEffect(expr->expression());
10330 return;
10331 }
10332
10333 DCHECK(ast_context()->IsValue());
10334 HBasicBlock* materialize_false = graph()->CreateBasicBlock();
10335 HBasicBlock* materialize_true = graph()->CreateBasicBlock();
10336 CHECK_BAILOUT(VisitForControl(expr->expression(),
10337 materialize_false,
10338 materialize_true));
10339
10340 if (materialize_false->HasPredecessor()) {
10341 materialize_false->SetJoinId(expr->MaterializeFalseId());
10342 set_current_block(materialize_false);
10343 Push(graph()->GetConstantFalse());
10344 } else {
10345 materialize_false = NULL;
10346 }
10347
10348 if (materialize_true->HasPredecessor()) {
10349 materialize_true->SetJoinId(expr->MaterializeTrueId());
10350 set_current_block(materialize_true);
10351 Push(graph()->GetConstantTrue());
10352 } else {
10353 materialize_true = NULL;
10354 }
10355
10356 HBasicBlock* join =
10357 CreateJoin(materialize_false, materialize_true, expr->id());
10358 set_current_block(join);
10359 if (join != NULL) return ast_context()->ReturnValue(Pop());
10360 }
10361
RepresentationFor(AstType * type)10362 static Representation RepresentationFor(AstType* type) {
10363 DisallowHeapAllocation no_allocation;
10364 if (type->Is(AstType::None())) return Representation::None();
10365 if (type->Is(AstType::SignedSmall())) return Representation::Smi();
10366 if (type->Is(AstType::Signed32())) return Representation::Integer32();
10367 if (type->Is(AstType::Number())) return Representation::Double();
10368 return Representation::Tagged();
10369 }
10370
BuildIncrement(CountOperation * expr)10371 HInstruction* HOptimizedGraphBuilder::BuildIncrement(CountOperation* expr) {
10372 // The input to the count operation is on top of the expression stack.
10373 Representation rep = RepresentationFor(expr->type());
10374 if (rep.IsNone() || rep.IsTagged()) {
10375 rep = Representation::Smi();
10376 }
10377
10378 // We need an explicit HValue representing ToNumber(input). The
10379 // actual HChange instruction we need is (sometimes) added in a later
10380 // phase, so it is not available now to be used as an input to HAdd and
10381 // as the return value.
10382 HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
10383 if (!rep.IsDouble()) {
10384 number_input->SetFlag(HInstruction::kFlexibleRepresentation);
10385 number_input->SetFlag(HInstruction::kCannotBeTagged);
10386 }
10387 Push(number_input);
10388
10389 // The addition has no side effects, so we do not need
10390 // to simulate the expression stack after this instruction.
10391 // Any later failures deopt to the load of the input or earlier.
10392 HConstant* delta = (expr->op() == Token::INC)
10393 ? graph()->GetConstant1()
10394 : graph()->GetConstantMinus1();
10395 HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
10396 if (instr->IsAdd()) {
10397 HAdd* add = HAdd::cast(instr);
10398 add->set_observed_input_representation(1, rep);
10399 add->set_observed_input_representation(2, Representation::Smi());
10400 }
10401 instr->ClearAllSideEffects();
10402 instr->SetFlag(HInstruction::kCannotBeTagged);
10403 return instr;
10404 }
10405
BuildStoreForEffect(Expression * expr,Property * prop,FeedbackVectorSlot slot,BailoutId ast_id,BailoutId return_id,HValue * object,HValue * key,HValue * value)10406 void HOptimizedGraphBuilder::BuildStoreForEffect(
10407 Expression* expr, Property* prop, FeedbackVectorSlot slot, BailoutId ast_id,
10408 BailoutId return_id, HValue* object, HValue* key, HValue* value) {
10409 EffectContext for_effect(this);
10410 Push(object);
10411 if (key != NULL) Push(key);
10412 Push(value);
10413 BuildStore(expr, prop, slot, ast_id, return_id);
10414 }
10415
10416
VisitCountOperation(CountOperation * expr)10417 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
10418 DCHECK(!HasStackOverflow());
10419 DCHECK(current_block() != NULL);
10420 DCHECK(current_block()->HasPredecessor());
10421 if (!is_tracking_positions()) SetSourcePosition(expr->position());
10422 Expression* target = expr->expression();
10423 VariableProxy* proxy = target->AsVariableProxy();
10424 Property* prop = target->AsProperty();
10425 if (proxy == NULL && prop == NULL) {
10426 return Bailout(kInvalidLhsInCountOperation);
10427 }
10428
10429 // Match the full code generator stack by simulating an extra stack
10430 // element for postfix operations in a non-effect context. The return
10431 // value is ToNumber(input).
10432 bool returns_original_input =
10433 expr->is_postfix() && !ast_context()->IsEffect();
10434 HValue* input = NULL; // ToNumber(original_input).
10435 HValue* after = NULL; // The result after incrementing or decrementing.
10436
10437 if (proxy != NULL) {
10438 Variable* var = proxy->var();
10439 if (var->mode() == CONST) {
10440 return Bailout(kNonInitializerAssignmentToConst);
10441 }
10442 // Argument of the count operation is a variable, not a property.
10443 DCHECK(prop == NULL);
10444 CHECK_ALIVE(VisitForValue(target));
10445
10446 after = BuildIncrement(expr);
10447 input = returns_original_input ? Top() : Pop();
10448 Push(after);
10449
10450 switch (var->location()) {
10451 case VariableLocation::UNALLOCATED:
10452 HandleGlobalVariableAssignment(var, after, expr->CountSlot(),
10453 expr->AssignmentId());
10454 break;
10455
10456 case VariableLocation::PARAMETER:
10457 case VariableLocation::LOCAL:
10458 BindIfLive(var, after);
10459 break;
10460
10461 case VariableLocation::CONTEXT: {
10462 HValue* context = BuildContextChainWalk(var);
10463 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10464 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10465 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10466 mode, after);
10467 if (instr->HasObservableSideEffects()) {
10468 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10469 }
10470 break;
10471 }
10472
10473 case VariableLocation::LOOKUP:
10474 return Bailout(kLookupVariableInCountOperation);
10475
10476 case VariableLocation::MODULE:
10477 UNREACHABLE();
10478 }
10479
10480 Drop(returns_original_input ? 2 : 1);
10481 return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10482 }
10483
10484 // Argument of the count operation is a property.
10485 DCHECK(prop != NULL);
10486 if (returns_original_input) Push(graph()->GetConstantUndefined());
10487
10488 CHECK_ALIVE(VisitForValue(prop->obj()));
10489 HValue* object = Top();
10490
10491 HValue* key = NULL;
10492 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
10493 CHECK_ALIVE(VisitForValue(prop->key()));
10494 key = Top();
10495 }
10496
10497 CHECK_ALIVE(PushLoad(prop, object, key));
10498
10499 after = BuildIncrement(expr);
10500
10501 if (returns_original_input) {
10502 input = Pop();
10503 // Drop object and key to push it again in the effect context below.
10504 Drop(key == NULL ? 1 : 2);
10505 environment()->SetExpressionStackAt(0, input);
10506 CHECK_ALIVE(BuildStoreForEffect(expr, prop, expr->CountSlot(), expr->id(),
10507 expr->AssignmentId(), object, key, after));
10508 return ast_context()->ReturnValue(Pop());
10509 }
10510
10511 environment()->SetExpressionStackAt(0, after);
10512 return BuildStore(expr, prop, expr->CountSlot(), expr->id(),
10513 expr->AssignmentId());
10514 }
10515
10516
BuildStringCharCodeAt(HValue * string,HValue * index)10517 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
10518 HValue* string,
10519 HValue* index) {
10520 if (string->IsConstant() && index->IsConstant()) {
10521 HConstant* c_string = HConstant::cast(string);
10522 HConstant* c_index = HConstant::cast(index);
10523 if (c_string->HasStringValue() && c_index->HasNumberValue()) {
10524 int32_t i = c_index->NumberValueAsInteger32();
10525 Handle<String> s = c_string->StringValue();
10526 if (i < 0 || i >= s->length()) {
10527 return New<HConstant>(std::numeric_limits<double>::quiet_NaN());
10528 }
10529 return New<HConstant>(s->Get(i));
10530 }
10531 }
10532 string = BuildCheckString(string);
10533 index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
10534 return New<HStringCharCodeAt>(string, index);
10535 }
10536
10537
10538 // Checks if the given shift amounts have following forms:
10539 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
ShiftAmountsAllowReplaceByRotate(HValue * sa,HValue * const32_minus_sa)10540 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
10541 HValue* const32_minus_sa) {
10542 if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
10543 const HConstant* c1 = HConstant::cast(sa);
10544 const HConstant* c2 = HConstant::cast(const32_minus_sa);
10545 return c1->HasInteger32Value() && c2->HasInteger32Value() &&
10546 (c1->Integer32Value() + c2->Integer32Value() == 32);
10547 }
10548 if (!const32_minus_sa->IsSub()) return false;
10549 HSub* sub = HSub::cast(const32_minus_sa);
10550 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
10551 }
10552
10553
10554 // Checks if the left and the right are shift instructions with the oposite
10555 // directions that can be replaced by one rotate right instruction or not.
10556 // Returns the operand and the shift amount for the rotate instruction in the
10557 // former case.
MatchRotateRight(HValue * left,HValue * right,HValue ** operand,HValue ** shift_amount)10558 bool HGraphBuilder::MatchRotateRight(HValue* left,
10559 HValue* right,
10560 HValue** operand,
10561 HValue** shift_amount) {
10562 HShl* shl;
10563 HShr* shr;
10564 if (left->IsShl() && right->IsShr()) {
10565 shl = HShl::cast(left);
10566 shr = HShr::cast(right);
10567 } else if (left->IsShr() && right->IsShl()) {
10568 shl = HShl::cast(right);
10569 shr = HShr::cast(left);
10570 } else {
10571 return false;
10572 }
10573 if (shl->left() != shr->left()) return false;
10574
10575 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
10576 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
10577 return false;
10578 }
10579 *operand = shr->left();
10580 *shift_amount = shr->right();
10581 return true;
10582 }
10583
10584
CanBeZero(HValue * right)10585 bool CanBeZero(HValue* right) {
10586 if (right->IsConstant()) {
10587 HConstant* right_const = HConstant::cast(right);
10588 if (right_const->HasInteger32Value() &&
10589 (right_const->Integer32Value() & 0x1f) != 0) {
10590 return false;
10591 }
10592 }
10593 return true;
10594 }
10595
EnforceNumberType(HValue * number,AstType * expected)10596 HValue* HGraphBuilder::EnforceNumberType(HValue* number, AstType* expected) {
10597 if (expected->Is(AstType::SignedSmall())) {
10598 return AddUncasted<HForceRepresentation>(number, Representation::Smi());
10599 }
10600 if (expected->Is(AstType::Signed32())) {
10601 return AddUncasted<HForceRepresentation>(number,
10602 Representation::Integer32());
10603 }
10604 return number;
10605 }
10606
TruncateToNumber(HValue * value,AstType ** expected)10607 HValue* HGraphBuilder::TruncateToNumber(HValue* value, AstType** expected) {
10608 if (value->IsConstant()) {
10609 HConstant* constant = HConstant::cast(value);
10610 Maybe<HConstant*> number =
10611 constant->CopyToTruncatedNumber(isolate(), zone());
10612 if (number.IsJust()) {
10613 *expected = AstType::Number();
10614 return AddInstruction(number.FromJust());
10615 }
10616 }
10617
10618 // We put temporary values on the stack, which don't correspond to anything
10619 // in baseline code. Since nothing is observable we avoid recording those
10620 // pushes with a NoObservableSideEffectsScope.
10621 NoObservableSideEffectsScope no_effects(this);
10622
10623 AstType* expected_type = *expected;
10624
10625 // Separate the number type from the rest.
10626 AstType* expected_obj =
10627 AstType::Intersect(expected_type, AstType::NonNumber(), zone());
10628 AstType* expected_number =
10629 AstType::Intersect(expected_type, AstType::Number(), zone());
10630
10631 // We expect to get a number.
10632 // (We need to check first, since AstType::None->Is(AstType::Any()) == true.
10633 if (expected_obj->Is(AstType::None())) {
10634 DCHECK(!expected_number->Is(AstType::None()));
10635 return value;
10636 }
10637
10638 if (expected_obj->Is(AstType::Undefined())) {
10639 // This is already done by HChange.
10640 *expected = AstType::Union(expected_number, AstType::Number(), zone());
10641 return value;
10642 }
10643
10644 return value;
10645 }
10646
10647
BuildBinaryOperation(BinaryOperation * expr,HValue * left,HValue * right,PushBeforeSimulateBehavior push_sim_result)10648 HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
10649 BinaryOperation* expr,
10650 HValue* left,
10651 HValue* right,
10652 PushBeforeSimulateBehavior push_sim_result) {
10653 AstType* left_type = bounds_.get(expr->left()).lower;
10654 AstType* right_type = bounds_.get(expr->right()).lower;
10655 AstType* result_type = bounds_.get(expr).lower;
10656 Maybe<int> fixed_right_arg = expr->fixed_right_arg();
10657 Handle<AllocationSite> allocation_site = expr->allocation_site();
10658
10659 HAllocationMode allocation_mode;
10660 if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
10661 allocation_mode = HAllocationMode(allocation_site);
10662 }
10663 HValue* result = HGraphBuilder::BuildBinaryOperation(
10664 expr->op(), left, right, left_type, right_type, result_type,
10665 fixed_right_arg, allocation_mode, expr->id());
10666 // Add a simulate after instructions with observable side effects, and
10667 // after phis, which are the result of BuildBinaryOperation when we
10668 // inlined some complex subgraph.
10669 if (result->HasObservableSideEffects() || result->IsPhi()) {
10670 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10671 Push(result);
10672 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10673 Drop(1);
10674 } else {
10675 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10676 }
10677 }
10678 return result;
10679 }
10680
BuildBinaryOperation(Token::Value op,HValue * left,HValue * right,AstType * left_type,AstType * right_type,AstType * result_type,Maybe<int> fixed_right_arg,HAllocationMode allocation_mode,BailoutId opt_id)10681 HValue* HGraphBuilder::BuildBinaryOperation(
10682 Token::Value op, HValue* left, HValue* right, AstType* left_type,
10683 AstType* right_type, AstType* result_type, Maybe<int> fixed_right_arg,
10684 HAllocationMode allocation_mode, BailoutId opt_id) {
10685 bool maybe_string_add = false;
10686 if (op == Token::ADD) {
10687 // If we are adding constant string with something for which we don't have
10688 // a feedback yet, assume that it's also going to be a string and don't
10689 // generate deopt instructions.
10690 if (!left_type->IsInhabited() && right->IsConstant() &&
10691 HConstant::cast(right)->HasStringValue()) {
10692 left_type = AstType::String();
10693 }
10694
10695 if (!right_type->IsInhabited() && left->IsConstant() &&
10696 HConstant::cast(left)->HasStringValue()) {
10697 right_type = AstType::String();
10698 }
10699
10700 maybe_string_add = (left_type->Maybe(AstType::String()) ||
10701 left_type->Maybe(AstType::Receiver()) ||
10702 right_type->Maybe(AstType::String()) ||
10703 right_type->Maybe(AstType::Receiver()));
10704 }
10705
10706 Representation left_rep = RepresentationFor(left_type);
10707 Representation right_rep = RepresentationFor(right_type);
10708
10709 if (!left_type->IsInhabited()) {
10710 Add<HDeoptimize>(
10711 DeoptimizeReason::kInsufficientTypeFeedbackForLHSOfBinaryOperation,
10712 Deoptimizer::SOFT);
10713 left_type = AstType::Any();
10714 left_rep = RepresentationFor(left_type);
10715 maybe_string_add = op == Token::ADD;
10716 }
10717
10718 if (!right_type->IsInhabited()) {
10719 Add<HDeoptimize>(
10720 DeoptimizeReason::kInsufficientTypeFeedbackForRHSOfBinaryOperation,
10721 Deoptimizer::SOFT);
10722 right_type = AstType::Any();
10723 right_rep = RepresentationFor(right_type);
10724 maybe_string_add = op == Token::ADD;
10725 }
10726
10727 if (!maybe_string_add) {
10728 left = TruncateToNumber(left, &left_type);
10729 right = TruncateToNumber(right, &right_type);
10730 }
10731
10732 // Special case for string addition here.
10733 if (op == Token::ADD &&
10734 (left_type->Is(AstType::String()) || right_type->Is(AstType::String()))) {
10735 // Validate type feedback for left argument.
10736 if (left_type->Is(AstType::String())) {
10737 left = BuildCheckString(left);
10738 }
10739
10740 // Validate type feedback for right argument.
10741 if (right_type->Is(AstType::String())) {
10742 right = BuildCheckString(right);
10743 }
10744
10745 // Convert left argument as necessary.
10746 if (left_type->Is(AstType::Number())) {
10747 DCHECK(right_type->Is(AstType::String()));
10748 left = BuildNumberToString(left, left_type);
10749 } else if (!left_type->Is(AstType::String())) {
10750 DCHECK(right_type->Is(AstType::String()));
10751 return AddUncasted<HStringAdd>(
10752 left, right, allocation_mode.GetPretenureMode(),
10753 STRING_ADD_CONVERT_LEFT, allocation_mode.feedback_site());
10754 }
10755
10756 // Convert right argument as necessary.
10757 if (right_type->Is(AstType::Number())) {
10758 DCHECK(left_type->Is(AstType::String()));
10759 right = BuildNumberToString(right, right_type);
10760 } else if (!right_type->Is(AstType::String())) {
10761 DCHECK(left_type->Is(AstType::String()));
10762 return AddUncasted<HStringAdd>(
10763 left, right, allocation_mode.GetPretenureMode(),
10764 STRING_ADD_CONVERT_RIGHT, allocation_mode.feedback_site());
10765 }
10766
10767 // Fast paths for empty constant strings.
10768 Handle<String> left_string =
10769 left->IsConstant() && HConstant::cast(left)->HasStringValue()
10770 ? HConstant::cast(left)->StringValue()
10771 : Handle<String>();
10772 Handle<String> right_string =
10773 right->IsConstant() && HConstant::cast(right)->HasStringValue()
10774 ? HConstant::cast(right)->StringValue()
10775 : Handle<String>();
10776 if (!left_string.is_null() && left_string->length() == 0) return right;
10777 if (!right_string.is_null() && right_string->length() == 0) return left;
10778 if (!left_string.is_null() && !right_string.is_null()) {
10779 return AddUncasted<HStringAdd>(
10780 left, right, allocation_mode.GetPretenureMode(),
10781 STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
10782 }
10783
10784 // Register the dependent code with the allocation site.
10785 if (!allocation_mode.feedback_site().is_null()) {
10786 DCHECK(!graph()->info()->IsStub());
10787 Handle<AllocationSite> site(allocation_mode.feedback_site());
10788 top_info()->dependencies()->AssumeTenuringDecision(site);
10789 }
10790
10791 // Inline the string addition into the stub when creating allocation
10792 // mementos to gather allocation site feedback, or if we can statically
10793 // infer that we're going to create a cons string.
10794 if ((graph()->info()->IsStub() &&
10795 allocation_mode.CreateAllocationMementos()) ||
10796 (left->IsConstant() &&
10797 HConstant::cast(left)->HasStringValue() &&
10798 HConstant::cast(left)->StringValue()->length() + 1 >=
10799 ConsString::kMinLength) ||
10800 (right->IsConstant() &&
10801 HConstant::cast(right)->HasStringValue() &&
10802 HConstant::cast(right)->StringValue()->length() + 1 >=
10803 ConsString::kMinLength)) {
10804 return BuildStringAdd(left, right, allocation_mode);
10805 }
10806
10807 // Fallback to using the string add stub.
10808 return AddUncasted<HStringAdd>(
10809 left, right, allocation_mode.GetPretenureMode(), STRING_ADD_CHECK_NONE,
10810 allocation_mode.feedback_site());
10811 }
10812
10813 // Special case for +x here.
10814 if (op == Token::MUL) {
10815 if (left->EqualsInteger32Constant(1)) {
10816 return BuildToNumber(right);
10817 }
10818 if (right->EqualsInteger32Constant(1)) {
10819 return BuildToNumber(left);
10820 }
10821 }
10822
10823 if (graph()->info()->IsStub()) {
10824 left = EnforceNumberType(left, left_type);
10825 right = EnforceNumberType(right, right_type);
10826 }
10827
10828 Representation result_rep = RepresentationFor(result_type);
10829
10830 bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
10831 (right_rep.IsTagged() && !right_rep.IsSmi());
10832
10833 HInstruction* instr = NULL;
10834 // Only the stub is allowed to call into the runtime, since otherwise we would
10835 // inline several instructions (including the two pushes) for every tagged
10836 // operation in optimized code, which is more expensive, than a stub call.
10837 if (graph()->info()->IsStub() && is_non_primitive) {
10838 HValue* values[] = {left, right};
10839 #define GET_STUB(Name) \
10840 do { \
10841 Callable callable = CodeFactory::Name(isolate()); \
10842 HValue* stub = Add<HConstant>(callable.code()); \
10843 instr = AddUncasted<HCallWithDescriptor>(stub, 0, callable.descriptor(), \
10844 ArrayVector(values)); \
10845 } while (false)
10846
10847 switch (op) {
10848 default:
10849 UNREACHABLE();
10850 case Token::ADD:
10851 GET_STUB(Add);
10852 break;
10853 case Token::SUB:
10854 GET_STUB(Subtract);
10855 break;
10856 case Token::MUL:
10857 GET_STUB(Multiply);
10858 break;
10859 case Token::DIV:
10860 GET_STUB(Divide);
10861 break;
10862 case Token::MOD:
10863 GET_STUB(Modulus);
10864 break;
10865 case Token::BIT_OR:
10866 GET_STUB(BitwiseOr);
10867 break;
10868 case Token::BIT_AND:
10869 GET_STUB(BitwiseAnd);
10870 break;
10871 case Token::BIT_XOR:
10872 GET_STUB(BitwiseXor);
10873 break;
10874 case Token::SAR:
10875 GET_STUB(ShiftRight);
10876 break;
10877 case Token::SHR:
10878 GET_STUB(ShiftRightLogical);
10879 break;
10880 case Token::SHL:
10881 GET_STUB(ShiftLeft);
10882 break;
10883 }
10884 #undef GET_STUB
10885 } else {
10886 switch (op) {
10887 case Token::ADD:
10888 instr = AddUncasted<HAdd>(left, right);
10889 break;
10890 case Token::SUB:
10891 instr = AddUncasted<HSub>(left, right);
10892 break;
10893 case Token::MUL:
10894 instr = AddUncasted<HMul>(left, right);
10895 break;
10896 case Token::MOD: {
10897 if (fixed_right_arg.IsJust() &&
10898 !right->EqualsInteger32Constant(fixed_right_arg.FromJust())) {
10899 HConstant* fixed_right =
10900 Add<HConstant>(static_cast<int>(fixed_right_arg.FromJust()));
10901 IfBuilder if_same(this);
10902 if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
10903 if_same.Then();
10904 if_same.ElseDeopt(DeoptimizeReason::kUnexpectedRHSOfBinaryOperation);
10905 right = fixed_right;
10906 }
10907 instr = AddUncasted<HMod>(left, right);
10908 break;
10909 }
10910 case Token::DIV:
10911 instr = AddUncasted<HDiv>(left, right);
10912 break;
10913 case Token::BIT_XOR:
10914 case Token::BIT_AND:
10915 instr = AddUncasted<HBitwise>(op, left, right);
10916 break;
10917 case Token::BIT_OR: {
10918 HValue *operand, *shift_amount;
10919 if (left_type->Is(AstType::Signed32()) &&
10920 right_type->Is(AstType::Signed32()) &&
10921 MatchRotateRight(left, right, &operand, &shift_amount)) {
10922 instr = AddUncasted<HRor>(operand, shift_amount);
10923 } else {
10924 instr = AddUncasted<HBitwise>(op, left, right);
10925 }
10926 break;
10927 }
10928 case Token::SAR:
10929 instr = AddUncasted<HSar>(left, right);
10930 break;
10931 case Token::SHR:
10932 instr = AddUncasted<HShr>(left, right);
10933 if (instr->IsShr() && CanBeZero(right)) {
10934 graph()->RecordUint32Instruction(instr);
10935 }
10936 break;
10937 case Token::SHL:
10938 instr = AddUncasted<HShl>(left, right);
10939 break;
10940 default:
10941 UNREACHABLE();
10942 }
10943 }
10944
10945 if (instr->IsBinaryOperation()) {
10946 HBinaryOperation* binop = HBinaryOperation::cast(instr);
10947 binop->set_observed_input_representation(1, left_rep);
10948 binop->set_observed_input_representation(2, right_rep);
10949 binop->initialize_output_representation(result_rep);
10950 if (graph()->info()->IsStub()) {
10951 // Stub should not call into stub.
10952 instr->SetFlag(HValue::kCannotBeTagged);
10953 // And should truncate on HForceRepresentation already.
10954 if (left->IsForceRepresentation()) {
10955 left->CopyFlag(HValue::kTruncatingToSmi, instr);
10956 left->CopyFlag(HValue::kTruncatingToInt32, instr);
10957 }
10958 if (right->IsForceRepresentation()) {
10959 right->CopyFlag(HValue::kTruncatingToSmi, instr);
10960 right->CopyFlag(HValue::kTruncatingToInt32, instr);
10961 }
10962 }
10963 }
10964 return instr;
10965 }
10966
10967
10968 // Check for the form (%_ClassOf(foo) === 'BarClass').
IsClassOfTest(CompareOperation * expr)10969 static bool IsClassOfTest(CompareOperation* expr) {
10970 if (expr->op() != Token::EQ_STRICT) return false;
10971 CallRuntime* call = expr->left()->AsCallRuntime();
10972 if (call == NULL) return false;
10973 Literal* literal = expr->right()->AsLiteral();
10974 if (literal == NULL) return false;
10975 if (!literal->value()->IsString()) return false;
10976 if (!call->is_jsruntime() &&
10977 call->function()->function_id != Runtime::kInlineClassOf) {
10978 return false;
10979 }
10980 DCHECK(call->arguments()->length() == 1);
10981 return true;
10982 }
10983
10984
VisitBinaryOperation(BinaryOperation * expr)10985 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
10986 DCHECK(!HasStackOverflow());
10987 DCHECK(current_block() != NULL);
10988 DCHECK(current_block()->HasPredecessor());
10989 switch (expr->op()) {
10990 case Token::COMMA:
10991 return VisitComma(expr);
10992 case Token::OR:
10993 case Token::AND:
10994 return VisitLogicalExpression(expr);
10995 default:
10996 return VisitArithmeticExpression(expr);
10997 }
10998 }
10999
11000
VisitComma(BinaryOperation * expr)11001 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
11002 CHECK_ALIVE(VisitForEffect(expr->left()));
11003 // Visit the right subexpression in the same AST context as the entire
11004 // expression.
11005 Visit(expr->right());
11006 }
11007
11008
VisitLogicalExpression(BinaryOperation * expr)11009 void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
11010 bool is_logical_and = expr->op() == Token::AND;
11011 if (ast_context()->IsTest()) {
11012 TestContext* context = TestContext::cast(ast_context());
11013 // Translate left subexpression.
11014 HBasicBlock* eval_right = graph()->CreateBasicBlock();
11015 if (is_logical_and) {
11016 CHECK_BAILOUT(VisitForControl(expr->left(),
11017 eval_right,
11018 context->if_false()));
11019 } else {
11020 CHECK_BAILOUT(VisitForControl(expr->left(),
11021 context->if_true(),
11022 eval_right));
11023 }
11024
11025 // Translate right subexpression by visiting it in the same AST
11026 // context as the entire expression.
11027 CHECK(eval_right->HasPredecessor());
11028 eval_right->SetJoinId(expr->RightId());
11029 set_current_block(eval_right);
11030 Visit(expr->right());
11031 } else if (ast_context()->IsValue()) {
11032 CHECK_ALIVE(VisitForValue(expr->left()));
11033 DCHECK(current_block() != NULL);
11034 HValue* left_value = Top();
11035
11036 // Short-circuit left values that always evaluate to the same boolean value.
11037 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
11038 // l (evals true) && r -> r
11039 // l (evals true) || r -> l
11040 // l (evals false) && r -> l
11041 // l (evals false) || r -> r
11042 if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
11043 Drop(1);
11044 CHECK_ALIVE(VisitForValue(expr->right()));
11045 }
11046 return ast_context()->ReturnValue(Pop());
11047 }
11048
11049 // We need an extra block to maintain edge-split form.
11050 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11051 HBasicBlock* eval_right = graph()->CreateBasicBlock();
11052 ToBooleanHints expected(expr->left()->to_boolean_types());
11053 HBranch* test = is_logical_and
11054 ? New<HBranch>(left_value, expected, eval_right, empty_block)
11055 : New<HBranch>(left_value, expected, empty_block, eval_right);
11056 FinishCurrentBlock(test);
11057
11058 set_current_block(eval_right);
11059 Drop(1); // Value of the left subexpression.
11060 CHECK_BAILOUT(VisitForValue(expr->right()));
11061
11062 HBasicBlock* join_block =
11063 CreateJoin(empty_block, current_block(), expr->id());
11064 set_current_block(join_block);
11065 return ast_context()->ReturnValue(Pop());
11066
11067 } else {
11068 DCHECK(ast_context()->IsEffect());
11069 // In an effect context, we don't need the value of the left subexpression,
11070 // only its control flow and side effects. We need an extra block to
11071 // maintain edge-split form.
11072 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11073 HBasicBlock* right_block = graph()->CreateBasicBlock();
11074 if (is_logical_and) {
11075 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
11076 } else {
11077 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
11078 }
11079
11080 // TODO(kmillikin): Find a way to fix this. It's ugly that there are
11081 // actually two empty blocks (one here and one inserted by
11082 // TestContext::BuildBranch, and that they both have an HSimulate though the
11083 // second one is not a merge node, and that we really have no good AST ID to
11084 // put on that first HSimulate.
11085
11086 // Technically, we should be able to handle the case when one side of
11087 // the test is not connected, but this can trip up liveness analysis
11088 // if we did not fully connect the test context based on some optimistic
11089 // assumption. If such an assumption was violated, we would end up with
11090 // an environment with optimized-out values. So we should always
11091 // conservatively connect the test context.
11092
11093 CHECK(right_block->HasPredecessor());
11094 CHECK(empty_block->HasPredecessor());
11095
11096 empty_block->SetJoinId(expr->id());
11097
11098 right_block->SetJoinId(expr->RightId());
11099 set_current_block(right_block);
11100 CHECK_BAILOUT(VisitForEffect(expr->right()));
11101 right_block = current_block();
11102
11103 HBasicBlock* join_block =
11104 CreateJoin(empty_block, right_block, expr->id());
11105 set_current_block(join_block);
11106 // We did not materialize any value in the predecessor environments,
11107 // so there is no need to handle it here.
11108 }
11109 }
11110
11111
VisitArithmeticExpression(BinaryOperation * expr)11112 void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
11113 CHECK_ALIVE(VisitForValue(expr->left()));
11114 CHECK_ALIVE(VisitForValue(expr->right()));
11115 SetSourcePosition(expr->position());
11116 HValue* right = Pop();
11117 HValue* left = Pop();
11118 HValue* result =
11119 BuildBinaryOperation(expr, left, right,
11120 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11121 : PUSH_BEFORE_SIMULATE);
11122 return ast_context()->ReturnValue(result);
11123 }
11124
11125
HandleLiteralCompareTypeof(CompareOperation * expr,Expression * sub_expr,Handle<String> check)11126 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
11127 Expression* sub_expr,
11128 Handle<String> check) {
11129 CHECK_ALIVE(VisitForTypeOf(sub_expr));
11130 SetSourcePosition(expr->position());
11131 HValue* value = Pop();
11132 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
11133 return ast_context()->ReturnControl(instr, expr->id());
11134 }
11135
11136 namespace {
11137
IsLiteralCompareStrict(Isolate * isolate,HValue * left,Token::Value op,HValue * right)11138 bool IsLiteralCompareStrict(Isolate* isolate, HValue* left, Token::Value op,
11139 HValue* right) {
11140 return op == Token::EQ_STRICT &&
11141 ((left->IsConstant() &&
11142 !HConstant::cast(left)->handle(isolate)->IsNumber() &&
11143 !HConstant::cast(left)->handle(isolate)->IsSimd128Value() &&
11144 !HConstant::cast(left)->handle(isolate)->IsString()) ||
11145 (right->IsConstant() &&
11146 !HConstant::cast(right)->handle(isolate)->IsNumber() &&
11147 !HConstant::cast(right)->handle(isolate)->IsSimd128Value() &&
11148 !HConstant::cast(right)->handle(isolate)->IsString()));
11149 }
11150
11151 } // namespace
11152
VisitCompareOperation(CompareOperation * expr)11153 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
11154 DCHECK(!HasStackOverflow());
11155 DCHECK(current_block() != NULL);
11156 DCHECK(current_block()->HasPredecessor());
11157
11158 if (!is_tracking_positions()) SetSourcePosition(expr->position());
11159
11160 // Check for a few fast cases. The AST visiting behavior must be in sync
11161 // with the full codegen: We don't push both left and right values onto
11162 // the expression stack when one side is a special-case literal.
11163 Expression* sub_expr = NULL;
11164 Handle<String> check;
11165 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
11166 return HandleLiteralCompareTypeof(expr, sub_expr, check);
11167 }
11168 if (expr->IsLiteralCompareUndefined(&sub_expr)) {
11169 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
11170 }
11171 if (expr->IsLiteralCompareNull(&sub_expr)) {
11172 return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
11173 }
11174
11175 if (IsClassOfTest(expr)) {
11176 CallRuntime* call = expr->left()->AsCallRuntime();
11177 DCHECK(call->arguments()->length() == 1);
11178 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11179 HValue* value = Pop();
11180 Literal* literal = expr->right()->AsLiteral();
11181 Handle<String> rhs = Handle<String>::cast(literal->value());
11182 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
11183 return ast_context()->ReturnControl(instr, expr->id());
11184 }
11185
11186 AstType* left_type = bounds_.get(expr->left()).lower;
11187 AstType* right_type = bounds_.get(expr->right()).lower;
11188 AstType* combined_type = expr->combined_type();
11189
11190 CHECK_ALIVE(VisitForValue(expr->left()));
11191 CHECK_ALIVE(VisitForValue(expr->right()));
11192
11193 HValue* right = Pop();
11194 HValue* left = Pop();
11195 Token::Value op = expr->op();
11196
11197 if (IsLiteralCompareStrict(isolate(), left, op, right)) {
11198 HCompareObjectEqAndBranch* result =
11199 New<HCompareObjectEqAndBranch>(left, right);
11200 return ast_context()->ReturnControl(result, expr->id());
11201 }
11202
11203 if (op == Token::INSTANCEOF) {
11204 // Check to see if the rhs of the instanceof is a known function.
11205 if (right->IsConstant() &&
11206 HConstant::cast(right)->handle(isolate())->IsJSFunction()) {
11207 Handle<JSFunction> function =
11208 Handle<JSFunction>::cast(HConstant::cast(right)->handle(isolate()));
11209 // Make sure that the {function} already has a meaningful initial map
11210 // (i.e. we constructed at least one instance using the constructor
11211 // {function}).
11212 if (function->has_initial_map()) {
11213 // Lookup @@hasInstance on the {function}.
11214 Handle<Map> function_map(function->map(), isolate());
11215 PropertyAccessInfo has_instance(
11216 this, LOAD, function_map,
11217 isolate()->factory()->has_instance_symbol());
11218 // Check if we are using the Function.prototype[@@hasInstance].
11219 if (has_instance.CanAccessMonomorphic() &&
11220 has_instance.IsDataConstant() &&
11221 has_instance.constant().is_identical_to(
11222 isolate()->function_has_instance())) {
11223 // Add appropriate receiver map check and prototype chain
11224 // checks to guard the @@hasInstance lookup chain.
11225 AddCheckMap(right, function_map);
11226 if (has_instance.has_holder()) {
11227 Handle<JSObject> prototype(
11228 JSObject::cast(has_instance.map()->prototype()), isolate());
11229 BuildCheckPrototypeMaps(prototype, has_instance.holder());
11230 }
11231 // Perform the prototype chain walk.
11232 Handle<Map> initial_map(function->initial_map(), isolate());
11233 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
11234 HInstruction* prototype =
11235 Add<HConstant>(handle(initial_map->prototype(), isolate()));
11236 HHasInPrototypeChainAndBranch* result =
11237 New<HHasInPrototypeChainAndBranch>(left, prototype);
11238 return ast_context()->ReturnControl(result, expr->id());
11239 }
11240 }
11241 }
11242
11243 Callable callable = CodeFactory::InstanceOf(isolate());
11244 HValue* stub = Add<HConstant>(callable.code());
11245 HValue* values[] = {left, right};
11246 HCallWithDescriptor* result = New<HCallWithDescriptor>(
11247 stub, 0, callable.descriptor(), ArrayVector(values));
11248 result->set_type(HType::Boolean());
11249 return ast_context()->ReturnInstruction(result, expr->id());
11250
11251 } else if (op == Token::IN) {
11252 Callable callable = CodeFactory::HasProperty(isolate());
11253 HValue* stub = Add<HConstant>(callable.code());
11254 HValue* values[] = {left, right};
11255 HInstruction* result =
11256 New<HCallWithDescriptor>(stub, 0, callable.descriptor(),
11257 Vector<HValue*>(values, arraysize(values)));
11258 return ast_context()->ReturnInstruction(result, expr->id());
11259 }
11260
11261 PushBeforeSimulateBehavior push_behavior =
11262 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11263 : PUSH_BEFORE_SIMULATE;
11264 HControlInstruction* compare = BuildCompareInstruction(
11265 op, left, right, left_type, right_type, combined_type,
11266 ScriptPositionToSourcePosition(expr->left()->position()),
11267 ScriptPositionToSourcePosition(expr->right()->position()),
11268 push_behavior, expr->id());
11269 if (compare == NULL) return; // Bailed out.
11270 return ast_context()->ReturnControl(compare, expr->id());
11271 }
11272
BuildCompareInstruction(Token::Value op,HValue * left,HValue * right,AstType * left_type,AstType * right_type,AstType * combined_type,SourcePosition left_position,SourcePosition right_position,PushBeforeSimulateBehavior push_sim_result,BailoutId bailout_id)11273 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
11274 Token::Value op, HValue* left, HValue* right, AstType* left_type,
11275 AstType* right_type, AstType* combined_type, SourcePosition left_position,
11276 SourcePosition right_position, PushBeforeSimulateBehavior push_sim_result,
11277 BailoutId bailout_id) {
11278 // Cases handled below depend on collected type feedback. They should
11279 // soft deoptimize when there is no type feedback.
11280 if (!combined_type->IsInhabited()) {
11281 Add<HDeoptimize>(
11282 DeoptimizeReason::
11283 kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation,
11284 Deoptimizer::SOFT);
11285 combined_type = left_type = right_type = AstType::Any();
11286 }
11287
11288 Representation left_rep = RepresentationFor(left_type);
11289 Representation right_rep = RepresentationFor(right_type);
11290 Representation combined_rep = RepresentationFor(combined_type);
11291
11292 if (combined_type->Is(AstType::Receiver())) {
11293 if (Token::IsEqualityOp(op)) {
11294 // HCompareObjectEqAndBranch can only deal with object, so
11295 // exclude numbers.
11296 if ((left->IsConstant() &&
11297 HConstant::cast(left)->HasNumberValue()) ||
11298 (right->IsConstant() &&
11299 HConstant::cast(right)->HasNumberValue())) {
11300 Add<HDeoptimize>(
11301 DeoptimizeReason::kTypeMismatchBetweenFeedbackAndConstant,
11302 Deoptimizer::SOFT);
11303 // The caller expects a branch instruction, so make it happy.
11304 return New<HBranch>(graph()->GetConstantTrue());
11305 }
11306 if (op == Token::EQ) {
11307 // For abstract equality we need to check both sides are receivers.
11308 if (combined_type->IsClass()) {
11309 Handle<Map> map = combined_type->AsClass()->Map();
11310 AddCheckMap(left, map);
11311 AddCheckMap(right, map);
11312 } else {
11313 BuildCheckHeapObject(left);
11314 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_JS_RECEIVER);
11315 BuildCheckHeapObject(right);
11316 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_JS_RECEIVER);
11317 }
11318 } else {
11319 // For strict equality we only need to check one side.
11320 HValue* operand_to_check =
11321 left->block()->block_id() < right->block()->block_id() ? left
11322 : right;
11323 if (combined_type->IsClass()) {
11324 Handle<Map> map = combined_type->AsClass()->Map();
11325 AddCheckMap(operand_to_check, map);
11326 } else {
11327 BuildCheckHeapObject(operand_to_check);
11328 Add<HCheckInstanceType>(operand_to_check,
11329 HCheckInstanceType::IS_JS_RECEIVER);
11330 }
11331 }
11332 HCompareObjectEqAndBranch* result =
11333 New<HCompareObjectEqAndBranch>(left, right);
11334 return result;
11335 } else {
11336 if (combined_type->IsClass()) {
11337 // TODO(bmeurer): This is an optimized version of an x < y, x > y,
11338 // x <= y or x >= y, where both x and y are spec objects with the
11339 // same map. The CompareIC collects this map for us. So if we know
11340 // that there's no @@toPrimitive on the map (including the prototype
11341 // chain), and both valueOf and toString are the default initial
11342 // implementations (on the %ObjectPrototype%), then we can reduce
11343 // the comparison to map checks on x and y, because the comparison
11344 // will turn into a comparison of "[object CLASS]" to itself (the
11345 // default outcome of toString, since valueOf returns a spec object).
11346 // This is pretty much adhoc, so in TurboFan we could do a lot better
11347 // and inline the interesting parts of ToPrimitive (actually we could
11348 // even do that in Crankshaft but we don't want to waste too much
11349 // time on this now).
11350 DCHECK(Token::IsOrderedRelationalCompareOp(op));
11351 Handle<Map> map = combined_type->AsClass()->Map();
11352 PropertyAccessInfo value_of(this, LOAD, map,
11353 isolate()->factory()->valueOf_string());
11354 PropertyAccessInfo to_primitive(
11355 this, LOAD, map, isolate()->factory()->to_primitive_symbol());
11356 PropertyAccessInfo to_string(this, LOAD, map,
11357 isolate()->factory()->toString_string());
11358 PropertyAccessInfo to_string_tag(
11359 this, LOAD, map, isolate()->factory()->to_string_tag_symbol());
11360 if (to_primitive.CanAccessMonomorphic() && !to_primitive.IsFound() &&
11361 to_string_tag.CanAccessMonomorphic() &&
11362 (!to_string_tag.IsFound() || to_string_tag.IsData() ||
11363 to_string_tag.IsDataConstant()) &&
11364 value_of.CanAccessMonomorphic() && value_of.IsDataConstant() &&
11365 value_of.constant().is_identical_to(isolate()->object_value_of()) &&
11366 to_string.CanAccessMonomorphic() && to_string.IsDataConstant() &&
11367 to_string.constant().is_identical_to(
11368 isolate()->object_to_string())) {
11369 // We depend on the prototype chain to stay the same, because we
11370 // also need to deoptimize when someone installs @@toPrimitive
11371 // or @@toStringTag somewhere in the prototype chain.
11372 Handle<Object> prototype(map->prototype(), isolate());
11373 if (prototype->IsJSObject()) {
11374 BuildCheckPrototypeMaps(Handle<JSObject>::cast(prototype),
11375 Handle<JSObject>::null());
11376 }
11377 AddCheckMap(left, map);
11378 AddCheckMap(right, map);
11379 // The caller expects a branch instruction, so make it happy.
11380 return New<HBranch>(
11381 graph()->GetConstantBool(op == Token::LTE || op == Token::GTE));
11382 }
11383 }
11384 Bailout(kUnsupportedNonPrimitiveCompare);
11385 return NULL;
11386 }
11387 } else if (combined_type->Is(AstType::InternalizedString()) &&
11388 Token::IsEqualityOp(op)) {
11389 // If we have a constant argument, it should be consistent with the type
11390 // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
11391 if ((left->IsConstant() &&
11392 !HConstant::cast(left)->HasInternalizedStringValue()) ||
11393 (right->IsConstant() &&
11394 !HConstant::cast(right)->HasInternalizedStringValue())) {
11395 Add<HDeoptimize>(
11396 DeoptimizeReason::kTypeMismatchBetweenFeedbackAndConstant,
11397 Deoptimizer::SOFT);
11398 // The caller expects a branch instruction, so make it happy.
11399 return New<HBranch>(graph()->GetConstantTrue());
11400 }
11401 BuildCheckHeapObject(left);
11402 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
11403 BuildCheckHeapObject(right);
11404 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
11405 HCompareObjectEqAndBranch* result =
11406 New<HCompareObjectEqAndBranch>(left, right);
11407 return result;
11408 } else if (combined_type->Is(AstType::String())) {
11409 BuildCheckHeapObject(left);
11410 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
11411 BuildCheckHeapObject(right);
11412 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
11413 HStringCompareAndBranch* result =
11414 New<HStringCompareAndBranch>(left, right, op);
11415 return result;
11416 } else if (combined_type->Is(AstType::Boolean())) {
11417 AddCheckMap(left, isolate()->factory()->boolean_map());
11418 AddCheckMap(right, isolate()->factory()->boolean_map());
11419 if (Token::IsEqualityOp(op)) {
11420 HCompareObjectEqAndBranch* result =
11421 New<HCompareObjectEqAndBranch>(left, right);
11422 return result;
11423 }
11424 left = Add<HLoadNamedField>(
11425 left, nullptr,
11426 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11427 right = Add<HLoadNamedField>(
11428 right, nullptr,
11429 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11430 HCompareNumericAndBranch* result =
11431 New<HCompareNumericAndBranch>(left, right, op);
11432 return result;
11433 } else {
11434 if (op == Token::EQ) {
11435 if (left->IsConstant() &&
11436 HConstant::cast(left)->GetInstanceType() == ODDBALL_TYPE &&
11437 HConstant::cast(left)->IsUndetectable()) {
11438 return New<HIsUndetectableAndBranch>(right);
11439 }
11440
11441 if (right->IsConstant() &&
11442 HConstant::cast(right)->GetInstanceType() == ODDBALL_TYPE &&
11443 HConstant::cast(right)->IsUndetectable()) {
11444 return New<HIsUndetectableAndBranch>(left);
11445 }
11446 }
11447
11448 if (combined_rep.IsTagged() || combined_rep.IsNone()) {
11449 HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
11450 result->set_observed_input_representation(1, left_rep);
11451 result->set_observed_input_representation(2, right_rep);
11452 if (result->HasObservableSideEffects()) {
11453 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11454 Push(result);
11455 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11456 Drop(1);
11457 } else {
11458 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11459 }
11460 }
11461 // TODO(jkummerow): Can we make this more efficient?
11462 HBranch* branch = New<HBranch>(result);
11463 return branch;
11464 } else {
11465 HCompareNumericAndBranch* result =
11466 New<HCompareNumericAndBranch>(left, right, op);
11467 result->set_observed_input_representation(left_rep, right_rep);
11468 return result;
11469 }
11470 }
11471 }
11472
11473
HandleLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)11474 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
11475 Expression* sub_expr,
11476 NilValue nil) {
11477 DCHECK(!HasStackOverflow());
11478 DCHECK(current_block() != NULL);
11479 DCHECK(current_block()->HasPredecessor());
11480 DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
11481 if (!is_tracking_positions()) SetSourcePosition(expr->position());
11482 CHECK_ALIVE(VisitForValue(sub_expr));
11483 HValue* value = Pop();
11484 HControlInstruction* instr;
11485 if (expr->op() == Token::EQ_STRICT) {
11486 HConstant* nil_constant = nil == kNullValue
11487 ? graph()->GetConstantNull()
11488 : graph()->GetConstantUndefined();
11489 instr = New<HCompareObjectEqAndBranch>(value, nil_constant);
11490 } else {
11491 DCHECK_EQ(Token::EQ, expr->op());
11492 instr = New<HIsUndetectableAndBranch>(value);
11493 }
11494 return ast_context()->ReturnControl(instr, expr->id());
11495 }
11496
11497
VisitSpread(Spread * expr)11498 void HOptimizedGraphBuilder::VisitSpread(Spread* expr) { UNREACHABLE(); }
11499
11500
VisitEmptyParentheses(EmptyParentheses * expr)11501 void HOptimizedGraphBuilder::VisitEmptyParentheses(EmptyParentheses* expr) {
11502 UNREACHABLE();
11503 }
11504
11505
AddThisFunction()11506 HValue* HOptimizedGraphBuilder::AddThisFunction() {
11507 return AddInstruction(BuildThisFunction());
11508 }
11509
11510
BuildThisFunction()11511 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
11512 // If we share optimized code between different closures, the
11513 // this-function is not a constant, except inside an inlined body.
11514 if (function_state()->outer() != NULL) {
11515 return New<HConstant>(
11516 function_state()->compilation_info()->closure());
11517 } else {
11518 return New<HThisFunction>();
11519 }
11520 }
11521
11522
BuildFastLiteral(Handle<JSObject> boilerplate_object,AllocationSiteUsageContext * site_context)11523 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
11524 Handle<JSObject> boilerplate_object,
11525 AllocationSiteUsageContext* site_context) {
11526 NoObservableSideEffectsScope no_effects(this);
11527 Handle<Map> initial_map(boilerplate_object->map());
11528 InstanceType instance_type = initial_map->instance_type();
11529 DCHECK(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
11530
11531 HType type = instance_type == JS_ARRAY_TYPE
11532 ? HType::JSArray() : HType::JSObject();
11533 HValue* object_size_constant = Add<HConstant>(initial_map->instance_size());
11534
11535 PretenureFlag pretenure_flag = NOT_TENURED;
11536 Handle<AllocationSite> top_site(*site_context->top(), isolate());
11537 if (FLAG_allocation_site_pretenuring) {
11538 pretenure_flag = top_site->GetPretenureMode();
11539 }
11540
11541 Handle<AllocationSite> current_site(*site_context->current(), isolate());
11542 if (*top_site == *current_site) {
11543 // We install a dependency for pretenuring only on the outermost literal.
11544 top_info()->dependencies()->AssumeTenuringDecision(top_site);
11545 }
11546 top_info()->dependencies()->AssumeTransitionStable(current_site);
11547
11548 HInstruction* object =
11549 Add<HAllocate>(object_size_constant, type, pretenure_flag, instance_type,
11550 graph()->GetConstant0(), top_site);
11551
11552 // If allocation folding reaches kMaxRegularHeapObjectSize the
11553 // elements array may not get folded into the object. Hence, we set the
11554 // elements pointer to empty fixed array and let store elimination remove
11555 // this store in the folding case.
11556 HConstant* empty_fixed_array = Add<HConstant>(
11557 isolate()->factory()->empty_fixed_array());
11558 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11559 empty_fixed_array);
11560
11561 BuildEmitObjectHeader(boilerplate_object, object);
11562
11563 // Similarly to the elements pointer, there is no guarantee that all
11564 // property allocations can get folded, so pre-initialize all in-object
11565 // properties to a safe value.
11566 BuildInitializeInobjectProperties(object, initial_map);
11567
11568 Handle<FixedArrayBase> elements(boilerplate_object->elements());
11569 int elements_size = (elements->length() > 0 &&
11570 elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
11571 elements->Size() : 0;
11572
11573 if (pretenure_flag == TENURED &&
11574 elements->map() == isolate()->heap()->fixed_cow_array_map() &&
11575 isolate()->heap()->InNewSpace(*elements)) {
11576 // If we would like to pretenure a fixed cow array, we must ensure that the
11577 // array is already in old space, otherwise we'll create too many old-to-
11578 // new-space pointers (overflowing the store buffer).
11579 elements = Handle<FixedArrayBase>(
11580 isolate()->factory()->CopyAndTenureFixedCOWArray(
11581 Handle<FixedArray>::cast(elements)));
11582 boilerplate_object->set_elements(*elements);
11583 }
11584
11585 HInstruction* object_elements = NULL;
11586 if (elements_size > 0) {
11587 HValue* object_elements_size = Add<HConstant>(elements_size);
11588 InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
11589 ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
11590 object_elements = Add<HAllocate>(object_elements_size, HType::HeapObject(),
11591 pretenure_flag, instance_type,
11592 graph()->GetConstant0(), top_site);
11593 BuildEmitElements(boilerplate_object, elements, object_elements,
11594 site_context);
11595 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11596 object_elements);
11597 } else {
11598 Handle<Object> elements_field =
11599 Handle<Object>(boilerplate_object->elements(), isolate());
11600 HInstruction* object_elements_cow = Add<HConstant>(elements_field);
11601 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11602 object_elements_cow);
11603 }
11604
11605 // Copy in-object properties.
11606 if (initial_map->NumberOfFields() != 0 ||
11607 initial_map->unused_property_fields() > 0) {
11608 BuildEmitInObjectProperties(boilerplate_object, object, site_context,
11609 pretenure_flag);
11610 }
11611 return object;
11612 }
11613
11614
BuildEmitObjectHeader(Handle<JSObject> boilerplate_object,HInstruction * object)11615 void HOptimizedGraphBuilder::BuildEmitObjectHeader(
11616 Handle<JSObject> boilerplate_object,
11617 HInstruction* object) {
11618 DCHECK(boilerplate_object->properties()->length() == 0);
11619
11620 Handle<Map> boilerplate_object_map(boilerplate_object->map());
11621 AddStoreMapConstant(object, boilerplate_object_map);
11622
11623 Handle<Object> properties_field =
11624 Handle<Object>(boilerplate_object->properties(), isolate());
11625 DCHECK(*properties_field == isolate()->heap()->empty_fixed_array());
11626 HInstruction* properties = Add<HConstant>(properties_field);
11627 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
11628 Add<HStoreNamedField>(object, access, properties);
11629
11630 if (boilerplate_object->IsJSArray()) {
11631 Handle<JSArray> boilerplate_array =
11632 Handle<JSArray>::cast(boilerplate_object);
11633 Handle<Object> length_field =
11634 Handle<Object>(boilerplate_array->length(), isolate());
11635 HInstruction* length = Add<HConstant>(length_field);
11636
11637 DCHECK(boilerplate_array->length()->IsSmi());
11638 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
11639 boilerplate_array->GetElementsKind()), length);
11640 }
11641 }
11642
11643
BuildEmitInObjectProperties(Handle<JSObject> boilerplate_object,HInstruction * object,AllocationSiteUsageContext * site_context,PretenureFlag pretenure_flag)11644 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
11645 Handle<JSObject> boilerplate_object,
11646 HInstruction* object,
11647 AllocationSiteUsageContext* site_context,
11648 PretenureFlag pretenure_flag) {
11649 Handle<Map> boilerplate_map(boilerplate_object->map());
11650 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
11651 int limit = boilerplate_map->NumberOfOwnDescriptors();
11652
11653 int copied_fields = 0;
11654 for (int i = 0; i < limit; i++) {
11655 PropertyDetails details = descriptors->GetDetails(i);
11656 if (details.type() != DATA) continue;
11657 copied_fields++;
11658 FieldIndex field_index = FieldIndex::ForDescriptor(*boilerplate_map, i);
11659
11660
11661 int property_offset = field_index.offset();
11662 Handle<Name> name(descriptors->GetKey(i));
11663
11664 // The access for the store depends on the type of the boilerplate.
11665 HObjectAccess access = boilerplate_object->IsJSArray() ?
11666 HObjectAccess::ForJSArrayOffset(property_offset) :
11667 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11668
11669 if (boilerplate_object->IsUnboxedDoubleField(field_index)) {
11670 CHECK(!boilerplate_object->IsJSArray());
11671 double value = boilerplate_object->RawFastDoublePropertyAt(field_index);
11672 access = access.WithRepresentation(Representation::Double());
11673 Add<HStoreNamedField>(object, access, Add<HConstant>(value));
11674 continue;
11675 }
11676 Handle<Object> value(boilerplate_object->RawFastPropertyAt(field_index),
11677 isolate());
11678
11679 if (value->IsJSObject()) {
11680 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11681 Handle<AllocationSite> current_site = site_context->EnterNewScope();
11682 HInstruction* result =
11683 BuildFastLiteral(value_object, site_context);
11684 site_context->ExitScope(current_site, value_object);
11685 Add<HStoreNamedField>(object, access, result);
11686 } else {
11687 Representation representation = details.representation();
11688 HInstruction* value_instruction;
11689
11690 if (representation.IsDouble()) {
11691 // Allocate a HeapNumber box and store the value into it.
11692 HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
11693 HInstruction* double_box = Add<HAllocate>(
11694 heap_number_constant, HType::HeapObject(), pretenure_flag,
11695 MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
11696 AddStoreMapConstant(double_box,
11697 isolate()->factory()->mutable_heap_number_map());
11698 // Unwrap the mutable heap number from the boilerplate.
11699 HValue* double_value =
11700 Add<HConstant>(Handle<HeapNumber>::cast(value)->value());
11701 Add<HStoreNamedField>(
11702 double_box, HObjectAccess::ForHeapNumberValue(), double_value);
11703 value_instruction = double_box;
11704 } else if (representation.IsSmi()) {
11705 value_instruction = value->IsUninitialized(isolate())
11706 ? graph()->GetConstant0()
11707 : Add<HConstant>(value);
11708 // Ensure that value is stored as smi.
11709 access = access.WithRepresentation(representation);
11710 } else {
11711 value_instruction = Add<HConstant>(value);
11712 }
11713
11714 Add<HStoreNamedField>(object, access, value_instruction);
11715 }
11716 }
11717
11718 int inobject_properties = boilerplate_object->map()->GetInObjectProperties();
11719 HInstruction* value_instruction =
11720 Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
11721 for (int i = copied_fields; i < inobject_properties; i++) {
11722 DCHECK(boilerplate_object->IsJSObject());
11723 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
11724 HObjectAccess access =
11725 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11726 Add<HStoreNamedField>(object, access, value_instruction);
11727 }
11728 }
11729
11730
BuildEmitElements(Handle<JSObject> boilerplate_object,Handle<FixedArrayBase> elements,HValue * object_elements,AllocationSiteUsageContext * site_context)11731 void HOptimizedGraphBuilder::BuildEmitElements(
11732 Handle<JSObject> boilerplate_object,
11733 Handle<FixedArrayBase> elements,
11734 HValue* object_elements,
11735 AllocationSiteUsageContext* site_context) {
11736 ElementsKind kind = boilerplate_object->map()->elements_kind();
11737 int elements_length = elements->length();
11738 HValue* object_elements_length = Add<HConstant>(elements_length);
11739 BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
11740
11741 // Copy elements backing store content.
11742 if (elements->IsFixedDoubleArray()) {
11743 BuildEmitFixedDoubleArray(elements, kind, object_elements);
11744 } else if (elements->IsFixedArray()) {
11745 BuildEmitFixedArray(elements, kind, object_elements,
11746 site_context);
11747 } else {
11748 UNREACHABLE();
11749 }
11750 }
11751
11752
BuildEmitFixedDoubleArray(Handle<FixedArrayBase> elements,ElementsKind kind,HValue * object_elements)11753 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
11754 Handle<FixedArrayBase> elements,
11755 ElementsKind kind,
11756 HValue* object_elements) {
11757 HInstruction* boilerplate_elements = Add<HConstant>(elements);
11758 int elements_length = elements->length();
11759 for (int i = 0; i < elements_length; i++) {
11760 HValue* key_constant = Add<HConstant>(i);
11761 HInstruction* value_instruction =
11762 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
11763 kind, ALLOW_RETURN_HOLE);
11764 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
11765 value_instruction, nullptr, kind);
11766 store->SetFlag(HValue::kTruncatingToNumber);
11767 }
11768 }
11769
11770
BuildEmitFixedArray(Handle<FixedArrayBase> elements,ElementsKind kind,HValue * object_elements,AllocationSiteUsageContext * site_context)11771 void HOptimizedGraphBuilder::BuildEmitFixedArray(
11772 Handle<FixedArrayBase> elements,
11773 ElementsKind kind,
11774 HValue* object_elements,
11775 AllocationSiteUsageContext* site_context) {
11776 HInstruction* boilerplate_elements = Add<HConstant>(elements);
11777 int elements_length = elements->length();
11778 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
11779 for (int i = 0; i < elements_length; i++) {
11780 Handle<Object> value(fast_elements->get(i), isolate());
11781 HValue* key_constant = Add<HConstant>(i);
11782 if (value->IsJSObject()) {
11783 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11784 Handle<AllocationSite> current_site = site_context->EnterNewScope();
11785 HInstruction* result =
11786 BuildFastLiteral(value_object, site_context);
11787 site_context->ExitScope(current_site, value_object);
11788 Add<HStoreKeyed>(object_elements, key_constant, result, nullptr, kind);
11789 } else {
11790 ElementsKind copy_kind =
11791 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
11792 HInstruction* value_instruction =
11793 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
11794 copy_kind, ALLOW_RETURN_HOLE);
11795 Add<HStoreKeyed>(object_elements, key_constant, value_instruction,
11796 nullptr, copy_kind);
11797 }
11798 }
11799 }
11800
11801
VisitThisFunction(ThisFunction * expr)11802 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
11803 DCHECK(!HasStackOverflow());
11804 DCHECK(current_block() != NULL);
11805 DCHECK(current_block()->HasPredecessor());
11806 HInstruction* instr = BuildThisFunction();
11807 return ast_context()->ReturnInstruction(instr, expr->id());
11808 }
11809
11810
VisitSuperPropertyReference(SuperPropertyReference * expr)11811 void HOptimizedGraphBuilder::VisitSuperPropertyReference(
11812 SuperPropertyReference* expr) {
11813 DCHECK(!HasStackOverflow());
11814 DCHECK(current_block() != NULL);
11815 DCHECK(current_block()->HasPredecessor());
11816 return Bailout(kSuperReference);
11817 }
11818
11819
VisitSuperCallReference(SuperCallReference * expr)11820 void HOptimizedGraphBuilder::VisitSuperCallReference(SuperCallReference* expr) {
11821 DCHECK(!HasStackOverflow());
11822 DCHECK(current_block() != NULL);
11823 DCHECK(current_block()->HasPredecessor());
11824 return Bailout(kSuperReference);
11825 }
11826
VisitDeclarations(Declaration::List * declarations)11827 void HOptimizedGraphBuilder::VisitDeclarations(
11828 Declaration::List* declarations) {
11829 DCHECK(globals_.is_empty());
11830 AstVisitor<HOptimizedGraphBuilder>::VisitDeclarations(declarations);
11831 if (!globals_.is_empty()) {
11832 Handle<FixedArray> array =
11833 isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
11834 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
11835 int flags = current_info()->GetDeclareGlobalsFlags();
11836 Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
11837 Add<HDeclareGlobals>(array, flags, vector);
11838 globals_.Rewind(0);
11839 }
11840 }
11841
11842
VisitVariableDeclaration(VariableDeclaration * declaration)11843 void HOptimizedGraphBuilder::VisitVariableDeclaration(
11844 VariableDeclaration* declaration) {
11845 VariableProxy* proxy = declaration->proxy();
11846 Variable* variable = proxy->var();
11847 switch (variable->location()) {
11848 case VariableLocation::UNALLOCATED: {
11849 DCHECK(!variable->binding_needs_init());
11850 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
11851 DCHECK(!slot.IsInvalid());
11852 globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
11853 globals_.Add(isolate()->factory()->undefined_value(), zone());
11854 return;
11855 }
11856 case VariableLocation::PARAMETER:
11857 case VariableLocation::LOCAL:
11858 if (variable->binding_needs_init()) {
11859 HValue* value = graph()->GetConstantHole();
11860 environment()->Bind(variable, value);
11861 }
11862 break;
11863 case VariableLocation::CONTEXT:
11864 if (variable->binding_needs_init()) {
11865 HValue* value = graph()->GetConstantHole();
11866 HValue* context = environment()->context();
11867 HStoreContextSlot* store = Add<HStoreContextSlot>(
11868 context, variable->index(), HStoreContextSlot::kNoCheck, value);
11869 if (store->HasObservableSideEffects()) {
11870 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11871 }
11872 }
11873 break;
11874 case VariableLocation::LOOKUP:
11875 return Bailout(kUnsupportedLookupSlotInDeclaration);
11876 case VariableLocation::MODULE:
11877 UNREACHABLE();
11878 }
11879 }
11880
11881
VisitFunctionDeclaration(FunctionDeclaration * declaration)11882 void HOptimizedGraphBuilder::VisitFunctionDeclaration(
11883 FunctionDeclaration* declaration) {
11884 VariableProxy* proxy = declaration->proxy();
11885 Variable* variable = proxy->var();
11886 switch (variable->location()) {
11887 case VariableLocation::UNALLOCATED: {
11888 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
11889 DCHECK(!slot.IsInvalid());
11890 globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
11891 Handle<SharedFunctionInfo> function = Compiler::GetSharedFunctionInfo(
11892 declaration->fun(), current_info()->script(), top_info());
11893 // Check for stack-overflow exception.
11894 if (function.is_null()) return SetStackOverflow();
11895 globals_.Add(function, zone());
11896 return;
11897 }
11898 case VariableLocation::PARAMETER:
11899 case VariableLocation::LOCAL: {
11900 CHECK_ALIVE(VisitForValue(declaration->fun()));
11901 HValue* value = Pop();
11902 BindIfLive(variable, value);
11903 break;
11904 }
11905 case VariableLocation::CONTEXT: {
11906 CHECK_ALIVE(VisitForValue(declaration->fun()));
11907 HValue* value = Pop();
11908 HValue* context = environment()->context();
11909 HStoreContextSlot* store = Add<HStoreContextSlot>(
11910 context, variable->index(), HStoreContextSlot::kNoCheck, value);
11911 if (store->HasObservableSideEffects()) {
11912 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11913 }
11914 break;
11915 }
11916 case VariableLocation::LOOKUP:
11917 return Bailout(kUnsupportedLookupSlotInDeclaration);
11918 case VariableLocation::MODULE:
11919 UNREACHABLE();
11920 }
11921 }
11922
11923
VisitRewritableExpression(RewritableExpression * node)11924 void HOptimizedGraphBuilder::VisitRewritableExpression(
11925 RewritableExpression* node) {
11926 CHECK_ALIVE(Visit(node->expression()));
11927 }
11928
11929
11930 // Generators for inline runtime functions.
11931 // Support for types.
GenerateIsSmi(CallRuntime * call)11932 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
11933 DCHECK(call->arguments()->length() == 1);
11934 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11935 HValue* value = Pop();
11936 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
11937 return ast_context()->ReturnControl(result, call->id());
11938 }
11939
11940
GenerateIsJSReceiver(CallRuntime * call)11941 void HOptimizedGraphBuilder::GenerateIsJSReceiver(CallRuntime* call) {
11942 DCHECK(call->arguments()->length() == 1);
11943 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11944 HValue* value = Pop();
11945 HHasInstanceTypeAndBranch* result =
11946 New<HHasInstanceTypeAndBranch>(value,
11947 FIRST_JS_RECEIVER_TYPE,
11948 LAST_JS_RECEIVER_TYPE);
11949 return ast_context()->ReturnControl(result, call->id());
11950 }
11951
GenerateIsArray(CallRuntime * call)11952 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
11953 DCHECK(call->arguments()->length() == 1);
11954 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11955 HValue* value = Pop();
11956 HHasInstanceTypeAndBranch* result =
11957 New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
11958 return ast_context()->ReturnControl(result, call->id());
11959 }
11960
11961
GenerateIsTypedArray(CallRuntime * call)11962 void HOptimizedGraphBuilder::GenerateIsTypedArray(CallRuntime* call) {
11963 DCHECK(call->arguments()->length() == 1);
11964 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11965 HValue* value = Pop();
11966 HHasInstanceTypeAndBranch* result =
11967 New<HHasInstanceTypeAndBranch>(value, JS_TYPED_ARRAY_TYPE);
11968 return ast_context()->ReturnControl(result, call->id());
11969 }
11970
11971
GenerateIsRegExp(CallRuntime * call)11972 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
11973 DCHECK(call->arguments()->length() == 1);
11974 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11975 HValue* value = Pop();
11976 HHasInstanceTypeAndBranch* result =
11977 New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
11978 return ast_context()->ReturnControl(result, call->id());
11979 }
11980
11981
GenerateToInteger(CallRuntime * call)11982 void HOptimizedGraphBuilder::GenerateToInteger(CallRuntime* call) {
11983 DCHECK_EQ(1, call->arguments()->length());
11984 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11985 HValue* input = Pop();
11986 if (input->type().IsSmi()) {
11987 return ast_context()->ReturnValue(input);
11988 } else {
11989 Callable callable = CodeFactory::ToInteger(isolate());
11990 HValue* stub = Add<HConstant>(callable.code());
11991 HValue* values[] = {input};
11992 HInstruction* result = New<HCallWithDescriptor>(
11993 stub, 0, callable.descriptor(), ArrayVector(values));
11994 return ast_context()->ReturnInstruction(result, call->id());
11995 }
11996 }
11997
11998
GenerateToObject(CallRuntime * call)11999 void HOptimizedGraphBuilder::GenerateToObject(CallRuntime* call) {
12000 DCHECK_EQ(1, call->arguments()->length());
12001 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12002 HValue* value = Pop();
12003 HValue* result = BuildToObject(value);
12004 return ast_context()->ReturnValue(result);
12005 }
12006
12007
GenerateToString(CallRuntime * call)12008 void HOptimizedGraphBuilder::GenerateToString(CallRuntime* call) {
12009 DCHECK_EQ(1, call->arguments()->length());
12010 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12011 HValue* input = Pop();
12012 if (input->type().IsString()) {
12013 return ast_context()->ReturnValue(input);
12014 } else {
12015 Callable callable = CodeFactory::ToString(isolate());
12016 HValue* stub = Add<HConstant>(callable.code());
12017 HValue* values[] = {input};
12018 HInstruction* result = New<HCallWithDescriptor>(
12019 stub, 0, callable.descriptor(), ArrayVector(values));
12020 return ast_context()->ReturnInstruction(result, call->id());
12021 }
12022 }
12023
12024
GenerateToLength(CallRuntime * call)12025 void HOptimizedGraphBuilder::GenerateToLength(CallRuntime* call) {
12026 DCHECK_EQ(1, call->arguments()->length());
12027 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12028 Callable callable = CodeFactory::ToLength(isolate());
12029 HValue* input = Pop();
12030 HValue* stub = Add<HConstant>(callable.code());
12031 HValue* values[] = {input};
12032 HInstruction* result = New<HCallWithDescriptor>(
12033 stub, 0, callable.descriptor(), ArrayVector(values));
12034 return ast_context()->ReturnInstruction(result, call->id());
12035 }
12036
12037
GenerateToNumber(CallRuntime * call)12038 void HOptimizedGraphBuilder::GenerateToNumber(CallRuntime* call) {
12039 DCHECK_EQ(1, call->arguments()->length());
12040 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12041 Callable callable = CodeFactory::ToNumber(isolate());
12042 HValue* input = Pop();
12043 HValue* result = BuildToNumber(input);
12044 if (result->HasObservableSideEffects()) {
12045 if (!ast_context()->IsEffect()) Push(result);
12046 Add<HSimulate>(call->id(), REMOVABLE_SIMULATE);
12047 if (!ast_context()->IsEffect()) result = Pop();
12048 }
12049 return ast_context()->ReturnValue(result);
12050 }
12051
12052
GenerateIsJSProxy(CallRuntime * call)12053 void HOptimizedGraphBuilder::GenerateIsJSProxy(CallRuntime* call) {
12054 DCHECK(call->arguments()->length() == 1);
12055 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12056 HValue* value = Pop();
12057 HIfContinuation continuation;
12058 IfBuilder if_proxy(this);
12059
12060 HValue* smicheck = if_proxy.IfNot<HIsSmiAndBranch>(value);
12061 if_proxy.And();
12062 HValue* map = Add<HLoadNamedField>(value, smicheck, HObjectAccess::ForMap());
12063 HValue* instance_type =
12064 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
12065 if_proxy.If<HCompareNumericAndBranch>(
12066 instance_type, Add<HConstant>(JS_PROXY_TYPE), Token::EQ);
12067
12068 if_proxy.CaptureContinuation(&continuation);
12069 return ast_context()->ReturnContinuation(&continuation, call->id());
12070 }
12071
12072
GenerateHasFastPackedElements(CallRuntime * call)12073 void HOptimizedGraphBuilder::GenerateHasFastPackedElements(CallRuntime* call) {
12074 DCHECK(call->arguments()->length() == 1);
12075 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12076 HValue* object = Pop();
12077 HIfContinuation continuation(graph()->CreateBasicBlock(),
12078 graph()->CreateBasicBlock());
12079 IfBuilder if_not_smi(this);
12080 if_not_smi.IfNot<HIsSmiAndBranch>(object);
12081 if_not_smi.Then();
12082 {
12083 NoObservableSideEffectsScope no_effects(this);
12084
12085 IfBuilder if_fast_packed(this);
12086 HValue* elements_kind = BuildGetElementsKind(object);
12087 if_fast_packed.If<HCompareNumericAndBranch>(
12088 elements_kind, Add<HConstant>(FAST_SMI_ELEMENTS), Token::EQ);
12089 if_fast_packed.Or();
12090 if_fast_packed.If<HCompareNumericAndBranch>(
12091 elements_kind, Add<HConstant>(FAST_ELEMENTS), Token::EQ);
12092 if_fast_packed.Or();
12093 if_fast_packed.If<HCompareNumericAndBranch>(
12094 elements_kind, Add<HConstant>(FAST_DOUBLE_ELEMENTS), Token::EQ);
12095 if_fast_packed.JoinContinuation(&continuation);
12096 }
12097 if_not_smi.JoinContinuation(&continuation);
12098 return ast_context()->ReturnContinuation(&continuation, call->id());
12099 }
12100
12101
12102 // Fast support for charCodeAt(n).
GenerateStringCharCodeAt(CallRuntime * call)12103 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
12104 DCHECK(call->arguments()->length() == 2);
12105 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12106 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12107 HValue* index = Pop();
12108 HValue* string = Pop();
12109 HInstruction* result = BuildStringCharCodeAt(string, index);
12110 return ast_context()->ReturnInstruction(result, call->id());
12111 }
12112
12113
12114 // Fast support for SubString.
GenerateSubString(CallRuntime * call)12115 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
12116 DCHECK_EQ(3, call->arguments()->length());
12117 CHECK_ALIVE(VisitExpressions(call->arguments()));
12118 Callable callable = CodeFactory::SubString(isolate());
12119 HValue* stub = Add<HConstant>(callable.code());
12120 HValue* to = Pop();
12121 HValue* from = Pop();
12122 HValue* string = Pop();
12123 HValue* values[] = {string, from, to};
12124 HInstruction* result = New<HCallWithDescriptor>(
12125 stub, 0, callable.descriptor(), ArrayVector(values));
12126 result->set_type(HType::String());
12127 return ast_context()->ReturnInstruction(result, call->id());
12128 }
12129
12130 // Support for direct creation of new objects.
GenerateNewObject(CallRuntime * call)12131 void HOptimizedGraphBuilder::GenerateNewObject(CallRuntime* call) {
12132 DCHECK_EQ(2, call->arguments()->length());
12133 CHECK_ALIVE(VisitExpressions(call->arguments()));
12134 FastNewObjectStub stub(isolate());
12135 FastNewObjectDescriptor descriptor(isolate());
12136 HValue* values[] = {Pop(), Pop()};
12137 HConstant* stub_value = Add<HConstant>(stub.GetCode());
12138 HInstruction* result =
12139 New<HCallWithDescriptor>(stub_value, 0, descriptor, ArrayVector(values));
12140 return ast_context()->ReturnInstruction(result, call->id());
12141 }
12142
12143 // Support for direct calls from JavaScript to native RegExp code.
GenerateRegExpExec(CallRuntime * call)12144 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
12145 DCHECK_EQ(4, call->arguments()->length());
12146 CHECK_ALIVE(VisitExpressions(call->arguments()));
12147 Callable callable = CodeFactory::RegExpExec(isolate());
12148 HValue* last_match_info = Pop();
12149 HValue* index = Pop();
12150 HValue* subject = Pop();
12151 HValue* regexp_object = Pop();
12152 HValue* stub = Add<HConstant>(callable.code());
12153 HValue* values[] = {regexp_object, subject, index, last_match_info};
12154 HInstruction* result = New<HCallWithDescriptor>(
12155 stub, 0, callable.descriptor(), ArrayVector(values));
12156 return ast_context()->ReturnInstruction(result, call->id());
12157 }
12158
12159
12160 // Fast support for number to string.
GenerateNumberToString(CallRuntime * call)12161 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
12162 DCHECK_EQ(1, call->arguments()->length());
12163 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12164 HValue* number = Pop();
12165 HValue* result = BuildNumberToString(number, AstType::Any());
12166 return ast_context()->ReturnValue(result);
12167 }
12168
12169
12170 // Fast support for calls.
GenerateCall(CallRuntime * call)12171 void HOptimizedGraphBuilder::GenerateCall(CallRuntime* call) {
12172 DCHECK_LE(2, call->arguments()->length());
12173 CHECK_ALIVE(VisitExpressions(call->arguments()));
12174 CallTrampolineDescriptor descriptor(isolate());
12175 PushArgumentsFromEnvironment(call->arguments()->length() - 1);
12176 HValue* trampoline = Add<HConstant>(isolate()->builtins()->Call());
12177 HValue* target = Pop();
12178 HValue* values[] = {target, Add<HConstant>(call->arguments()->length() - 2)};
12179 HInstruction* result =
12180 New<HCallWithDescriptor>(trampoline, call->arguments()->length() - 1,
12181 descriptor, ArrayVector(values));
12182 return ast_context()->ReturnInstruction(result, call->id());
12183 }
12184
12185
GenerateFixedArrayGet(CallRuntime * call)12186 void HOptimizedGraphBuilder::GenerateFixedArrayGet(CallRuntime* call) {
12187 DCHECK(call->arguments()->length() == 2);
12188 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12189 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12190 HValue* index = Pop();
12191 HValue* object = Pop();
12192 HInstruction* result = New<HLoadKeyed>(
12193 object, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
12194 return ast_context()->ReturnInstruction(result, call->id());
12195 }
12196
12197
GenerateFixedArraySet(CallRuntime * call)12198 void HOptimizedGraphBuilder::GenerateFixedArraySet(CallRuntime* call) {
12199 DCHECK(call->arguments()->length() == 3);
12200 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12201 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12202 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12203 HValue* value = Pop();
12204 HValue* index = Pop();
12205 HValue* object = Pop();
12206 NoObservableSideEffectsScope no_effects(this);
12207 Add<HStoreKeyed>(object, index, value, nullptr, FAST_HOLEY_ELEMENTS);
12208 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12209 }
12210
12211
GenerateTheHole(CallRuntime * call)12212 void HOptimizedGraphBuilder::GenerateTheHole(CallRuntime* call) {
12213 DCHECK(call->arguments()->length() == 0);
12214 return ast_context()->ReturnValue(graph()->GetConstantHole());
12215 }
12216
12217
GenerateCreateIterResultObject(CallRuntime * call)12218 void HOptimizedGraphBuilder::GenerateCreateIterResultObject(CallRuntime* call) {
12219 DCHECK_EQ(2, call->arguments()->length());
12220 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12221 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12222 HValue* done = Pop();
12223 HValue* value = Pop();
12224 HValue* result = BuildCreateIterResultObject(value, done);
12225 return ast_context()->ReturnValue(result);
12226 }
12227
12228
GenerateJSCollectionGetTable(CallRuntime * call)12229 void HOptimizedGraphBuilder::GenerateJSCollectionGetTable(CallRuntime* call) {
12230 DCHECK(call->arguments()->length() == 1);
12231 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12232 HValue* receiver = Pop();
12233 HInstruction* result = New<HLoadNamedField>(
12234 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12235 return ast_context()->ReturnInstruction(result, call->id());
12236 }
12237
12238
GenerateStringGetRawHashField(CallRuntime * call)12239 void HOptimizedGraphBuilder::GenerateStringGetRawHashField(CallRuntime* call) {
12240 DCHECK(call->arguments()->length() == 1);
12241 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12242 HValue* object = Pop();
12243 HInstruction* result = New<HLoadNamedField>(
12244 object, nullptr, HObjectAccess::ForStringHashField());
12245 return ast_context()->ReturnInstruction(result, call->id());
12246 }
12247
12248
12249 template <typename CollectionType>
BuildAllocateOrderedHashTable()12250 HValue* HOptimizedGraphBuilder::BuildAllocateOrderedHashTable() {
12251 static const int kCapacity = CollectionType::kMinCapacity;
12252 static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
12253 static const int kFixedArrayLength = CollectionType::kHashTableStartIndex +
12254 kBucketCount +
12255 (kCapacity * CollectionType::kEntrySize);
12256 static const int kSizeInBytes =
12257 FixedArray::kHeaderSize + (kFixedArrayLength * kPointerSize);
12258
12259 // Allocate the table and add the proper map.
12260 HValue* table =
12261 Add<HAllocate>(Add<HConstant>(kSizeInBytes), HType::HeapObject(),
12262 NOT_TENURED, FIXED_ARRAY_TYPE, graph()->GetConstant0());
12263 AddStoreMapConstant(table, isolate()->factory()->ordered_hash_table_map());
12264
12265 // Initialize the FixedArray...
12266 HValue* length = Add<HConstant>(kFixedArrayLength);
12267 Add<HStoreNamedField>(table, HObjectAccess::ForFixedArrayLength(), length);
12268
12269 // ...and the OrderedHashTable fields.
12270 Add<HStoreNamedField>(
12271 table,
12272 HObjectAccess::ForOrderedHashTableNumberOfBuckets<CollectionType>(),
12273 Add<HConstant>(kBucketCount));
12274 Add<HStoreNamedField>(
12275 table,
12276 HObjectAccess::ForOrderedHashTableNumberOfElements<CollectionType>(),
12277 graph()->GetConstant0());
12278 Add<HStoreNamedField>(
12279 table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12280 CollectionType>(),
12281 graph()->GetConstant0());
12282
12283 // Fill the buckets with kNotFound.
12284 HValue* not_found = Add<HConstant>(CollectionType::kNotFound);
12285 for (int i = 0; i < kBucketCount; ++i) {
12286 Add<HStoreNamedField>(
12287 table, HObjectAccess::ForOrderedHashTableBucket<CollectionType>(i),
12288 not_found);
12289 }
12290
12291 // Fill the data table with undefined.
12292 HValue* undefined = graph()->GetConstantUndefined();
12293 for (int i = 0; i < (kCapacity * CollectionType::kEntrySize); ++i) {
12294 Add<HStoreNamedField>(table,
12295 HObjectAccess::ForOrderedHashTableDataTableIndex<
12296 CollectionType, kBucketCount>(i),
12297 undefined);
12298 }
12299
12300 return table;
12301 }
12302
12303
GenerateSetInitialize(CallRuntime * call)12304 void HOptimizedGraphBuilder::GenerateSetInitialize(CallRuntime* call) {
12305 DCHECK(call->arguments()->length() == 1);
12306 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12307 HValue* receiver = Pop();
12308
12309 NoObservableSideEffectsScope no_effects(this);
12310 HValue* table = BuildAllocateOrderedHashTable<OrderedHashSet>();
12311 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12312 return ast_context()->ReturnValue(receiver);
12313 }
12314
12315
GenerateMapInitialize(CallRuntime * call)12316 void HOptimizedGraphBuilder::GenerateMapInitialize(CallRuntime* call) {
12317 DCHECK(call->arguments()->length() == 1);
12318 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12319 HValue* receiver = Pop();
12320
12321 NoObservableSideEffectsScope no_effects(this);
12322 HValue* table = BuildAllocateOrderedHashTable<OrderedHashMap>();
12323 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12324 return ast_context()->ReturnValue(receiver);
12325 }
12326
12327
12328 template <typename CollectionType>
BuildOrderedHashTableClear(HValue * receiver)12329 void HOptimizedGraphBuilder::BuildOrderedHashTableClear(HValue* receiver) {
12330 HValue* old_table = Add<HLoadNamedField>(
12331 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12332 HValue* new_table = BuildAllocateOrderedHashTable<CollectionType>();
12333 Add<HStoreNamedField>(
12334 old_table, HObjectAccess::ForOrderedHashTableNextTable<CollectionType>(),
12335 new_table);
12336 Add<HStoreNamedField>(
12337 old_table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12338 CollectionType>(),
12339 Add<HConstant>(CollectionType::kClearedTableSentinel));
12340 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(),
12341 new_table);
12342 }
12343
12344
GenerateSetClear(CallRuntime * call)12345 void HOptimizedGraphBuilder::GenerateSetClear(CallRuntime* call) {
12346 DCHECK(call->arguments()->length() == 1);
12347 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12348 HValue* receiver = Pop();
12349
12350 NoObservableSideEffectsScope no_effects(this);
12351 BuildOrderedHashTableClear<OrderedHashSet>(receiver);
12352 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12353 }
12354
12355
GenerateMapClear(CallRuntime * call)12356 void HOptimizedGraphBuilder::GenerateMapClear(CallRuntime* call) {
12357 DCHECK(call->arguments()->length() == 1);
12358 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12359 HValue* receiver = Pop();
12360
12361 NoObservableSideEffectsScope no_effects(this);
12362 BuildOrderedHashTableClear<OrderedHashMap>(receiver);
12363 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12364 }
12365
GenerateDebugBreakInOptimizedCode(CallRuntime * call)12366 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
12367 CallRuntime* call) {
12368 Add<HDebugBreak>();
12369 return ast_context()->ReturnValue(graph()->GetConstant0());
12370 }
12371
12372
GenerateDebugIsActive(CallRuntime * call)12373 void HOptimizedGraphBuilder::GenerateDebugIsActive(CallRuntime* call) {
12374 DCHECK(call->arguments()->length() == 0);
12375 HValue* ref =
12376 Add<HConstant>(ExternalReference::debug_is_active_address(isolate()));
12377 HValue* value =
12378 Add<HLoadNamedField>(ref, nullptr, HObjectAccess::ForExternalUInteger8());
12379 return ast_context()->ReturnValue(value);
12380 }
12381
12382 #undef CHECK_BAILOUT
12383 #undef CHECK_ALIVE
12384
12385
HEnvironment(HEnvironment * outer,Scope * scope,Handle<JSFunction> closure,Zone * zone)12386 HEnvironment::HEnvironment(HEnvironment* outer,
12387 Scope* scope,
12388 Handle<JSFunction> closure,
12389 Zone* zone)
12390 : closure_(closure),
12391 values_(0, zone),
12392 frame_type_(JS_FUNCTION),
12393 parameter_count_(0),
12394 specials_count_(1),
12395 local_count_(0),
12396 outer_(outer),
12397 entry_(NULL),
12398 pop_count_(0),
12399 push_count_(0),
12400 ast_id_(BailoutId::None()),
12401 zone_(zone) {
12402 DeclarationScope* declaration_scope = scope->GetDeclarationScope();
12403 Initialize(declaration_scope->num_parameters() + 1,
12404 declaration_scope->num_stack_slots(), 0);
12405 }
12406
12407
HEnvironment(Zone * zone,int parameter_count)12408 HEnvironment::HEnvironment(Zone* zone, int parameter_count)
12409 : values_(0, zone),
12410 frame_type_(STUB),
12411 parameter_count_(parameter_count),
12412 specials_count_(1),
12413 local_count_(0),
12414 outer_(NULL),
12415 entry_(NULL),
12416 pop_count_(0),
12417 push_count_(0),
12418 ast_id_(BailoutId::None()),
12419 zone_(zone) {
12420 Initialize(parameter_count, 0, 0);
12421 }
12422
12423
HEnvironment(const HEnvironment * other,Zone * zone)12424 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
12425 : values_(0, zone),
12426 frame_type_(JS_FUNCTION),
12427 parameter_count_(0),
12428 specials_count_(0),
12429 local_count_(0),
12430 outer_(NULL),
12431 entry_(NULL),
12432 pop_count_(0),
12433 push_count_(0),
12434 ast_id_(other->ast_id()),
12435 zone_(zone) {
12436 Initialize(other);
12437 }
12438
12439
HEnvironment(HEnvironment * outer,Handle<JSFunction> closure,FrameType frame_type,int arguments,Zone * zone)12440 HEnvironment::HEnvironment(HEnvironment* outer,
12441 Handle<JSFunction> closure,
12442 FrameType frame_type,
12443 int arguments,
12444 Zone* zone)
12445 : closure_(closure),
12446 values_(arguments, zone),
12447 frame_type_(frame_type),
12448 parameter_count_(arguments),
12449 specials_count_(0),
12450 local_count_(0),
12451 outer_(outer),
12452 entry_(NULL),
12453 pop_count_(0),
12454 push_count_(0),
12455 ast_id_(BailoutId::None()),
12456 zone_(zone) {
12457 }
12458
12459
Initialize(int parameter_count,int local_count,int stack_height)12460 void HEnvironment::Initialize(int parameter_count,
12461 int local_count,
12462 int stack_height) {
12463 parameter_count_ = parameter_count;
12464 local_count_ = local_count;
12465
12466 // Avoid reallocating the temporaries' backing store on the first Push.
12467 int total = parameter_count + specials_count_ + local_count + stack_height;
12468 values_.Initialize(total + 4, zone());
12469 for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
12470 }
12471
12472
Initialize(const HEnvironment * other)12473 void HEnvironment::Initialize(const HEnvironment* other) {
12474 closure_ = other->closure();
12475 values_.AddAll(other->values_, zone());
12476 assigned_variables_.Union(other->assigned_variables_, zone());
12477 frame_type_ = other->frame_type_;
12478 parameter_count_ = other->parameter_count_;
12479 local_count_ = other->local_count_;
12480 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
12481 entry_ = other->entry_;
12482 pop_count_ = other->pop_count_;
12483 push_count_ = other->push_count_;
12484 specials_count_ = other->specials_count_;
12485 ast_id_ = other->ast_id_;
12486 }
12487
12488
AddIncomingEdge(HBasicBlock * block,HEnvironment * other)12489 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
12490 DCHECK(!block->IsLoopHeader());
12491 DCHECK(values_.length() == other->values_.length());
12492
12493 int length = values_.length();
12494 for (int i = 0; i < length; ++i) {
12495 HValue* value = values_[i];
12496 if (value != NULL && value->IsPhi() && value->block() == block) {
12497 // There is already a phi for the i'th value.
12498 HPhi* phi = HPhi::cast(value);
12499 // Assert index is correct and that we haven't missed an incoming edge.
12500 DCHECK(phi->merged_index() == i || !phi->HasMergedIndex());
12501 DCHECK(phi->OperandCount() == block->predecessors()->length());
12502 phi->AddInput(other->values_[i]);
12503 } else if (values_[i] != other->values_[i]) {
12504 // There is a fresh value on the incoming edge, a phi is needed.
12505 DCHECK(values_[i] != NULL && other->values_[i] != NULL);
12506 HPhi* phi = block->AddNewPhi(i);
12507 HValue* old_value = values_[i];
12508 for (int j = 0; j < block->predecessors()->length(); j++) {
12509 phi->AddInput(old_value);
12510 }
12511 phi->AddInput(other->values_[i]);
12512 this->values_[i] = phi;
12513 }
12514 }
12515 }
12516
12517
Bind(int index,HValue * value)12518 void HEnvironment::Bind(int index, HValue* value) {
12519 DCHECK(value != NULL);
12520 assigned_variables_.Add(index, zone());
12521 values_[index] = value;
12522 }
12523
12524
HasExpressionAt(int index) const12525 bool HEnvironment::HasExpressionAt(int index) const {
12526 return index >= parameter_count_ + specials_count_ + local_count_;
12527 }
12528
12529
ExpressionStackIsEmpty() const12530 bool HEnvironment::ExpressionStackIsEmpty() const {
12531 DCHECK(length() >= first_expression_index());
12532 return length() == first_expression_index();
12533 }
12534
12535
SetExpressionStackAt(int index_from_top,HValue * value)12536 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
12537 int count = index_from_top + 1;
12538 int index = values_.length() - count;
12539 DCHECK(HasExpressionAt(index));
12540 // The push count must include at least the element in question or else
12541 // the new value will not be included in this environment's history.
12542 if (push_count_ < count) {
12543 // This is the same effect as popping then re-pushing 'count' elements.
12544 pop_count_ += (count - push_count_);
12545 push_count_ = count;
12546 }
12547 values_[index] = value;
12548 }
12549
12550
RemoveExpressionStackAt(int index_from_top)12551 HValue* HEnvironment::RemoveExpressionStackAt(int index_from_top) {
12552 int count = index_from_top + 1;
12553 int index = values_.length() - count;
12554 DCHECK(HasExpressionAt(index));
12555 // Simulate popping 'count' elements and then
12556 // pushing 'count - 1' elements back.
12557 pop_count_ += Max(count - push_count_, 0);
12558 push_count_ = Max(push_count_ - count, 0) + (count - 1);
12559 return values_.Remove(index);
12560 }
12561
12562
Drop(int count)12563 void HEnvironment::Drop(int count) {
12564 for (int i = 0; i < count; ++i) {
12565 Pop();
12566 }
12567 }
12568
12569
Print() const12570 void HEnvironment::Print() const {
12571 OFStream os(stdout);
12572 os << *this << "\n";
12573 }
12574
12575
Copy() const12576 HEnvironment* HEnvironment::Copy() const {
12577 return new(zone()) HEnvironment(this, zone());
12578 }
12579
12580
CopyWithoutHistory() const12581 HEnvironment* HEnvironment::CopyWithoutHistory() const {
12582 HEnvironment* result = Copy();
12583 result->ClearHistory();
12584 return result;
12585 }
12586
12587
CopyAsLoopHeader(HBasicBlock * loop_header) const12588 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
12589 HEnvironment* new_env = Copy();
12590 for (int i = 0; i < values_.length(); ++i) {
12591 HPhi* phi = loop_header->AddNewPhi(i);
12592 phi->AddInput(values_[i]);
12593 new_env->values_[i] = phi;
12594 }
12595 new_env->ClearHistory();
12596 return new_env;
12597 }
12598
12599
CreateStubEnvironment(HEnvironment * outer,Handle<JSFunction> target,FrameType frame_type,int arguments) const12600 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
12601 Handle<JSFunction> target,
12602 FrameType frame_type,
12603 int arguments) const {
12604 HEnvironment* new_env =
12605 new(zone()) HEnvironment(outer, target, frame_type,
12606 arguments + 1, zone());
12607 for (int i = 0; i <= arguments; ++i) { // Include receiver.
12608 new_env->Push(ExpressionStackAt(arguments - i));
12609 }
12610 new_env->ClearHistory();
12611 return new_env;
12612 }
12613
MarkAsTailCaller()12614 void HEnvironment::MarkAsTailCaller() {
12615 DCHECK_EQ(JS_FUNCTION, frame_type());
12616 frame_type_ = TAIL_CALLER_FUNCTION;
12617 }
12618
ClearTailCallerMark()12619 void HEnvironment::ClearTailCallerMark() {
12620 DCHECK_EQ(TAIL_CALLER_FUNCTION, frame_type());
12621 frame_type_ = JS_FUNCTION;
12622 }
12623
CopyForInlining(Handle<JSFunction> target,int arguments,FunctionLiteral * function,HConstant * undefined,InliningKind inlining_kind,TailCallMode syntactic_tail_call_mode) const12624 HEnvironment* HEnvironment::CopyForInlining(
12625 Handle<JSFunction> target, int arguments, FunctionLiteral* function,
12626 HConstant* undefined, InliningKind inlining_kind,
12627 TailCallMode syntactic_tail_call_mode) const {
12628 DCHECK_EQ(JS_FUNCTION, frame_type());
12629
12630 // Outer environment is a copy of this one without the arguments.
12631 int arity = function->scope()->num_parameters();
12632
12633 HEnvironment* outer = Copy();
12634 outer->Drop(arguments + 1); // Including receiver.
12635 outer->ClearHistory();
12636
12637 if (syntactic_tail_call_mode == TailCallMode::kAllow) {
12638 DCHECK_EQ(NORMAL_RETURN, inlining_kind);
12639 outer->MarkAsTailCaller();
12640 }
12641
12642 if (inlining_kind == CONSTRUCT_CALL_RETURN) {
12643 // Create artificial constructor stub environment. The receiver should
12644 // actually be the constructor function, but we pass the newly allocated
12645 // object instead, DoComputeConstructStubFrame() relies on that.
12646 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
12647 } else if (inlining_kind == GETTER_CALL_RETURN) {
12648 // We need an additional StackFrame::INTERNAL frame for restoring the
12649 // correct context.
12650 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
12651 } else if (inlining_kind == SETTER_CALL_RETURN) {
12652 // We need an additional StackFrame::INTERNAL frame for temporarily saving
12653 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
12654 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
12655 }
12656
12657 if (arity != arguments) {
12658 // Create artificial arguments adaptation environment.
12659 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
12660 }
12661
12662 HEnvironment* inner =
12663 new(zone()) HEnvironment(outer, function->scope(), target, zone());
12664 // Get the argument values from the original environment.
12665 for (int i = 0; i <= arity; ++i) { // Include receiver.
12666 HValue* push = (i <= arguments) ?
12667 ExpressionStackAt(arguments - i) : undefined;
12668 inner->SetValueAt(i, push);
12669 }
12670 inner->SetValueAt(arity + 1, context());
12671 for (int i = arity + 2; i < inner->length(); ++i) {
12672 inner->SetValueAt(i, undefined);
12673 }
12674
12675 inner->set_ast_id(BailoutId::FunctionEntry());
12676 return inner;
12677 }
12678
12679
operator <<(std::ostream & os,const HEnvironment & env)12680 std::ostream& operator<<(std::ostream& os, const HEnvironment& env) {
12681 for (int i = 0; i < env.length(); i++) {
12682 if (i == 0) os << "parameters\n";
12683 if (i == env.parameter_count()) os << "specials\n";
12684 if (i == env.parameter_count() + env.specials_count()) os << "locals\n";
12685 if (i == env.parameter_count() + env.specials_count() + env.local_count()) {
12686 os << "expressions\n";
12687 }
12688 HValue* val = env.values()->at(i);
12689 os << i << ": ";
12690 if (val != NULL) {
12691 os << val;
12692 } else {
12693 os << "NULL";
12694 }
12695 os << "\n";
12696 }
12697 return os << "\n";
12698 }
12699
12700
TraceCompilation(CompilationInfo * info)12701 void HTracer::TraceCompilation(CompilationInfo* info) {
12702 Tag tag(this, "compilation");
12703 std::string name;
12704 if (info->parse_info()) {
12705 Object* source_name = info->script()->name();
12706 if (source_name->IsString()) {
12707 String* str = String::cast(source_name);
12708 if (str->length() > 0) {
12709 name.append(str->ToCString().get());
12710 name.append(":");
12711 }
12712 }
12713 }
12714 std::unique_ptr<char[]> method_name = info->GetDebugName();
12715 name.append(method_name.get());
12716 if (info->IsOptimizing()) {
12717 PrintStringProperty("name", name.c_str());
12718 PrintIndent();
12719 trace_.Add("method \"%s:%d\"\n", method_name.get(),
12720 info->optimization_id());
12721 } else {
12722 PrintStringProperty("name", name.c_str());
12723 PrintStringProperty("method", "stub");
12724 }
12725 PrintLongProperty("date",
12726 static_cast<int64_t>(base::OS::TimeCurrentMillis()));
12727 }
12728
12729
TraceLithium(const char * name,LChunk * chunk)12730 void HTracer::TraceLithium(const char* name, LChunk* chunk) {
12731 DCHECK(!chunk->isolate()->concurrent_recompilation_enabled());
12732 AllowHandleDereference allow_deref;
12733 AllowDeferredHandleDereference allow_deferred_deref;
12734 Trace(name, chunk->graph(), chunk);
12735 }
12736
12737
TraceHydrogen(const char * name,HGraph * graph)12738 void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
12739 DCHECK(!graph->isolate()->concurrent_recompilation_enabled());
12740 AllowHandleDereference allow_deref;
12741 AllowDeferredHandleDereference allow_deferred_deref;
12742 Trace(name, graph, NULL);
12743 }
12744
12745
Trace(const char * name,HGraph * graph,LChunk * chunk)12746 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
12747 Tag tag(this, "cfg");
12748 PrintStringProperty("name", name);
12749 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
12750 for (int i = 0; i < blocks->length(); i++) {
12751 HBasicBlock* current = blocks->at(i);
12752 Tag block_tag(this, "block");
12753 PrintBlockProperty("name", current->block_id());
12754 PrintIntProperty("from_bci", -1);
12755 PrintIntProperty("to_bci", -1);
12756
12757 if (!current->predecessors()->is_empty()) {
12758 PrintIndent();
12759 trace_.Add("predecessors");
12760 for (int j = 0; j < current->predecessors()->length(); ++j) {
12761 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
12762 }
12763 trace_.Add("\n");
12764 } else {
12765 PrintEmptyProperty("predecessors");
12766 }
12767
12768 if (current->end()->SuccessorCount() == 0) {
12769 PrintEmptyProperty("successors");
12770 } else {
12771 PrintIndent();
12772 trace_.Add("successors");
12773 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
12774 trace_.Add(" \"B%d\"", it.Current()->block_id());
12775 }
12776 trace_.Add("\n");
12777 }
12778
12779 PrintEmptyProperty("xhandlers");
12780
12781 {
12782 PrintIndent();
12783 trace_.Add("flags");
12784 if (current->IsLoopSuccessorDominator()) {
12785 trace_.Add(" \"dom-loop-succ\"");
12786 }
12787 if (current->IsUnreachable()) {
12788 trace_.Add(" \"dead\"");
12789 }
12790 if (current->is_osr_entry()) {
12791 trace_.Add(" \"osr\"");
12792 }
12793 trace_.Add("\n");
12794 }
12795
12796 if (current->dominator() != NULL) {
12797 PrintBlockProperty("dominator", current->dominator()->block_id());
12798 }
12799
12800 PrintIntProperty("loop_depth", current->LoopNestingDepth());
12801
12802 if (chunk != NULL) {
12803 int first_index = current->first_instruction_index();
12804 int last_index = current->last_instruction_index();
12805 PrintIntProperty(
12806 "first_lir_id",
12807 LifetimePosition::FromInstructionIndex(first_index).Value());
12808 PrintIntProperty(
12809 "last_lir_id",
12810 LifetimePosition::FromInstructionIndex(last_index).Value());
12811 }
12812
12813 {
12814 Tag states_tag(this, "states");
12815 Tag locals_tag(this, "locals");
12816 int total = current->phis()->length();
12817 PrintIntProperty("size", current->phis()->length());
12818 PrintStringProperty("method", "None");
12819 for (int j = 0; j < total; ++j) {
12820 HPhi* phi = current->phis()->at(j);
12821 PrintIndent();
12822 std::ostringstream os;
12823 os << phi->merged_index() << " " << NameOf(phi) << " " << *phi << "\n";
12824 trace_.Add(os.str().c_str());
12825 }
12826 }
12827
12828 {
12829 Tag HIR_tag(this, "HIR");
12830 for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
12831 HInstruction* instruction = it.Current();
12832 int uses = instruction->UseCount();
12833 PrintIndent();
12834 std::ostringstream os;
12835 os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
12836 if (instruction->has_position()) {
12837 const SourcePosition pos = instruction->position();
12838 os << " pos:";
12839 if (pos.isInlined()) os << "inlining(" << pos.InliningId() << "),";
12840 os << pos.ScriptOffset();
12841 }
12842 os << " <|@\n";
12843 trace_.Add(os.str().c_str());
12844 }
12845 }
12846
12847
12848 if (chunk != NULL) {
12849 Tag LIR_tag(this, "LIR");
12850 int first_index = current->first_instruction_index();
12851 int last_index = current->last_instruction_index();
12852 if (first_index != -1 && last_index != -1) {
12853 const ZoneList<LInstruction*>* instructions = chunk->instructions();
12854 for (int i = first_index; i <= last_index; ++i) {
12855 LInstruction* linstr = instructions->at(i);
12856 if (linstr != NULL) {
12857 PrintIndent();
12858 trace_.Add("%d ",
12859 LifetimePosition::FromInstructionIndex(i).Value());
12860 linstr->PrintTo(&trace_);
12861 std::ostringstream os;
12862 os << " [hir:" << NameOf(linstr->hydrogen_value()) << "] <|@\n";
12863 trace_.Add(os.str().c_str());
12864 }
12865 }
12866 }
12867 }
12868 }
12869 }
12870
12871
TraceLiveRanges(const char * name,LAllocator * allocator)12872 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
12873 Tag tag(this, "intervals");
12874 PrintStringProperty("name", name);
12875
12876 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
12877 for (int i = 0; i < fixed_d->length(); ++i) {
12878 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
12879 }
12880
12881 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
12882 for (int i = 0; i < fixed->length(); ++i) {
12883 TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
12884 }
12885
12886 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
12887 for (int i = 0; i < live_ranges->length(); ++i) {
12888 TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
12889 }
12890 }
12891
12892
TraceLiveRange(LiveRange * range,const char * type,Zone * zone)12893 void HTracer::TraceLiveRange(LiveRange* range, const char* type,
12894 Zone* zone) {
12895 if (range != NULL && !range->IsEmpty()) {
12896 PrintIndent();
12897 trace_.Add("%d %s", range->id(), type);
12898 if (range->HasRegisterAssigned()) {
12899 LOperand* op = range->CreateAssignedOperand(zone);
12900 int assigned_reg = op->index();
12901 if (op->IsDoubleRegister()) {
12902 trace_.Add(" \"%s\"",
12903 GetRegConfig()->GetDoubleRegisterName(assigned_reg));
12904 } else {
12905 DCHECK(op->IsRegister());
12906 trace_.Add(" \"%s\"",
12907 GetRegConfig()->GetGeneralRegisterName(assigned_reg));
12908 }
12909 } else if (range->IsSpilled()) {
12910 LOperand* op = range->TopLevel()->GetSpillOperand();
12911 if (op->IsDoubleStackSlot()) {
12912 trace_.Add(" \"double_stack:%d\"", op->index());
12913 } else {
12914 DCHECK(op->IsStackSlot());
12915 trace_.Add(" \"stack:%d\"", op->index());
12916 }
12917 }
12918 int parent_index = -1;
12919 if (range->IsChild()) {
12920 parent_index = range->parent()->id();
12921 } else {
12922 parent_index = range->id();
12923 }
12924 LOperand* op = range->FirstHint();
12925 int hint_index = -1;
12926 if (op != NULL && op->IsUnallocated()) {
12927 hint_index = LUnallocated::cast(op)->virtual_register();
12928 }
12929 trace_.Add(" %d %d", parent_index, hint_index);
12930 UseInterval* cur_interval = range->first_interval();
12931 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
12932 trace_.Add(" [%d, %d[",
12933 cur_interval->start().Value(),
12934 cur_interval->end().Value());
12935 cur_interval = cur_interval->next();
12936 }
12937
12938 UsePosition* current_pos = range->first_pos();
12939 while (current_pos != NULL) {
12940 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
12941 trace_.Add(" %d M", current_pos->pos().Value());
12942 }
12943 current_pos = current_pos->next();
12944 }
12945
12946 trace_.Add(" \"\"\n");
12947 }
12948 }
12949
12950
FlushToFile()12951 void HTracer::FlushToFile() {
12952 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
12953 false);
12954 trace_.Reset();
12955 }
12956
12957
Initialize(CompilationInfo * info)12958 void HStatistics::Initialize(CompilationInfo* info) {
12959 if (!info->has_shared_info()) return;
12960 source_size_ += info->shared_info()->SourceSize();
12961 }
12962
12963
Print()12964 void HStatistics::Print() {
12965 PrintF(
12966 "\n"
12967 "----------------------------------------"
12968 "----------------------------------------\n"
12969 "--- Hydrogen timing results:\n"
12970 "----------------------------------------"
12971 "----------------------------------------\n");
12972 base::TimeDelta sum;
12973 for (int i = 0; i < times_.length(); ++i) {
12974 sum += times_[i];
12975 }
12976
12977 for (int i = 0; i < names_.length(); ++i) {
12978 PrintF("%33s", names_[i]);
12979 double ms = times_[i].InMillisecondsF();
12980 double percent = times_[i].PercentOf(sum);
12981 PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
12982
12983 size_t size = sizes_[i];
12984 double size_percent = static_cast<double>(size) * 100 / total_size_;
12985 PrintF(" %9zu bytes / %4.1f %%\n", size, size_percent);
12986 }
12987
12988 PrintF(
12989 "----------------------------------------"
12990 "----------------------------------------\n");
12991 base::TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
12992 PrintF("%33s %8.3f ms / %4.1f %% \n", "Create graph",
12993 create_graph_.InMillisecondsF(), create_graph_.PercentOf(total));
12994 PrintF("%33s %8.3f ms / %4.1f %% \n", "Optimize graph",
12995 optimize_graph_.InMillisecondsF(), optimize_graph_.PercentOf(total));
12996 PrintF("%33s %8.3f ms / %4.1f %% \n", "Generate and install code",
12997 generate_code_.InMillisecondsF(), generate_code_.PercentOf(total));
12998 PrintF(
12999 "----------------------------------------"
13000 "----------------------------------------\n");
13001 PrintF("%33s %8.3f ms %9zu bytes\n", "Total",
13002 total.InMillisecondsF(), total_size_);
13003 PrintF("%33s (%.1f times slower than full code gen)\n", "",
13004 total.TimesOf(full_code_gen_));
13005
13006 double source_size_in_kb = static_cast<double>(source_size_) / 1024;
13007 double normalized_time = source_size_in_kb > 0
13008 ? total.InMillisecondsF() / source_size_in_kb
13009 : 0;
13010 double normalized_size_in_kb =
13011 source_size_in_kb > 0
13012 ? static_cast<double>(total_size_) / 1024 / source_size_in_kb
13013 : 0;
13014 PrintF("%33s %8.3f ms %7.3f kB allocated\n",
13015 "Average per kB source", normalized_time, normalized_size_in_kb);
13016 }
13017
13018
SaveTiming(const char * name,base::TimeDelta time,size_t size)13019 void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
13020 size_t size) {
13021 total_size_ += size;
13022 for (int i = 0; i < names_.length(); ++i) {
13023 if (strcmp(names_[i], name) == 0) {
13024 times_[i] += time;
13025 sizes_[i] += size;
13026 return;
13027 }
13028 }
13029 names_.Add(name);
13030 times_.Add(time);
13031 sizes_.Add(size);
13032 }
13033
13034
~HPhase()13035 HPhase::~HPhase() {
13036 if (ShouldProduceTraceOutput()) {
13037 isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
13038 }
13039
13040 #ifdef DEBUG
13041 graph_->Verify(false); // No full verify.
13042 #endif
13043 }
13044
13045 } // namespace internal
13046 } // namespace v8
13047