1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/crankshaft/hydrogen-instructions.h"
6
7 #include "src/base/bits.h"
8 #include "src/base/safe_math.h"
9 #include "src/crankshaft/hydrogen-infer-representation.h"
10 #include "src/double.h"
11 #include "src/elements.h"
12 #include "src/factory.h"
13
14 #if V8_TARGET_ARCH_IA32
15 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
16 #elif V8_TARGET_ARCH_X64
17 #include "src/crankshaft/x64/lithium-x64.h" // NOLINT
18 #elif V8_TARGET_ARCH_ARM64
19 #include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
20 #elif V8_TARGET_ARCH_ARM
21 #include "src/crankshaft/arm/lithium-arm.h" // NOLINT
22 #elif V8_TARGET_ARCH_PPC
23 #include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
24 #elif V8_TARGET_ARCH_MIPS
25 #include "src/crankshaft/mips/lithium-mips.h" // NOLINT
26 #elif V8_TARGET_ARCH_MIPS64
27 #include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
28 #elif V8_TARGET_ARCH_X87
29 #include "src/crankshaft/x87/lithium-x87.h" // NOLINT
30 #else
31 #error Unsupported target architecture.
32 #endif
33
34 namespace v8 {
35 namespace internal {
36
37 #define DEFINE_COMPILE(type) \
38 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
39 return builder->Do##type(this); \
40 }
HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)41 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
42 #undef DEFINE_COMPILE
43
44
45 Isolate* HValue::isolate() const {
46 DCHECK(block() != NULL);
47 return block()->isolate();
48 }
49
50
AssumeRepresentation(Representation r)51 void HValue::AssumeRepresentation(Representation r) {
52 if (CheckFlag(kFlexibleRepresentation)) {
53 ChangeRepresentation(r);
54 // The representation of the value is dictated by type feedback and
55 // will not be changed later.
56 ClearFlag(kFlexibleRepresentation);
57 }
58 }
59
60
InferRepresentation(HInferRepresentationPhase * h_infer)61 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
62 DCHECK(CheckFlag(kFlexibleRepresentation));
63 Representation new_rep = RepresentationFromInputs();
64 UpdateRepresentation(new_rep, h_infer, "inputs");
65 new_rep = RepresentationFromUses();
66 UpdateRepresentation(new_rep, h_infer, "uses");
67 if (representation().IsSmi() && HasNonSmiUse()) {
68 UpdateRepresentation(
69 Representation::Integer32(), h_infer, "use requirements");
70 }
71 }
72
73
RepresentationFromUses()74 Representation HValue::RepresentationFromUses() {
75 if (HasNoUses()) return Representation::None();
76 Representation result = Representation::None();
77
78 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
79 HValue* use = it.value();
80 Representation rep = use->observed_input_representation(it.index());
81 result = result.generalize(rep);
82
83 if (FLAG_trace_representation) {
84 PrintF("#%d %s is used by #%d %s as %s%s\n",
85 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
86 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
87 }
88 }
89 if (IsPhi()) {
90 result = result.generalize(
91 HPhi::cast(this)->representation_from_indirect_uses());
92 }
93
94 // External representations are dealt with separately.
95 return result.IsExternal() ? Representation::None() : result;
96 }
97
98
UpdateRepresentation(Representation new_rep,HInferRepresentationPhase * h_infer,const char * reason)99 void HValue::UpdateRepresentation(Representation new_rep,
100 HInferRepresentationPhase* h_infer,
101 const char* reason) {
102 Representation r = representation();
103 if (new_rep.is_more_general_than(r)) {
104 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
105 if (FLAG_trace_representation) {
106 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
107 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
108 }
109 ChangeRepresentation(new_rep);
110 AddDependantsToWorklist(h_infer);
111 }
112 }
113
114
AddDependantsToWorklist(HInferRepresentationPhase * h_infer)115 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
116 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
117 h_infer->AddToWorklist(it.value());
118 }
119 for (int i = 0; i < OperandCount(); ++i) {
120 h_infer->AddToWorklist(OperandAt(i));
121 }
122 }
123
124
ConvertAndSetOverflow(Representation r,int64_t result,bool * overflow)125 static int32_t ConvertAndSetOverflow(Representation r,
126 int64_t result,
127 bool* overflow) {
128 if (r.IsSmi()) {
129 if (result > Smi::kMaxValue) {
130 *overflow = true;
131 return Smi::kMaxValue;
132 }
133 if (result < Smi::kMinValue) {
134 *overflow = true;
135 return Smi::kMinValue;
136 }
137 } else {
138 if (result > kMaxInt) {
139 *overflow = true;
140 return kMaxInt;
141 }
142 if (result < kMinInt) {
143 *overflow = true;
144 return kMinInt;
145 }
146 }
147 return static_cast<int32_t>(result);
148 }
149
150
AddWithoutOverflow(Representation r,int32_t a,int32_t b,bool * overflow)151 static int32_t AddWithoutOverflow(Representation r,
152 int32_t a,
153 int32_t b,
154 bool* overflow) {
155 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
156 return ConvertAndSetOverflow(r, result, overflow);
157 }
158
159
SubWithoutOverflow(Representation r,int32_t a,int32_t b,bool * overflow)160 static int32_t SubWithoutOverflow(Representation r,
161 int32_t a,
162 int32_t b,
163 bool* overflow) {
164 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
165 return ConvertAndSetOverflow(r, result, overflow);
166 }
167
168
MulWithoutOverflow(const Representation & r,int32_t a,int32_t b,bool * overflow)169 static int32_t MulWithoutOverflow(const Representation& r,
170 int32_t a,
171 int32_t b,
172 bool* overflow) {
173 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
174 return ConvertAndSetOverflow(r, result, overflow);
175 }
176
177
Mask() const178 int32_t Range::Mask() const {
179 if (lower_ == upper_) return lower_;
180 if (lower_ >= 0) {
181 int32_t res = 1;
182 while (res < upper_) {
183 res = (res << 1) | 1;
184 }
185 return res;
186 }
187 return 0xffffffff;
188 }
189
190
AddConstant(int32_t value)191 void Range::AddConstant(int32_t value) {
192 if (value == 0) return;
193 bool may_overflow = false; // Overflow is ignored here.
194 Representation r = Representation::Integer32();
195 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
196 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
197 #ifdef DEBUG
198 Verify();
199 #endif
200 }
201
202
Intersect(Range * other)203 void Range::Intersect(Range* other) {
204 upper_ = Min(upper_, other->upper_);
205 lower_ = Max(lower_, other->lower_);
206 bool b = CanBeMinusZero() && other->CanBeMinusZero();
207 set_can_be_minus_zero(b);
208 }
209
210
Union(Range * other)211 void Range::Union(Range* other) {
212 upper_ = Max(upper_, other->upper_);
213 lower_ = Min(lower_, other->lower_);
214 bool b = CanBeMinusZero() || other->CanBeMinusZero();
215 set_can_be_minus_zero(b);
216 }
217
218
CombinedMax(Range * other)219 void Range::CombinedMax(Range* other) {
220 upper_ = Max(upper_, other->upper_);
221 lower_ = Max(lower_, other->lower_);
222 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
223 }
224
225
CombinedMin(Range * other)226 void Range::CombinedMin(Range* other) {
227 upper_ = Min(upper_, other->upper_);
228 lower_ = Min(lower_, other->lower_);
229 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
230 }
231
232
Sar(int32_t value)233 void Range::Sar(int32_t value) {
234 int32_t bits = value & 0x1F;
235 lower_ = lower_ >> bits;
236 upper_ = upper_ >> bits;
237 set_can_be_minus_zero(false);
238 }
239
240
Shl(int32_t value)241 void Range::Shl(int32_t value) {
242 int32_t bits = value & 0x1F;
243 int old_lower = lower_;
244 int old_upper = upper_;
245 lower_ = lower_ << bits;
246 upper_ = upper_ << bits;
247 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
248 upper_ = kMaxInt;
249 lower_ = kMinInt;
250 }
251 set_can_be_minus_zero(false);
252 }
253
254
AddAndCheckOverflow(const Representation & r,Range * other)255 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
256 bool may_overflow = false;
257 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
258 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
259 KeepOrder();
260 #ifdef DEBUG
261 Verify();
262 #endif
263 return may_overflow;
264 }
265
266
SubAndCheckOverflow(const Representation & r,Range * other)267 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
268 bool may_overflow = false;
269 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
270 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
271 KeepOrder();
272 #ifdef DEBUG
273 Verify();
274 #endif
275 return may_overflow;
276 }
277
278
KeepOrder()279 void Range::KeepOrder() {
280 if (lower_ > upper_) {
281 int32_t tmp = lower_;
282 lower_ = upper_;
283 upper_ = tmp;
284 }
285 }
286
287
288 #ifdef DEBUG
Verify() const289 void Range::Verify() const {
290 DCHECK(lower_ <= upper_);
291 }
292 #endif
293
294
MulAndCheckOverflow(const Representation & r,Range * other)295 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
296 bool may_overflow = false;
297 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
298 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
299 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
300 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
301 lower_ = Min(Min(v1, v2), Min(v3, v4));
302 upper_ = Max(Max(v1, v2), Max(v3, v4));
303 #ifdef DEBUG
304 Verify();
305 #endif
306 return may_overflow;
307 }
308
309
IsDefinedAfter(HBasicBlock * other) const310 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
311 return block()->block_id() > other->block_id();
312 }
313
314
tail()315 HUseListNode* HUseListNode::tail() {
316 // Skip and remove dead items in the use list.
317 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
318 tail_ = tail_->tail_;
319 }
320 return tail_;
321 }
322
323
CheckUsesForFlag(Flag f) const324 bool HValue::CheckUsesForFlag(Flag f) const {
325 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
326 if (it.value()->IsSimulate()) continue;
327 if (!it.value()->CheckFlag(f)) return false;
328 }
329 return true;
330 }
331
332
CheckUsesForFlag(Flag f,HValue ** value) const333 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
334 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
335 if (it.value()->IsSimulate()) continue;
336 if (!it.value()->CheckFlag(f)) {
337 *value = it.value();
338 return false;
339 }
340 }
341 return true;
342 }
343
344
HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const345 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
346 bool return_value = false;
347 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
348 if (it.value()->IsSimulate()) continue;
349 if (!it.value()->CheckFlag(f)) return false;
350 return_value = true;
351 }
352 return return_value;
353 }
354
355
HUseIterator(HUseListNode * head)356 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
357 Advance();
358 }
359
360
Advance()361 void HUseIterator::Advance() {
362 current_ = next_;
363 if (current_ != NULL) {
364 next_ = current_->tail();
365 value_ = current_->value();
366 index_ = current_->index();
367 }
368 }
369
370
UseCount() const371 int HValue::UseCount() const {
372 int count = 0;
373 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
374 return count;
375 }
376
377
RemoveUse(HValue * value,int index)378 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
379 HUseListNode* previous = NULL;
380 HUseListNode* current = use_list_;
381 while (current != NULL) {
382 if (current->value() == value && current->index() == index) {
383 if (previous == NULL) {
384 use_list_ = current->tail();
385 } else {
386 previous->set_tail(current->tail());
387 }
388 break;
389 }
390
391 previous = current;
392 current = current->tail();
393 }
394
395 #ifdef DEBUG
396 // Do not reuse use list nodes in debug mode, zap them.
397 if (current != NULL) {
398 HUseListNode* temp =
399 new(block()->zone())
400 HUseListNode(current->value(), current->index(), NULL);
401 current->Zap();
402 current = temp;
403 }
404 #endif
405 return current;
406 }
407
408
Equals(HValue * other)409 bool HValue::Equals(HValue* other) {
410 if (other->opcode() != opcode()) return false;
411 if (!other->representation().Equals(representation())) return false;
412 if (!other->type_.Equals(type_)) return false;
413 if (other->flags() != flags()) return false;
414 if (OperandCount() != other->OperandCount()) return false;
415 for (int i = 0; i < OperandCount(); ++i) {
416 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
417 }
418 bool result = DataEquals(other);
419 DCHECK(!result || Hashcode() == other->Hashcode());
420 return result;
421 }
422
423
Hashcode()424 intptr_t HValue::Hashcode() {
425 intptr_t result = opcode();
426 int count = OperandCount();
427 for (int i = 0; i < count; ++i) {
428 result = result * 19 + OperandAt(i)->id() + (result >> 7);
429 }
430 return result;
431 }
432
433
Mnemonic() const434 const char* HValue::Mnemonic() const {
435 switch (opcode()) {
436 #define MAKE_CASE(type) case k##type: return #type;
437 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
438 #undef MAKE_CASE
439 case kPhi: return "Phi";
440 default: return "";
441 }
442 }
443
444
CanReplaceWithDummyUses()445 bool HValue::CanReplaceWithDummyUses() {
446 return FLAG_unreachable_code_elimination &&
447 !(block()->IsReachable() ||
448 IsBlockEntry() ||
449 IsControlInstruction() ||
450 IsArgumentsObject() ||
451 IsCapturedObject() ||
452 IsSimulate() ||
453 IsEnterInlined() ||
454 IsLeaveInlined());
455 }
456
457
IsInteger32Constant()458 bool HValue::IsInteger32Constant() {
459 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
460 }
461
462
GetInteger32Constant()463 int32_t HValue::GetInteger32Constant() {
464 return HConstant::cast(this)->Integer32Value();
465 }
466
467
EqualsInteger32Constant(int32_t value)468 bool HValue::EqualsInteger32Constant(int32_t value) {
469 return IsInteger32Constant() && GetInteger32Constant() == value;
470 }
471
472
SetOperandAt(int index,HValue * value)473 void HValue::SetOperandAt(int index, HValue* value) {
474 RegisterUse(index, value);
475 InternalSetOperandAt(index, value);
476 }
477
478
DeleteAndReplaceWith(HValue * other)479 void HValue::DeleteAndReplaceWith(HValue* other) {
480 // We replace all uses first, so Delete can assert that there are none.
481 if (other != NULL) ReplaceAllUsesWith(other);
482 Kill();
483 DeleteFromGraph();
484 }
485
486
ReplaceAllUsesWith(HValue * other)487 void HValue::ReplaceAllUsesWith(HValue* other) {
488 while (use_list_ != NULL) {
489 HUseListNode* list_node = use_list_;
490 HValue* value = list_node->value();
491 DCHECK(!value->block()->IsStartBlock());
492 value->InternalSetOperandAt(list_node->index(), other);
493 use_list_ = list_node->tail();
494 list_node->set_tail(other->use_list_);
495 other->use_list_ = list_node;
496 }
497 }
498
499
Kill()500 void HValue::Kill() {
501 // Instead of going through the entire use list of each operand, we only
502 // check the first item in each use list and rely on the tail() method to
503 // skip dead items, removing them lazily next time we traverse the list.
504 SetFlag(kIsDead);
505 for (int i = 0; i < OperandCount(); ++i) {
506 HValue* operand = OperandAt(i);
507 if (operand == NULL) continue;
508 HUseListNode* first = operand->use_list_;
509 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
510 operand->use_list_ = first->tail();
511 }
512 }
513 }
514
515
SetBlock(HBasicBlock * block)516 void HValue::SetBlock(HBasicBlock* block) {
517 DCHECK(block_ == NULL || block == NULL);
518 block_ = block;
519 if (id_ == kNoNumber && block != NULL) {
520 id_ = block->graph()->GetNextValueID(this);
521 }
522 }
523
524
operator <<(std::ostream & os,const HValue & v)525 std::ostream& operator<<(std::ostream& os, const HValue& v) {
526 return v.PrintTo(os);
527 }
528
529
operator <<(std::ostream & os,const TypeOf & t)530 std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
531 if (t.value->representation().IsTagged() &&
532 !t.value->type().Equals(HType::Tagged()))
533 return os;
534 return os << " type:" << t.value->type();
535 }
536
537
operator <<(std::ostream & os,const ChangesOf & c)538 std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
539 GVNFlagSet changes_flags = c.value->ChangesFlags();
540 if (changes_flags.IsEmpty()) return os;
541 os << " changes[";
542 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
543 os << "*";
544 } else {
545 bool add_comma = false;
546 #define PRINT_DO(Type) \
547 if (changes_flags.Contains(k##Type)) { \
548 if (add_comma) os << ","; \
549 add_comma = true; \
550 os << #Type; \
551 }
552 GVN_TRACKED_FLAG_LIST(PRINT_DO);
553 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
554 #undef PRINT_DO
555 }
556 return os << "]";
557 }
558
559
HasMonomorphicJSObjectType()560 bool HValue::HasMonomorphicJSObjectType() {
561 return !GetMonomorphicJSObjectMap().is_null();
562 }
563
564
UpdateInferredType()565 bool HValue::UpdateInferredType() {
566 HType type = CalculateInferredType();
567 bool result = (!type.Equals(type_));
568 type_ = type;
569 return result;
570 }
571
572
RegisterUse(int index,HValue * new_value)573 void HValue::RegisterUse(int index, HValue* new_value) {
574 HValue* old_value = OperandAt(index);
575 if (old_value == new_value) return;
576
577 HUseListNode* removed = NULL;
578 if (old_value != NULL) {
579 removed = old_value->RemoveUse(this, index);
580 }
581
582 if (new_value != NULL) {
583 if (removed == NULL) {
584 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
585 this, index, new_value->use_list_);
586 } else {
587 removed->set_tail(new_value->use_list_);
588 new_value->use_list_ = removed;
589 }
590 }
591 }
592
593
AddNewRange(Range * r,Zone * zone)594 void HValue::AddNewRange(Range* r, Zone* zone) {
595 if (!HasRange()) ComputeInitialRange(zone);
596 if (!HasRange()) range_ = new(zone) Range();
597 DCHECK(HasRange());
598 r->StackUpon(range_);
599 range_ = r;
600 }
601
602
RemoveLastAddedRange()603 void HValue::RemoveLastAddedRange() {
604 DCHECK(HasRange());
605 DCHECK(range_->next() != NULL);
606 range_ = range_->next();
607 }
608
609
ComputeInitialRange(Zone * zone)610 void HValue::ComputeInitialRange(Zone* zone) {
611 DCHECK(!HasRange());
612 range_ = InferRange(zone);
613 DCHECK(HasRange());
614 }
615
616
PrintTo(std::ostream & os) const617 std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
618 os << Mnemonic() << " ";
619 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
620 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
621 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
622 return os;
623 }
624
625
PrintDataTo(std::ostream & os) const626 std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
627 for (int i = 0; i < OperandCount(); ++i) {
628 if (i > 0) os << " ";
629 os << NameOf(OperandAt(i));
630 }
631 return os;
632 }
633
634
Unlink()635 void HInstruction::Unlink() {
636 DCHECK(IsLinked());
637 DCHECK(!IsControlInstruction()); // Must never move control instructions.
638 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
639 DCHECK(previous_ != NULL);
640 previous_->next_ = next_;
641 if (next_ == NULL) {
642 DCHECK(block()->last() == this);
643 block()->set_last(previous_);
644 } else {
645 next_->previous_ = previous_;
646 }
647 clear_block();
648 }
649
650
InsertBefore(HInstruction * next)651 void HInstruction::InsertBefore(HInstruction* next) {
652 DCHECK(!IsLinked());
653 DCHECK(!next->IsBlockEntry());
654 DCHECK(!IsControlInstruction());
655 DCHECK(!next->block()->IsStartBlock());
656 DCHECK(next->previous_ != NULL);
657 HInstruction* prev = next->previous();
658 prev->next_ = this;
659 next->previous_ = this;
660 next_ = next;
661 previous_ = prev;
662 SetBlock(next->block());
663 if (!has_position() && next->has_position()) {
664 set_position(next->position());
665 }
666 }
667
668
InsertAfter(HInstruction * previous)669 void HInstruction::InsertAfter(HInstruction* previous) {
670 DCHECK(!IsLinked());
671 DCHECK(!previous->IsControlInstruction());
672 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
673 HBasicBlock* block = previous->block();
674 // Never insert anything except constants into the start block after finishing
675 // it.
676 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
677 DCHECK(block->end()->SecondSuccessor() == NULL);
678 InsertAfter(block->end()->FirstSuccessor()->first());
679 return;
680 }
681
682 // If we're inserting after an instruction with side-effects that is
683 // followed by a simulate instruction, we need to insert after the
684 // simulate instruction instead.
685 HInstruction* next = previous->next_;
686 if (previous->HasObservableSideEffects() && next != NULL) {
687 DCHECK(next->IsSimulate());
688 previous = next;
689 next = previous->next_;
690 }
691
692 previous_ = previous;
693 next_ = next;
694 SetBlock(block);
695 previous->next_ = this;
696 if (next != NULL) next->previous_ = this;
697 if (block->last() == previous) {
698 block->set_last(this);
699 }
700 if (!has_position() && previous->has_position()) {
701 set_position(previous->position());
702 }
703 }
704
705
Dominates(HInstruction * other)706 bool HInstruction::Dominates(HInstruction* other) {
707 if (block() != other->block()) {
708 return block()->Dominates(other->block());
709 }
710 // Both instructions are in the same basic block. This instruction
711 // should precede the other one in order to dominate it.
712 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
713 if (instr == other) {
714 return true;
715 }
716 }
717 return false;
718 }
719
720
721 #ifdef DEBUG
Verify()722 void HInstruction::Verify() {
723 // Verify that input operands are defined before use.
724 HBasicBlock* cur_block = block();
725 for (int i = 0; i < OperandCount(); ++i) {
726 HValue* other_operand = OperandAt(i);
727 if (other_operand == NULL) continue;
728 HBasicBlock* other_block = other_operand->block();
729 if (cur_block == other_block) {
730 if (!other_operand->IsPhi()) {
731 HInstruction* cur = this->previous();
732 while (cur != NULL) {
733 if (cur == other_operand) break;
734 cur = cur->previous();
735 }
736 // Must reach other operand in the same block!
737 DCHECK(cur == other_operand);
738 }
739 } else {
740 // If the following assert fires, you may have forgotten an
741 // AddInstruction.
742 DCHECK(other_block->Dominates(cur_block));
743 }
744 }
745
746 // Verify that instructions that may have side-effects are followed
747 // by a simulate instruction.
748 if (HasObservableSideEffects() && !IsOsrEntry()) {
749 DCHECK(next()->IsSimulate());
750 }
751
752 // Verify that instructions that can be eliminated by GVN have overridden
753 // HValue::DataEquals. The default implementation is UNREACHABLE. We
754 // don't actually care whether DataEquals returns true or false here.
755 if (CheckFlag(kUseGVN)) DataEquals(this);
756
757 // Verify that all uses are in the graph.
758 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
759 if (use.value()->IsInstruction()) {
760 DCHECK(HInstruction::cast(use.value())->IsLinked());
761 }
762 }
763 }
764 #endif
765
766
CanDeoptimize()767 bool HInstruction::CanDeoptimize() {
768 // TODO(titzer): make this a virtual method?
769 switch (opcode()) {
770 case HValue::kAbnormalExit:
771 case HValue::kAccessArgumentsAt:
772 case HValue::kAllocate:
773 case HValue::kArgumentsElements:
774 case HValue::kArgumentsLength:
775 case HValue::kArgumentsObject:
776 case HValue::kBlockEntry:
777 case HValue::kBoundsCheckBaseIndexInformation:
778 case HValue::kCallFunction:
779 case HValue::kCallNewArray:
780 case HValue::kCallStub:
781 case HValue::kCapturedObject:
782 case HValue::kClassOfTestAndBranch:
783 case HValue::kCompareGeneric:
784 case HValue::kCompareHoleAndBranch:
785 case HValue::kCompareMap:
786 case HValue::kCompareMinusZeroAndBranch:
787 case HValue::kCompareNumericAndBranch:
788 case HValue::kCompareObjectEqAndBranch:
789 case HValue::kConstant:
790 case HValue::kConstructDouble:
791 case HValue::kContext:
792 case HValue::kDebugBreak:
793 case HValue::kDeclareGlobals:
794 case HValue::kDoubleBits:
795 case HValue::kDummyUse:
796 case HValue::kEnterInlined:
797 case HValue::kEnvironmentMarker:
798 case HValue::kForceRepresentation:
799 case HValue::kGetCachedArrayIndex:
800 case HValue::kGoto:
801 case HValue::kHasCachedArrayIndexAndBranch:
802 case HValue::kHasInstanceTypeAndBranch:
803 case HValue::kInnerAllocatedObject:
804 case HValue::kInstanceOf:
805 case HValue::kIsSmiAndBranch:
806 case HValue::kIsStringAndBranch:
807 case HValue::kIsUndetectableAndBranch:
808 case HValue::kLeaveInlined:
809 case HValue::kLoadFieldByIndex:
810 case HValue::kLoadGlobalGeneric:
811 case HValue::kLoadNamedField:
812 case HValue::kLoadNamedGeneric:
813 case HValue::kLoadRoot:
814 case HValue::kMapEnumLength:
815 case HValue::kMathMinMax:
816 case HValue::kParameter:
817 case HValue::kPhi:
818 case HValue::kPushArguments:
819 case HValue::kReturn:
820 case HValue::kSeqStringGetChar:
821 case HValue::kStoreCodeEntry:
822 case HValue::kStoreFrameContext:
823 case HValue::kStoreKeyed:
824 case HValue::kStoreNamedField:
825 case HValue::kStoreNamedGeneric:
826 case HValue::kStringCharCodeAt:
827 case HValue::kStringCharFromCode:
828 case HValue::kThisFunction:
829 case HValue::kTypeofIsAndBranch:
830 case HValue::kUnknownOSRValue:
831 case HValue::kUseConst:
832 return false;
833
834 case HValue::kAdd:
835 case HValue::kAllocateBlockContext:
836 case HValue::kApplyArguments:
837 case HValue::kBitwise:
838 case HValue::kBoundsCheck:
839 case HValue::kBranch:
840 case HValue::kCallJSFunction:
841 case HValue::kCallRuntime:
842 case HValue::kCallWithDescriptor:
843 case HValue::kChange:
844 case HValue::kCheckArrayBufferNotNeutered:
845 case HValue::kCheckHeapObject:
846 case HValue::kCheckInstanceType:
847 case HValue::kCheckMapValue:
848 case HValue::kCheckMaps:
849 case HValue::kCheckSmi:
850 case HValue::kCheckValue:
851 case HValue::kClampToUint8:
852 case HValue::kDeoptimize:
853 case HValue::kDiv:
854 case HValue::kForInCacheArray:
855 case HValue::kForInPrepareMap:
856 case HValue::kHasInPrototypeChainAndBranch:
857 case HValue::kInvokeFunction:
858 case HValue::kLoadContextSlot:
859 case HValue::kLoadFunctionPrototype:
860 case HValue::kLoadKeyed:
861 case HValue::kLoadKeyedGeneric:
862 case HValue::kMathFloorOfDiv:
863 case HValue::kMaybeGrowElements:
864 case HValue::kMod:
865 case HValue::kMul:
866 case HValue::kOsrEntry:
867 case HValue::kPower:
868 case HValue::kPrologue:
869 case HValue::kRor:
870 case HValue::kSar:
871 case HValue::kSeqStringSetChar:
872 case HValue::kShl:
873 case HValue::kShr:
874 case HValue::kSimulate:
875 case HValue::kStackCheck:
876 case HValue::kStoreContextSlot:
877 case HValue::kStoreKeyedGeneric:
878 case HValue::kStringAdd:
879 case HValue::kStringCompareAndBranch:
880 case HValue::kSub:
881 case HValue::kToFastProperties:
882 case HValue::kTransitionElementsKind:
883 case HValue::kTrapAllocationMemento:
884 case HValue::kTypeof:
885 case HValue::kUnaryMathOperation:
886 case HValue::kWrapReceiver:
887 return true;
888 }
889 UNREACHABLE();
890 return true;
891 }
892
893
operator <<(std::ostream & os,const NameOf & v)894 std::ostream& operator<<(std::ostream& os, const NameOf& v) {
895 return os << v.value->representation().Mnemonic() << v.value->id();
896 }
897
PrintDataTo(std::ostream & os) const898 std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
899 return os << NameOf(value());
900 }
901
902
PrintDataTo(std::ostream & os) const903 std::ostream& HEnvironmentMarker::PrintDataTo(
904 std::ostream& os) const { // NOLINT
905 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
906 << "]";
907 }
908
909
PrintDataTo(std::ostream & os) const910 std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
911 return os << NameOf(value()) << " #" << argument_count();
912 }
913
914
PrintDataTo(std::ostream & os) const915 std::ostream& HCallJSFunction::PrintDataTo(std::ostream& os) const { // NOLINT
916 return os << NameOf(function()) << " #" << argument_count();
917 }
918
919
New(Isolate * isolate,Zone * zone,HValue * context,HValue * function,int argument_count)920 HCallJSFunction* HCallJSFunction::New(Isolate* isolate, Zone* zone,
921 HValue* context, HValue* function,
922 int argument_count) {
923 bool has_stack_check = false;
924 if (function->IsConstant()) {
925 HConstant* fun_const = HConstant::cast(function);
926 Handle<JSFunction> jsfun =
927 Handle<JSFunction>::cast(fun_const->handle(isolate));
928 has_stack_check = !jsfun.is_null() &&
929 (jsfun->code()->kind() == Code::FUNCTION ||
930 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
931 }
932
933 return new (zone) HCallJSFunction(function, argument_count, has_stack_check);
934 }
935
936
PrintDataTo(std::ostream & os) const937 std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
938 return os << NameOf(first()) << " " << NameOf(second()) << " #"
939 << argument_count();
940 }
941
942
PrintDataTo(std::ostream & os) const943 std::ostream& HCallFunction::PrintDataTo(std::ostream& os) const { // NOLINT
944 os << NameOf(context()) << " " << NameOf(function());
945 if (HasVectorAndSlot()) {
946 os << " (type-feedback-vector icslot " << slot().ToInt() << ")";
947 }
948 os << " (convert mode" << convert_mode() << ")";
949 return os;
950 }
951
952
ApplyIndexChange()953 void HBoundsCheck::ApplyIndexChange() {
954 if (skip_check()) return;
955
956 DecompositionResult decomposition;
957 bool index_is_decomposable = index()->TryDecompose(&decomposition);
958 if (index_is_decomposable) {
959 DCHECK(decomposition.base() == base());
960 if (decomposition.offset() == offset() &&
961 decomposition.scale() == scale()) return;
962 } else {
963 return;
964 }
965
966 ReplaceAllUsesWith(index());
967
968 HValue* current_index = decomposition.base();
969 int actual_offset = decomposition.offset() + offset();
970 int actual_scale = decomposition.scale() + scale();
971
972 HGraph* graph = block()->graph();
973 Isolate* isolate = graph->isolate();
974 Zone* zone = graph->zone();
975 HValue* context = graph->GetInvalidContext();
976 if (actual_offset != 0) {
977 HConstant* add_offset =
978 HConstant::New(isolate, zone, context, actual_offset);
979 add_offset->InsertBefore(this);
980 HInstruction* add =
981 HAdd::New(isolate, zone, context, current_index, add_offset);
982 add->InsertBefore(this);
983 add->AssumeRepresentation(index()->representation());
984 add->ClearFlag(kCanOverflow);
985 current_index = add;
986 }
987
988 if (actual_scale != 0) {
989 HConstant* sar_scale = HConstant::New(isolate, zone, context, actual_scale);
990 sar_scale->InsertBefore(this);
991 HInstruction* sar =
992 HSar::New(isolate, zone, context, current_index, sar_scale);
993 sar->InsertBefore(this);
994 sar->AssumeRepresentation(index()->representation());
995 current_index = sar;
996 }
997
998 SetOperandAt(0, current_index);
999
1000 base_ = NULL;
1001 offset_ = 0;
1002 scale_ = 0;
1003 }
1004
1005
PrintDataTo(std::ostream & os) const1006 std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
1007 os << NameOf(index()) << " " << NameOf(length());
1008 if (base() != NULL && (offset() != 0 || scale() != 0)) {
1009 os << " base: ((";
1010 if (base() != index()) {
1011 os << NameOf(index());
1012 } else {
1013 os << "index";
1014 }
1015 os << " + " << offset() << ") >> " << scale() << ")";
1016 }
1017 if (skip_check()) os << " [DISABLED]";
1018 return os;
1019 }
1020
1021
InferRepresentation(HInferRepresentationPhase * h_infer)1022 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1023 DCHECK(CheckFlag(kFlexibleRepresentation));
1024 HValue* actual_index = index()->ActualValue();
1025 HValue* actual_length = length()->ActualValue();
1026 Representation index_rep = actual_index->representation();
1027 Representation length_rep = actual_length->representation();
1028 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1029 index_rep = Representation::Smi();
1030 }
1031 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1032 length_rep = Representation::Smi();
1033 }
1034 Representation r = index_rep.generalize(length_rep);
1035 if (r.is_more_general_than(Representation::Integer32())) {
1036 r = Representation::Integer32();
1037 }
1038 UpdateRepresentation(r, h_infer, "boundscheck");
1039 }
1040
1041
InferRange(Zone * zone)1042 Range* HBoundsCheck::InferRange(Zone* zone) {
1043 Representation r = representation();
1044 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1045 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1046 int lower = 0;
1047
1048 Range* result = new(zone) Range(lower, upper);
1049 if (index()->HasRange()) {
1050 result->Intersect(index()->range());
1051 }
1052
1053 // In case of Smi representation, clamp result to Smi::kMaxValue.
1054 if (r.IsSmi()) result->ClampToSmi();
1055 return result;
1056 }
1057 return HValue::InferRange(zone);
1058 }
1059
1060
PrintDataTo(std::ostream & os) const1061 std::ostream& HBoundsCheckBaseIndexInformation::PrintDataTo(
1062 std::ostream& os) const { // NOLINT
1063 // TODO(svenpanne) This 2nd base_index() looks wrong...
1064 return os << "base: " << NameOf(base_index())
1065 << ", check: " << NameOf(base_index());
1066 }
1067
1068
PrintDataTo(std::ostream & os) const1069 std::ostream& HCallWithDescriptor::PrintDataTo(
1070 std::ostream& os) const { // NOLINT
1071 for (int i = 0; i < OperandCount(); i++) {
1072 os << NameOf(OperandAt(i)) << " ";
1073 }
1074 return os << "#" << argument_count();
1075 }
1076
1077
PrintDataTo(std::ostream & os) const1078 std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
1079 os << ElementsKindToString(elements_kind()) << " ";
1080 return HBinaryCall::PrintDataTo(os);
1081 }
1082
1083
PrintDataTo(std::ostream & os) const1084 std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
1085 os << function()->name << " ";
1086 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1087 return os << "#" << argument_count();
1088 }
1089
1090
PrintDataTo(std::ostream & os) const1091 std::ostream& HClassOfTestAndBranch::PrintDataTo(
1092 std::ostream& os) const { // NOLINT
1093 return os << "class_of_test(" << NameOf(value()) << ", \""
1094 << class_name()->ToCString().get() << "\")";
1095 }
1096
1097
PrintDataTo(std::ostream & os) const1098 std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1099 return os << NameOf(receiver()) << " " << NameOf(function());
1100 }
1101
1102
PrintDataTo(std::ostream & os) const1103 std::ostream& HAccessArgumentsAt::PrintDataTo(
1104 std::ostream& os) const { // NOLINT
1105 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1106 << NameOf(length());
1107 }
1108
1109
PrintDataTo(std::ostream & os) const1110 std::ostream& HAllocateBlockContext::PrintDataTo(
1111 std::ostream& os) const { // NOLINT
1112 return os << NameOf(context()) << " " << NameOf(function());
1113 }
1114
1115
PrintDataTo(std::ostream & os) const1116 std::ostream& HControlInstruction::PrintDataTo(
1117 std::ostream& os) const { // NOLINT
1118 os << " goto (";
1119 bool first_block = true;
1120 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1121 if (!first_block) os << ", ";
1122 os << *it.Current();
1123 first_block = false;
1124 }
1125 return os << ")";
1126 }
1127
1128
PrintDataTo(std::ostream & os) const1129 std::ostream& HUnaryControlInstruction::PrintDataTo(
1130 std::ostream& os) const { // NOLINT
1131 os << NameOf(value());
1132 return HControlInstruction::PrintDataTo(os);
1133 }
1134
1135
PrintDataTo(std::ostream & os) const1136 std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1137 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1138 << " values)";
1139 }
1140
1141
observed_input_representation(int index)1142 Representation HBranch::observed_input_representation(int index) {
1143 if (expected_input_types_.Contains(ToBooleanStub::NULL_TYPE) ||
1144 expected_input_types_.Contains(ToBooleanStub::SPEC_OBJECT) ||
1145 expected_input_types_.Contains(ToBooleanStub::STRING) ||
1146 expected_input_types_.Contains(ToBooleanStub::SYMBOL) ||
1147 expected_input_types_.Contains(ToBooleanStub::SIMD_VALUE)) {
1148 return Representation::Tagged();
1149 }
1150 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1151 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1152 return Representation::Double();
1153 }
1154 return Representation::Tagged();
1155 }
1156 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1157 return Representation::Double();
1158 }
1159 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1160 return Representation::Smi();
1161 }
1162 return Representation::None();
1163 }
1164
1165
KnownSuccessorBlock(HBasicBlock ** block)1166 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1167 HValue* value = this->value();
1168 if (value->EmitAtUses()) {
1169 DCHECK(value->IsConstant());
1170 DCHECK(!value->representation().IsDouble());
1171 *block = HConstant::cast(value)->BooleanValue()
1172 ? FirstSuccessor()
1173 : SecondSuccessor();
1174 return true;
1175 }
1176 *block = NULL;
1177 return false;
1178 }
1179
1180
PrintDataTo(std::ostream & os) const1181 std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1182 return HUnaryControlInstruction::PrintDataTo(os) << " "
1183 << expected_input_types();
1184 }
1185
1186
PrintDataTo(std::ostream & os) const1187 std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1188 os << NameOf(value()) << " (" << *map().handle() << ")";
1189 HControlInstruction::PrintDataTo(os);
1190 if (known_successor_index() == 0) {
1191 os << " [true]";
1192 } else if (known_successor_index() == 1) {
1193 os << " [false]";
1194 }
1195 return os;
1196 }
1197
1198
OpName() const1199 const char* HUnaryMathOperation::OpName() const {
1200 switch (op()) {
1201 case kMathFloor:
1202 return "floor";
1203 case kMathFround:
1204 return "fround";
1205 case kMathRound:
1206 return "round";
1207 case kMathAbs:
1208 return "abs";
1209 case kMathLog:
1210 return "log";
1211 case kMathExp:
1212 return "exp";
1213 case kMathSqrt:
1214 return "sqrt";
1215 case kMathPowHalf:
1216 return "pow-half";
1217 case kMathClz32:
1218 return "clz32";
1219 default:
1220 UNREACHABLE();
1221 return NULL;
1222 }
1223 }
1224
1225
InferRange(Zone * zone)1226 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1227 Representation r = representation();
1228 if (op() == kMathClz32) return new(zone) Range(0, 32);
1229 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1230 if (op() == kMathAbs) {
1231 int upper = value()->range()->upper();
1232 int lower = value()->range()->lower();
1233 bool spans_zero = value()->range()->CanBeZero();
1234 // Math.abs(kMinInt) overflows its representation, on which the
1235 // instruction deopts. Hence clamp it to kMaxInt.
1236 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1237 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1238 Range* result =
1239 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1240 Max(abs_lower, abs_upper));
1241 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1242 // Smi::kMaxValue.
1243 if (r.IsSmi()) result->ClampToSmi();
1244 return result;
1245 }
1246 }
1247 return HValue::InferRange(zone);
1248 }
1249
1250
PrintDataTo(std::ostream & os) const1251 std::ostream& HUnaryMathOperation::PrintDataTo(
1252 std::ostream& os) const { // NOLINT
1253 return os << OpName() << " " << NameOf(value());
1254 }
1255
1256
PrintDataTo(std::ostream & os) const1257 std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1258 return os << NameOf(value());
1259 }
1260
1261
PrintDataTo(std::ostream & os) const1262 std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1263 std::ostream& os) const { // NOLINT
1264 os << NameOf(value());
1265 switch (from_) {
1266 case FIRST_JS_RECEIVER_TYPE:
1267 if (to_ == LAST_TYPE) os << " spec_object";
1268 break;
1269 case JS_REGEXP_TYPE:
1270 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1271 break;
1272 case JS_ARRAY_TYPE:
1273 if (to_ == JS_ARRAY_TYPE) os << " array";
1274 break;
1275 case JS_FUNCTION_TYPE:
1276 if (to_ == JS_FUNCTION_TYPE) os << " function";
1277 break;
1278 default:
1279 break;
1280 }
1281 return os;
1282 }
1283
1284
PrintDataTo(std::ostream & os) const1285 std::ostream& HTypeofIsAndBranch::PrintDataTo(
1286 std::ostream& os) const { // NOLINT
1287 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1288 return HControlInstruction::PrintDataTo(os);
1289 }
1290
1291
1292 namespace {
1293
TypeOfString(HConstant * constant,Isolate * isolate)1294 String* TypeOfString(HConstant* constant, Isolate* isolate) {
1295 Heap* heap = isolate->heap();
1296 if (constant->HasNumberValue()) return heap->number_string();
1297 if (constant->IsUndetectable()) return heap->undefined_string();
1298 if (constant->HasStringValue()) return heap->string_string();
1299 switch (constant->GetInstanceType()) {
1300 case ODDBALL_TYPE: {
1301 Unique<Object> unique = constant->GetUnique();
1302 if (unique.IsKnownGlobal(heap->true_value()) ||
1303 unique.IsKnownGlobal(heap->false_value())) {
1304 return heap->boolean_string();
1305 }
1306 if (unique.IsKnownGlobal(heap->null_value())) {
1307 return heap->object_string();
1308 }
1309 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1310 return heap->undefined_string();
1311 }
1312 case SYMBOL_TYPE:
1313 return heap->symbol_string();
1314 case SIMD128_VALUE_TYPE: {
1315 Unique<Map> map = constant->ObjectMap();
1316 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
1317 if (map.IsKnownGlobal(heap->type##_map())) { \
1318 return heap->type##_string(); \
1319 }
1320 SIMD128_TYPES(SIMD128_TYPE)
1321 #undef SIMD128_TYPE
1322 UNREACHABLE();
1323 return nullptr;
1324 }
1325 default:
1326 if (constant->IsCallable()) return heap->function_string();
1327 return heap->object_string();
1328 }
1329 }
1330
1331 } // namespace
1332
1333
KnownSuccessorBlock(HBasicBlock ** block)1334 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1335 if (FLAG_fold_constants && value()->IsConstant()) {
1336 HConstant* constant = HConstant::cast(value());
1337 String* type_string = TypeOfString(constant, isolate());
1338 bool same_type = type_literal_.IsKnownGlobal(type_string);
1339 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1340 return true;
1341 } else if (value()->representation().IsSpecialization()) {
1342 bool number_type =
1343 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1344 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1345 return true;
1346 }
1347 *block = NULL;
1348 return false;
1349 }
1350
1351
PrintDataTo(std::ostream & os) const1352 std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1353 return os << NameOf(value()) << " " << NameOf(map());
1354 }
1355
1356
Canonicalize()1357 HValue* HCheckMapValue::Canonicalize() {
1358 if (map()->IsConstant()) {
1359 HConstant* c_map = HConstant::cast(map());
1360 return HCheckMaps::CreateAndInsertAfter(
1361 block()->graph()->zone(), value(), c_map->MapValue(),
1362 c_map->HasStableMapValue(), this);
1363 }
1364 return this;
1365 }
1366
1367
PrintDataTo(std::ostream & os) const1368 std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1369 return os << NameOf(enumerable());
1370 }
1371
1372
PrintDataTo(std::ostream & os) const1373 std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1374 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1375 << "]";
1376 }
1377
1378
PrintDataTo(std::ostream & os) const1379 std::ostream& HLoadFieldByIndex::PrintDataTo(
1380 std::ostream& os) const { // NOLINT
1381 return os << NameOf(object()) << " " << NameOf(index());
1382 }
1383
1384
MatchLeftIsOnes(HValue * l,HValue * r,HValue ** negated)1385 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1386 if (!l->EqualsInteger32Constant(~0)) return false;
1387 *negated = r;
1388 return true;
1389 }
1390
1391
MatchNegationViaXor(HValue * instr,HValue ** negated)1392 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1393 if (!instr->IsBitwise()) return false;
1394 HBitwise* b = HBitwise::cast(instr);
1395 return (b->op() == Token::BIT_XOR) &&
1396 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1397 MatchLeftIsOnes(b->right(), b->left(), negated));
1398 }
1399
1400
MatchDoubleNegation(HValue * instr,HValue ** arg)1401 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1402 HValue* negated;
1403 return MatchNegationViaXor(instr, &negated) &&
1404 MatchNegationViaXor(negated, arg);
1405 }
1406
1407
Canonicalize()1408 HValue* HBitwise::Canonicalize() {
1409 if (!representation().IsSmiOrInteger32()) return this;
1410 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1411 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1412 if (left()->EqualsInteger32Constant(nop_constant) &&
1413 !right()->CheckFlag(kUint32)) {
1414 return right();
1415 }
1416 if (right()->EqualsInteger32Constant(nop_constant) &&
1417 !left()->CheckFlag(kUint32)) {
1418 return left();
1419 }
1420 // Optimize double negation, a common pattern used for ToInt32(x).
1421 HValue* arg;
1422 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1423 return arg;
1424 }
1425 return this;
1426 }
1427
1428
1429 // static
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,Strength strength,ExternalAddType external_add_type)1430 HInstruction* HAdd::New(Isolate* isolate, Zone* zone, HValue* context,
1431 HValue* left, HValue* right, Strength strength,
1432 ExternalAddType external_add_type) {
1433 // For everything else, you should use the other factory method without
1434 // ExternalAddType.
1435 DCHECK_EQ(external_add_type, AddOfExternalAndTagged);
1436 return new (zone) HAdd(context, left, right, strength, external_add_type);
1437 }
1438
1439
RepresentationFromInputs()1440 Representation HAdd::RepresentationFromInputs() {
1441 Representation left_rep = left()->representation();
1442 if (left_rep.IsExternal()) {
1443 return Representation::External();
1444 }
1445 return HArithmeticBinaryOperation::RepresentationFromInputs();
1446 }
1447
1448
RequiredInputRepresentation(int index)1449 Representation HAdd::RequiredInputRepresentation(int index) {
1450 if (index == 2) {
1451 Representation left_rep = left()->representation();
1452 if (left_rep.IsExternal()) {
1453 if (external_add_type_ == AddOfExternalAndTagged) {
1454 return Representation::Tagged();
1455 } else {
1456 return Representation::Integer32();
1457 }
1458 }
1459 }
1460 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1461 }
1462
1463
IsIdentityOperation(HValue * arg1,HValue * arg2,int32_t identity)1464 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1465 return arg1->representation().IsSpecialization() &&
1466 arg2->EqualsInteger32Constant(identity);
1467 }
1468
1469
Canonicalize()1470 HValue* HAdd::Canonicalize() {
1471 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1472 if (IsIdentityOperation(left(), right(), 0) &&
1473 !left()->representation().IsDouble()) { // Left could be -0.
1474 return left();
1475 }
1476 if (IsIdentityOperation(right(), left(), 0) &&
1477 !left()->representation().IsDouble()) { // Right could be -0.
1478 return right();
1479 }
1480 return this;
1481 }
1482
1483
Canonicalize()1484 HValue* HSub::Canonicalize() {
1485 if (IsIdentityOperation(left(), right(), 0)) return left();
1486 return this;
1487 }
1488
1489
Canonicalize()1490 HValue* HMul::Canonicalize() {
1491 if (IsIdentityOperation(left(), right(), 1)) return left();
1492 if (IsIdentityOperation(right(), left(), 1)) return right();
1493 return this;
1494 }
1495
1496
MulMinusOne()1497 bool HMul::MulMinusOne() {
1498 if (left()->EqualsInteger32Constant(-1) ||
1499 right()->EqualsInteger32Constant(-1)) {
1500 return true;
1501 }
1502
1503 return false;
1504 }
1505
1506
Canonicalize()1507 HValue* HMod::Canonicalize() {
1508 return this;
1509 }
1510
1511
Canonicalize()1512 HValue* HDiv::Canonicalize() {
1513 if (IsIdentityOperation(left(), right(), 1)) return left();
1514 return this;
1515 }
1516
1517
Canonicalize()1518 HValue* HChange::Canonicalize() {
1519 return (from().Equals(to())) ? value() : this;
1520 }
1521
1522
Canonicalize()1523 HValue* HWrapReceiver::Canonicalize() {
1524 if (HasNoUses()) return NULL;
1525 if (receiver()->type().IsJSReceiver()) {
1526 return receiver();
1527 }
1528 return this;
1529 }
1530
1531
PrintDataTo(std::ostream & os) const1532 std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1533 return os << NameOf(value());
1534 }
1535
1536
New(Isolate * isolate,Zone * zone,HValue * context,HValue * value,Representation representation)1537 HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1538 HValue* context, HValue* value,
1539 Representation representation) {
1540 if (FLAG_fold_constants && value->IsConstant()) {
1541 HConstant* c = HConstant::cast(value);
1542 c = c->CopyToRepresentation(representation, zone);
1543 if (c != NULL) return c;
1544 }
1545 return new(zone) HForceRepresentation(value, representation);
1546 }
1547
1548
PrintDataTo(std::ostream & os) const1549 std::ostream& HForceRepresentation::PrintDataTo(
1550 std::ostream& os) const { // NOLINT
1551 return os << representation().Mnemonic() << " " << NameOf(value());
1552 }
1553
1554
PrintDataTo(std::ostream & os) const1555 std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1556 HUnaryOperation::PrintDataTo(os);
1557 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1558
1559 if (CanTruncateToSmi()) os << " truncating-smi";
1560 if (CanTruncateToInt32()) os << " truncating-int32";
1561 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1562 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1563 return os;
1564 }
1565
1566
Canonicalize()1567 HValue* HUnaryMathOperation::Canonicalize() {
1568 if (op() == kMathRound || op() == kMathFloor) {
1569 HValue* val = value();
1570 if (val->IsChange()) val = HChange::cast(val)->value();
1571 if (val->representation().IsSmiOrInteger32()) {
1572 if (val->representation().Equals(representation())) return val;
1573 return Prepend(new(block()->zone()) HChange(
1574 val, representation(), false, false));
1575 }
1576 }
1577 if (op() == kMathFloor && value()->IsDiv() && value()->HasOneUse()) {
1578 HDiv* hdiv = HDiv::cast(value());
1579
1580 HValue* left = hdiv->left();
1581 if (left->representation().IsInteger32() && !left->CheckFlag(kUint32)) {
1582 // A value with an integer representation does not need to be transformed.
1583 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32() &&
1584 !HChange::cast(left)->value()->CheckFlag(kUint32)) {
1585 // A change from an integer32 can be replaced by the integer32 value.
1586 left = HChange::cast(left)->value();
1587 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1588 left = Prepend(new(block()->zone()) HChange(
1589 left, Representation::Integer32(), false, false));
1590 } else {
1591 return this;
1592 }
1593
1594 HValue* right = hdiv->right();
1595 if (right->IsInteger32Constant()) {
1596 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1597 Representation::Integer32(), right->block()->zone()));
1598 } else if (right->representation().IsInteger32() &&
1599 !right->CheckFlag(kUint32)) {
1600 // A value with an integer representation does not need to be transformed.
1601 } else if (right->IsChange() &&
1602 HChange::cast(right)->from().IsInteger32() &&
1603 !HChange::cast(right)->value()->CheckFlag(kUint32)) {
1604 // A change from an integer32 can be replaced by the integer32 value.
1605 right = HChange::cast(right)->value();
1606 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1607 right = Prepend(new(block()->zone()) HChange(
1608 right, Representation::Integer32(), false, false));
1609 } else {
1610 return this;
1611 }
1612
1613 return Prepend(HMathFloorOfDiv::New(
1614 block()->graph()->isolate(), block()->zone(), context(), left, right));
1615 }
1616 return this;
1617 }
1618
1619
Canonicalize()1620 HValue* HCheckInstanceType::Canonicalize() {
1621 if ((check_ == IS_JS_RECEIVER && value()->type().IsJSReceiver()) ||
1622 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1623 (check_ == IS_STRING && value()->type().IsString())) {
1624 return value();
1625 }
1626
1627 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1628 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1629 return value();
1630 }
1631 }
1632 return this;
1633 }
1634
1635
GetCheckInterval(InstanceType * first,InstanceType * last)1636 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1637 InstanceType* last) {
1638 DCHECK(is_interval_check());
1639 switch (check_) {
1640 case IS_JS_RECEIVER:
1641 *first = FIRST_JS_RECEIVER_TYPE;
1642 *last = LAST_JS_RECEIVER_TYPE;
1643 return;
1644 case IS_JS_ARRAY:
1645 *first = *last = JS_ARRAY_TYPE;
1646 return;
1647 case IS_JS_DATE:
1648 *first = *last = JS_DATE_TYPE;
1649 return;
1650 default:
1651 UNREACHABLE();
1652 }
1653 }
1654
1655
GetCheckMaskAndTag(uint8_t * mask,uint8_t * tag)1656 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1657 DCHECK(!is_interval_check());
1658 switch (check_) {
1659 case IS_STRING:
1660 *mask = kIsNotStringMask;
1661 *tag = kStringTag;
1662 return;
1663 case IS_INTERNALIZED_STRING:
1664 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1665 *tag = kInternalizedTag;
1666 return;
1667 default:
1668 UNREACHABLE();
1669 }
1670 }
1671
1672
PrintDataTo(std::ostream & os) const1673 std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1674 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1675 for (int i = 1; i < maps()->size(); ++i) {
1676 os << "," << *maps()->at(i).handle();
1677 }
1678 os << "]";
1679 if (IsStabilityCheck()) os << "(stability-check)";
1680 return os;
1681 }
1682
1683
Canonicalize()1684 HValue* HCheckMaps::Canonicalize() {
1685 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1686 HConstant* c_value = HConstant::cast(value());
1687 if (c_value->HasObjectMap()) {
1688 for (int i = 0; i < maps()->size(); ++i) {
1689 if (c_value->ObjectMap() == maps()->at(i)) {
1690 if (maps()->size() > 1) {
1691 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1692 maps()->at(i), block()->graph()->zone()));
1693 }
1694 MarkAsStabilityCheck();
1695 break;
1696 }
1697 }
1698 }
1699 }
1700 return this;
1701 }
1702
1703
PrintDataTo(std::ostream & os) const1704 std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1705 return os << NameOf(value()) << " " << Brief(*object().handle());
1706 }
1707
1708
Canonicalize()1709 HValue* HCheckValue::Canonicalize() {
1710 return (value()->IsConstant() &&
1711 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1712 }
1713
1714
GetCheckName() const1715 const char* HCheckInstanceType::GetCheckName() const {
1716 switch (check_) {
1717 case IS_JS_RECEIVER: return "object";
1718 case IS_JS_ARRAY: return "array";
1719 case IS_JS_DATE:
1720 return "date";
1721 case IS_STRING: return "string";
1722 case IS_INTERNALIZED_STRING: return "internalized_string";
1723 }
1724 UNREACHABLE();
1725 return "";
1726 }
1727
1728
PrintDataTo(std::ostream & os) const1729 std::ostream& HCheckInstanceType::PrintDataTo(
1730 std::ostream& os) const { // NOLINT
1731 os << GetCheckName() << " ";
1732 return HUnaryOperation::PrintDataTo(os);
1733 }
1734
1735
PrintDataTo(std::ostream & os) const1736 std::ostream& HCallStub::PrintDataTo(std::ostream& os) const { // NOLINT
1737 os << CodeStub::MajorName(major_key_) << " ";
1738 return HUnaryCall::PrintDataTo(os);
1739 }
1740
1741
PrintDataTo(std::ostream & os) const1742 std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1743 const char* type = "expression";
1744 if (environment_->is_local_index(index_)) type = "local";
1745 if (environment_->is_special_index(index_)) type = "special";
1746 if (environment_->is_parameter_index(index_)) type = "parameter";
1747 return os << type << " @ " << index_;
1748 }
1749
1750
PrintDataTo(std::ostream & os) const1751 std::ostream& HInstanceOf::PrintDataTo(std::ostream& os) const { // NOLINT
1752 return os << NameOf(left()) << " " << NameOf(right()) << " "
1753 << NameOf(context());
1754 }
1755
1756
InferRange(Zone * zone)1757 Range* HValue::InferRange(Zone* zone) {
1758 Range* result;
1759 if (representation().IsSmi() || type().IsSmi()) {
1760 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1761 result->set_can_be_minus_zero(false);
1762 } else {
1763 result = new(zone) Range();
1764 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1765 // TODO(jkummerow): The range cannot be minus zero when the upper type
1766 // bound is Integer32.
1767 }
1768 return result;
1769 }
1770
1771
InferRange(Zone * zone)1772 Range* HChange::InferRange(Zone* zone) {
1773 Range* input_range = value()->range();
1774 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1775 (to().IsSmi() ||
1776 (to().IsTagged() &&
1777 input_range != NULL &&
1778 input_range->IsInSmiRange()))) {
1779 set_type(HType::Smi());
1780 ClearChangesFlag(kNewSpacePromotion);
1781 }
1782 if (to().IsSmiOrTagged() &&
1783 input_range != NULL &&
1784 input_range->IsInSmiRange() &&
1785 (!SmiValuesAre32Bits() ||
1786 !value()->CheckFlag(HValue::kUint32) ||
1787 input_range->upper() != kMaxInt)) {
1788 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1789 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1790 ClearFlag(kCanOverflow);
1791 }
1792 Range* result = (input_range != NULL)
1793 ? input_range->Copy(zone)
1794 : HValue::InferRange(zone);
1795 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1796 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1797 CheckFlag(kAllUsesTruncatingToSmi)));
1798 if (to().IsSmi()) result->ClampToSmi();
1799 return result;
1800 }
1801
1802
InferRange(Zone * zone)1803 Range* HConstant::InferRange(Zone* zone) {
1804 if (HasInteger32Value()) {
1805 Range* result = new(zone) Range(int32_value_, int32_value_);
1806 result->set_can_be_minus_zero(false);
1807 return result;
1808 }
1809 return HValue::InferRange(zone);
1810 }
1811
1812
position() const1813 SourcePosition HPhi::position() const { return block()->first()->position(); }
1814
1815
InferRange(Zone * zone)1816 Range* HPhi::InferRange(Zone* zone) {
1817 Representation r = representation();
1818 if (r.IsSmiOrInteger32()) {
1819 if (block()->IsLoopHeader()) {
1820 Range* range = r.IsSmi()
1821 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1822 : new(zone) Range(kMinInt, kMaxInt);
1823 return range;
1824 } else {
1825 Range* range = OperandAt(0)->range()->Copy(zone);
1826 for (int i = 1; i < OperandCount(); ++i) {
1827 range->Union(OperandAt(i)->range());
1828 }
1829 return range;
1830 }
1831 } else {
1832 return HValue::InferRange(zone);
1833 }
1834 }
1835
1836
InferRange(Zone * zone)1837 Range* HAdd::InferRange(Zone* zone) {
1838 Representation r = representation();
1839 if (r.IsSmiOrInteger32()) {
1840 Range* a = left()->range();
1841 Range* b = right()->range();
1842 Range* res = a->Copy(zone);
1843 if (!res->AddAndCheckOverflow(r, b) ||
1844 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1845 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1846 ClearFlag(kCanOverflow);
1847 }
1848 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1849 !CheckFlag(kAllUsesTruncatingToInt32) &&
1850 a->CanBeMinusZero() && b->CanBeMinusZero());
1851 return res;
1852 } else {
1853 return HValue::InferRange(zone);
1854 }
1855 }
1856
1857
InferRange(Zone * zone)1858 Range* HSub::InferRange(Zone* zone) {
1859 Representation r = representation();
1860 if (r.IsSmiOrInteger32()) {
1861 Range* a = left()->range();
1862 Range* b = right()->range();
1863 Range* res = a->Copy(zone);
1864 if (!res->SubAndCheckOverflow(r, b) ||
1865 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1866 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1867 ClearFlag(kCanOverflow);
1868 }
1869 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1870 !CheckFlag(kAllUsesTruncatingToInt32) &&
1871 a->CanBeMinusZero() && b->CanBeZero());
1872 return res;
1873 } else {
1874 return HValue::InferRange(zone);
1875 }
1876 }
1877
1878
InferRange(Zone * zone)1879 Range* HMul::InferRange(Zone* zone) {
1880 Representation r = representation();
1881 if (r.IsSmiOrInteger32()) {
1882 Range* a = left()->range();
1883 Range* b = right()->range();
1884 Range* res = a->Copy(zone);
1885 if (!res->MulAndCheckOverflow(r, b) ||
1886 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1887 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1888 MulMinusOne())) {
1889 // Truncated int multiplication is too precise and therefore not the
1890 // same as converting to Double and back.
1891 // Handle truncated integer multiplication by -1 special.
1892 ClearFlag(kCanOverflow);
1893 }
1894 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1895 !CheckFlag(kAllUsesTruncatingToInt32) &&
1896 ((a->CanBeZero() && b->CanBeNegative()) ||
1897 (a->CanBeNegative() && b->CanBeZero())));
1898 return res;
1899 } else {
1900 return HValue::InferRange(zone);
1901 }
1902 }
1903
1904
InferRange(Zone * zone)1905 Range* HDiv::InferRange(Zone* zone) {
1906 if (representation().IsInteger32()) {
1907 Range* a = left()->range();
1908 Range* b = right()->range();
1909 Range* result = new(zone) Range();
1910 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1911 (a->CanBeMinusZero() ||
1912 (a->CanBeZero() && b->CanBeNegative())));
1913 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1914 ClearFlag(kCanOverflow);
1915 }
1916
1917 if (!b->CanBeZero()) {
1918 ClearFlag(kCanBeDivByZero);
1919 }
1920 return result;
1921 } else {
1922 return HValue::InferRange(zone);
1923 }
1924 }
1925
1926
InferRange(Zone * zone)1927 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1928 if (representation().IsInteger32()) {
1929 Range* a = left()->range();
1930 Range* b = right()->range();
1931 Range* result = new(zone) Range();
1932 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1933 (a->CanBeMinusZero() ||
1934 (a->CanBeZero() && b->CanBeNegative())));
1935 if (!a->Includes(kMinInt)) {
1936 ClearFlag(kLeftCanBeMinInt);
1937 }
1938
1939 if (!a->CanBeNegative()) {
1940 ClearFlag(HValue::kLeftCanBeNegative);
1941 }
1942
1943 if (!a->CanBePositive()) {
1944 ClearFlag(HValue::kLeftCanBePositive);
1945 }
1946
1947 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1948 ClearFlag(kCanOverflow);
1949 }
1950
1951 if (!b->CanBeZero()) {
1952 ClearFlag(kCanBeDivByZero);
1953 }
1954 return result;
1955 } else {
1956 return HValue::InferRange(zone);
1957 }
1958 }
1959
1960
1961 // Returns the absolute value of its argument minus one, avoiding undefined
1962 // behavior at kMinInt.
AbsMinus1(int32_t a)1963 static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1964
1965
InferRange(Zone * zone)1966 Range* HMod::InferRange(Zone* zone) {
1967 if (representation().IsInteger32()) {
1968 Range* a = left()->range();
1969 Range* b = right()->range();
1970
1971 // The magnitude of the modulus is bounded by the right operand.
1972 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1973
1974 // The result of the modulo operation has the sign of its left operand.
1975 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1976 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1977 a->CanBePositive() ? positive_bound : 0);
1978
1979 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1980 left_can_be_negative);
1981
1982 if (!a->CanBeNegative()) {
1983 ClearFlag(HValue::kLeftCanBeNegative);
1984 }
1985
1986 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1987 ClearFlag(HValue::kCanOverflow);
1988 }
1989
1990 if (!b->CanBeZero()) {
1991 ClearFlag(HValue::kCanBeDivByZero);
1992 }
1993 return result;
1994 } else {
1995 return HValue::InferRange(zone);
1996 }
1997 }
1998
1999
ExaminePhi(HPhi * phi)2000 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
2001 if (phi->block()->loop_information() == NULL) return NULL;
2002 if (phi->OperandCount() != 2) return NULL;
2003 int32_t candidate_increment;
2004
2005 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
2006 if (candidate_increment != 0) {
2007 return new(phi->block()->graph()->zone())
2008 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
2009 }
2010
2011 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
2012 if (candidate_increment != 0) {
2013 return new(phi->block()->graph()->zone())
2014 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2015 }
2016
2017 return NULL;
2018 }
2019
2020
2021 /*
2022 * This function tries to match the following patterns (and all the relevant
2023 * variants related to |, & and + being commutative):
2024 * base | constant_or_mask
2025 * base & constant_and_mask
2026 * (base + constant_offset) & constant_and_mask
2027 * (base - constant_offset) & constant_and_mask
2028 */
DecomposeBitwise(HValue * value,BitwiseDecompositionResult * result)2029 void InductionVariableData::DecomposeBitwise(
2030 HValue* value,
2031 BitwiseDecompositionResult* result) {
2032 HValue* base = IgnoreOsrValue(value);
2033 result->base = value;
2034
2035 if (!base->representation().IsInteger32()) return;
2036
2037 if (base->IsBitwise()) {
2038 bool allow_offset = false;
2039 int32_t mask = 0;
2040
2041 HBitwise* bitwise = HBitwise::cast(base);
2042 if (bitwise->right()->IsInteger32Constant()) {
2043 mask = bitwise->right()->GetInteger32Constant();
2044 base = bitwise->left();
2045 } else if (bitwise->left()->IsInteger32Constant()) {
2046 mask = bitwise->left()->GetInteger32Constant();
2047 base = bitwise->right();
2048 } else {
2049 return;
2050 }
2051 if (bitwise->op() == Token::BIT_AND) {
2052 result->and_mask = mask;
2053 allow_offset = true;
2054 } else if (bitwise->op() == Token::BIT_OR) {
2055 result->or_mask = mask;
2056 } else {
2057 return;
2058 }
2059
2060 result->context = bitwise->context();
2061
2062 if (allow_offset) {
2063 if (base->IsAdd()) {
2064 HAdd* add = HAdd::cast(base);
2065 if (add->right()->IsInteger32Constant()) {
2066 base = add->left();
2067 } else if (add->left()->IsInteger32Constant()) {
2068 base = add->right();
2069 }
2070 } else if (base->IsSub()) {
2071 HSub* sub = HSub::cast(base);
2072 if (sub->right()->IsInteger32Constant()) {
2073 base = sub->left();
2074 }
2075 }
2076 }
2077
2078 result->base = base;
2079 }
2080 }
2081
2082
AddCheck(HBoundsCheck * check,int32_t upper_limit)2083 void InductionVariableData::AddCheck(HBoundsCheck* check,
2084 int32_t upper_limit) {
2085 DCHECK(limit_validity() != NULL);
2086 if (limit_validity() != check->block() &&
2087 !limit_validity()->Dominates(check->block())) return;
2088 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2089 check->block()->current_loop())) return;
2090
2091 ChecksRelatedToLength* length_checks = checks();
2092 while (length_checks != NULL) {
2093 if (length_checks->length() == check->length()) break;
2094 length_checks = length_checks->next();
2095 }
2096 if (length_checks == NULL) {
2097 length_checks = new(check->block()->zone())
2098 ChecksRelatedToLength(check->length(), checks());
2099 checks_ = length_checks;
2100 }
2101
2102 length_checks->AddCheck(check, upper_limit);
2103 }
2104
2105
CloseCurrentBlock()2106 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2107 if (checks() != NULL) {
2108 InductionVariableCheck* c = checks();
2109 HBasicBlock* current_block = c->check()->block();
2110 while (c != NULL && c->check()->block() == current_block) {
2111 c->set_upper_limit(current_upper_limit_);
2112 c = c->next();
2113 }
2114 }
2115 }
2116
2117
UseNewIndexInCurrentBlock(Token::Value token,int32_t mask,HValue * index_base,HValue * context)2118 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2119 Token::Value token,
2120 int32_t mask,
2121 HValue* index_base,
2122 HValue* context) {
2123 DCHECK(first_check_in_block() != NULL);
2124 HValue* previous_index = first_check_in_block()->index();
2125 DCHECK(context != NULL);
2126
2127 Zone* zone = index_base->block()->graph()->zone();
2128 Isolate* isolate = index_base->block()->graph()->isolate();
2129 set_added_constant(HConstant::New(isolate, zone, context, mask));
2130 if (added_index() != NULL) {
2131 added_constant()->InsertBefore(added_index());
2132 } else {
2133 added_constant()->InsertBefore(first_check_in_block());
2134 }
2135
2136 if (added_index() == NULL) {
2137 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2138 HInstruction* new_index = HBitwise::New(isolate, zone, context, token,
2139 index_base, added_constant());
2140 DCHECK(new_index->IsBitwise());
2141 new_index->ClearAllSideEffects();
2142 new_index->AssumeRepresentation(Representation::Integer32());
2143 set_added_index(HBitwise::cast(new_index));
2144 added_index()->InsertBefore(first_check_in_block());
2145 }
2146 DCHECK(added_index()->op() == token);
2147
2148 added_index()->SetOperandAt(1, index_base);
2149 added_index()->SetOperandAt(2, added_constant());
2150 first_check_in_block()->SetOperandAt(0, added_index());
2151 if (previous_index->HasNoUses()) {
2152 previous_index->DeleteAndReplaceWith(NULL);
2153 }
2154 }
2155
AddCheck(HBoundsCheck * check,int32_t upper_limit)2156 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2157 HBoundsCheck* check,
2158 int32_t upper_limit) {
2159 BitwiseDecompositionResult decomposition;
2160 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2161
2162 if (first_check_in_block() == NULL ||
2163 first_check_in_block()->block() != check->block()) {
2164 CloseCurrentBlock();
2165
2166 first_check_in_block_ = check;
2167 set_added_index(NULL);
2168 set_added_constant(NULL);
2169 current_and_mask_in_block_ = decomposition.and_mask;
2170 current_or_mask_in_block_ = decomposition.or_mask;
2171 current_upper_limit_ = upper_limit;
2172
2173 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2174 InductionVariableCheck(check, checks_, upper_limit);
2175 checks_ = new_check;
2176 return;
2177 }
2178
2179 if (upper_limit > current_upper_limit()) {
2180 current_upper_limit_ = upper_limit;
2181 }
2182
2183 if (decomposition.and_mask != 0 &&
2184 current_or_mask_in_block() == 0) {
2185 if (current_and_mask_in_block() == 0 ||
2186 decomposition.and_mask > current_and_mask_in_block()) {
2187 UseNewIndexInCurrentBlock(Token::BIT_AND,
2188 decomposition.and_mask,
2189 decomposition.base,
2190 decomposition.context);
2191 current_and_mask_in_block_ = decomposition.and_mask;
2192 }
2193 check->set_skip_check();
2194 }
2195 if (current_and_mask_in_block() == 0) {
2196 if (decomposition.or_mask > current_or_mask_in_block()) {
2197 UseNewIndexInCurrentBlock(Token::BIT_OR,
2198 decomposition.or_mask,
2199 decomposition.base,
2200 decomposition.context);
2201 current_or_mask_in_block_ = decomposition.or_mask;
2202 }
2203 check->set_skip_check();
2204 }
2205
2206 if (!check->skip_check()) {
2207 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2208 InductionVariableCheck(check, checks_, upper_limit);
2209 checks_ = new_check;
2210 }
2211 }
2212
2213
2214 /*
2215 * This method detects if phi is an induction variable, with phi_operand as
2216 * its "incremented" value (the other operand would be the "base" value).
2217 *
2218 * It cheks is phi_operand has the form "phi + constant".
2219 * If yes, the constant is the increment that the induction variable gets at
2220 * every loop iteration.
2221 * Otherwise it returns 0.
2222 */
ComputeIncrement(HPhi * phi,HValue * phi_operand)2223 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2224 HValue* phi_operand) {
2225 if (!phi_operand->representation().IsSmiOrInteger32()) return 0;
2226
2227 if (phi_operand->IsAdd()) {
2228 HAdd* operation = HAdd::cast(phi_operand);
2229 if (operation->left() == phi &&
2230 operation->right()->IsInteger32Constant()) {
2231 return operation->right()->GetInteger32Constant();
2232 } else if (operation->right() == phi &&
2233 operation->left()->IsInteger32Constant()) {
2234 return operation->left()->GetInteger32Constant();
2235 }
2236 } else if (phi_operand->IsSub()) {
2237 HSub* operation = HSub::cast(phi_operand);
2238 if (operation->left() == phi &&
2239 operation->right()->IsInteger32Constant()) {
2240 int constant = operation->right()->GetInteger32Constant();
2241 if (constant == kMinInt) return 0;
2242 return -constant;
2243 }
2244 }
2245
2246 return 0;
2247 }
2248
2249
2250 /*
2251 * Swaps the information in "update" with the one contained in "this".
2252 * The swapping is important because this method is used while doing a
2253 * dominator tree traversal, and "update" will retain the old data that
2254 * will be restored while backtracking.
2255 */
UpdateAdditionalLimit(InductionVariableLimitUpdate * update)2256 void InductionVariableData::UpdateAdditionalLimit(
2257 InductionVariableLimitUpdate* update) {
2258 DCHECK(update->updated_variable == this);
2259 if (update->limit_is_upper) {
2260 swap(&additional_upper_limit_, &update->limit);
2261 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2262 } else {
2263 swap(&additional_lower_limit_, &update->limit);
2264 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2265 }
2266 }
2267
2268
ComputeUpperLimit(int32_t and_mask,int32_t or_mask)2269 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2270 int32_t or_mask) {
2271 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2272 const int32_t MAX_LIMIT = 1 << 30;
2273
2274 int32_t result = MAX_LIMIT;
2275
2276 if (limit() != NULL &&
2277 limit()->IsInteger32Constant()) {
2278 int32_t limit_value = limit()->GetInteger32Constant();
2279 if (!limit_included()) {
2280 limit_value--;
2281 }
2282 if (limit_value < result) result = limit_value;
2283 }
2284
2285 if (additional_upper_limit() != NULL &&
2286 additional_upper_limit()->IsInteger32Constant()) {
2287 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2288 if (!additional_upper_limit_is_included()) {
2289 limit_value--;
2290 }
2291 if (limit_value < result) result = limit_value;
2292 }
2293
2294 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2295 if (and_mask < result) result = and_mask;
2296 return result;
2297 }
2298
2299 // Add the effect of the or_mask.
2300 result |= or_mask;
2301
2302 return result >= MAX_LIMIT ? kNoLimit : result;
2303 }
2304
2305
IgnoreOsrValue(HValue * v)2306 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2307 if (!v->IsPhi()) return v;
2308 HPhi* phi = HPhi::cast(v);
2309 if (phi->OperandCount() != 2) return v;
2310 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2311 return phi->OperandAt(1);
2312 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2313 return phi->OperandAt(0);
2314 } else {
2315 return v;
2316 }
2317 }
2318
2319
GetInductionVariableData(HValue * v)2320 InductionVariableData* InductionVariableData::GetInductionVariableData(
2321 HValue* v) {
2322 v = IgnoreOsrValue(v);
2323 if (v->IsPhi()) {
2324 return HPhi::cast(v)->induction_variable_data();
2325 }
2326 return NULL;
2327 }
2328
2329
2330 /*
2331 * Check if a conditional branch to "current_branch" with token "token" is
2332 * the branch that keeps the induction loop running (and, conversely, will
2333 * terminate it if the "other_branch" is taken).
2334 *
2335 * Three conditions must be met:
2336 * - "current_branch" must be in the induction loop.
2337 * - "other_branch" must be out of the induction loop.
2338 * - "token" and the induction increment must be "compatible": the token should
2339 * be a condition that keeps the execution inside the loop until the limit is
2340 * reached.
2341 */
CheckIfBranchIsLoopGuard(Token::Value token,HBasicBlock * current_branch,HBasicBlock * other_branch)2342 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2343 Token::Value token,
2344 HBasicBlock* current_branch,
2345 HBasicBlock* other_branch) {
2346 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2347 current_branch->current_loop())) {
2348 return false;
2349 }
2350
2351 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2352 other_branch->current_loop())) {
2353 return false;
2354 }
2355
2356 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2357 return true;
2358 }
2359 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2360 return true;
2361 }
2362 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2363 return true;
2364 }
2365
2366 return false;
2367 }
2368
2369
ComputeLimitFromPredecessorBlock(HBasicBlock * block,LimitFromPredecessorBlock * result)2370 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2371 HBasicBlock* block,
2372 LimitFromPredecessorBlock* result) {
2373 if (block->predecessors()->length() != 1) return;
2374 HBasicBlock* predecessor = block->predecessors()->at(0);
2375 HInstruction* end = predecessor->last();
2376
2377 if (!end->IsCompareNumericAndBranch()) return;
2378 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2379
2380 Token::Value token = branch->token();
2381 if (!Token::IsArithmeticCompareOp(token)) return;
2382
2383 HBasicBlock* other_target;
2384 if (block == branch->SuccessorAt(0)) {
2385 other_target = branch->SuccessorAt(1);
2386 } else {
2387 other_target = branch->SuccessorAt(0);
2388 token = Token::NegateCompareOp(token);
2389 DCHECK(block == branch->SuccessorAt(1));
2390 }
2391
2392 InductionVariableData* data;
2393
2394 data = GetInductionVariableData(branch->left());
2395 HValue* limit = branch->right();
2396 if (data == NULL) {
2397 data = GetInductionVariableData(branch->right());
2398 token = Token::ReverseCompareOp(token);
2399 limit = branch->left();
2400 }
2401
2402 if (data != NULL) {
2403 result->variable = data;
2404 result->token = token;
2405 result->limit = limit;
2406 result->other_target = other_target;
2407 }
2408 }
2409
2410
2411 /*
2412 * Compute the limit that is imposed on an induction variable when entering
2413 * "block" (if any).
2414 * If the limit is the "proper" induction limit (the one that makes the loop
2415 * terminate when the induction variable reaches it) it is stored directly in
2416 * the induction variable data.
2417 * Otherwise the limit is written in "additional_limit" and the method
2418 * returns true.
2419 */
ComputeInductionVariableLimit(HBasicBlock * block,InductionVariableLimitUpdate * additional_limit)2420 bool InductionVariableData::ComputeInductionVariableLimit(
2421 HBasicBlock* block,
2422 InductionVariableLimitUpdate* additional_limit) {
2423 LimitFromPredecessorBlock limit;
2424 ComputeLimitFromPredecessorBlock(block, &limit);
2425 if (!limit.LimitIsValid()) return false;
2426
2427 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2428 block,
2429 limit.other_target)) {
2430 limit.variable->limit_ = limit.limit;
2431 limit.variable->limit_included_ = limit.LimitIsIncluded();
2432 limit.variable->limit_validity_ = block;
2433 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2434 limit.variable->induction_exit_target_ = limit.other_target;
2435 return false;
2436 } else {
2437 additional_limit->updated_variable = limit.variable;
2438 additional_limit->limit = limit.limit;
2439 additional_limit->limit_is_upper = limit.LimitIsUpper();
2440 additional_limit->limit_is_included = limit.LimitIsIncluded();
2441 return true;
2442 }
2443 }
2444
2445
InferRange(Zone * zone)2446 Range* HMathMinMax::InferRange(Zone* zone) {
2447 if (representation().IsSmiOrInteger32()) {
2448 Range* a = left()->range();
2449 Range* b = right()->range();
2450 Range* res = a->Copy(zone);
2451 if (operation_ == kMathMax) {
2452 res->CombinedMax(b);
2453 } else {
2454 DCHECK(operation_ == kMathMin);
2455 res->CombinedMin(b);
2456 }
2457 return res;
2458 } else {
2459 return HValue::InferRange(zone);
2460 }
2461 }
2462
2463
AddInput(HValue * value)2464 void HPushArguments::AddInput(HValue* value) {
2465 inputs_.Add(NULL, value->block()->zone());
2466 SetOperandAt(OperandCount() - 1, value);
2467 }
2468
2469
PrintTo(std::ostream & os) const2470 std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
2471 os << "[";
2472 for (int i = 0; i < OperandCount(); ++i) {
2473 os << " " << NameOf(OperandAt(i)) << " ";
2474 }
2475 return os << " uses" << UseCount()
2476 << representation_from_indirect_uses().Mnemonic() << " "
2477 << TypeOf(this) << "]";
2478 }
2479
2480
AddInput(HValue * value)2481 void HPhi::AddInput(HValue* value) {
2482 inputs_.Add(NULL, value->block()->zone());
2483 SetOperandAt(OperandCount() - 1, value);
2484 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2485 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2486 SetFlag(kIsArguments);
2487 }
2488 }
2489
2490
HasRealUses()2491 bool HPhi::HasRealUses() {
2492 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2493 if (!it.value()->IsPhi()) return true;
2494 }
2495 return false;
2496 }
2497
2498
GetRedundantReplacement()2499 HValue* HPhi::GetRedundantReplacement() {
2500 HValue* candidate = NULL;
2501 int count = OperandCount();
2502 int position = 0;
2503 while (position < count && candidate == NULL) {
2504 HValue* current = OperandAt(position++);
2505 if (current != this) candidate = current;
2506 }
2507 while (position < count) {
2508 HValue* current = OperandAt(position++);
2509 if (current != this && current != candidate) return NULL;
2510 }
2511 DCHECK(candidate != this);
2512 return candidate;
2513 }
2514
2515
DeleteFromGraph()2516 void HPhi::DeleteFromGraph() {
2517 DCHECK(block() != NULL);
2518 block()->RemovePhi(this);
2519 DCHECK(block() == NULL);
2520 }
2521
2522
InitRealUses(int phi_id)2523 void HPhi::InitRealUses(int phi_id) {
2524 // Initialize real uses.
2525 phi_id_ = phi_id;
2526 // Compute a conservative approximation of truncating uses before inferring
2527 // representations. The proper, exact computation will be done later, when
2528 // inserting representation changes.
2529 SetFlag(kTruncatingToSmi);
2530 SetFlag(kTruncatingToInt32);
2531 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2532 HValue* value = it.value();
2533 if (!value->IsPhi()) {
2534 Representation rep = value->observed_input_representation(it.index());
2535 representation_from_non_phi_uses_ =
2536 representation_from_non_phi_uses().generalize(rep);
2537 if (rep.IsSmi() || rep.IsInteger32() || rep.IsDouble()) {
2538 has_type_feedback_from_uses_ = true;
2539 }
2540
2541 if (FLAG_trace_representation) {
2542 PrintF("#%d Phi is used by real #%d %s as %s\n",
2543 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2544 }
2545 if (!value->IsSimulate()) {
2546 if (!value->CheckFlag(kTruncatingToSmi)) {
2547 ClearFlag(kTruncatingToSmi);
2548 }
2549 if (!value->CheckFlag(kTruncatingToInt32)) {
2550 ClearFlag(kTruncatingToInt32);
2551 }
2552 }
2553 }
2554 }
2555 }
2556
2557
AddNonPhiUsesFrom(HPhi * other)2558 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2559 if (FLAG_trace_representation) {
2560 PrintF(
2561 "generalizing use representation '%s' of #%d Phi "
2562 "with uses of #%d Phi '%s'\n",
2563 representation_from_indirect_uses().Mnemonic(), id(), other->id(),
2564 other->representation_from_non_phi_uses().Mnemonic());
2565 }
2566
2567 representation_from_indirect_uses_ =
2568 representation_from_indirect_uses().generalize(
2569 other->representation_from_non_phi_uses());
2570 }
2571
2572
MergeWith(ZoneList<HSimulate * > * list)2573 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2574 while (!list->is_empty()) {
2575 HSimulate* from = list->RemoveLast();
2576 ZoneList<HValue*>* from_values = &from->values_;
2577 for (int i = 0; i < from_values->length(); ++i) {
2578 if (from->HasAssignedIndexAt(i)) {
2579 int index = from->GetAssignedIndexAt(i);
2580 if (HasValueForIndex(index)) continue;
2581 AddAssignedValue(index, from_values->at(i));
2582 } else {
2583 if (pop_count_ > 0) {
2584 pop_count_--;
2585 } else {
2586 AddPushedValue(from_values->at(i));
2587 }
2588 }
2589 }
2590 pop_count_ += from->pop_count_;
2591 from->DeleteAndReplaceWith(NULL);
2592 }
2593 }
2594
2595
PrintDataTo(std::ostream & os) const2596 std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2597 os << "id=" << ast_id().ToInt();
2598 if (pop_count_ > 0) os << " pop " << pop_count_;
2599 if (values_.length() > 0) {
2600 if (pop_count_ > 0) os << " /";
2601 for (int i = values_.length() - 1; i >= 0; --i) {
2602 if (HasAssignedIndexAt(i)) {
2603 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2604 } else {
2605 os << " push ";
2606 }
2607 os << NameOf(values_[i]);
2608 if (i > 0) os << ",";
2609 }
2610 }
2611 return os;
2612 }
2613
2614
ReplayEnvironment(HEnvironment * env)2615 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2616 if (is_done_with_replay()) return;
2617 DCHECK(env != NULL);
2618 env->set_ast_id(ast_id());
2619 env->Drop(pop_count());
2620 for (int i = values()->length() - 1; i >= 0; --i) {
2621 HValue* value = values()->at(i);
2622 if (HasAssignedIndexAt(i)) {
2623 env->Bind(GetAssignedIndexAt(i), value);
2624 } else {
2625 env->Push(value);
2626 }
2627 }
2628 set_done_with_replay();
2629 }
2630
2631
ReplayEnvironmentNested(const ZoneList<HValue * > * values,HCapturedObject * other)2632 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2633 HCapturedObject* other) {
2634 for (int i = 0; i < values->length(); ++i) {
2635 HValue* value = values->at(i);
2636 if (value->IsCapturedObject()) {
2637 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2638 values->at(i) = other;
2639 } else {
2640 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2641 }
2642 }
2643 }
2644 }
2645
2646
2647 // Replay captured objects by replacing all captured objects with the
2648 // same capture id in the current and all outer environments.
ReplayEnvironment(HEnvironment * env)2649 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2650 DCHECK(env != NULL);
2651 while (env != NULL) {
2652 ReplayEnvironmentNested(env->values(), this);
2653 env = env->outer();
2654 }
2655 }
2656
2657
PrintDataTo(std::ostream & os) const2658 std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2659 os << "#" << capture_id() << " ";
2660 return HDematerializedObject::PrintDataTo(os);
2661 }
2662
2663
RegisterReturnTarget(HBasicBlock * return_target,Zone * zone)2664 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2665 Zone* zone) {
2666 DCHECK(return_target->IsInlineReturnTarget());
2667 return_targets_.Add(return_target, zone);
2668 }
2669
2670
PrintDataTo(std::ostream & os) const2671 std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
2672 return os << function()->debug_name()->ToCString().get();
2673 }
2674
2675
IsInteger32(double value)2676 static bool IsInteger32(double value) {
2677 if (value >= std::numeric_limits<int32_t>::min() &&
2678 value <= std::numeric_limits<int32_t>::max()) {
2679 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2680 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2681 }
2682 return false;
2683 }
2684
2685
HConstant(Special special)2686 HConstant::HConstant(Special special)
2687 : HTemplateInstruction<0>(HType::TaggedNumber()),
2688 object_(Handle<Object>::null()),
2689 object_map_(Handle<Map>::null()),
2690 bit_field_(HasDoubleValueField::encode(true) |
2691 InstanceTypeField::encode(kUnknownInstanceType)),
2692 int32_value_(0) {
2693 DCHECK_EQ(kHoleNaN, special);
2694 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2695 Initialize(Representation::Double());
2696 }
2697
2698
HConstant(Handle<Object> object,Representation r)2699 HConstant::HConstant(Handle<Object> object, Representation r)
2700 : HTemplateInstruction<0>(HType::FromValue(object)),
2701 object_(Unique<Object>::CreateUninitialized(object)),
2702 object_map_(Handle<Map>::null()),
2703 bit_field_(
2704 HasStableMapValueField::encode(false) |
2705 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2706 HasDoubleValueField::encode(false) |
2707 HasExternalReferenceValueField::encode(false) |
2708 IsNotInNewSpaceField::encode(true) |
2709 BooleanValueField::encode(object->BooleanValue()) |
2710 IsUndetectableField::encode(false) | IsCallableField::encode(false) |
2711 InstanceTypeField::encode(kUnknownInstanceType)) {
2712 if (object->IsHeapObject()) {
2713 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2714 Isolate* isolate = heap_object->GetIsolate();
2715 Handle<Map> map(heap_object->map(), isolate);
2716 bit_field_ = IsNotInNewSpaceField::update(
2717 bit_field_, !isolate->heap()->InNewSpace(*object));
2718 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2719 bit_field_ =
2720 IsUndetectableField::update(bit_field_, map->is_undetectable());
2721 bit_field_ = IsCallableField::update(bit_field_, map->is_callable());
2722 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2723 bit_field_ = HasStableMapValueField::update(
2724 bit_field_,
2725 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2726 }
2727 if (object->IsNumber()) {
2728 double n = object->Number();
2729 bool has_int32_value = IsInteger32(n);
2730 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2731 int32_value_ = DoubleToInt32(n);
2732 bit_field_ = HasSmiValueField::update(
2733 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2734 double_value_ = n;
2735 bit_field_ = HasDoubleValueField::update(bit_field_, true);
2736 // TODO(titzer): if this heap number is new space, tenure a new one.
2737 }
2738
2739 Initialize(r);
2740 }
2741
2742
HConstant(Unique<Object> object,Unique<Map> object_map,bool has_stable_map_value,Representation r,HType type,bool is_not_in_new_space,bool boolean_value,bool is_undetectable,InstanceType instance_type)2743 HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2744 bool has_stable_map_value, Representation r, HType type,
2745 bool is_not_in_new_space, bool boolean_value,
2746 bool is_undetectable, InstanceType instance_type)
2747 : HTemplateInstruction<0>(type),
2748 object_(object),
2749 object_map_(object_map),
2750 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2751 HasSmiValueField::encode(false) |
2752 HasInt32ValueField::encode(false) |
2753 HasDoubleValueField::encode(false) |
2754 HasExternalReferenceValueField::encode(false) |
2755 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2756 BooleanValueField::encode(boolean_value) |
2757 IsUndetectableField::encode(is_undetectable) |
2758 InstanceTypeField::encode(instance_type)) {
2759 DCHECK(!object.handle().is_null());
2760 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2761 Initialize(r);
2762 }
2763
2764
HConstant(int32_t integer_value,Representation r,bool is_not_in_new_space,Unique<Object> object)2765 HConstant::HConstant(int32_t integer_value, Representation r,
2766 bool is_not_in_new_space, Unique<Object> object)
2767 : object_(object),
2768 object_map_(Handle<Map>::null()),
2769 bit_field_(HasStableMapValueField::encode(false) |
2770 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2771 HasInt32ValueField::encode(true) |
2772 HasDoubleValueField::encode(true) |
2773 HasExternalReferenceValueField::encode(false) |
2774 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2775 BooleanValueField::encode(integer_value != 0) |
2776 IsUndetectableField::encode(false) |
2777 InstanceTypeField::encode(kUnknownInstanceType)),
2778 int32_value_(integer_value),
2779 double_value_(FastI2D(integer_value)) {
2780 // It's possible to create a constant with a value in Smi-range but stored
2781 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2782 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2783 bool is_smi = HasSmiValue() && !could_be_heapobject;
2784 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2785 Initialize(r);
2786 }
2787
2788
HConstant(double double_value,Representation r,bool is_not_in_new_space,Unique<Object> object)2789 HConstant::HConstant(double double_value, Representation r,
2790 bool is_not_in_new_space, Unique<Object> object)
2791 : object_(object),
2792 object_map_(Handle<Map>::null()),
2793 bit_field_(HasStableMapValueField::encode(false) |
2794 HasInt32ValueField::encode(IsInteger32(double_value)) |
2795 HasDoubleValueField::encode(true) |
2796 HasExternalReferenceValueField::encode(false) |
2797 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2798 BooleanValueField::encode(double_value != 0 &&
2799 !std::isnan(double_value)) |
2800 IsUndetectableField::encode(false) |
2801 InstanceTypeField::encode(kUnknownInstanceType)),
2802 int32_value_(DoubleToInt32(double_value)),
2803 double_value_(double_value) {
2804 bit_field_ = HasSmiValueField::update(
2805 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2806 // It's possible to create a constant with a value in Smi-range but stored
2807 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2808 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2809 bool is_smi = HasSmiValue() && !could_be_heapobject;
2810 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2811 Initialize(r);
2812 }
2813
2814
HConstant(ExternalReference reference)2815 HConstant::HConstant(ExternalReference reference)
2816 : HTemplateInstruction<0>(HType::Any()),
2817 object_(Unique<Object>(Handle<Object>::null())),
2818 object_map_(Handle<Map>::null()),
2819 bit_field_(
2820 HasStableMapValueField::encode(false) |
2821 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2822 HasDoubleValueField::encode(false) |
2823 HasExternalReferenceValueField::encode(true) |
2824 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2825 IsUndetectableField::encode(false) |
2826 InstanceTypeField::encode(kUnknownInstanceType)),
2827 external_reference_value_(reference) {
2828 Initialize(Representation::External());
2829 }
2830
2831
Initialize(Representation r)2832 void HConstant::Initialize(Representation r) {
2833 if (r.IsNone()) {
2834 if (HasSmiValue() && SmiValuesAre31Bits()) {
2835 r = Representation::Smi();
2836 } else if (HasInteger32Value()) {
2837 r = Representation::Integer32();
2838 } else if (HasDoubleValue()) {
2839 r = Representation::Double();
2840 } else if (HasExternalReferenceValue()) {
2841 r = Representation::External();
2842 } else {
2843 Handle<Object> object = object_.handle();
2844 if (object->IsJSObject()) {
2845 // Try to eagerly migrate JSObjects that have deprecated maps.
2846 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2847 if (js_object->map()->is_deprecated()) {
2848 JSObject::TryMigrateInstance(js_object);
2849 }
2850 }
2851 r = Representation::Tagged();
2852 }
2853 }
2854 if (r.IsSmi()) {
2855 // If we have an existing handle, zap it, because it might be a heap
2856 // number which we must not re-use when copying this HConstant to
2857 // Tagged representation later, because having Smi representation now
2858 // could cause heap object checks not to get emitted.
2859 object_ = Unique<Object>(Handle<Object>::null());
2860 }
2861 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2862 // If it's not a heap object, it can't be in new space.
2863 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2864 }
2865 set_representation(r);
2866 SetFlag(kUseGVN);
2867 }
2868
2869
ImmortalImmovable() const2870 bool HConstant::ImmortalImmovable() const {
2871 if (HasInteger32Value()) {
2872 return false;
2873 }
2874 if (HasDoubleValue()) {
2875 if (IsSpecialDouble()) {
2876 return true;
2877 }
2878 return false;
2879 }
2880 if (HasExternalReferenceValue()) {
2881 return false;
2882 }
2883
2884 DCHECK(!object_.handle().is_null());
2885 Heap* heap = isolate()->heap();
2886 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2887 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2888 return
2889 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2890 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2891 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2892 #undef IMMORTAL_IMMOVABLE_ROOT
2893 #define INTERNALIZED_STRING(name, value) \
2894 object_.IsKnownGlobal(heap->name()) ||
2895 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2896 #undef INTERNALIZED_STRING
2897 #define STRING_TYPE(NAME, size, name, Name) \
2898 object_.IsKnownGlobal(heap->name##_map()) ||
2899 STRING_TYPE_LIST(STRING_TYPE)
2900 #undef STRING_TYPE
2901 false;
2902 }
2903
2904
EmitAtUses()2905 bool HConstant::EmitAtUses() {
2906 DCHECK(IsLinked());
2907 if (block()->graph()->has_osr() &&
2908 block()->graph()->IsStandardConstant(this)) {
2909 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2910 return true;
2911 }
2912 if (HasNoUses()) return true;
2913 if (IsCell()) return false;
2914 if (representation().IsDouble()) return false;
2915 if (representation().IsExternal()) return false;
2916 return true;
2917 }
2918
2919
CopyToRepresentation(Representation r,Zone * zone) const2920 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2921 if (r.IsSmi() && !HasSmiValue()) return NULL;
2922 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2923 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2924 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2925 if (HasInteger32Value()) {
2926 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2927 }
2928 if (HasDoubleValue()) {
2929 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2930 }
2931 if (HasExternalReferenceValue()) {
2932 return new(zone) HConstant(external_reference_value_);
2933 }
2934 DCHECK(!object_.handle().is_null());
2935 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2936 type_, NotInNewSpace(), BooleanValue(),
2937 IsUndetectable(), GetInstanceType());
2938 }
2939
2940
CopyToTruncatedInt32(Zone * zone)2941 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2942 HConstant* res = NULL;
2943 if (HasInteger32Value()) {
2944 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2945 NotInNewSpace(), object_);
2946 } else if (HasDoubleValue()) {
2947 res = new (zone)
2948 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2949 NotInNewSpace(), object_);
2950 }
2951 return res != NULL ? Just(res) : Nothing<HConstant*>();
2952 }
2953
2954
CopyToTruncatedNumber(Isolate * isolate,Zone * zone)2955 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2956 Zone* zone) {
2957 HConstant* res = NULL;
2958 Handle<Object> handle = this->handle(isolate);
2959 if (handle->IsBoolean()) {
2960 res = handle->BooleanValue() ?
2961 new(zone) HConstant(1) : new(zone) HConstant(0);
2962 } else if (handle->IsUndefined()) {
2963 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
2964 } else if (handle->IsNull()) {
2965 res = new(zone) HConstant(0);
2966 }
2967 return res != NULL ? Just(res) : Nothing<HConstant*>();
2968 }
2969
2970
PrintDataTo(std::ostream & os) const2971 std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2972 if (HasInteger32Value()) {
2973 os << int32_value_ << " ";
2974 } else if (HasDoubleValue()) {
2975 os << double_value_ << " ";
2976 } else if (HasExternalReferenceValue()) {
2977 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2978 } else {
2979 // The handle() method is silently and lazily mutating the object.
2980 Handle<Object> h = const_cast<HConstant*>(this)->handle(isolate());
2981 os << Brief(*h) << " ";
2982 if (HasStableMapValue()) os << "[stable-map] ";
2983 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2984 }
2985 if (!NotInNewSpace()) os << "[new space] ";
2986 return os;
2987 }
2988
2989
PrintDataTo(std::ostream & os) const2990 std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2991 os << NameOf(left()) << " " << NameOf(right());
2992 if (CheckFlag(kCanOverflow)) os << " !";
2993 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2994 return os;
2995 }
2996
2997
InferRepresentation(HInferRepresentationPhase * h_infer)2998 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2999 DCHECK(CheckFlag(kFlexibleRepresentation));
3000 Representation new_rep = RepresentationFromInputs();
3001 UpdateRepresentation(new_rep, h_infer, "inputs");
3002
3003 if (representation().IsSmi() && HasNonSmiUse()) {
3004 UpdateRepresentation(
3005 Representation::Integer32(), h_infer, "use requirements");
3006 }
3007
3008 if (observed_output_representation_.IsNone()) {
3009 new_rep = RepresentationFromUses();
3010 UpdateRepresentation(new_rep, h_infer, "uses");
3011 } else {
3012 new_rep = RepresentationFromOutput();
3013 UpdateRepresentation(new_rep, h_infer, "output");
3014 }
3015 }
3016
3017
RepresentationFromInputs()3018 Representation HBinaryOperation::RepresentationFromInputs() {
3019 // Determine the worst case of observed input representations and
3020 // the currently assumed output representation.
3021 Representation rep = representation();
3022 for (int i = 1; i <= 2; ++i) {
3023 rep = rep.generalize(observed_input_representation(i));
3024 }
3025 // If any of the actual input representation is more general than what we
3026 // have so far but not Tagged, use that representation instead.
3027 Representation left_rep = left()->representation();
3028 Representation right_rep = right()->representation();
3029 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3030 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3031
3032 return rep;
3033 }
3034
3035
IgnoreObservedOutputRepresentation(Representation current_rep)3036 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3037 Representation current_rep) {
3038 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3039 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3040 // Mul in Integer32 mode would be too precise.
3041 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
3042 }
3043
3044
RepresentationFromOutput()3045 Representation HBinaryOperation::RepresentationFromOutput() {
3046 Representation rep = representation();
3047 // Consider observed output representation, but ignore it if it's Double,
3048 // this instruction is not a division, and all its uses are truncating
3049 // to Integer32.
3050 if (observed_output_representation_.is_more_general_than(rep) &&
3051 !IgnoreObservedOutputRepresentation(rep)) {
3052 return observed_output_representation_;
3053 }
3054 return Representation::None();
3055 }
3056
3057
AssumeRepresentation(Representation r)3058 void HBinaryOperation::AssumeRepresentation(Representation r) {
3059 set_observed_input_representation(1, r);
3060 set_observed_input_representation(2, r);
3061 HValue::AssumeRepresentation(r);
3062 }
3063
3064
InferRepresentation(HInferRepresentationPhase * h_infer)3065 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
3066 DCHECK(CheckFlag(kFlexibleRepresentation));
3067 Representation new_rep = RepresentationFromInputs();
3068 UpdateRepresentation(new_rep, h_infer, "inputs");
3069 // Do not care about uses.
3070 }
3071
3072
InferRange(Zone * zone)3073 Range* HBitwise::InferRange(Zone* zone) {
3074 if (op() == Token::BIT_XOR) {
3075 if (left()->HasRange() && right()->HasRange()) {
3076 // The maximum value has the high bit, and all bits below, set:
3077 // (1 << high) - 1.
3078 // If the range can be negative, the minimum int is a negative number with
3079 // the high bit, and all bits below, unset:
3080 // -(1 << high).
3081 // If it cannot be negative, conservatively choose 0 as minimum int.
3082 int64_t left_upper = left()->range()->upper();
3083 int64_t left_lower = left()->range()->lower();
3084 int64_t right_upper = right()->range()->upper();
3085 int64_t right_lower = right()->range()->lower();
3086
3087 if (left_upper < 0) left_upper = ~left_upper;
3088 if (left_lower < 0) left_lower = ~left_lower;
3089 if (right_upper < 0) right_upper = ~right_upper;
3090 if (right_lower < 0) right_lower = ~right_lower;
3091
3092 int high = MostSignificantBit(
3093 static_cast<uint32_t>(
3094 left_upper | left_lower | right_upper | right_lower));
3095
3096 int64_t limit = 1;
3097 limit <<= high;
3098 int32_t min = (left()->range()->CanBeNegative() ||
3099 right()->range()->CanBeNegative())
3100 ? static_cast<int32_t>(-limit) : 0;
3101 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3102 }
3103 Range* result = HValue::InferRange(zone);
3104 result->set_can_be_minus_zero(false);
3105 return result;
3106 }
3107 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
3108 int32_t left_mask = (left()->range() != NULL)
3109 ? left()->range()->Mask()
3110 : kDefaultMask;
3111 int32_t right_mask = (right()->range() != NULL)
3112 ? right()->range()->Mask()
3113 : kDefaultMask;
3114 int32_t result_mask = (op() == Token::BIT_AND)
3115 ? left_mask & right_mask
3116 : left_mask | right_mask;
3117 if (result_mask >= 0) return new(zone) Range(0, result_mask);
3118
3119 Range* result = HValue::InferRange(zone);
3120 result->set_can_be_minus_zero(false);
3121 return result;
3122 }
3123
3124
InferRange(Zone * zone)3125 Range* HSar::InferRange(Zone* zone) {
3126 if (right()->IsConstant()) {
3127 HConstant* c = HConstant::cast(right());
3128 if (c->HasInteger32Value()) {
3129 Range* result = (left()->range() != NULL)
3130 ? left()->range()->Copy(zone)
3131 : new(zone) Range();
3132 result->Sar(c->Integer32Value());
3133 return result;
3134 }
3135 }
3136 return HValue::InferRange(zone);
3137 }
3138
3139
InferRange(Zone * zone)3140 Range* HShr::InferRange(Zone* zone) {
3141 if (right()->IsConstant()) {
3142 HConstant* c = HConstant::cast(right());
3143 if (c->HasInteger32Value()) {
3144 int shift_count = c->Integer32Value() & 0x1f;
3145 if (left()->range()->CanBeNegative()) {
3146 // Only compute bounds if the result always fits into an int32.
3147 return (shift_count >= 1)
3148 ? new(zone) Range(0,
3149 static_cast<uint32_t>(0xffffffff) >> shift_count)
3150 : new(zone) Range();
3151 } else {
3152 // For positive inputs we can use the >> operator.
3153 Range* result = (left()->range() != NULL)
3154 ? left()->range()->Copy(zone)
3155 : new(zone) Range();
3156 result->Sar(c->Integer32Value());
3157 return result;
3158 }
3159 }
3160 }
3161 return HValue::InferRange(zone);
3162 }
3163
3164
InferRange(Zone * zone)3165 Range* HShl::InferRange(Zone* zone) {
3166 if (right()->IsConstant()) {
3167 HConstant* c = HConstant::cast(right());
3168 if (c->HasInteger32Value()) {
3169 Range* result = (left()->range() != NULL)
3170 ? left()->range()->Copy(zone)
3171 : new(zone) Range();
3172 result->Shl(c->Integer32Value());
3173 return result;
3174 }
3175 }
3176 return HValue::InferRange(zone);
3177 }
3178
3179
InferRange(Zone * zone)3180 Range* HLoadNamedField::InferRange(Zone* zone) {
3181 if (access().representation().IsInteger8()) {
3182 return new(zone) Range(kMinInt8, kMaxInt8);
3183 }
3184 if (access().representation().IsUInteger8()) {
3185 return new(zone) Range(kMinUInt8, kMaxUInt8);
3186 }
3187 if (access().representation().IsInteger16()) {
3188 return new(zone) Range(kMinInt16, kMaxInt16);
3189 }
3190 if (access().representation().IsUInteger16()) {
3191 return new(zone) Range(kMinUInt16, kMaxUInt16);
3192 }
3193 if (access().IsStringLength()) {
3194 return new(zone) Range(0, String::kMaxLength);
3195 }
3196 return HValue::InferRange(zone);
3197 }
3198
3199
InferRange(Zone * zone)3200 Range* HLoadKeyed::InferRange(Zone* zone) {
3201 switch (elements_kind()) {
3202 case INT8_ELEMENTS:
3203 return new(zone) Range(kMinInt8, kMaxInt8);
3204 case UINT8_ELEMENTS:
3205 case UINT8_CLAMPED_ELEMENTS:
3206 return new(zone) Range(kMinUInt8, kMaxUInt8);
3207 case INT16_ELEMENTS:
3208 return new(zone) Range(kMinInt16, kMaxInt16);
3209 case UINT16_ELEMENTS:
3210 return new(zone) Range(kMinUInt16, kMaxUInt16);
3211 default:
3212 return HValue::InferRange(zone);
3213 }
3214 }
3215
3216
PrintDataTo(std::ostream & os) const3217 std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
3218 os << Token::Name(token()) << " ";
3219 return HBinaryOperation::PrintDataTo(os);
3220 }
3221
3222
PrintDataTo(std::ostream & os) const3223 std::ostream& HStringCompareAndBranch::PrintDataTo(
3224 std::ostream& os) const { // NOLINT
3225 os << Token::Name(token()) << " ";
3226 return HControlInstruction::PrintDataTo(os);
3227 }
3228
3229
PrintDataTo(std::ostream & os) const3230 std::ostream& HCompareNumericAndBranch::PrintDataTo(
3231 std::ostream& os) const { // NOLINT
3232 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
3233 return HControlInstruction::PrintDataTo(os);
3234 }
3235
3236
PrintDataTo(std::ostream & os) const3237 std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
3238 std::ostream& os) const { // NOLINT
3239 os << NameOf(left()) << " " << NameOf(right());
3240 return HControlInstruction::PrintDataTo(os);
3241 }
3242
3243
KnownSuccessorBlock(HBasicBlock ** block)3244 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3245 if (known_successor_index() != kNoKnownSuccessorIndex) {
3246 *block = SuccessorAt(known_successor_index());
3247 return true;
3248 }
3249 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3250 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3251 ? FirstSuccessor() : SecondSuccessor();
3252 return true;
3253 }
3254 *block = NULL;
3255 return false;
3256 }
3257
3258
KnownSuccessorBlock(HBasicBlock ** block)3259 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3260 if (known_successor_index() != kNoKnownSuccessorIndex) {
3261 *block = SuccessorAt(known_successor_index());
3262 return true;
3263 }
3264 if (FLAG_fold_constants && value()->IsConstant()) {
3265 *block = HConstant::cast(value())->HasStringValue()
3266 ? FirstSuccessor() : SecondSuccessor();
3267 return true;
3268 }
3269 if (value()->type().IsString()) {
3270 *block = FirstSuccessor();
3271 return true;
3272 }
3273 if (value()->type().IsSmi() ||
3274 value()->type().IsNull() ||
3275 value()->type().IsBoolean() ||
3276 value()->type().IsUndefined() ||
3277 value()->type().IsJSReceiver()) {
3278 *block = SecondSuccessor();
3279 return true;
3280 }
3281 *block = NULL;
3282 return false;
3283 }
3284
3285
KnownSuccessorBlock(HBasicBlock ** block)3286 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3287 if (FLAG_fold_constants && value()->IsConstant()) {
3288 *block = HConstant::cast(value())->IsUndetectable()
3289 ? FirstSuccessor() : SecondSuccessor();
3290 return true;
3291 }
3292 *block = NULL;
3293 return false;
3294 }
3295
3296
KnownSuccessorBlock(HBasicBlock ** block)3297 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3298 if (FLAG_fold_constants && value()->IsConstant()) {
3299 InstanceType type = HConstant::cast(value())->GetInstanceType();
3300 *block = (from_ <= type) && (type <= to_)
3301 ? FirstSuccessor() : SecondSuccessor();
3302 return true;
3303 }
3304 *block = NULL;
3305 return false;
3306 }
3307
3308
InferRepresentation(HInferRepresentationPhase * h_infer)3309 void HCompareHoleAndBranch::InferRepresentation(
3310 HInferRepresentationPhase* h_infer) {
3311 ChangeRepresentation(value()->representation());
3312 }
3313
3314
KnownSuccessorBlock(HBasicBlock ** block)3315 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3316 if (left() == right() &&
3317 left()->representation().IsSmiOrInteger32()) {
3318 *block = (token() == Token::EQ ||
3319 token() == Token::EQ_STRICT ||
3320 token() == Token::LTE ||
3321 token() == Token::GTE)
3322 ? FirstSuccessor() : SecondSuccessor();
3323 return true;
3324 }
3325 *block = NULL;
3326 return false;
3327 }
3328
3329
KnownSuccessorBlock(HBasicBlock ** block)3330 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3331 if (FLAG_fold_constants && value()->IsConstant()) {
3332 HConstant* constant = HConstant::cast(value());
3333 if (constant->HasDoubleValue()) {
3334 *block = IsMinusZero(constant->DoubleValue())
3335 ? FirstSuccessor() : SecondSuccessor();
3336 return true;
3337 }
3338 }
3339 if (value()->representation().IsSmiOrInteger32()) {
3340 // A Smi or Integer32 cannot contain minus zero.
3341 *block = SecondSuccessor();
3342 return true;
3343 }
3344 *block = NULL;
3345 return false;
3346 }
3347
3348
InferRepresentation(HInferRepresentationPhase * h_infer)3349 void HCompareMinusZeroAndBranch::InferRepresentation(
3350 HInferRepresentationPhase* h_infer) {
3351 ChangeRepresentation(value()->representation());
3352 }
3353
3354
PrintDataTo(std::ostream & os) const3355 std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
3356 return os << *SuccessorAt(0);
3357 }
3358
3359
InferRepresentation(HInferRepresentationPhase * h_infer)3360 void HCompareNumericAndBranch::InferRepresentation(
3361 HInferRepresentationPhase* h_infer) {
3362 Representation left_rep = left()->representation();
3363 Representation right_rep = right()->representation();
3364 Representation observed_left = observed_input_representation(0);
3365 Representation observed_right = observed_input_representation(1);
3366
3367 Representation rep = Representation::None();
3368 rep = rep.generalize(observed_left);
3369 rep = rep.generalize(observed_right);
3370 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3371 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3372 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3373 } else {
3374 rep = Representation::Double();
3375 }
3376
3377 if (rep.IsDouble()) {
3378 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3379 // and !=) have special handling of undefined, e.g. undefined == undefined
3380 // is 'true'. Relational comparisons have a different semantic, first
3381 // calling ToPrimitive() on their arguments. The standard Crankshaft
3382 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3383 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3384 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3385 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3386 // it is not consistent with the spec. For example, it would cause undefined
3387 // == undefined (should be true) to be evaluated as NaN == NaN
3388 // (false). Therefore, any comparisons other than ordered relational
3389 // comparisons must cause a deopt when one of their arguments is undefined.
3390 // See also v8:1434
3391 if (Token::IsOrderedRelationalCompareOp(token_) && !is_strong(strength())) {
3392 SetFlag(kAllowUndefinedAsNaN);
3393 }
3394 }
3395 ChangeRepresentation(rep);
3396 }
3397
3398
PrintDataTo(std::ostream & os) const3399 std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
3400 return os << index();
3401 }
3402
3403
PrintDataTo(std::ostream & os) const3404 std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3405 os << NameOf(object()) << access_;
3406
3407 if (maps() != NULL) {
3408 os << " [" << *maps()->at(0).handle();
3409 for (int i = 1; i < maps()->size(); ++i) {
3410 os << "," << *maps()->at(i).handle();
3411 }
3412 os << "]";
3413 }
3414
3415 if (HasDependency()) os << " " << NameOf(dependency());
3416 return os;
3417 }
3418
3419
PrintDataTo(std::ostream & os) const3420 std::ostream& HLoadNamedGeneric::PrintDataTo(
3421 std::ostream& os) const { // NOLINT
3422 Handle<String> n = Handle<String>::cast(name());
3423 return os << NameOf(object()) << "." << n->ToCString().get();
3424 }
3425
3426
PrintDataTo(std::ostream & os) const3427 std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3428 if (!is_fixed_typed_array()) {
3429 os << NameOf(elements());
3430 } else {
3431 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
3432 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
3433 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3434 }
3435
3436 os << "[" << NameOf(key());
3437 if (IsDehoisted()) os << " + " << base_offset();
3438 os << "]";
3439
3440 if (HasDependency()) os << " " << NameOf(dependency());
3441 if (RequiresHoleCheck()) os << " check_hole";
3442 return os;
3443 }
3444
3445
TryIncreaseBaseOffset(uint32_t increase_by_value)3446 bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3447 // The base offset is usually simply the size of the array header, except
3448 // with dehoisting adds an addition offset due to a array index key
3449 // manipulation, in which case it becomes (array header size +
3450 // constant-offset-from-key * kPointerSize)
3451 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
3452 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
3453 addition_result += increase_by_value;
3454 if (!addition_result.IsValid()) return false;
3455 base_offset = addition_result.ValueOrDie();
3456 if (!BaseOffsetField::is_valid(base_offset)) return false;
3457 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
3458 return true;
3459 }
3460
3461
UsesMustHandleHole() const3462 bool HLoadKeyed::UsesMustHandleHole() const {
3463 if (IsFastPackedElementsKind(elements_kind())) {
3464 return false;
3465 }
3466
3467 if (IsFixedTypedArrayElementsKind(elements_kind())) {
3468 return false;
3469 }
3470
3471 if (hole_mode() == ALLOW_RETURN_HOLE) {
3472 if (IsFastDoubleElementsKind(elements_kind())) {
3473 return AllUsesCanTreatHoleAsNaN();
3474 }
3475 return true;
3476 }
3477
3478 if (IsFastDoubleElementsKind(elements_kind())) {
3479 return false;
3480 }
3481
3482 // Holes are only returned as tagged values.
3483 if (!representation().IsTagged()) {
3484 return false;
3485 }
3486
3487 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3488 HValue* use = it.value();
3489 if (!use->IsChange()) return false;
3490 }
3491
3492 return true;
3493 }
3494
3495
AllUsesCanTreatHoleAsNaN() const3496 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3497 return IsFastDoubleElementsKind(elements_kind()) &&
3498 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3499 }
3500
3501
RequiresHoleCheck() const3502 bool HLoadKeyed::RequiresHoleCheck() const {
3503 if (IsFastPackedElementsKind(elements_kind())) {
3504 return false;
3505 }
3506
3507 if (IsFixedTypedArrayElementsKind(elements_kind())) {
3508 return false;
3509 }
3510
3511 if (hole_mode() == CONVERT_HOLE_TO_UNDEFINED) {
3512 return false;
3513 }
3514
3515 return !UsesMustHandleHole();
3516 }
3517
3518
PrintDataTo(std::ostream & os) const3519 std::ostream& HLoadKeyedGeneric::PrintDataTo(
3520 std::ostream& os) const { // NOLINT
3521 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
3522 }
3523
3524
Canonicalize()3525 HValue* HLoadKeyedGeneric::Canonicalize() {
3526 // Recognize generic keyed loads that use property name generated
3527 // by for-in statement as a key and rewrite them into fast property load
3528 // by index.
3529 if (key()->IsLoadKeyed()) {
3530 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3531 if (key_load->elements()->IsForInCacheArray()) {
3532 HForInCacheArray* names_cache =
3533 HForInCacheArray::cast(key_load->elements());
3534
3535 if (names_cache->enumerable() == object()) {
3536 HForInCacheArray* index_cache =
3537 names_cache->index_cache();
3538 HCheckMapValue* map_check = HCheckMapValue::New(
3539 block()->graph()->isolate(), block()->graph()->zone(),
3540 block()->graph()->GetInvalidContext(), object(),
3541 names_cache->map());
3542 HInstruction* index = HLoadKeyed::New(
3543 block()->graph()->isolate(), block()->graph()->zone(),
3544 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
3545 key_load->key(), nullptr, key_load->elements_kind());
3546 map_check->InsertBefore(this);
3547 index->InsertBefore(this);
3548 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3549 object(), index));
3550 }
3551 }
3552 }
3553
3554 return this;
3555 }
3556
3557
PrintDataTo(std::ostream & os) const3558 std::ostream& HStoreNamedGeneric::PrintDataTo(
3559 std::ostream& os) const { // NOLINT
3560 Handle<String> n = Handle<String>::cast(name());
3561 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3562 << NameOf(value());
3563 }
3564
3565
PrintDataTo(std::ostream & os) const3566 std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3567 os << NameOf(object()) << access_ << " = " << NameOf(value());
3568 if (NeedsWriteBarrier()) os << " (write-barrier)";
3569 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3570 return os;
3571 }
3572
3573
PrintDataTo(std::ostream & os) const3574 std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3575 if (!is_fixed_typed_array()) {
3576 os << NameOf(elements());
3577 } else {
3578 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
3579 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
3580 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3581 }
3582
3583 os << "[" << NameOf(key());
3584 if (IsDehoisted()) os << " + " << base_offset();
3585 return os << "] = " << NameOf(value());
3586 }
3587
3588
PrintDataTo(std::ostream & os) const3589 std::ostream& HStoreKeyedGeneric::PrintDataTo(
3590 std::ostream& os) const { // NOLINT
3591 return os << NameOf(object()) << "[" << NameOf(key())
3592 << "] = " << NameOf(value());
3593 }
3594
3595
PrintDataTo(std::ostream & os) const3596 std::ostream& HTransitionElementsKind::PrintDataTo(
3597 std::ostream& os) const { // NOLINT
3598 os << NameOf(object());
3599 ElementsKind from_kind = original_map().handle()->elements_kind();
3600 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3601 os << " " << *original_map().handle() << " ["
3602 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3603 << *transitioned_map().handle() << " ["
3604 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3605 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3606 return os;
3607 }
3608
3609
PrintDataTo(std::ostream & os) const3610 std::ostream& HLoadGlobalGeneric::PrintDataTo(
3611 std::ostream& os) const { // NOLINT
3612 return os << name()->ToCString().get() << " ";
3613 }
3614
3615
PrintDataTo(std::ostream & os) const3616 std::ostream& HInnerAllocatedObject::PrintDataTo(
3617 std::ostream& os) const { // NOLINT
3618 os << NameOf(base_object()) << " offset ";
3619 return offset()->PrintTo(os);
3620 }
3621
3622
PrintDataTo(std::ostream & os) const3623 std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3624 return os << NameOf(value()) << "[" << slot_index() << "]";
3625 }
3626
3627
PrintDataTo(std::ostream & os) const3628 std::ostream& HStoreContextSlot::PrintDataTo(
3629 std::ostream& os) const { // NOLINT
3630 return os << NameOf(context()) << "[" << slot_index()
3631 << "] = " << NameOf(value());
3632 }
3633
3634
3635 // Implementation of type inference and type conversions. Calculates
3636 // the inferred type of this instruction based on the input operands.
3637
CalculateInferredType()3638 HType HValue::CalculateInferredType() {
3639 return type_;
3640 }
3641
3642
CalculateInferredType()3643 HType HPhi::CalculateInferredType() {
3644 if (OperandCount() == 0) return HType::Tagged();
3645 HType result = OperandAt(0)->type();
3646 for (int i = 1; i < OperandCount(); ++i) {
3647 HType current = OperandAt(i)->type();
3648 result = result.Combine(current);
3649 }
3650 return result;
3651 }
3652
3653
CalculateInferredType()3654 HType HChange::CalculateInferredType() {
3655 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3656 return type();
3657 }
3658
3659
RepresentationFromInputs()3660 Representation HUnaryMathOperation::RepresentationFromInputs() {
3661 if (SupportsFlexibleFloorAndRound() &&
3662 (op_ == kMathFloor || op_ == kMathRound)) {
3663 // Floor and Round always take a double input. The integral result can be
3664 // used as an integer or a double. Infer the representation from the uses.
3665 return Representation::None();
3666 }
3667 Representation rep = representation();
3668 // If any of the actual input representation is more general than what we
3669 // have so far but not Tagged, use that representation instead.
3670 Representation input_rep = value()->representation();
3671 if (!input_rep.IsTagged()) {
3672 rep = rep.generalize(input_rep);
3673 }
3674 return rep;
3675 }
3676
3677
HandleSideEffectDominator(GVNFlag side_effect,HValue * dominator)3678 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3679 HValue* dominator) {
3680 DCHECK(side_effect == kNewSpacePromotion);
3681 Zone* zone = block()->zone();
3682 Isolate* isolate = block()->isolate();
3683 if (!FLAG_use_allocation_folding) return false;
3684
3685 // Try to fold allocations together with their dominating allocations.
3686 if (!dominator->IsAllocate()) {
3687 if (FLAG_trace_allocation_folding) {
3688 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3689 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3690 }
3691 return false;
3692 }
3693
3694 // Check whether we are folding within the same block for local folding.
3695 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3696 if (FLAG_trace_allocation_folding) {
3697 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3698 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3699 }
3700 return false;
3701 }
3702
3703 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3704 HValue* dominator_size = dominator_allocate->size();
3705 HValue* current_size = size();
3706
3707 // TODO(hpayer): Add support for non-constant allocation in dominator.
3708 if (!dominator_size->IsInteger32Constant()) {
3709 if (FLAG_trace_allocation_folding) {
3710 PrintF("#%d (%s) cannot fold into #%d (%s), "
3711 "dynamic allocation size in dominator\n",
3712 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3713 }
3714 return false;
3715 }
3716
3717
3718 if (!IsFoldable(dominator_allocate)) {
3719 if (FLAG_trace_allocation_folding) {
3720 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
3721 Mnemonic(), dominator->id(), dominator->Mnemonic());
3722 }
3723 return false;
3724 }
3725
3726 if (!has_size_upper_bound()) {
3727 if (FLAG_trace_allocation_folding) {
3728 PrintF("#%d (%s) cannot fold into #%d (%s), "
3729 "can't estimate total allocation size\n",
3730 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3731 }
3732 return false;
3733 }
3734
3735 if (!current_size->IsInteger32Constant()) {
3736 // If it's not constant then it is a size_in_bytes calculation graph
3737 // like this: (const_header_size + const_element_size * size).
3738 DCHECK(current_size->IsInstruction());
3739
3740 HInstruction* current_instr = HInstruction::cast(current_size);
3741 if (!current_instr->Dominates(dominator_allocate)) {
3742 if (FLAG_trace_allocation_folding) {
3743 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size "
3744 "value does not dominate target allocation\n",
3745 id(), Mnemonic(), dominator_allocate->id(),
3746 dominator_allocate->Mnemonic());
3747 }
3748 return false;
3749 }
3750 }
3751
3752 DCHECK(
3753 (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
3754 (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
3755
3756 // First update the size of the dominator allocate instruction.
3757 dominator_size = dominator_allocate->size();
3758 int32_t original_object_size =
3759 HConstant::cast(dominator_size)->GetInteger32Constant();
3760 int32_t dominator_size_constant = original_object_size;
3761
3762 if (MustAllocateDoubleAligned()) {
3763 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3764 dominator_size_constant += kDoubleSize / 2;
3765 }
3766 }
3767
3768 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant();
3769 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3770
3771 // Since we clear the first word after folded memory, we cannot use the
3772 // whole Page::kMaxRegularHeapObjectSize memory.
3773 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3774 if (FLAG_trace_allocation_folding) {
3775 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3776 id(), Mnemonic(), dominator_allocate->id(),
3777 dominator_allocate->Mnemonic(), new_dominator_size);
3778 }
3779 return false;
3780 }
3781
3782 HInstruction* new_dominator_size_value;
3783
3784 if (current_size->IsInteger32Constant()) {
3785 new_dominator_size_value = HConstant::CreateAndInsertBefore(
3786 isolate, zone, context(), new_dominator_size, Representation::None(),
3787 dominator_allocate);
3788 } else {
3789 HValue* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3790 isolate, zone, context(), dominator_size_constant,
3791 Representation::Integer32(), dominator_allocate);
3792
3793 // Add old and new size together and insert.
3794 current_size->ChangeRepresentation(Representation::Integer32());
3795
3796 new_dominator_size_value = HAdd::New(
3797 isolate, zone, context(), new_dominator_size_constant, current_size);
3798 new_dominator_size_value->ClearFlag(HValue::kCanOverflow);
3799 new_dominator_size_value->ChangeRepresentation(Representation::Integer32());
3800
3801 new_dominator_size_value->InsertBefore(dominator_allocate);
3802 }
3803
3804 dominator_allocate->UpdateSize(new_dominator_size_value);
3805
3806 if (MustAllocateDoubleAligned()) {
3807 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3808 dominator_allocate->MakeDoubleAligned();
3809 }
3810 }
3811
3812 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats;
3813 #ifdef VERIFY_HEAP
3814 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap;
3815 #endif
3816
3817 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) {
3818 dominator_allocate->MakePrefillWithFiller();
3819 } else {
3820 // TODO(hpayer): This is a short-term hack to make allocation mementos
3821 // work again in new space.
3822 dominator_allocate->ClearNextMapWord(original_object_size);
3823 }
3824
3825 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3826
3827 // After that replace the dominated allocate instruction.
3828 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3829 isolate, zone, context(), dominator_size_constant, Representation::None(),
3830 this);
3831
3832 HInstruction* dominated_allocate_instr = HInnerAllocatedObject::New(
3833 isolate, zone, context(), dominator_allocate, inner_offset, type());
3834 dominated_allocate_instr->InsertBefore(this);
3835 DeleteAndReplaceWith(dominated_allocate_instr);
3836 if (FLAG_trace_allocation_folding) {
3837 PrintF("#%d (%s) folded into #%d (%s)\n",
3838 id(), Mnemonic(), dominator_allocate->id(),
3839 dominator_allocate->Mnemonic());
3840 }
3841 return true;
3842 }
3843
3844
UpdateFreeSpaceFiller(int32_t free_space_size)3845 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3846 DCHECK(filler_free_space_size_ != NULL);
3847 Zone* zone = block()->zone();
3848 // We must explicitly force Smi representation here because on x64 we
3849 // would otherwise automatically choose int32, but the actual store
3850 // requires a Smi-tagged value.
3851 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3852 block()->isolate(), zone, context(),
3853 filler_free_space_size_->value()->GetInteger32Constant() +
3854 free_space_size,
3855 Representation::Smi(), filler_free_space_size_);
3856 filler_free_space_size_->UpdateValue(new_free_space_size);
3857 }
3858
3859
CreateFreeSpaceFiller(int32_t free_space_size)3860 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3861 DCHECK(filler_free_space_size_ == NULL);
3862 Isolate* isolate = block()->isolate();
3863 Zone* zone = block()->zone();
3864 HInstruction* free_space_instr =
3865 HInnerAllocatedObject::New(isolate, zone, context(), dominating_allocate_,
3866 dominating_allocate_->size(), type());
3867 free_space_instr->InsertBefore(this);
3868 HConstant* filler_map = HConstant::CreateAndInsertAfter(
3869 zone, Unique<Map>::CreateImmovable(isolate->factory()->free_space_map()),
3870 true, free_space_instr);
3871 HInstruction* store_map =
3872 HStoreNamedField::New(isolate, zone, context(), free_space_instr,
3873 HObjectAccess::ForMap(), filler_map);
3874 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3875 store_map->InsertAfter(filler_map);
3876
3877 // We must explicitly force Smi representation here because on x64 we
3878 // would otherwise automatically choose int32, but the actual store
3879 // requires a Smi-tagged value.
3880 HConstant* filler_size =
3881 HConstant::CreateAndInsertAfter(isolate, zone, context(), free_space_size,
3882 Representation::Smi(), store_map);
3883 // Must force Smi representation for x64 (see comment above).
3884 HObjectAccess access = HObjectAccess::ForMapAndOffset(
3885 isolate->factory()->free_space_map(), FreeSpace::kSizeOffset,
3886 Representation::Smi());
3887 HStoreNamedField* store_size = HStoreNamedField::New(
3888 isolate, zone, context(), free_space_instr, access, filler_size);
3889 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3890 store_size->InsertAfter(filler_size);
3891 filler_free_space_size_ = store_size;
3892 }
3893
3894
ClearNextMapWord(int offset)3895 void HAllocate::ClearNextMapWord(int offset) {
3896 if (MustClearNextMapWord()) {
3897 Zone* zone = block()->zone();
3898 HObjectAccess access =
3899 HObjectAccess::ForObservableJSObjectOffset(offset);
3900 HStoreNamedField* clear_next_map =
3901 HStoreNamedField::New(block()->isolate(), zone, context(), this, access,
3902 block()->graph()->GetConstant0());
3903 clear_next_map->ClearAllSideEffects();
3904 clear_next_map->InsertAfter(this);
3905 }
3906 }
3907
3908
PrintDataTo(std::ostream & os) const3909 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
3910 os << NameOf(size()) << " (";
3911 if (IsNewSpaceAllocation()) os << "N";
3912 if (IsOldSpaceAllocation()) os << "P";
3913 if (MustAllocateDoubleAligned()) os << "A";
3914 if (MustPrefillWithFiller()) os << "F";
3915 return os << ")";
3916 }
3917
3918
TryIncreaseBaseOffset(uint32_t increase_by_value)3919 bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3920 // The base offset is usually simply the size of the array header, except
3921 // with dehoisting adds an addition offset due to a array index key
3922 // manipulation, in which case it becomes (array header size +
3923 // constant-offset-from-key * kPointerSize)
3924 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
3925 addition_result += increase_by_value;
3926 if (!addition_result.IsValid()) return false;
3927 base_offset_ = addition_result.ValueOrDie();
3928 return true;
3929 }
3930
3931
NeedsCanonicalization()3932 bool HStoreKeyed::NeedsCanonicalization() {
3933 switch (value()->opcode()) {
3934 case kLoadKeyed: {
3935 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
3936 return IsFixedFloatElementsKind(load_kind);
3937 }
3938 case kChange: {
3939 Representation from = HChange::cast(value())->from();
3940 return from.IsTagged() || from.IsHeapObject();
3941 }
3942 case kLoadNamedField:
3943 case kPhi: {
3944 // Better safe than sorry...
3945 return true;
3946 }
3947 default:
3948 return false;
3949 }
3950 }
3951
3952
3953 #define H_CONSTANT_INT(val) \
3954 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
3955 #define H_CONSTANT_DOUBLE(val) \
3956 HConstant::New(isolate, zone, context, static_cast<double>(val))
3957
3958 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3959 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3960 HValue* left, HValue* right, Strength strength) { \
3961 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3962 HConstant* c_left = HConstant::cast(left); \
3963 HConstant* c_right = HConstant::cast(right); \
3964 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3965 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3966 if (IsInt32Double(double_res)) { \
3967 return H_CONSTANT_INT(double_res); \
3968 } \
3969 return H_CONSTANT_DOUBLE(double_res); \
3970 } \
3971 } \
3972 return new (zone) HInstr(context, left, right, strength); \
3973 }
3974
3975
3976 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3977 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3978 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3979
3980 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3981
3982
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,PretenureFlag pretenure_flag,StringAddFlags flags,Handle<AllocationSite> allocation_site)3983 HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
3984 HValue* left, HValue* right,
3985 PretenureFlag pretenure_flag,
3986 StringAddFlags flags,
3987 Handle<AllocationSite> allocation_site) {
3988 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3989 HConstant* c_right = HConstant::cast(right);
3990 HConstant* c_left = HConstant::cast(left);
3991 if (c_left->HasStringValue() && c_right->HasStringValue()) {
3992 Handle<String> left_string = c_left->StringValue();
3993 Handle<String> right_string = c_right->StringValue();
3994 // Prevent possible exception by invalid string length.
3995 if (left_string->length() + right_string->length() < String::kMaxLength) {
3996 MaybeHandle<String> concat = isolate->factory()->NewConsString(
3997 c_left->StringValue(), c_right->StringValue());
3998 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
3999 }
4000 }
4001 }
4002 return new (zone)
4003 HStringAdd(context, left, right, pretenure_flag, flags, allocation_site);
4004 }
4005
4006
PrintDataTo(std::ostream & os) const4007 std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
4008 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4009 os << "_CheckBoth";
4010 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
4011 os << "_CheckLeft";
4012 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
4013 os << "_CheckRight";
4014 }
4015 HBinaryOperation::PrintDataTo(os);
4016 os << " (";
4017 if (pretenure_flag() == NOT_TENURED)
4018 os << "N";
4019 else if (pretenure_flag() == TENURED)
4020 os << "D";
4021 return os << ")";
4022 }
4023
4024
New(Isolate * isolate,Zone * zone,HValue * context,HValue * char_code)4025 HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
4026 HValue* context, HValue* char_code) {
4027 if (FLAG_fold_constants && char_code->IsConstant()) {
4028 HConstant* c_code = HConstant::cast(char_code);
4029 if (c_code->HasNumberValue()) {
4030 if (std::isfinite(c_code->DoubleValue())) {
4031 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4032 return HConstant::New(
4033 isolate, zone, context,
4034 isolate->factory()->LookupSingleCharacterStringFromCode(code));
4035 }
4036 return HConstant::New(isolate, zone, context,
4037 isolate->factory()->empty_string());
4038 }
4039 }
4040 return new(zone) HStringCharFromCode(context, char_code);
4041 }
4042
4043
New(Isolate * isolate,Zone * zone,HValue * context,HValue * value,BuiltinFunctionId op)4044 HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
4045 HValue* context, HValue* value,
4046 BuiltinFunctionId op) {
4047 do {
4048 if (!FLAG_fold_constants) break;
4049 if (!value->IsConstant()) break;
4050 HConstant* constant = HConstant::cast(value);
4051 if (!constant->HasNumberValue()) break;
4052 double d = constant->DoubleValue();
4053 if (std::isnan(d)) { // NaN poisons everything.
4054 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4055 }
4056 if (std::isinf(d)) { // +Infinity and -Infinity.
4057 switch (op) {
4058 case kMathExp:
4059 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
4060 case kMathLog:
4061 case kMathSqrt:
4062 return H_CONSTANT_DOUBLE(
4063 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
4064 case kMathPowHalf:
4065 case kMathAbs:
4066 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
4067 case kMathRound:
4068 case kMathFround:
4069 case kMathFloor:
4070 return H_CONSTANT_DOUBLE(d);
4071 case kMathClz32:
4072 return H_CONSTANT_INT(32);
4073 default:
4074 UNREACHABLE();
4075 break;
4076 }
4077 }
4078 switch (op) {
4079 case kMathExp:
4080 lazily_initialize_fast_exp(isolate);
4081 return H_CONSTANT_DOUBLE(fast_exp(d, isolate));
4082 case kMathLog:
4083 return H_CONSTANT_DOUBLE(std::log(d));
4084 case kMathSqrt:
4085 lazily_initialize_fast_sqrt(isolate);
4086 return H_CONSTANT_DOUBLE(fast_sqrt(d, isolate));
4087 case kMathPowHalf:
4088 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
4089 case kMathAbs:
4090 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
4091 case kMathRound:
4092 // -0.5 .. -0.0 round to -0.0.
4093 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
4094 // Doubles are represented as Significant * 2 ^ Exponent. If the
4095 // Exponent is not negative, the double value is already an integer.
4096 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
4097 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
4098 case kMathFround:
4099 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
4100 case kMathFloor:
4101 return H_CONSTANT_DOUBLE(Floor(d));
4102 case kMathClz32: {
4103 uint32_t i = DoubleToUint32(d);
4104 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
4105 }
4106 default:
4107 UNREACHABLE();
4108 break;
4109 }
4110 } while (false);
4111 return new(zone) HUnaryMathOperation(context, value, op);
4112 }
4113
4114
RepresentationFromUses()4115 Representation HUnaryMathOperation::RepresentationFromUses() {
4116 if (op_ != kMathFloor && op_ != kMathRound) {
4117 return HValue::RepresentationFromUses();
4118 }
4119
4120 // The instruction can have an int32 or double output. Prefer a double
4121 // representation if there are double uses.
4122 bool use_double = false;
4123
4124 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4125 HValue* use = it.value();
4126 int use_index = it.index();
4127 Representation rep_observed = use->observed_input_representation(use_index);
4128 Representation rep_required = use->RequiredInputRepresentation(use_index);
4129 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
4130 if (use_double && !FLAG_trace_representation) {
4131 // Having seen one double is enough.
4132 break;
4133 }
4134 if (FLAG_trace_representation) {
4135 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
4136 PrintF("#%d %s is used by #%d %s as %s%s\n",
4137 id(), Mnemonic(), use->id(),
4138 use->Mnemonic(), rep_observed.Mnemonic(),
4139 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4140 } else {
4141 PrintF("#%d %s is required by #%d %s as %s%s\n",
4142 id(), Mnemonic(), use->id(),
4143 use->Mnemonic(), rep_required.Mnemonic(),
4144 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4145 }
4146 }
4147 }
4148 return use_double ? Representation::Double() : Representation::Integer32();
4149 }
4150
4151
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right)4152 HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
4153 HValue* left, HValue* right) {
4154 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4155 HConstant* c_left = HConstant::cast(left);
4156 HConstant* c_right = HConstant::cast(right);
4157 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4158 double result =
4159 power_helper(isolate, c_left->DoubleValue(), c_right->DoubleValue());
4160 return H_CONSTANT_DOUBLE(std::isnan(result)
4161 ? std::numeric_limits<double>::quiet_NaN()
4162 : result);
4163 }
4164 }
4165 return new(zone) HPower(left, right);
4166 }
4167
4168
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,Operation op)4169 HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
4170 HValue* left, HValue* right, Operation op) {
4171 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4172 HConstant* c_left = HConstant::cast(left);
4173 HConstant* c_right = HConstant::cast(right);
4174 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4175 double d_left = c_left->DoubleValue();
4176 double d_right = c_right->DoubleValue();
4177 if (op == kMathMin) {
4178 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4179 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4180 if (d_left == d_right) {
4181 // Handle +0 and -0.
4182 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4183 : d_right);
4184 }
4185 } else {
4186 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4187 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4188 if (d_left == d_right) {
4189 // Handle +0 and -0.
4190 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4191 : d_left);
4192 }
4193 }
4194 // All comparisons failed, must be NaN.
4195 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4196 }
4197 }
4198 return new(zone) HMathMinMax(context, left, right, op);
4199 }
4200
4201
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,Strength strength)4202 HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
4203 HValue* left, HValue* right, Strength strength) {
4204 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4205 HConstant* c_left = HConstant::cast(left);
4206 HConstant* c_right = HConstant::cast(right);
4207 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4208 int32_t dividend = c_left->Integer32Value();
4209 int32_t divisor = c_right->Integer32Value();
4210 if (dividend == kMinInt && divisor == -1) {
4211 return H_CONSTANT_DOUBLE(-0.0);
4212 }
4213 if (divisor != 0) {
4214 int32_t res = dividend % divisor;
4215 if ((res == 0) && (dividend < 0)) {
4216 return H_CONSTANT_DOUBLE(-0.0);
4217 }
4218 return H_CONSTANT_INT(res);
4219 }
4220 }
4221 }
4222 return new (zone) HMod(context, left, right, strength);
4223 }
4224
4225
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,Strength strength)4226 HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
4227 HValue* left, HValue* right, Strength strength) {
4228 // If left and right are constant values, try to return a constant value.
4229 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4230 HConstant* c_left = HConstant::cast(left);
4231 HConstant* c_right = HConstant::cast(right);
4232 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4233 if (c_right->DoubleValue() != 0) {
4234 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4235 if (IsInt32Double(double_res)) {
4236 return H_CONSTANT_INT(double_res);
4237 }
4238 return H_CONSTANT_DOUBLE(double_res);
4239 } else {
4240 int sign = Double(c_left->DoubleValue()).Sign() *
4241 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4242 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4243 }
4244 }
4245 }
4246 return new (zone) HDiv(context, left, right, strength);
4247 }
4248
4249
New(Isolate * isolate,Zone * zone,HValue * context,Token::Value op,HValue * left,HValue * right,Strength strength)4250 HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
4251 Token::Value op, HValue* left, HValue* right,
4252 Strength strength) {
4253 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4254 HConstant* c_left = HConstant::cast(left);
4255 HConstant* c_right = HConstant::cast(right);
4256 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4257 int32_t result;
4258 int32_t v_left = c_left->NumberValueAsInteger32();
4259 int32_t v_right = c_right->NumberValueAsInteger32();
4260 switch (op) {
4261 case Token::BIT_XOR:
4262 result = v_left ^ v_right;
4263 break;
4264 case Token::BIT_AND:
4265 result = v_left & v_right;
4266 break;
4267 case Token::BIT_OR:
4268 result = v_left | v_right;
4269 break;
4270 default:
4271 result = 0; // Please the compiler.
4272 UNREACHABLE();
4273 }
4274 return H_CONSTANT_INT(result);
4275 }
4276 }
4277 return new (zone) HBitwise(context, op, left, right, strength);
4278 }
4279
4280
4281 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4282 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4283 HValue* left, HValue* right, Strength strength) { \
4284 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4285 HConstant* c_left = HConstant::cast(left); \
4286 HConstant* c_right = HConstant::cast(right); \
4287 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4288 return H_CONSTANT_INT(result); \
4289 } \
4290 } \
4291 return new (zone) HInstr(context, left, right, strength); \
4292 }
4293
4294
4295 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4296 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4297 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4298 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4299
4300 #undef DEFINE_NEW_H_BITWISE_INSTR
4301
4302
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,Strength strength)4303 HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
4304 HValue* left, HValue* right, Strength strength) {
4305 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4306 HConstant* c_left = HConstant::cast(left);
4307 HConstant* c_right = HConstant::cast(right);
4308 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4309 int32_t left_val = c_left->NumberValueAsInteger32();
4310 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4311 if ((right_val == 0) && (left_val < 0)) {
4312 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4313 }
4314 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4315 }
4316 }
4317 return new (zone) HShr(context, left, right, strength);
4318 }
4319
4320
New(Isolate * isolate,Zone * zone,HValue * context,String::Encoding encoding,HValue * string,HValue * index)4321 HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
4322 HValue* context, String::Encoding encoding,
4323 HValue* string, HValue* index) {
4324 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4325 HConstant* c_string = HConstant::cast(string);
4326 HConstant* c_index = HConstant::cast(index);
4327 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4328 Handle<String> s = c_string->StringValue();
4329 int32_t i = c_index->Integer32Value();
4330 DCHECK_LE(0, i);
4331 DCHECK_LT(i, s->length());
4332 return H_CONSTANT_INT(s->Get(i));
4333 }
4334 }
4335 return new(zone) HSeqStringGetChar(encoding, string, index);
4336 }
4337
4338
4339 #undef H_CONSTANT_INT
4340 #undef H_CONSTANT_DOUBLE
4341
4342
PrintDataTo(std::ostream & os) const4343 std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
4344 os << Token::Name(op_) << " ";
4345 return HBitwiseBinaryOperation::PrintDataTo(os);
4346 }
4347
4348
SimplifyConstantInputs()4349 void HPhi::SimplifyConstantInputs() {
4350 // Convert constant inputs to integers when all uses are truncating.
4351 // This must happen before representation inference takes place.
4352 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4353 for (int i = 0; i < OperandCount(); ++i) {
4354 if (!OperandAt(i)->IsConstant()) return;
4355 }
4356 HGraph* graph = block()->graph();
4357 for (int i = 0; i < OperandCount(); ++i) {
4358 HConstant* operand = HConstant::cast(OperandAt(i));
4359 if (operand->HasInteger32Value()) {
4360 continue;
4361 } else if (operand->HasDoubleValue()) {
4362 HConstant* integer_input = HConstant::New(
4363 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
4364 DoubleToInt32(operand->DoubleValue()));
4365 integer_input->InsertAfter(operand);
4366 SetOperandAt(i, integer_input);
4367 } else if (operand->HasBooleanValue()) {
4368 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4369 : graph->GetConstant0());
4370 } else if (operand->ImmortalImmovable()) {
4371 SetOperandAt(i, graph->GetConstant0());
4372 }
4373 }
4374 // Overwrite observed input representations because they are likely Tagged.
4375 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4376 HValue* use = it.value();
4377 if (use->IsBinaryOperation()) {
4378 HBinaryOperation::cast(use)->set_observed_input_representation(
4379 it.index(), Representation::Smi());
4380 }
4381 }
4382 }
4383
4384
InferRepresentation(HInferRepresentationPhase * h_infer)4385 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4386 DCHECK(CheckFlag(kFlexibleRepresentation));
4387 Representation new_rep = RepresentationFromUses();
4388 UpdateRepresentation(new_rep, h_infer, "uses");
4389 new_rep = RepresentationFromInputs();
4390 UpdateRepresentation(new_rep, h_infer, "inputs");
4391 new_rep = RepresentationFromUseRequirements();
4392 UpdateRepresentation(new_rep, h_infer, "use requirements");
4393 }
4394
4395
RepresentationFromInputs()4396 Representation HPhi::RepresentationFromInputs() {
4397 Representation r = representation();
4398 for (int i = 0; i < OperandCount(); ++i) {
4399 // Ignore conservative Tagged assumption of parameters if we have
4400 // reason to believe that it's too conservative.
4401 if (has_type_feedback_from_uses() && OperandAt(i)->IsParameter()) {
4402 continue;
4403 }
4404
4405 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4406 }
4407 return r;
4408 }
4409
4410
4411 // Returns a representation if all uses agree on the same representation.
4412 // Integer32 is also returned when some uses are Smi but others are Integer32.
RepresentationFromUseRequirements()4413 Representation HValue::RepresentationFromUseRequirements() {
4414 Representation rep = Representation::None();
4415 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4416 // Ignore the use requirement from never run code
4417 if (it.value()->block()->IsUnreachable()) continue;
4418
4419 // We check for observed_input_representation elsewhere.
4420 Representation use_rep =
4421 it.value()->RequiredInputRepresentation(it.index());
4422 if (rep.IsNone()) {
4423 rep = use_rep;
4424 continue;
4425 }
4426 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4427 if (rep.generalize(use_rep).IsInteger32()) {
4428 rep = Representation::Integer32();
4429 continue;
4430 }
4431 return Representation::None();
4432 }
4433 return rep;
4434 }
4435
4436
HasNonSmiUse()4437 bool HValue::HasNonSmiUse() {
4438 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4439 // We check for observed_input_representation elsewhere.
4440 Representation use_rep =
4441 it.value()->RequiredInputRepresentation(it.index());
4442 if (!use_rep.IsNone() &&
4443 !use_rep.IsSmi() &&
4444 !use_rep.IsTagged()) {
4445 return true;
4446 }
4447 }
4448 return false;
4449 }
4450
4451
4452 // Node-specific verification code is only included in debug mode.
4453 #ifdef DEBUG
4454
Verify()4455 void HPhi::Verify() {
4456 DCHECK(OperandCount() == block()->predecessors()->length());
4457 for (int i = 0; i < OperandCount(); ++i) {
4458 HValue* value = OperandAt(i);
4459 HBasicBlock* defining_block = value->block();
4460 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4461 DCHECK(defining_block == predecessor_block ||
4462 defining_block->Dominates(predecessor_block));
4463 }
4464 }
4465
4466
Verify()4467 void HSimulate::Verify() {
4468 HInstruction::Verify();
4469 DCHECK(HasAstId() || next()->IsEnterInlined());
4470 }
4471
4472
Verify()4473 void HCheckHeapObject::Verify() {
4474 HInstruction::Verify();
4475 DCHECK(HasNoUses());
4476 }
4477
4478
Verify()4479 void HCheckValue::Verify() {
4480 HInstruction::Verify();
4481 DCHECK(HasNoUses());
4482 }
4483
4484 #endif
4485
4486
ForFixedArrayHeader(int offset)4487 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4488 DCHECK(offset >= 0);
4489 DCHECK(offset < FixedArray::kHeaderSize);
4490 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4491 return HObjectAccess(kInobject, offset);
4492 }
4493
4494
ForMapAndOffset(Handle<Map> map,int offset,Representation representation)4495 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4496 Representation representation) {
4497 DCHECK(offset >= 0);
4498 Portion portion = kInobject;
4499
4500 if (offset == JSObject::kElementsOffset) {
4501 portion = kElementsPointer;
4502 } else if (offset == JSObject::kMapOffset) {
4503 portion = kMaps;
4504 }
4505 bool existing_inobject_property = true;
4506 if (!map.is_null()) {
4507 existing_inobject_property = (offset <
4508 map->instance_size() - map->unused_property_fields() * kPointerSize);
4509 }
4510 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4511 false, existing_inobject_property);
4512 }
4513
4514
ForAllocationSiteOffset(int offset)4515 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4516 switch (offset) {
4517 case AllocationSite::kTransitionInfoOffset:
4518 return HObjectAccess(kInobject, offset, Representation::Tagged());
4519 case AllocationSite::kNestedSiteOffset:
4520 return HObjectAccess(kInobject, offset, Representation::Tagged());
4521 case AllocationSite::kPretenureDataOffset:
4522 return HObjectAccess(kInobject, offset, Representation::Smi());
4523 case AllocationSite::kPretenureCreateCountOffset:
4524 return HObjectAccess(kInobject, offset, Representation::Smi());
4525 case AllocationSite::kDependentCodeOffset:
4526 return HObjectAccess(kInobject, offset, Representation::Tagged());
4527 case AllocationSite::kWeakNextOffset:
4528 return HObjectAccess(kInobject, offset, Representation::Tagged());
4529 default:
4530 UNREACHABLE();
4531 }
4532 return HObjectAccess(kInobject, offset);
4533 }
4534
4535
ForContextSlot(int index)4536 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4537 DCHECK(index >= 0);
4538 Portion portion = kInobject;
4539 int offset = Context::kHeaderSize + index * kPointerSize;
4540 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4541 return HObjectAccess(portion, offset, Representation::Tagged());
4542 }
4543
4544
ForScriptContext(int index)4545 HObjectAccess HObjectAccess::ForScriptContext(int index) {
4546 DCHECK(index >= 0);
4547 Portion portion = kInobject;
4548 int offset = ScriptContextTable::GetContextOffset(index);
4549 return HObjectAccess(portion, offset, Representation::Tagged());
4550 }
4551
4552
ForJSArrayOffset(int offset)4553 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4554 DCHECK(offset >= 0);
4555 Portion portion = kInobject;
4556
4557 if (offset == JSObject::kElementsOffset) {
4558 portion = kElementsPointer;
4559 } else if (offset == JSArray::kLengthOffset) {
4560 portion = kArrayLengths;
4561 } else if (offset == JSObject::kMapOffset) {
4562 portion = kMaps;
4563 }
4564 return HObjectAccess(portion, offset);
4565 }
4566
4567
ForBackingStoreOffset(int offset,Representation representation)4568 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4569 Representation representation) {
4570 DCHECK(offset >= 0);
4571 return HObjectAccess(kBackingStore, offset, representation,
4572 Handle<String>::null(), false, false);
4573 }
4574
4575
ForField(Handle<Map> map,int index,Representation representation,Handle<Name> name)4576 HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
4577 Representation representation,
4578 Handle<Name> name) {
4579 if (index < 0) {
4580 // Negative property indices are in-object properties, indexed
4581 // from the end of the fixed part of the object.
4582 int offset = (index * kPointerSize) + map->instance_size();
4583 return HObjectAccess(kInobject, offset, representation, name, false, true);
4584 } else {
4585 // Non-negative property indices are in the properties array.
4586 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4587 return HObjectAccess(kBackingStore, offset, representation, name,
4588 false, false);
4589 }
4590 }
4591
4592
SetGVNFlags(HValue * instr,PropertyAccessType access_type)4593 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
4594 // set the appropriate GVN flags for a given load or store instruction
4595 if (access_type == STORE) {
4596 // track dominating allocations in order to eliminate write barriers
4597 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4598 instr->SetFlag(HValue::kTrackSideEffectDominators);
4599 } else {
4600 // try to GVN loads, but don't hoist above map changes
4601 instr->SetFlag(HValue::kUseGVN);
4602 instr->SetDependsOnFlag(::v8::internal::kMaps);
4603 }
4604
4605 switch (portion()) {
4606 case kArrayLengths:
4607 if (access_type == STORE) {
4608 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4609 } else {
4610 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4611 }
4612 break;
4613 case kStringLengths:
4614 if (access_type == STORE) {
4615 instr->SetChangesFlag(::v8::internal::kStringLengths);
4616 } else {
4617 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4618 }
4619 break;
4620 case kInobject:
4621 if (access_type == STORE) {
4622 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4623 } else {
4624 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4625 }
4626 break;
4627 case kDouble:
4628 if (access_type == STORE) {
4629 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4630 } else {
4631 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4632 }
4633 break;
4634 case kBackingStore:
4635 if (access_type == STORE) {
4636 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4637 } else {
4638 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4639 }
4640 break;
4641 case kElementsPointer:
4642 if (access_type == STORE) {
4643 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4644 } else {
4645 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4646 }
4647 break;
4648 case kMaps:
4649 if (access_type == STORE) {
4650 instr->SetChangesFlag(::v8::internal::kMaps);
4651 } else {
4652 instr->SetDependsOnFlag(::v8::internal::kMaps);
4653 }
4654 break;
4655 case kExternalMemory:
4656 if (access_type == STORE) {
4657 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4658 } else {
4659 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4660 }
4661 break;
4662 }
4663 }
4664
4665
operator <<(std::ostream & os,const HObjectAccess & access)4666 std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4667 os << ".";
4668
4669 switch (access.portion()) {
4670 case HObjectAccess::kArrayLengths:
4671 case HObjectAccess::kStringLengths:
4672 os << "%length";
4673 break;
4674 case HObjectAccess::kElementsPointer:
4675 os << "%elements";
4676 break;
4677 case HObjectAccess::kMaps:
4678 os << "%map";
4679 break;
4680 case HObjectAccess::kDouble: // fall through
4681 case HObjectAccess::kInobject:
4682 if (!access.name().is_null() && access.name()->IsString()) {
4683 os << Handle<String>::cast(access.name())->ToCString().get();
4684 }
4685 os << "[in-object]";
4686 break;
4687 case HObjectAccess::kBackingStore:
4688 if (!access.name().is_null() && access.name()->IsString()) {
4689 os << Handle<String>::cast(access.name())->ToCString().get();
4690 }
4691 os << "[backing-store]";
4692 break;
4693 case HObjectAccess::kExternalMemory:
4694 os << "[external-memory]";
4695 break;
4696 }
4697
4698 return os << "@" << access.offset();
4699 }
4700
4701 } // namespace internal
4702 } // namespace v8
4703