1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/base/bits.h"
8 #include "src/double.h"
9 #include "src/factory.h"
10 #include "src/hydrogen-infer-representation.h"
11 #include "src/property-details-inl.h"
12
13 #if V8_TARGET_ARCH_IA32
14 #include "src/ia32/lithium-ia32.h" // NOLINT
15 #elif V8_TARGET_ARCH_X64
16 #include "src/x64/lithium-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM64
18 #include "src/arm64/lithium-arm64.h" // NOLINT
19 #elif V8_TARGET_ARCH_ARM
20 #include "src/arm/lithium-arm.h" // NOLINT
21 #elif V8_TARGET_ARCH_MIPS
22 #include "src/mips/lithium-mips.h" // NOLINT
23 #elif V8_TARGET_ARCH_MIPS64
24 #include "src/mips64/lithium-mips64.h" // NOLINT
25 #elif V8_TARGET_ARCH_X87
26 #include "src/x87/lithium-x87.h" // NOLINT
27 #else
28 #error Unsupported target architecture.
29 #endif
30
31 #include "src/base/safe_math.h"
32
33 namespace v8 {
34 namespace internal {
35
36 #define DEFINE_COMPILE(type) \
37 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
38 return builder->Do##type(this); \
39 }
HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)40 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
41 #undef DEFINE_COMPILE
42
43
44 Isolate* HValue::isolate() const {
45 DCHECK(block() != NULL);
46 return block()->isolate();
47 }
48
49
AssumeRepresentation(Representation r)50 void HValue::AssumeRepresentation(Representation r) {
51 if (CheckFlag(kFlexibleRepresentation)) {
52 ChangeRepresentation(r);
53 // The representation of the value is dictated by type feedback and
54 // will not be changed later.
55 ClearFlag(kFlexibleRepresentation);
56 }
57 }
58
59
InferRepresentation(HInferRepresentationPhase * h_infer)60 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
61 DCHECK(CheckFlag(kFlexibleRepresentation));
62 Representation new_rep = RepresentationFromInputs();
63 UpdateRepresentation(new_rep, h_infer, "inputs");
64 new_rep = RepresentationFromUses();
65 UpdateRepresentation(new_rep, h_infer, "uses");
66 if (representation().IsSmi() && HasNonSmiUse()) {
67 UpdateRepresentation(
68 Representation::Integer32(), h_infer, "use requirements");
69 }
70 }
71
72
RepresentationFromUses()73 Representation HValue::RepresentationFromUses() {
74 if (HasNoUses()) return Representation::None();
75
76 // Array of use counts for each representation.
77 int use_count[Representation::kNumRepresentations] = { 0 };
78
79 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
80 HValue* use = it.value();
81 Representation rep = use->observed_input_representation(it.index());
82 if (rep.IsNone()) continue;
83 if (FLAG_trace_representation) {
84 PrintF("#%d %s is used by #%d %s as %s%s\n",
85 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
86 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
87 }
88 use_count[rep.kind()] += 1;
89 }
90 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
91 int tagged_count = use_count[Representation::kTagged];
92 int double_count = use_count[Representation::kDouble];
93 int int32_count = use_count[Representation::kInteger32];
94 int smi_count = use_count[Representation::kSmi];
95
96 if (tagged_count > 0) return Representation::Tagged();
97 if (double_count > 0) return Representation::Double();
98 if (int32_count > 0) return Representation::Integer32();
99 if (smi_count > 0) return Representation::Smi();
100
101 return Representation::None();
102 }
103
104
UpdateRepresentation(Representation new_rep,HInferRepresentationPhase * h_infer,const char * reason)105 void HValue::UpdateRepresentation(Representation new_rep,
106 HInferRepresentationPhase* h_infer,
107 const char* reason) {
108 Representation r = representation();
109 if (new_rep.is_more_general_than(r)) {
110 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
111 if (FLAG_trace_representation) {
112 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
113 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
114 }
115 ChangeRepresentation(new_rep);
116 AddDependantsToWorklist(h_infer);
117 }
118 }
119
120
AddDependantsToWorklist(HInferRepresentationPhase * h_infer)121 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
122 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
123 h_infer->AddToWorklist(it.value());
124 }
125 for (int i = 0; i < OperandCount(); ++i) {
126 h_infer->AddToWorklist(OperandAt(i));
127 }
128 }
129
130
ConvertAndSetOverflow(Representation r,int64_t result,bool * overflow)131 static int32_t ConvertAndSetOverflow(Representation r,
132 int64_t result,
133 bool* overflow) {
134 if (r.IsSmi()) {
135 if (result > Smi::kMaxValue) {
136 *overflow = true;
137 return Smi::kMaxValue;
138 }
139 if (result < Smi::kMinValue) {
140 *overflow = true;
141 return Smi::kMinValue;
142 }
143 } else {
144 if (result > kMaxInt) {
145 *overflow = true;
146 return kMaxInt;
147 }
148 if (result < kMinInt) {
149 *overflow = true;
150 return kMinInt;
151 }
152 }
153 return static_cast<int32_t>(result);
154 }
155
156
AddWithoutOverflow(Representation r,int32_t a,int32_t b,bool * overflow)157 static int32_t AddWithoutOverflow(Representation r,
158 int32_t a,
159 int32_t b,
160 bool* overflow) {
161 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
162 return ConvertAndSetOverflow(r, result, overflow);
163 }
164
165
SubWithoutOverflow(Representation r,int32_t a,int32_t b,bool * overflow)166 static int32_t SubWithoutOverflow(Representation r,
167 int32_t a,
168 int32_t b,
169 bool* overflow) {
170 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
171 return ConvertAndSetOverflow(r, result, overflow);
172 }
173
174
MulWithoutOverflow(const Representation & r,int32_t a,int32_t b,bool * overflow)175 static int32_t MulWithoutOverflow(const Representation& r,
176 int32_t a,
177 int32_t b,
178 bool* overflow) {
179 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
180 return ConvertAndSetOverflow(r, result, overflow);
181 }
182
183
Mask() const184 int32_t Range::Mask() const {
185 if (lower_ == upper_) return lower_;
186 if (lower_ >= 0) {
187 int32_t res = 1;
188 while (res < upper_) {
189 res = (res << 1) | 1;
190 }
191 return res;
192 }
193 return 0xffffffff;
194 }
195
196
AddConstant(int32_t value)197 void Range::AddConstant(int32_t value) {
198 if (value == 0) return;
199 bool may_overflow = false; // Overflow is ignored here.
200 Representation r = Representation::Integer32();
201 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
202 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
203 #ifdef DEBUG
204 Verify();
205 #endif
206 }
207
208
Intersect(Range * other)209 void Range::Intersect(Range* other) {
210 upper_ = Min(upper_, other->upper_);
211 lower_ = Max(lower_, other->lower_);
212 bool b = CanBeMinusZero() && other->CanBeMinusZero();
213 set_can_be_minus_zero(b);
214 }
215
216
Union(Range * other)217 void Range::Union(Range* other) {
218 upper_ = Max(upper_, other->upper_);
219 lower_ = Min(lower_, other->lower_);
220 bool b = CanBeMinusZero() || other->CanBeMinusZero();
221 set_can_be_minus_zero(b);
222 }
223
224
CombinedMax(Range * other)225 void Range::CombinedMax(Range* other) {
226 upper_ = Max(upper_, other->upper_);
227 lower_ = Max(lower_, other->lower_);
228 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
229 }
230
231
CombinedMin(Range * other)232 void Range::CombinedMin(Range* other) {
233 upper_ = Min(upper_, other->upper_);
234 lower_ = Min(lower_, other->lower_);
235 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
236 }
237
238
Sar(int32_t value)239 void Range::Sar(int32_t value) {
240 int32_t bits = value & 0x1F;
241 lower_ = lower_ >> bits;
242 upper_ = upper_ >> bits;
243 set_can_be_minus_zero(false);
244 }
245
246
Shl(int32_t value)247 void Range::Shl(int32_t value) {
248 int32_t bits = value & 0x1F;
249 int old_lower = lower_;
250 int old_upper = upper_;
251 lower_ = lower_ << bits;
252 upper_ = upper_ << bits;
253 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
254 upper_ = kMaxInt;
255 lower_ = kMinInt;
256 }
257 set_can_be_minus_zero(false);
258 }
259
260
AddAndCheckOverflow(const Representation & r,Range * other)261 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
262 bool may_overflow = false;
263 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
264 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
265 KeepOrder();
266 #ifdef DEBUG
267 Verify();
268 #endif
269 return may_overflow;
270 }
271
272
SubAndCheckOverflow(const Representation & r,Range * other)273 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
274 bool may_overflow = false;
275 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
276 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
277 KeepOrder();
278 #ifdef DEBUG
279 Verify();
280 #endif
281 return may_overflow;
282 }
283
284
KeepOrder()285 void Range::KeepOrder() {
286 if (lower_ > upper_) {
287 int32_t tmp = lower_;
288 lower_ = upper_;
289 upper_ = tmp;
290 }
291 }
292
293
294 #ifdef DEBUG
Verify() const295 void Range::Verify() const {
296 DCHECK(lower_ <= upper_);
297 }
298 #endif
299
300
MulAndCheckOverflow(const Representation & r,Range * other)301 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
302 bool may_overflow = false;
303 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
304 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
305 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
306 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
307 lower_ = Min(Min(v1, v2), Min(v3, v4));
308 upper_ = Max(Max(v1, v2), Max(v3, v4));
309 #ifdef DEBUG
310 Verify();
311 #endif
312 return may_overflow;
313 }
314
315
IsDefinedAfter(HBasicBlock * other) const316 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
317 return block()->block_id() > other->block_id();
318 }
319
320
tail()321 HUseListNode* HUseListNode::tail() {
322 // Skip and remove dead items in the use list.
323 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
324 tail_ = tail_->tail_;
325 }
326 return tail_;
327 }
328
329
CheckUsesForFlag(Flag f) const330 bool HValue::CheckUsesForFlag(Flag f) const {
331 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
332 if (it.value()->IsSimulate()) continue;
333 if (!it.value()->CheckFlag(f)) return false;
334 }
335 return true;
336 }
337
338
CheckUsesForFlag(Flag f,HValue ** value) const339 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
340 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
341 if (it.value()->IsSimulate()) continue;
342 if (!it.value()->CheckFlag(f)) {
343 *value = it.value();
344 return false;
345 }
346 }
347 return true;
348 }
349
350
HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const351 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
352 bool return_value = false;
353 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
354 if (it.value()->IsSimulate()) continue;
355 if (!it.value()->CheckFlag(f)) return false;
356 return_value = true;
357 }
358 return return_value;
359 }
360
361
HUseIterator(HUseListNode * head)362 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
363 Advance();
364 }
365
366
Advance()367 void HUseIterator::Advance() {
368 current_ = next_;
369 if (current_ != NULL) {
370 next_ = current_->tail();
371 value_ = current_->value();
372 index_ = current_->index();
373 }
374 }
375
376
UseCount() const377 int HValue::UseCount() const {
378 int count = 0;
379 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
380 return count;
381 }
382
383
RemoveUse(HValue * value,int index)384 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
385 HUseListNode* previous = NULL;
386 HUseListNode* current = use_list_;
387 while (current != NULL) {
388 if (current->value() == value && current->index() == index) {
389 if (previous == NULL) {
390 use_list_ = current->tail();
391 } else {
392 previous->set_tail(current->tail());
393 }
394 break;
395 }
396
397 previous = current;
398 current = current->tail();
399 }
400
401 #ifdef DEBUG
402 // Do not reuse use list nodes in debug mode, zap them.
403 if (current != NULL) {
404 HUseListNode* temp =
405 new(block()->zone())
406 HUseListNode(current->value(), current->index(), NULL);
407 current->Zap();
408 current = temp;
409 }
410 #endif
411 return current;
412 }
413
414
Equals(HValue * other)415 bool HValue::Equals(HValue* other) {
416 if (other->opcode() != opcode()) return false;
417 if (!other->representation().Equals(representation())) return false;
418 if (!other->type_.Equals(type_)) return false;
419 if (other->flags() != flags()) return false;
420 if (OperandCount() != other->OperandCount()) return false;
421 for (int i = 0; i < OperandCount(); ++i) {
422 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
423 }
424 bool result = DataEquals(other);
425 DCHECK(!result || Hashcode() == other->Hashcode());
426 return result;
427 }
428
429
Hashcode()430 intptr_t HValue::Hashcode() {
431 intptr_t result = opcode();
432 int count = OperandCount();
433 for (int i = 0; i < count; ++i) {
434 result = result * 19 + OperandAt(i)->id() + (result >> 7);
435 }
436 return result;
437 }
438
439
Mnemonic() const440 const char* HValue::Mnemonic() const {
441 switch (opcode()) {
442 #define MAKE_CASE(type) case k##type: return #type;
443 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
444 #undef MAKE_CASE
445 case kPhi: return "Phi";
446 default: return "";
447 }
448 }
449
450
CanReplaceWithDummyUses()451 bool HValue::CanReplaceWithDummyUses() {
452 return FLAG_unreachable_code_elimination &&
453 !(block()->IsReachable() ||
454 IsBlockEntry() ||
455 IsControlInstruction() ||
456 IsArgumentsObject() ||
457 IsCapturedObject() ||
458 IsSimulate() ||
459 IsEnterInlined() ||
460 IsLeaveInlined());
461 }
462
463
IsInteger32Constant()464 bool HValue::IsInteger32Constant() {
465 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
466 }
467
468
GetInteger32Constant()469 int32_t HValue::GetInteger32Constant() {
470 return HConstant::cast(this)->Integer32Value();
471 }
472
473
EqualsInteger32Constant(int32_t value)474 bool HValue::EqualsInteger32Constant(int32_t value) {
475 return IsInteger32Constant() && GetInteger32Constant() == value;
476 }
477
478
SetOperandAt(int index,HValue * value)479 void HValue::SetOperandAt(int index, HValue* value) {
480 RegisterUse(index, value);
481 InternalSetOperandAt(index, value);
482 }
483
484
DeleteAndReplaceWith(HValue * other)485 void HValue::DeleteAndReplaceWith(HValue* other) {
486 // We replace all uses first, so Delete can assert that there are none.
487 if (other != NULL) ReplaceAllUsesWith(other);
488 Kill();
489 DeleteFromGraph();
490 }
491
492
ReplaceAllUsesWith(HValue * other)493 void HValue::ReplaceAllUsesWith(HValue* other) {
494 while (use_list_ != NULL) {
495 HUseListNode* list_node = use_list_;
496 HValue* value = list_node->value();
497 DCHECK(!value->block()->IsStartBlock());
498 value->InternalSetOperandAt(list_node->index(), other);
499 use_list_ = list_node->tail();
500 list_node->set_tail(other->use_list_);
501 other->use_list_ = list_node;
502 }
503 }
504
505
Kill()506 void HValue::Kill() {
507 // Instead of going through the entire use list of each operand, we only
508 // check the first item in each use list and rely on the tail() method to
509 // skip dead items, removing them lazily next time we traverse the list.
510 SetFlag(kIsDead);
511 for (int i = 0; i < OperandCount(); ++i) {
512 HValue* operand = OperandAt(i);
513 if (operand == NULL) continue;
514 HUseListNode* first = operand->use_list_;
515 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
516 operand->use_list_ = first->tail();
517 }
518 }
519 }
520
521
SetBlock(HBasicBlock * block)522 void HValue::SetBlock(HBasicBlock* block) {
523 DCHECK(block_ == NULL || block == NULL);
524 block_ = block;
525 if (id_ == kNoNumber && block != NULL) {
526 id_ = block->graph()->GetNextValueID(this);
527 }
528 }
529
530
operator <<(OStream & os,const HValue & v)531 OStream& operator<<(OStream& os, const HValue& v) { return v.PrintTo(os); }
532
533
operator <<(OStream & os,const TypeOf & t)534 OStream& operator<<(OStream& os, const TypeOf& t) {
535 if (t.value->representation().IsTagged() &&
536 !t.value->type().Equals(HType::Tagged()))
537 return os;
538 return os << " type:" << t.value->type();
539 }
540
541
operator <<(OStream & os,const ChangesOf & c)542 OStream& operator<<(OStream& os, const ChangesOf& c) {
543 GVNFlagSet changes_flags = c.value->ChangesFlags();
544 if (changes_flags.IsEmpty()) return os;
545 os << " changes[";
546 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
547 os << "*";
548 } else {
549 bool add_comma = false;
550 #define PRINT_DO(Type) \
551 if (changes_flags.Contains(k##Type)) { \
552 if (add_comma) os << ","; \
553 add_comma = true; \
554 os << #Type; \
555 }
556 GVN_TRACKED_FLAG_LIST(PRINT_DO);
557 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
558 #undef PRINT_DO
559 }
560 return os << "]";
561 }
562
563
HasMonomorphicJSObjectType()564 bool HValue::HasMonomorphicJSObjectType() {
565 return !GetMonomorphicJSObjectMap().is_null();
566 }
567
568
UpdateInferredType()569 bool HValue::UpdateInferredType() {
570 HType type = CalculateInferredType();
571 bool result = (!type.Equals(type_));
572 type_ = type;
573 return result;
574 }
575
576
RegisterUse(int index,HValue * new_value)577 void HValue::RegisterUse(int index, HValue* new_value) {
578 HValue* old_value = OperandAt(index);
579 if (old_value == new_value) return;
580
581 HUseListNode* removed = NULL;
582 if (old_value != NULL) {
583 removed = old_value->RemoveUse(this, index);
584 }
585
586 if (new_value != NULL) {
587 if (removed == NULL) {
588 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
589 this, index, new_value->use_list_);
590 } else {
591 removed->set_tail(new_value->use_list_);
592 new_value->use_list_ = removed;
593 }
594 }
595 }
596
597
AddNewRange(Range * r,Zone * zone)598 void HValue::AddNewRange(Range* r, Zone* zone) {
599 if (!HasRange()) ComputeInitialRange(zone);
600 if (!HasRange()) range_ = new(zone) Range();
601 DCHECK(HasRange());
602 r->StackUpon(range_);
603 range_ = r;
604 }
605
606
RemoveLastAddedRange()607 void HValue::RemoveLastAddedRange() {
608 DCHECK(HasRange());
609 DCHECK(range_->next() != NULL);
610 range_ = range_->next();
611 }
612
613
ComputeInitialRange(Zone * zone)614 void HValue::ComputeInitialRange(Zone* zone) {
615 DCHECK(!HasRange());
616 range_ = InferRange(zone);
617 DCHECK(HasRange());
618 }
619
620
operator <<(OStream & os,const HSourcePosition & p)621 OStream& operator<<(OStream& os, const HSourcePosition& p) {
622 if (p.IsUnknown()) {
623 return os << "<?>";
624 } else if (FLAG_hydrogen_track_positions) {
625 return os << "<" << p.inlining_id() << ":" << p.position() << ">";
626 } else {
627 return os << "<0:" << p.raw() << ">";
628 }
629 }
630
631
PrintTo(OStream & os) const632 OStream& HInstruction::PrintTo(OStream& os) const { // NOLINT
633 os << Mnemonic() << " ";
634 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
635 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
636 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
637 return os;
638 }
639
640
PrintDataTo(OStream & os) const641 OStream& HInstruction::PrintDataTo(OStream& os) const { // NOLINT
642 for (int i = 0; i < OperandCount(); ++i) {
643 if (i > 0) os << " ";
644 os << NameOf(OperandAt(i));
645 }
646 return os;
647 }
648
649
Unlink()650 void HInstruction::Unlink() {
651 DCHECK(IsLinked());
652 DCHECK(!IsControlInstruction()); // Must never move control instructions.
653 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
654 DCHECK(previous_ != NULL);
655 previous_->next_ = next_;
656 if (next_ == NULL) {
657 DCHECK(block()->last() == this);
658 block()->set_last(previous_);
659 } else {
660 next_->previous_ = previous_;
661 }
662 clear_block();
663 }
664
665
InsertBefore(HInstruction * next)666 void HInstruction::InsertBefore(HInstruction* next) {
667 DCHECK(!IsLinked());
668 DCHECK(!next->IsBlockEntry());
669 DCHECK(!IsControlInstruction());
670 DCHECK(!next->block()->IsStartBlock());
671 DCHECK(next->previous_ != NULL);
672 HInstruction* prev = next->previous();
673 prev->next_ = this;
674 next->previous_ = this;
675 next_ = next;
676 previous_ = prev;
677 SetBlock(next->block());
678 if (!has_position() && next->has_position()) {
679 set_position(next->position());
680 }
681 }
682
683
InsertAfter(HInstruction * previous)684 void HInstruction::InsertAfter(HInstruction* previous) {
685 DCHECK(!IsLinked());
686 DCHECK(!previous->IsControlInstruction());
687 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
688 HBasicBlock* block = previous->block();
689 // Never insert anything except constants into the start block after finishing
690 // it.
691 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
692 DCHECK(block->end()->SecondSuccessor() == NULL);
693 InsertAfter(block->end()->FirstSuccessor()->first());
694 return;
695 }
696
697 // If we're inserting after an instruction with side-effects that is
698 // followed by a simulate instruction, we need to insert after the
699 // simulate instruction instead.
700 HInstruction* next = previous->next_;
701 if (previous->HasObservableSideEffects() && next != NULL) {
702 DCHECK(next->IsSimulate());
703 previous = next;
704 next = previous->next_;
705 }
706
707 previous_ = previous;
708 next_ = next;
709 SetBlock(block);
710 previous->next_ = this;
711 if (next != NULL) next->previous_ = this;
712 if (block->last() == previous) {
713 block->set_last(this);
714 }
715 if (!has_position() && previous->has_position()) {
716 set_position(previous->position());
717 }
718 }
719
720
Dominates(HInstruction * other)721 bool HInstruction::Dominates(HInstruction* other) {
722 if (block() != other->block()) {
723 return block()->Dominates(other->block());
724 }
725 // Both instructions are in the same basic block. This instruction
726 // should precede the other one in order to dominate it.
727 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
728 if (instr == other) {
729 return true;
730 }
731 }
732 return false;
733 }
734
735
736 #ifdef DEBUG
Verify()737 void HInstruction::Verify() {
738 // Verify that input operands are defined before use.
739 HBasicBlock* cur_block = block();
740 for (int i = 0; i < OperandCount(); ++i) {
741 HValue* other_operand = OperandAt(i);
742 if (other_operand == NULL) continue;
743 HBasicBlock* other_block = other_operand->block();
744 if (cur_block == other_block) {
745 if (!other_operand->IsPhi()) {
746 HInstruction* cur = this->previous();
747 while (cur != NULL) {
748 if (cur == other_operand) break;
749 cur = cur->previous();
750 }
751 // Must reach other operand in the same block!
752 DCHECK(cur == other_operand);
753 }
754 } else {
755 // If the following assert fires, you may have forgotten an
756 // AddInstruction.
757 DCHECK(other_block->Dominates(cur_block));
758 }
759 }
760
761 // Verify that instructions that may have side-effects are followed
762 // by a simulate instruction.
763 if (HasObservableSideEffects() && !IsOsrEntry()) {
764 DCHECK(next()->IsSimulate());
765 }
766
767 // Verify that instructions that can be eliminated by GVN have overridden
768 // HValue::DataEquals. The default implementation is UNREACHABLE. We
769 // don't actually care whether DataEquals returns true or false here.
770 if (CheckFlag(kUseGVN)) DataEquals(this);
771
772 // Verify that all uses are in the graph.
773 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
774 if (use.value()->IsInstruction()) {
775 DCHECK(HInstruction::cast(use.value())->IsLinked());
776 }
777 }
778 }
779 #endif
780
781
CanDeoptimize()782 bool HInstruction::CanDeoptimize() {
783 // TODO(titzer): make this a virtual method?
784 switch (opcode()) {
785 case HValue::kAbnormalExit:
786 case HValue::kAccessArgumentsAt:
787 case HValue::kAllocate:
788 case HValue::kArgumentsElements:
789 case HValue::kArgumentsLength:
790 case HValue::kArgumentsObject:
791 case HValue::kBlockEntry:
792 case HValue::kBoundsCheckBaseIndexInformation:
793 case HValue::kCallFunction:
794 case HValue::kCallNew:
795 case HValue::kCallNewArray:
796 case HValue::kCallStub:
797 case HValue::kCallWithDescriptor:
798 case HValue::kCapturedObject:
799 case HValue::kClassOfTestAndBranch:
800 case HValue::kCompareGeneric:
801 case HValue::kCompareHoleAndBranch:
802 case HValue::kCompareMap:
803 case HValue::kCompareMinusZeroAndBranch:
804 case HValue::kCompareNumericAndBranch:
805 case HValue::kCompareObjectEqAndBranch:
806 case HValue::kConstant:
807 case HValue::kConstructDouble:
808 case HValue::kContext:
809 case HValue::kDebugBreak:
810 case HValue::kDeclareGlobals:
811 case HValue::kDoubleBits:
812 case HValue::kDummyUse:
813 case HValue::kEnterInlined:
814 case HValue::kEnvironmentMarker:
815 case HValue::kForceRepresentation:
816 case HValue::kGetCachedArrayIndex:
817 case HValue::kGoto:
818 case HValue::kHasCachedArrayIndexAndBranch:
819 case HValue::kHasInstanceTypeAndBranch:
820 case HValue::kInnerAllocatedObject:
821 case HValue::kInstanceOf:
822 case HValue::kInstanceOfKnownGlobal:
823 case HValue::kIsConstructCallAndBranch:
824 case HValue::kIsObjectAndBranch:
825 case HValue::kIsSmiAndBranch:
826 case HValue::kIsStringAndBranch:
827 case HValue::kIsUndetectableAndBranch:
828 case HValue::kLeaveInlined:
829 case HValue::kLoadFieldByIndex:
830 case HValue::kLoadGlobalGeneric:
831 case HValue::kLoadNamedField:
832 case HValue::kLoadNamedGeneric:
833 case HValue::kLoadRoot:
834 case HValue::kMapEnumLength:
835 case HValue::kMathMinMax:
836 case HValue::kParameter:
837 case HValue::kPhi:
838 case HValue::kPushArguments:
839 case HValue::kRegExpLiteral:
840 case HValue::kReturn:
841 case HValue::kSeqStringGetChar:
842 case HValue::kStoreCodeEntry:
843 case HValue::kStoreFrameContext:
844 case HValue::kStoreKeyed:
845 case HValue::kStoreNamedField:
846 case HValue::kStoreNamedGeneric:
847 case HValue::kStringCharCodeAt:
848 case HValue::kStringCharFromCode:
849 case HValue::kTailCallThroughMegamorphicCache:
850 case HValue::kThisFunction:
851 case HValue::kTypeofIsAndBranch:
852 case HValue::kUnknownOSRValue:
853 case HValue::kUseConst:
854 return false;
855
856 case HValue::kAdd:
857 case HValue::kAllocateBlockContext:
858 case HValue::kApplyArguments:
859 case HValue::kBitwise:
860 case HValue::kBoundsCheck:
861 case HValue::kBranch:
862 case HValue::kCallJSFunction:
863 case HValue::kCallRuntime:
864 case HValue::kChange:
865 case HValue::kCheckHeapObject:
866 case HValue::kCheckInstanceType:
867 case HValue::kCheckMapValue:
868 case HValue::kCheckMaps:
869 case HValue::kCheckSmi:
870 case HValue::kCheckValue:
871 case HValue::kClampToUint8:
872 case HValue::kDateField:
873 case HValue::kDeoptimize:
874 case HValue::kDiv:
875 case HValue::kForInCacheArray:
876 case HValue::kForInPrepareMap:
877 case HValue::kFunctionLiteral:
878 case HValue::kInvokeFunction:
879 case HValue::kLoadContextSlot:
880 case HValue::kLoadFunctionPrototype:
881 case HValue::kLoadGlobalCell:
882 case HValue::kLoadKeyed:
883 case HValue::kLoadKeyedGeneric:
884 case HValue::kMathFloorOfDiv:
885 case HValue::kMod:
886 case HValue::kMul:
887 case HValue::kOsrEntry:
888 case HValue::kPower:
889 case HValue::kRor:
890 case HValue::kSar:
891 case HValue::kSeqStringSetChar:
892 case HValue::kShl:
893 case HValue::kShr:
894 case HValue::kSimulate:
895 case HValue::kStackCheck:
896 case HValue::kStoreContextSlot:
897 case HValue::kStoreGlobalCell:
898 case HValue::kStoreKeyedGeneric:
899 case HValue::kStringAdd:
900 case HValue::kStringCompareAndBranch:
901 case HValue::kSub:
902 case HValue::kToFastProperties:
903 case HValue::kTransitionElementsKind:
904 case HValue::kTrapAllocationMemento:
905 case HValue::kTypeof:
906 case HValue::kUnaryMathOperation:
907 case HValue::kWrapReceiver:
908 return true;
909 }
910 UNREACHABLE();
911 return true;
912 }
913
914
operator <<(OStream & os,const NameOf & v)915 OStream& operator<<(OStream& os, const NameOf& v) {
916 return os << v.value->representation().Mnemonic() << v.value->id();
917 }
918
PrintDataTo(OStream & os) const919 OStream& HDummyUse::PrintDataTo(OStream& os) const { // NOLINT
920 return os << NameOf(value());
921 }
922
923
PrintDataTo(OStream & os) const924 OStream& HEnvironmentMarker::PrintDataTo(OStream& os) const { // NOLINT
925 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
926 << "]";
927 }
928
929
PrintDataTo(OStream & os) const930 OStream& HUnaryCall::PrintDataTo(OStream& os) const { // NOLINT
931 return os << NameOf(value()) << " #" << argument_count();
932 }
933
934
PrintDataTo(OStream & os) const935 OStream& HCallJSFunction::PrintDataTo(OStream& os) const { // NOLINT
936 return os << NameOf(function()) << " #" << argument_count();
937 }
938
939
New(Zone * zone,HValue * context,HValue * function,int argument_count,bool pass_argument_count)940 HCallJSFunction* HCallJSFunction::New(
941 Zone* zone,
942 HValue* context,
943 HValue* function,
944 int argument_count,
945 bool pass_argument_count) {
946 bool has_stack_check = false;
947 if (function->IsConstant()) {
948 HConstant* fun_const = HConstant::cast(function);
949 Handle<JSFunction> jsfun =
950 Handle<JSFunction>::cast(fun_const->handle(zone->isolate()));
951 has_stack_check = !jsfun.is_null() &&
952 (jsfun->code()->kind() == Code::FUNCTION ||
953 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
954 }
955
956 return new(zone) HCallJSFunction(
957 function, argument_count, pass_argument_count,
958 has_stack_check);
959 }
960
961
PrintDataTo(OStream & os) const962 OStream& HBinaryCall::PrintDataTo(OStream& os) const { // NOLINT
963 return os << NameOf(first()) << " " << NameOf(second()) << " #"
964 << argument_count();
965 }
966
967
ApplyIndexChange()968 void HBoundsCheck::ApplyIndexChange() {
969 if (skip_check()) return;
970
971 DecompositionResult decomposition;
972 bool index_is_decomposable = index()->TryDecompose(&decomposition);
973 if (index_is_decomposable) {
974 DCHECK(decomposition.base() == base());
975 if (decomposition.offset() == offset() &&
976 decomposition.scale() == scale()) return;
977 } else {
978 return;
979 }
980
981 ReplaceAllUsesWith(index());
982
983 HValue* current_index = decomposition.base();
984 int actual_offset = decomposition.offset() + offset();
985 int actual_scale = decomposition.scale() + scale();
986
987 Zone* zone = block()->graph()->zone();
988 HValue* context = block()->graph()->GetInvalidContext();
989 if (actual_offset != 0) {
990 HConstant* add_offset = HConstant::New(zone, context, actual_offset);
991 add_offset->InsertBefore(this);
992 HInstruction* add = HAdd::New(zone, context,
993 current_index, add_offset);
994 add->InsertBefore(this);
995 add->AssumeRepresentation(index()->representation());
996 add->ClearFlag(kCanOverflow);
997 current_index = add;
998 }
999
1000 if (actual_scale != 0) {
1001 HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
1002 sar_scale->InsertBefore(this);
1003 HInstruction* sar = HSar::New(zone, context,
1004 current_index, sar_scale);
1005 sar->InsertBefore(this);
1006 sar->AssumeRepresentation(index()->representation());
1007 current_index = sar;
1008 }
1009
1010 SetOperandAt(0, current_index);
1011
1012 base_ = NULL;
1013 offset_ = 0;
1014 scale_ = 0;
1015 }
1016
1017
PrintDataTo(OStream & os) const1018 OStream& HBoundsCheck::PrintDataTo(OStream& os) const { // NOLINT
1019 os << NameOf(index()) << " " << NameOf(length());
1020 if (base() != NULL && (offset() != 0 || scale() != 0)) {
1021 os << " base: ((";
1022 if (base() != index()) {
1023 os << NameOf(index());
1024 } else {
1025 os << "index";
1026 }
1027 os << " + " << offset() << ") >> " << scale() << ")";
1028 }
1029 if (skip_check()) os << " [DISABLED]";
1030 return os;
1031 }
1032
1033
InferRepresentation(HInferRepresentationPhase * h_infer)1034 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1035 DCHECK(CheckFlag(kFlexibleRepresentation));
1036 HValue* actual_index = index()->ActualValue();
1037 HValue* actual_length = length()->ActualValue();
1038 Representation index_rep = actual_index->representation();
1039 Representation length_rep = actual_length->representation();
1040 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1041 index_rep = Representation::Smi();
1042 }
1043 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1044 length_rep = Representation::Smi();
1045 }
1046 Representation r = index_rep.generalize(length_rep);
1047 if (r.is_more_general_than(Representation::Integer32())) {
1048 r = Representation::Integer32();
1049 }
1050 UpdateRepresentation(r, h_infer, "boundscheck");
1051 }
1052
1053
InferRange(Zone * zone)1054 Range* HBoundsCheck::InferRange(Zone* zone) {
1055 Representation r = representation();
1056 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1057 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1058 int lower = 0;
1059
1060 Range* result = new(zone) Range(lower, upper);
1061 if (index()->HasRange()) {
1062 result->Intersect(index()->range());
1063 }
1064
1065 // In case of Smi representation, clamp result to Smi::kMaxValue.
1066 if (r.IsSmi()) result->ClampToSmi();
1067 return result;
1068 }
1069 return HValue::InferRange(zone);
1070 }
1071
1072
PrintDataTo(OStream & os) const1073 OStream& HBoundsCheckBaseIndexInformation::PrintDataTo(
1074 OStream& os) const { // NOLINT
1075 // TODO(svenpanne) This 2nd base_index() looks wrong...
1076 return os << "base: " << NameOf(base_index())
1077 << ", check: " << NameOf(base_index());
1078 }
1079
1080
PrintDataTo(OStream & os) const1081 OStream& HCallWithDescriptor::PrintDataTo(OStream& os) const { // NOLINT
1082 for (int i = 0; i < OperandCount(); i++) {
1083 os << NameOf(OperandAt(i)) << " ";
1084 }
1085 return os << "#" << argument_count();
1086 }
1087
1088
PrintDataTo(OStream & os) const1089 OStream& HCallNewArray::PrintDataTo(OStream& os) const { // NOLINT
1090 os << ElementsKindToString(elements_kind()) << " ";
1091 return HBinaryCall::PrintDataTo(os);
1092 }
1093
1094
PrintDataTo(OStream & os) const1095 OStream& HCallRuntime::PrintDataTo(OStream& os) const { // NOLINT
1096 os << name()->ToCString().get() << " ";
1097 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1098 return os << "#" << argument_count();
1099 }
1100
1101
PrintDataTo(OStream & os) const1102 OStream& HClassOfTestAndBranch::PrintDataTo(OStream& os) const { // NOLINT
1103 return os << "class_of_test(" << NameOf(value()) << ", \""
1104 << class_name()->ToCString().get() << "\")";
1105 }
1106
1107
PrintDataTo(OStream & os) const1108 OStream& HWrapReceiver::PrintDataTo(OStream& os) const { // NOLINT
1109 return os << NameOf(receiver()) << " " << NameOf(function());
1110 }
1111
1112
PrintDataTo(OStream & os) const1113 OStream& HAccessArgumentsAt::PrintDataTo(OStream& os) const { // NOLINT
1114 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1115 << NameOf(length());
1116 }
1117
1118
PrintDataTo(OStream & os) const1119 OStream& HAllocateBlockContext::PrintDataTo(OStream& os) const { // NOLINT
1120 return os << NameOf(context()) << " " << NameOf(function());
1121 }
1122
1123
PrintDataTo(OStream & os) const1124 OStream& HControlInstruction::PrintDataTo(OStream& os) const { // NOLINT
1125 os << " goto (";
1126 bool first_block = true;
1127 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1128 if (!first_block) os << ", ";
1129 os << *it.Current();
1130 first_block = false;
1131 }
1132 return os << ")";
1133 }
1134
1135
PrintDataTo(OStream & os) const1136 OStream& HUnaryControlInstruction::PrintDataTo(OStream& os) const { // NOLINT
1137 os << NameOf(value());
1138 return HControlInstruction::PrintDataTo(os);
1139 }
1140
1141
PrintDataTo(OStream & os) const1142 OStream& HReturn::PrintDataTo(OStream& os) const { // NOLINT
1143 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1144 << " values)";
1145 }
1146
1147
observed_input_representation(int index)1148 Representation HBranch::observed_input_representation(int index) {
1149 static const ToBooleanStub::Types tagged_types(
1150 ToBooleanStub::NULL_TYPE |
1151 ToBooleanStub::SPEC_OBJECT |
1152 ToBooleanStub::STRING |
1153 ToBooleanStub::SYMBOL);
1154 if (expected_input_types_.ContainsAnyOf(tagged_types)) {
1155 return Representation::Tagged();
1156 }
1157 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1158 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1159 return Representation::Double();
1160 }
1161 return Representation::Tagged();
1162 }
1163 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1164 return Representation::Double();
1165 }
1166 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1167 return Representation::Smi();
1168 }
1169 return Representation::None();
1170 }
1171
1172
KnownSuccessorBlock(HBasicBlock ** block)1173 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1174 HValue* value = this->value();
1175 if (value->EmitAtUses()) {
1176 DCHECK(value->IsConstant());
1177 DCHECK(!value->representation().IsDouble());
1178 *block = HConstant::cast(value)->BooleanValue()
1179 ? FirstSuccessor()
1180 : SecondSuccessor();
1181 return true;
1182 }
1183 *block = NULL;
1184 return false;
1185 }
1186
1187
PrintDataTo(OStream & os) const1188 OStream& HBranch::PrintDataTo(OStream& os) const { // NOLINT
1189 return HUnaryControlInstruction::PrintDataTo(os) << " "
1190 << expected_input_types();
1191 }
1192
1193
PrintDataTo(OStream & os) const1194 OStream& HCompareMap::PrintDataTo(OStream& os) const { // NOLINT
1195 os << NameOf(value()) << " (" << *map().handle() << ")";
1196 HControlInstruction::PrintDataTo(os);
1197 if (known_successor_index() == 0) {
1198 os << " [true]";
1199 } else if (known_successor_index() == 1) {
1200 os << " [false]";
1201 }
1202 return os;
1203 }
1204
1205
OpName() const1206 const char* HUnaryMathOperation::OpName() const {
1207 switch (op()) {
1208 case kMathFloor:
1209 return "floor";
1210 case kMathFround:
1211 return "fround";
1212 case kMathRound:
1213 return "round";
1214 case kMathAbs:
1215 return "abs";
1216 case kMathLog:
1217 return "log";
1218 case kMathExp:
1219 return "exp";
1220 case kMathSqrt:
1221 return "sqrt";
1222 case kMathPowHalf:
1223 return "pow-half";
1224 case kMathClz32:
1225 return "clz32";
1226 default:
1227 UNREACHABLE();
1228 return NULL;
1229 }
1230 }
1231
1232
InferRange(Zone * zone)1233 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1234 Representation r = representation();
1235 if (op() == kMathClz32) return new(zone) Range(0, 32);
1236 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1237 if (op() == kMathAbs) {
1238 int upper = value()->range()->upper();
1239 int lower = value()->range()->lower();
1240 bool spans_zero = value()->range()->CanBeZero();
1241 // Math.abs(kMinInt) overflows its representation, on which the
1242 // instruction deopts. Hence clamp it to kMaxInt.
1243 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1244 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1245 Range* result =
1246 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1247 Max(abs_lower, abs_upper));
1248 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1249 // Smi::kMaxValue.
1250 if (r.IsSmi()) result->ClampToSmi();
1251 return result;
1252 }
1253 }
1254 return HValue::InferRange(zone);
1255 }
1256
1257
PrintDataTo(OStream & os) const1258 OStream& HUnaryMathOperation::PrintDataTo(OStream& os) const { // NOLINT
1259 return os << OpName() << " " << NameOf(value());
1260 }
1261
1262
PrintDataTo(OStream & os) const1263 OStream& HUnaryOperation::PrintDataTo(OStream& os) const { // NOLINT
1264 return os << NameOf(value());
1265 }
1266
1267
PrintDataTo(OStream & os) const1268 OStream& HHasInstanceTypeAndBranch::PrintDataTo(OStream& os) const { // NOLINT
1269 os << NameOf(value());
1270 switch (from_) {
1271 case FIRST_JS_RECEIVER_TYPE:
1272 if (to_ == LAST_TYPE) os << " spec_object";
1273 break;
1274 case JS_REGEXP_TYPE:
1275 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1276 break;
1277 case JS_ARRAY_TYPE:
1278 if (to_ == JS_ARRAY_TYPE) os << " array";
1279 break;
1280 case JS_FUNCTION_TYPE:
1281 if (to_ == JS_FUNCTION_TYPE) os << " function";
1282 break;
1283 default:
1284 break;
1285 }
1286 return os;
1287 }
1288
1289
PrintDataTo(OStream & os) const1290 OStream& HTypeofIsAndBranch::PrintDataTo(OStream& os) const { // NOLINT
1291 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1292 return HControlInstruction::PrintDataTo(os);
1293 }
1294
1295
TypeOfString(HConstant * constant,Isolate * isolate)1296 static String* TypeOfString(HConstant* constant, Isolate* isolate) {
1297 Heap* heap = isolate->heap();
1298 if (constant->HasNumberValue()) return heap->number_string();
1299 if (constant->IsUndetectable()) return heap->undefined_string();
1300 if (constant->HasStringValue()) return heap->string_string();
1301 switch (constant->GetInstanceType()) {
1302 case ODDBALL_TYPE: {
1303 Unique<Object> unique = constant->GetUnique();
1304 if (unique.IsKnownGlobal(heap->true_value()) ||
1305 unique.IsKnownGlobal(heap->false_value())) {
1306 return heap->boolean_string();
1307 }
1308 if (unique.IsKnownGlobal(heap->null_value())) {
1309 return heap->object_string();
1310 }
1311 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1312 return heap->undefined_string();
1313 }
1314 case SYMBOL_TYPE:
1315 return heap->symbol_string();
1316 case JS_FUNCTION_TYPE:
1317 case JS_FUNCTION_PROXY_TYPE:
1318 return heap->function_string();
1319 default:
1320 return heap->object_string();
1321 }
1322 }
1323
1324
KnownSuccessorBlock(HBasicBlock ** block)1325 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1326 if (FLAG_fold_constants && value()->IsConstant()) {
1327 HConstant* constant = HConstant::cast(value());
1328 String* type_string = TypeOfString(constant, isolate());
1329 bool same_type = type_literal_.IsKnownGlobal(type_string);
1330 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1331 return true;
1332 } else if (value()->representation().IsSpecialization()) {
1333 bool number_type =
1334 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1335 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1336 return true;
1337 }
1338 *block = NULL;
1339 return false;
1340 }
1341
1342
PrintDataTo(OStream & os) const1343 OStream& HCheckMapValue::PrintDataTo(OStream& os) const { // NOLINT
1344 return os << NameOf(value()) << " " << NameOf(map());
1345 }
1346
1347
Canonicalize()1348 HValue* HCheckMapValue::Canonicalize() {
1349 if (map()->IsConstant()) {
1350 HConstant* c_map = HConstant::cast(map());
1351 return HCheckMaps::CreateAndInsertAfter(
1352 block()->graph()->zone(), value(), c_map->MapValue(),
1353 c_map->HasStableMapValue(), this);
1354 }
1355 return this;
1356 }
1357
1358
PrintDataTo(OStream & os) const1359 OStream& HForInPrepareMap::PrintDataTo(OStream& os) const { // NOLINT
1360 return os << NameOf(enumerable());
1361 }
1362
1363
PrintDataTo(OStream & os) const1364 OStream& HForInCacheArray::PrintDataTo(OStream& os) const { // NOLINT
1365 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1366 << "]";
1367 }
1368
1369
PrintDataTo(OStream & os) const1370 OStream& HLoadFieldByIndex::PrintDataTo(OStream& os) const { // NOLINT
1371 return os << NameOf(object()) << " " << NameOf(index());
1372 }
1373
1374
MatchLeftIsOnes(HValue * l,HValue * r,HValue ** negated)1375 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1376 if (!l->EqualsInteger32Constant(~0)) return false;
1377 *negated = r;
1378 return true;
1379 }
1380
1381
MatchNegationViaXor(HValue * instr,HValue ** negated)1382 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1383 if (!instr->IsBitwise()) return false;
1384 HBitwise* b = HBitwise::cast(instr);
1385 return (b->op() == Token::BIT_XOR) &&
1386 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1387 MatchLeftIsOnes(b->right(), b->left(), negated));
1388 }
1389
1390
MatchDoubleNegation(HValue * instr,HValue ** arg)1391 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1392 HValue* negated;
1393 return MatchNegationViaXor(instr, &negated) &&
1394 MatchNegationViaXor(negated, arg);
1395 }
1396
1397
Canonicalize()1398 HValue* HBitwise::Canonicalize() {
1399 if (!representation().IsSmiOrInteger32()) return this;
1400 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1401 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1402 if (left()->EqualsInteger32Constant(nop_constant) &&
1403 !right()->CheckFlag(kUint32)) {
1404 return right();
1405 }
1406 if (right()->EqualsInteger32Constant(nop_constant) &&
1407 !left()->CheckFlag(kUint32)) {
1408 return left();
1409 }
1410 // Optimize double negation, a common pattern used for ToInt32(x).
1411 HValue* arg;
1412 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1413 return arg;
1414 }
1415 return this;
1416 }
1417
1418
RepresentationFromInputs()1419 Representation HAdd::RepresentationFromInputs() {
1420 Representation left_rep = left()->representation();
1421 if (left_rep.IsExternal()) {
1422 return Representation::External();
1423 }
1424 return HArithmeticBinaryOperation::RepresentationFromInputs();
1425 }
1426
1427
RequiredInputRepresentation(int index)1428 Representation HAdd::RequiredInputRepresentation(int index) {
1429 if (index == 2) {
1430 Representation left_rep = left()->representation();
1431 if (left_rep.IsExternal()) {
1432 return Representation::Integer32();
1433 }
1434 }
1435 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1436 }
1437
1438
IsIdentityOperation(HValue * arg1,HValue * arg2,int32_t identity)1439 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1440 return arg1->representation().IsSpecialization() &&
1441 arg2->EqualsInteger32Constant(identity);
1442 }
1443
1444
Canonicalize()1445 HValue* HAdd::Canonicalize() {
1446 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1447 if (IsIdentityOperation(left(), right(), 0) &&
1448 !left()->representation().IsDouble()) { // Left could be -0.
1449 return left();
1450 }
1451 if (IsIdentityOperation(right(), left(), 0) &&
1452 !left()->representation().IsDouble()) { // Right could be -0.
1453 return right();
1454 }
1455 return this;
1456 }
1457
1458
Canonicalize()1459 HValue* HSub::Canonicalize() {
1460 if (IsIdentityOperation(left(), right(), 0)) return left();
1461 return this;
1462 }
1463
1464
Canonicalize()1465 HValue* HMul::Canonicalize() {
1466 if (IsIdentityOperation(left(), right(), 1)) return left();
1467 if (IsIdentityOperation(right(), left(), 1)) return right();
1468 return this;
1469 }
1470
1471
MulMinusOne()1472 bool HMul::MulMinusOne() {
1473 if (left()->EqualsInteger32Constant(-1) ||
1474 right()->EqualsInteger32Constant(-1)) {
1475 return true;
1476 }
1477
1478 return false;
1479 }
1480
1481
Canonicalize()1482 HValue* HMod::Canonicalize() {
1483 return this;
1484 }
1485
1486
Canonicalize()1487 HValue* HDiv::Canonicalize() {
1488 if (IsIdentityOperation(left(), right(), 1)) return left();
1489 return this;
1490 }
1491
1492
Canonicalize()1493 HValue* HChange::Canonicalize() {
1494 return (from().Equals(to())) ? value() : this;
1495 }
1496
1497
Canonicalize()1498 HValue* HWrapReceiver::Canonicalize() {
1499 if (HasNoUses()) return NULL;
1500 if (receiver()->type().IsJSObject()) {
1501 return receiver();
1502 }
1503 return this;
1504 }
1505
1506
PrintDataTo(OStream & os) const1507 OStream& HTypeof::PrintDataTo(OStream& os) const { // NOLINT
1508 return os << NameOf(value());
1509 }
1510
1511
New(Zone * zone,HValue * context,HValue * value,Representation representation)1512 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context,
1513 HValue* value, Representation representation) {
1514 if (FLAG_fold_constants && value->IsConstant()) {
1515 HConstant* c = HConstant::cast(value);
1516 c = c->CopyToRepresentation(representation, zone);
1517 if (c != NULL) return c;
1518 }
1519 return new(zone) HForceRepresentation(value, representation);
1520 }
1521
1522
PrintDataTo(OStream & os) const1523 OStream& HForceRepresentation::PrintDataTo(OStream& os) const { // NOLINT
1524 return os << representation().Mnemonic() << " " << NameOf(value());
1525 }
1526
1527
PrintDataTo(OStream & os) const1528 OStream& HChange::PrintDataTo(OStream& os) const { // NOLINT
1529 HUnaryOperation::PrintDataTo(os);
1530 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1531
1532 if (CanTruncateToSmi()) os << " truncating-smi";
1533 if (CanTruncateToInt32()) os << " truncating-int32";
1534 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1535 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1536 return os;
1537 }
1538
1539
Canonicalize()1540 HValue* HUnaryMathOperation::Canonicalize() {
1541 if (op() == kMathRound || op() == kMathFloor) {
1542 HValue* val = value();
1543 if (val->IsChange()) val = HChange::cast(val)->value();
1544 if (val->representation().IsSmiOrInteger32()) {
1545 if (val->representation().Equals(representation())) return val;
1546 return Prepend(new(block()->zone()) HChange(
1547 val, representation(), false, false));
1548 }
1549 }
1550 if (op() == kMathFloor && value()->IsDiv() && value()->HasOneUse()) {
1551 HDiv* hdiv = HDiv::cast(value());
1552
1553 HValue* left = hdiv->left();
1554 if (left->representation().IsInteger32()) {
1555 // A value with an integer representation does not need to be transformed.
1556 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1557 // A change from an integer32 can be replaced by the integer32 value.
1558 left = HChange::cast(left)->value();
1559 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1560 left = Prepend(new(block()->zone()) HChange(
1561 left, Representation::Integer32(), false, false));
1562 } else {
1563 return this;
1564 }
1565
1566 HValue* right = hdiv->right();
1567 if (right->IsInteger32Constant()) {
1568 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1569 Representation::Integer32(), right->block()->zone()));
1570 } else if (right->representation().IsInteger32()) {
1571 // A value with an integer representation does not need to be transformed.
1572 } else if (right->IsChange() &&
1573 HChange::cast(right)->from().IsInteger32()) {
1574 // A change from an integer32 can be replaced by the integer32 value.
1575 right = HChange::cast(right)->value();
1576 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1577 right = Prepend(new(block()->zone()) HChange(
1578 right, Representation::Integer32(), false, false));
1579 } else {
1580 return this;
1581 }
1582
1583 return Prepend(HMathFloorOfDiv::New(
1584 block()->zone(), context(), left, right));
1585 }
1586 return this;
1587 }
1588
1589
Canonicalize()1590 HValue* HCheckInstanceType::Canonicalize() {
1591 if ((check_ == IS_SPEC_OBJECT && value()->type().IsJSObject()) ||
1592 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1593 (check_ == IS_STRING && value()->type().IsString())) {
1594 return value();
1595 }
1596
1597 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1598 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1599 return value();
1600 }
1601 }
1602 return this;
1603 }
1604
1605
GetCheckInterval(InstanceType * first,InstanceType * last)1606 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1607 InstanceType* last) {
1608 DCHECK(is_interval_check());
1609 switch (check_) {
1610 case IS_SPEC_OBJECT:
1611 *first = FIRST_SPEC_OBJECT_TYPE;
1612 *last = LAST_SPEC_OBJECT_TYPE;
1613 return;
1614 case IS_JS_ARRAY:
1615 *first = *last = JS_ARRAY_TYPE;
1616 return;
1617 default:
1618 UNREACHABLE();
1619 }
1620 }
1621
1622
GetCheckMaskAndTag(uint8_t * mask,uint8_t * tag)1623 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1624 DCHECK(!is_interval_check());
1625 switch (check_) {
1626 case IS_STRING:
1627 *mask = kIsNotStringMask;
1628 *tag = kStringTag;
1629 return;
1630 case IS_INTERNALIZED_STRING:
1631 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1632 *tag = kInternalizedTag;
1633 return;
1634 default:
1635 UNREACHABLE();
1636 }
1637 }
1638
1639
PrintDataTo(OStream & os) const1640 OStream& HCheckMaps::PrintDataTo(OStream& os) const { // NOLINT
1641 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1642 for (int i = 1; i < maps()->size(); ++i) {
1643 os << "," << *maps()->at(i).handle();
1644 }
1645 os << "]";
1646 if (IsStabilityCheck()) os << "(stability-check)";
1647 return os;
1648 }
1649
1650
Canonicalize()1651 HValue* HCheckMaps::Canonicalize() {
1652 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1653 HConstant* c_value = HConstant::cast(value());
1654 if (c_value->HasObjectMap()) {
1655 for (int i = 0; i < maps()->size(); ++i) {
1656 if (c_value->ObjectMap() == maps()->at(i)) {
1657 if (maps()->size() > 1) {
1658 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1659 maps()->at(i), block()->graph()->zone()));
1660 }
1661 MarkAsStabilityCheck();
1662 break;
1663 }
1664 }
1665 }
1666 }
1667 return this;
1668 }
1669
1670
PrintDataTo(OStream & os) const1671 OStream& HCheckValue::PrintDataTo(OStream& os) const { // NOLINT
1672 return os << NameOf(value()) << " " << Brief(*object().handle());
1673 }
1674
1675
Canonicalize()1676 HValue* HCheckValue::Canonicalize() {
1677 return (value()->IsConstant() &&
1678 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1679 }
1680
1681
GetCheckName() const1682 const char* HCheckInstanceType::GetCheckName() const {
1683 switch (check_) {
1684 case IS_SPEC_OBJECT: return "object";
1685 case IS_JS_ARRAY: return "array";
1686 case IS_STRING: return "string";
1687 case IS_INTERNALIZED_STRING: return "internalized_string";
1688 }
1689 UNREACHABLE();
1690 return "";
1691 }
1692
1693
PrintDataTo(OStream & os) const1694 OStream& HCheckInstanceType::PrintDataTo(OStream& os) const { // NOLINT
1695 os << GetCheckName() << " ";
1696 return HUnaryOperation::PrintDataTo(os);
1697 }
1698
1699
PrintDataTo(OStream & os) const1700 OStream& HCallStub::PrintDataTo(OStream& os) const { // NOLINT
1701 os << CodeStub::MajorName(major_key_, false) << " ";
1702 return HUnaryCall::PrintDataTo(os);
1703 }
1704
1705
PrintDataTo(OStream & os) const1706 OStream& HTailCallThroughMegamorphicCache::PrintDataTo(
1707 OStream& os) const { // NOLINT
1708 for (int i = 0; i < OperandCount(); i++) {
1709 os << NameOf(OperandAt(i)) << " ";
1710 }
1711 return os << "flags: " << flags();
1712 }
1713
1714
PrintDataTo(OStream & os) const1715 OStream& HUnknownOSRValue::PrintDataTo(OStream& os) const { // NOLINT
1716 const char* type = "expression";
1717 if (environment_->is_local_index(index_)) type = "local";
1718 if (environment_->is_special_index(index_)) type = "special";
1719 if (environment_->is_parameter_index(index_)) type = "parameter";
1720 return os << type << " @ " << index_;
1721 }
1722
1723
PrintDataTo(OStream & os) const1724 OStream& HInstanceOf::PrintDataTo(OStream& os) const { // NOLINT
1725 return os << NameOf(left()) << " " << NameOf(right()) << " "
1726 << NameOf(context());
1727 }
1728
1729
InferRange(Zone * zone)1730 Range* HValue::InferRange(Zone* zone) {
1731 Range* result;
1732 if (representation().IsSmi() || type().IsSmi()) {
1733 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1734 result->set_can_be_minus_zero(false);
1735 } else {
1736 result = new(zone) Range();
1737 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1738 // TODO(jkummerow): The range cannot be minus zero when the upper type
1739 // bound is Integer32.
1740 }
1741 return result;
1742 }
1743
1744
InferRange(Zone * zone)1745 Range* HChange::InferRange(Zone* zone) {
1746 Range* input_range = value()->range();
1747 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1748 (to().IsSmi() ||
1749 (to().IsTagged() &&
1750 input_range != NULL &&
1751 input_range->IsInSmiRange()))) {
1752 set_type(HType::Smi());
1753 ClearChangesFlag(kNewSpacePromotion);
1754 }
1755 if (to().IsSmiOrTagged() &&
1756 input_range != NULL &&
1757 input_range->IsInSmiRange() &&
1758 (!SmiValuesAre32Bits() ||
1759 !value()->CheckFlag(HValue::kUint32) ||
1760 input_range->upper() != kMaxInt)) {
1761 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1762 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1763 ClearFlag(kCanOverflow);
1764 }
1765 Range* result = (input_range != NULL)
1766 ? input_range->Copy(zone)
1767 : HValue::InferRange(zone);
1768 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1769 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1770 CheckFlag(kAllUsesTruncatingToSmi)));
1771 if (to().IsSmi()) result->ClampToSmi();
1772 return result;
1773 }
1774
1775
InferRange(Zone * zone)1776 Range* HConstant::InferRange(Zone* zone) {
1777 if (has_int32_value_) {
1778 Range* result = new(zone) Range(int32_value_, int32_value_);
1779 result->set_can_be_minus_zero(false);
1780 return result;
1781 }
1782 return HValue::InferRange(zone);
1783 }
1784
1785
position() const1786 HSourcePosition HPhi::position() const {
1787 return block()->first()->position();
1788 }
1789
1790
InferRange(Zone * zone)1791 Range* HPhi::InferRange(Zone* zone) {
1792 Representation r = representation();
1793 if (r.IsSmiOrInteger32()) {
1794 if (block()->IsLoopHeader()) {
1795 Range* range = r.IsSmi()
1796 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1797 : new(zone) Range(kMinInt, kMaxInt);
1798 return range;
1799 } else {
1800 Range* range = OperandAt(0)->range()->Copy(zone);
1801 for (int i = 1; i < OperandCount(); ++i) {
1802 range->Union(OperandAt(i)->range());
1803 }
1804 return range;
1805 }
1806 } else {
1807 return HValue::InferRange(zone);
1808 }
1809 }
1810
1811
InferRange(Zone * zone)1812 Range* HAdd::InferRange(Zone* zone) {
1813 Representation r = representation();
1814 if (r.IsSmiOrInteger32()) {
1815 Range* a = left()->range();
1816 Range* b = right()->range();
1817 Range* res = a->Copy(zone);
1818 if (!res->AddAndCheckOverflow(r, b) ||
1819 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1820 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1821 ClearFlag(kCanOverflow);
1822 }
1823 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1824 !CheckFlag(kAllUsesTruncatingToInt32) &&
1825 a->CanBeMinusZero() && b->CanBeMinusZero());
1826 return res;
1827 } else {
1828 return HValue::InferRange(zone);
1829 }
1830 }
1831
1832
InferRange(Zone * zone)1833 Range* HSub::InferRange(Zone* zone) {
1834 Representation r = representation();
1835 if (r.IsSmiOrInteger32()) {
1836 Range* a = left()->range();
1837 Range* b = right()->range();
1838 Range* res = a->Copy(zone);
1839 if (!res->SubAndCheckOverflow(r, b) ||
1840 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1841 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1842 ClearFlag(kCanOverflow);
1843 }
1844 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1845 !CheckFlag(kAllUsesTruncatingToInt32) &&
1846 a->CanBeMinusZero() && b->CanBeZero());
1847 return res;
1848 } else {
1849 return HValue::InferRange(zone);
1850 }
1851 }
1852
1853
InferRange(Zone * zone)1854 Range* HMul::InferRange(Zone* zone) {
1855 Representation r = representation();
1856 if (r.IsSmiOrInteger32()) {
1857 Range* a = left()->range();
1858 Range* b = right()->range();
1859 Range* res = a->Copy(zone);
1860 if (!res->MulAndCheckOverflow(r, b) ||
1861 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1862 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1863 MulMinusOne())) {
1864 // Truncated int multiplication is too precise and therefore not the
1865 // same as converting to Double and back.
1866 // Handle truncated integer multiplication by -1 special.
1867 ClearFlag(kCanOverflow);
1868 }
1869 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1870 !CheckFlag(kAllUsesTruncatingToInt32) &&
1871 ((a->CanBeZero() && b->CanBeNegative()) ||
1872 (a->CanBeNegative() && b->CanBeZero())));
1873 return res;
1874 } else {
1875 return HValue::InferRange(zone);
1876 }
1877 }
1878
1879
InferRange(Zone * zone)1880 Range* HDiv::InferRange(Zone* zone) {
1881 if (representation().IsInteger32()) {
1882 Range* a = left()->range();
1883 Range* b = right()->range();
1884 Range* result = new(zone) Range();
1885 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1886 (a->CanBeMinusZero() ||
1887 (a->CanBeZero() && b->CanBeNegative())));
1888 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1889 ClearFlag(kCanOverflow);
1890 }
1891
1892 if (!b->CanBeZero()) {
1893 ClearFlag(kCanBeDivByZero);
1894 }
1895 return result;
1896 } else {
1897 return HValue::InferRange(zone);
1898 }
1899 }
1900
1901
InferRange(Zone * zone)1902 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1903 if (representation().IsInteger32()) {
1904 Range* a = left()->range();
1905 Range* b = right()->range();
1906 Range* result = new(zone) Range();
1907 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1908 (a->CanBeMinusZero() ||
1909 (a->CanBeZero() && b->CanBeNegative())));
1910 if (!a->Includes(kMinInt)) {
1911 ClearFlag(kLeftCanBeMinInt);
1912 }
1913
1914 if (!a->CanBeNegative()) {
1915 ClearFlag(HValue::kLeftCanBeNegative);
1916 }
1917
1918 if (!a->CanBePositive()) {
1919 ClearFlag(HValue::kLeftCanBePositive);
1920 }
1921
1922 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1923 ClearFlag(kCanOverflow);
1924 }
1925
1926 if (!b->CanBeZero()) {
1927 ClearFlag(kCanBeDivByZero);
1928 }
1929 return result;
1930 } else {
1931 return HValue::InferRange(zone);
1932 }
1933 }
1934
1935
1936 // Returns the absolute value of its argument minus one, avoiding undefined
1937 // behavior at kMinInt.
AbsMinus1(int32_t a)1938 static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1939
1940
InferRange(Zone * zone)1941 Range* HMod::InferRange(Zone* zone) {
1942 if (representation().IsInteger32()) {
1943 Range* a = left()->range();
1944 Range* b = right()->range();
1945
1946 // The magnitude of the modulus is bounded by the right operand.
1947 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1948
1949 // The result of the modulo operation has the sign of its left operand.
1950 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1951 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1952 a->CanBePositive() ? positive_bound : 0);
1953
1954 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1955 left_can_be_negative);
1956
1957 if (!a->CanBeNegative()) {
1958 ClearFlag(HValue::kLeftCanBeNegative);
1959 }
1960
1961 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1962 ClearFlag(HValue::kCanOverflow);
1963 }
1964
1965 if (!b->CanBeZero()) {
1966 ClearFlag(HValue::kCanBeDivByZero);
1967 }
1968 return result;
1969 } else {
1970 return HValue::InferRange(zone);
1971 }
1972 }
1973
1974
ExaminePhi(HPhi * phi)1975 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1976 if (phi->block()->loop_information() == NULL) return NULL;
1977 if (phi->OperandCount() != 2) return NULL;
1978 int32_t candidate_increment;
1979
1980 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
1981 if (candidate_increment != 0) {
1982 return new(phi->block()->graph()->zone())
1983 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
1984 }
1985
1986 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
1987 if (candidate_increment != 0) {
1988 return new(phi->block()->graph()->zone())
1989 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
1990 }
1991
1992 return NULL;
1993 }
1994
1995
1996 /*
1997 * This function tries to match the following patterns (and all the relevant
1998 * variants related to |, & and + being commutative):
1999 * base | constant_or_mask
2000 * base & constant_and_mask
2001 * (base + constant_offset) & constant_and_mask
2002 * (base - constant_offset) & constant_and_mask
2003 */
DecomposeBitwise(HValue * value,BitwiseDecompositionResult * result)2004 void InductionVariableData::DecomposeBitwise(
2005 HValue* value,
2006 BitwiseDecompositionResult* result) {
2007 HValue* base = IgnoreOsrValue(value);
2008 result->base = value;
2009
2010 if (!base->representation().IsInteger32()) return;
2011
2012 if (base->IsBitwise()) {
2013 bool allow_offset = false;
2014 int32_t mask = 0;
2015
2016 HBitwise* bitwise = HBitwise::cast(base);
2017 if (bitwise->right()->IsInteger32Constant()) {
2018 mask = bitwise->right()->GetInteger32Constant();
2019 base = bitwise->left();
2020 } else if (bitwise->left()->IsInteger32Constant()) {
2021 mask = bitwise->left()->GetInteger32Constant();
2022 base = bitwise->right();
2023 } else {
2024 return;
2025 }
2026 if (bitwise->op() == Token::BIT_AND) {
2027 result->and_mask = mask;
2028 allow_offset = true;
2029 } else if (bitwise->op() == Token::BIT_OR) {
2030 result->or_mask = mask;
2031 } else {
2032 return;
2033 }
2034
2035 result->context = bitwise->context();
2036
2037 if (allow_offset) {
2038 if (base->IsAdd()) {
2039 HAdd* add = HAdd::cast(base);
2040 if (add->right()->IsInteger32Constant()) {
2041 base = add->left();
2042 } else if (add->left()->IsInteger32Constant()) {
2043 base = add->right();
2044 }
2045 } else if (base->IsSub()) {
2046 HSub* sub = HSub::cast(base);
2047 if (sub->right()->IsInteger32Constant()) {
2048 base = sub->left();
2049 }
2050 }
2051 }
2052
2053 result->base = base;
2054 }
2055 }
2056
2057
AddCheck(HBoundsCheck * check,int32_t upper_limit)2058 void InductionVariableData::AddCheck(HBoundsCheck* check,
2059 int32_t upper_limit) {
2060 DCHECK(limit_validity() != NULL);
2061 if (limit_validity() != check->block() &&
2062 !limit_validity()->Dominates(check->block())) return;
2063 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2064 check->block()->current_loop())) return;
2065
2066 ChecksRelatedToLength* length_checks = checks();
2067 while (length_checks != NULL) {
2068 if (length_checks->length() == check->length()) break;
2069 length_checks = length_checks->next();
2070 }
2071 if (length_checks == NULL) {
2072 length_checks = new(check->block()->zone())
2073 ChecksRelatedToLength(check->length(), checks());
2074 checks_ = length_checks;
2075 }
2076
2077 length_checks->AddCheck(check, upper_limit);
2078 }
2079
2080
CloseCurrentBlock()2081 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2082 if (checks() != NULL) {
2083 InductionVariableCheck* c = checks();
2084 HBasicBlock* current_block = c->check()->block();
2085 while (c != NULL && c->check()->block() == current_block) {
2086 c->set_upper_limit(current_upper_limit_);
2087 c = c->next();
2088 }
2089 }
2090 }
2091
2092
UseNewIndexInCurrentBlock(Token::Value token,int32_t mask,HValue * index_base,HValue * context)2093 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2094 Token::Value token,
2095 int32_t mask,
2096 HValue* index_base,
2097 HValue* context) {
2098 DCHECK(first_check_in_block() != NULL);
2099 HValue* previous_index = first_check_in_block()->index();
2100 DCHECK(context != NULL);
2101
2102 Zone* zone = index_base->block()->graph()->zone();
2103 set_added_constant(HConstant::New(zone, context, mask));
2104 if (added_index() != NULL) {
2105 added_constant()->InsertBefore(added_index());
2106 } else {
2107 added_constant()->InsertBefore(first_check_in_block());
2108 }
2109
2110 if (added_index() == NULL) {
2111 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2112 HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
2113 added_constant());
2114 DCHECK(new_index->IsBitwise());
2115 new_index->ClearAllSideEffects();
2116 new_index->AssumeRepresentation(Representation::Integer32());
2117 set_added_index(HBitwise::cast(new_index));
2118 added_index()->InsertBefore(first_check_in_block());
2119 }
2120 DCHECK(added_index()->op() == token);
2121
2122 added_index()->SetOperandAt(1, index_base);
2123 added_index()->SetOperandAt(2, added_constant());
2124 first_check_in_block()->SetOperandAt(0, added_index());
2125 if (previous_index->HasNoUses()) {
2126 previous_index->DeleteAndReplaceWith(NULL);
2127 }
2128 }
2129
AddCheck(HBoundsCheck * check,int32_t upper_limit)2130 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2131 HBoundsCheck* check,
2132 int32_t upper_limit) {
2133 BitwiseDecompositionResult decomposition;
2134 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2135
2136 if (first_check_in_block() == NULL ||
2137 first_check_in_block()->block() != check->block()) {
2138 CloseCurrentBlock();
2139
2140 first_check_in_block_ = check;
2141 set_added_index(NULL);
2142 set_added_constant(NULL);
2143 current_and_mask_in_block_ = decomposition.and_mask;
2144 current_or_mask_in_block_ = decomposition.or_mask;
2145 current_upper_limit_ = upper_limit;
2146
2147 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2148 InductionVariableCheck(check, checks_, upper_limit);
2149 checks_ = new_check;
2150 return;
2151 }
2152
2153 if (upper_limit > current_upper_limit()) {
2154 current_upper_limit_ = upper_limit;
2155 }
2156
2157 if (decomposition.and_mask != 0 &&
2158 current_or_mask_in_block() == 0) {
2159 if (current_and_mask_in_block() == 0 ||
2160 decomposition.and_mask > current_and_mask_in_block()) {
2161 UseNewIndexInCurrentBlock(Token::BIT_AND,
2162 decomposition.and_mask,
2163 decomposition.base,
2164 decomposition.context);
2165 current_and_mask_in_block_ = decomposition.and_mask;
2166 }
2167 check->set_skip_check();
2168 }
2169 if (current_and_mask_in_block() == 0) {
2170 if (decomposition.or_mask > current_or_mask_in_block()) {
2171 UseNewIndexInCurrentBlock(Token::BIT_OR,
2172 decomposition.or_mask,
2173 decomposition.base,
2174 decomposition.context);
2175 current_or_mask_in_block_ = decomposition.or_mask;
2176 }
2177 check->set_skip_check();
2178 }
2179
2180 if (!check->skip_check()) {
2181 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2182 InductionVariableCheck(check, checks_, upper_limit);
2183 checks_ = new_check;
2184 }
2185 }
2186
2187
2188 /*
2189 * This method detects if phi is an induction variable, with phi_operand as
2190 * its "incremented" value (the other operand would be the "base" value).
2191 *
2192 * It cheks is phi_operand has the form "phi + constant".
2193 * If yes, the constant is the increment that the induction variable gets at
2194 * every loop iteration.
2195 * Otherwise it returns 0.
2196 */
ComputeIncrement(HPhi * phi,HValue * phi_operand)2197 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2198 HValue* phi_operand) {
2199 if (!phi_operand->representation().IsInteger32()) return 0;
2200
2201 if (phi_operand->IsAdd()) {
2202 HAdd* operation = HAdd::cast(phi_operand);
2203 if (operation->left() == phi &&
2204 operation->right()->IsInteger32Constant()) {
2205 return operation->right()->GetInteger32Constant();
2206 } else if (operation->right() == phi &&
2207 operation->left()->IsInteger32Constant()) {
2208 return operation->left()->GetInteger32Constant();
2209 }
2210 } else if (phi_operand->IsSub()) {
2211 HSub* operation = HSub::cast(phi_operand);
2212 if (operation->left() == phi &&
2213 operation->right()->IsInteger32Constant()) {
2214 return -operation->right()->GetInteger32Constant();
2215 }
2216 }
2217
2218 return 0;
2219 }
2220
2221
2222 /*
2223 * Swaps the information in "update" with the one contained in "this".
2224 * The swapping is important because this method is used while doing a
2225 * dominator tree traversal, and "update" will retain the old data that
2226 * will be restored while backtracking.
2227 */
UpdateAdditionalLimit(InductionVariableLimitUpdate * update)2228 void InductionVariableData::UpdateAdditionalLimit(
2229 InductionVariableLimitUpdate* update) {
2230 DCHECK(update->updated_variable == this);
2231 if (update->limit_is_upper) {
2232 swap(&additional_upper_limit_, &update->limit);
2233 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2234 } else {
2235 swap(&additional_lower_limit_, &update->limit);
2236 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2237 }
2238 }
2239
2240
ComputeUpperLimit(int32_t and_mask,int32_t or_mask)2241 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2242 int32_t or_mask) {
2243 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2244 const int32_t MAX_LIMIT = 1 << 30;
2245
2246 int32_t result = MAX_LIMIT;
2247
2248 if (limit() != NULL &&
2249 limit()->IsInteger32Constant()) {
2250 int32_t limit_value = limit()->GetInteger32Constant();
2251 if (!limit_included()) {
2252 limit_value--;
2253 }
2254 if (limit_value < result) result = limit_value;
2255 }
2256
2257 if (additional_upper_limit() != NULL &&
2258 additional_upper_limit()->IsInteger32Constant()) {
2259 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2260 if (!additional_upper_limit_is_included()) {
2261 limit_value--;
2262 }
2263 if (limit_value < result) result = limit_value;
2264 }
2265
2266 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2267 if (and_mask < result) result = and_mask;
2268 return result;
2269 }
2270
2271 // Add the effect of the or_mask.
2272 result |= or_mask;
2273
2274 return result >= MAX_LIMIT ? kNoLimit : result;
2275 }
2276
2277
IgnoreOsrValue(HValue * v)2278 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2279 if (!v->IsPhi()) return v;
2280 HPhi* phi = HPhi::cast(v);
2281 if (phi->OperandCount() != 2) return v;
2282 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2283 return phi->OperandAt(1);
2284 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2285 return phi->OperandAt(0);
2286 } else {
2287 return v;
2288 }
2289 }
2290
2291
GetInductionVariableData(HValue * v)2292 InductionVariableData* InductionVariableData::GetInductionVariableData(
2293 HValue* v) {
2294 v = IgnoreOsrValue(v);
2295 if (v->IsPhi()) {
2296 return HPhi::cast(v)->induction_variable_data();
2297 }
2298 return NULL;
2299 }
2300
2301
2302 /*
2303 * Check if a conditional branch to "current_branch" with token "token" is
2304 * the branch that keeps the induction loop running (and, conversely, will
2305 * terminate it if the "other_branch" is taken).
2306 *
2307 * Three conditions must be met:
2308 * - "current_branch" must be in the induction loop.
2309 * - "other_branch" must be out of the induction loop.
2310 * - "token" and the induction increment must be "compatible": the token should
2311 * be a condition that keeps the execution inside the loop until the limit is
2312 * reached.
2313 */
CheckIfBranchIsLoopGuard(Token::Value token,HBasicBlock * current_branch,HBasicBlock * other_branch)2314 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2315 Token::Value token,
2316 HBasicBlock* current_branch,
2317 HBasicBlock* other_branch) {
2318 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2319 current_branch->current_loop())) {
2320 return false;
2321 }
2322
2323 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2324 other_branch->current_loop())) {
2325 return false;
2326 }
2327
2328 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2329 return true;
2330 }
2331 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2332 return true;
2333 }
2334 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2335 return true;
2336 }
2337
2338 return false;
2339 }
2340
2341
ComputeLimitFromPredecessorBlock(HBasicBlock * block,LimitFromPredecessorBlock * result)2342 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2343 HBasicBlock* block,
2344 LimitFromPredecessorBlock* result) {
2345 if (block->predecessors()->length() != 1) return;
2346 HBasicBlock* predecessor = block->predecessors()->at(0);
2347 HInstruction* end = predecessor->last();
2348
2349 if (!end->IsCompareNumericAndBranch()) return;
2350 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2351
2352 Token::Value token = branch->token();
2353 if (!Token::IsArithmeticCompareOp(token)) return;
2354
2355 HBasicBlock* other_target;
2356 if (block == branch->SuccessorAt(0)) {
2357 other_target = branch->SuccessorAt(1);
2358 } else {
2359 other_target = branch->SuccessorAt(0);
2360 token = Token::NegateCompareOp(token);
2361 DCHECK(block == branch->SuccessorAt(1));
2362 }
2363
2364 InductionVariableData* data;
2365
2366 data = GetInductionVariableData(branch->left());
2367 HValue* limit = branch->right();
2368 if (data == NULL) {
2369 data = GetInductionVariableData(branch->right());
2370 token = Token::ReverseCompareOp(token);
2371 limit = branch->left();
2372 }
2373
2374 if (data != NULL) {
2375 result->variable = data;
2376 result->token = token;
2377 result->limit = limit;
2378 result->other_target = other_target;
2379 }
2380 }
2381
2382
2383 /*
2384 * Compute the limit that is imposed on an induction variable when entering
2385 * "block" (if any).
2386 * If the limit is the "proper" induction limit (the one that makes the loop
2387 * terminate when the induction variable reaches it) it is stored directly in
2388 * the induction variable data.
2389 * Otherwise the limit is written in "additional_limit" and the method
2390 * returns true.
2391 */
ComputeInductionVariableLimit(HBasicBlock * block,InductionVariableLimitUpdate * additional_limit)2392 bool InductionVariableData::ComputeInductionVariableLimit(
2393 HBasicBlock* block,
2394 InductionVariableLimitUpdate* additional_limit) {
2395 LimitFromPredecessorBlock limit;
2396 ComputeLimitFromPredecessorBlock(block, &limit);
2397 if (!limit.LimitIsValid()) return false;
2398
2399 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2400 block,
2401 limit.other_target)) {
2402 limit.variable->limit_ = limit.limit;
2403 limit.variable->limit_included_ = limit.LimitIsIncluded();
2404 limit.variable->limit_validity_ = block;
2405 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2406 limit.variable->induction_exit_target_ = limit.other_target;
2407 return false;
2408 } else {
2409 additional_limit->updated_variable = limit.variable;
2410 additional_limit->limit = limit.limit;
2411 additional_limit->limit_is_upper = limit.LimitIsUpper();
2412 additional_limit->limit_is_included = limit.LimitIsIncluded();
2413 return true;
2414 }
2415 }
2416
2417
InferRange(Zone * zone)2418 Range* HMathMinMax::InferRange(Zone* zone) {
2419 if (representation().IsSmiOrInteger32()) {
2420 Range* a = left()->range();
2421 Range* b = right()->range();
2422 Range* res = a->Copy(zone);
2423 if (operation_ == kMathMax) {
2424 res->CombinedMax(b);
2425 } else {
2426 DCHECK(operation_ == kMathMin);
2427 res->CombinedMin(b);
2428 }
2429 return res;
2430 } else {
2431 return HValue::InferRange(zone);
2432 }
2433 }
2434
2435
AddInput(HValue * value)2436 void HPushArguments::AddInput(HValue* value) {
2437 inputs_.Add(NULL, value->block()->zone());
2438 SetOperandAt(OperandCount() - 1, value);
2439 }
2440
2441
PrintTo(OStream & os) const2442 OStream& HPhi::PrintTo(OStream& os) const { // NOLINT
2443 os << "[";
2444 for (int i = 0; i < OperandCount(); ++i) {
2445 os << " " << NameOf(OperandAt(i)) << " ";
2446 }
2447 return os << " uses:" << UseCount() << "_"
2448 << smi_non_phi_uses() + smi_indirect_uses() << "s_"
2449 << int32_non_phi_uses() + int32_indirect_uses() << "i_"
2450 << double_non_phi_uses() + double_indirect_uses() << "d_"
2451 << tagged_non_phi_uses() + tagged_indirect_uses() << "t"
2452 << TypeOf(this) << "]";
2453 }
2454
2455
AddInput(HValue * value)2456 void HPhi::AddInput(HValue* value) {
2457 inputs_.Add(NULL, value->block()->zone());
2458 SetOperandAt(OperandCount() - 1, value);
2459 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2460 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2461 SetFlag(kIsArguments);
2462 }
2463 }
2464
2465
HasRealUses()2466 bool HPhi::HasRealUses() {
2467 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2468 if (!it.value()->IsPhi()) return true;
2469 }
2470 return false;
2471 }
2472
2473
GetRedundantReplacement()2474 HValue* HPhi::GetRedundantReplacement() {
2475 HValue* candidate = NULL;
2476 int count = OperandCount();
2477 int position = 0;
2478 while (position < count && candidate == NULL) {
2479 HValue* current = OperandAt(position++);
2480 if (current != this) candidate = current;
2481 }
2482 while (position < count) {
2483 HValue* current = OperandAt(position++);
2484 if (current != this && current != candidate) return NULL;
2485 }
2486 DCHECK(candidate != this);
2487 return candidate;
2488 }
2489
2490
DeleteFromGraph()2491 void HPhi::DeleteFromGraph() {
2492 DCHECK(block() != NULL);
2493 block()->RemovePhi(this);
2494 DCHECK(block() == NULL);
2495 }
2496
2497
InitRealUses(int phi_id)2498 void HPhi::InitRealUses(int phi_id) {
2499 // Initialize real uses.
2500 phi_id_ = phi_id;
2501 // Compute a conservative approximation of truncating uses before inferring
2502 // representations. The proper, exact computation will be done later, when
2503 // inserting representation changes.
2504 SetFlag(kTruncatingToSmi);
2505 SetFlag(kTruncatingToInt32);
2506 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2507 HValue* value = it.value();
2508 if (!value->IsPhi()) {
2509 Representation rep = value->observed_input_representation(it.index());
2510 non_phi_uses_[rep.kind()] += 1;
2511 if (FLAG_trace_representation) {
2512 PrintF("#%d Phi is used by real #%d %s as %s\n",
2513 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2514 }
2515 if (!value->IsSimulate()) {
2516 if (!value->CheckFlag(kTruncatingToSmi)) {
2517 ClearFlag(kTruncatingToSmi);
2518 }
2519 if (!value->CheckFlag(kTruncatingToInt32)) {
2520 ClearFlag(kTruncatingToInt32);
2521 }
2522 }
2523 }
2524 }
2525 }
2526
2527
AddNonPhiUsesFrom(HPhi * other)2528 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2529 if (FLAG_trace_representation) {
2530 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2531 id(), other->id(),
2532 other->non_phi_uses_[Representation::kSmi],
2533 other->non_phi_uses_[Representation::kInteger32],
2534 other->non_phi_uses_[Representation::kDouble],
2535 other->non_phi_uses_[Representation::kTagged]);
2536 }
2537
2538 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2539 indirect_uses_[i] += other->non_phi_uses_[i];
2540 }
2541 }
2542
2543
AddIndirectUsesTo(int * dest)2544 void HPhi::AddIndirectUsesTo(int* dest) {
2545 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2546 dest[i] += indirect_uses_[i];
2547 }
2548 }
2549
2550
MergeWith(ZoneList<HSimulate * > * list)2551 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2552 while (!list->is_empty()) {
2553 HSimulate* from = list->RemoveLast();
2554 ZoneList<HValue*>* from_values = &from->values_;
2555 for (int i = 0; i < from_values->length(); ++i) {
2556 if (from->HasAssignedIndexAt(i)) {
2557 int index = from->GetAssignedIndexAt(i);
2558 if (HasValueForIndex(index)) continue;
2559 AddAssignedValue(index, from_values->at(i));
2560 } else {
2561 if (pop_count_ > 0) {
2562 pop_count_--;
2563 } else {
2564 AddPushedValue(from_values->at(i));
2565 }
2566 }
2567 }
2568 pop_count_ += from->pop_count_;
2569 from->DeleteAndReplaceWith(NULL);
2570 }
2571 }
2572
2573
PrintDataTo(OStream & os) const2574 OStream& HSimulate::PrintDataTo(OStream& os) const { // NOLINT
2575 os << "id=" << ast_id().ToInt();
2576 if (pop_count_ > 0) os << " pop " << pop_count_;
2577 if (values_.length() > 0) {
2578 if (pop_count_ > 0) os << " /";
2579 for (int i = values_.length() - 1; i >= 0; --i) {
2580 if (HasAssignedIndexAt(i)) {
2581 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2582 } else {
2583 os << " push ";
2584 }
2585 os << NameOf(values_[i]);
2586 if (i > 0) os << ",";
2587 }
2588 }
2589 return os;
2590 }
2591
2592
ReplayEnvironment(HEnvironment * env)2593 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2594 if (done_with_replay_) return;
2595 DCHECK(env != NULL);
2596 env->set_ast_id(ast_id());
2597 env->Drop(pop_count());
2598 for (int i = values()->length() - 1; i >= 0; --i) {
2599 HValue* value = values()->at(i);
2600 if (HasAssignedIndexAt(i)) {
2601 env->Bind(GetAssignedIndexAt(i), value);
2602 } else {
2603 env->Push(value);
2604 }
2605 }
2606 done_with_replay_ = true;
2607 }
2608
2609
ReplayEnvironmentNested(const ZoneList<HValue * > * values,HCapturedObject * other)2610 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2611 HCapturedObject* other) {
2612 for (int i = 0; i < values->length(); ++i) {
2613 HValue* value = values->at(i);
2614 if (value->IsCapturedObject()) {
2615 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2616 values->at(i) = other;
2617 } else {
2618 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2619 }
2620 }
2621 }
2622 }
2623
2624
2625 // Replay captured objects by replacing all captured objects with the
2626 // same capture id in the current and all outer environments.
ReplayEnvironment(HEnvironment * env)2627 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2628 DCHECK(env != NULL);
2629 while (env != NULL) {
2630 ReplayEnvironmentNested(env->values(), this);
2631 env = env->outer();
2632 }
2633 }
2634
2635
PrintDataTo(OStream & os) const2636 OStream& HCapturedObject::PrintDataTo(OStream& os) const { // NOLINT
2637 os << "#" << capture_id() << " ";
2638 return HDematerializedObject::PrintDataTo(os);
2639 }
2640
2641
RegisterReturnTarget(HBasicBlock * return_target,Zone * zone)2642 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2643 Zone* zone) {
2644 DCHECK(return_target->IsInlineReturnTarget());
2645 return_targets_.Add(return_target, zone);
2646 }
2647
2648
PrintDataTo(OStream & os) const2649 OStream& HEnterInlined::PrintDataTo(OStream& os) const { // NOLINT
2650 return os << function()->debug_name()->ToCString().get()
2651 << ", id=" << function()->id().ToInt();
2652 }
2653
2654
IsInteger32(double value)2655 static bool IsInteger32(double value) {
2656 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2657 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2658 }
2659
2660
HConstant(Handle<Object> object,Representation r)2661 HConstant::HConstant(Handle<Object> object, Representation r)
2662 : HTemplateInstruction<0>(HType::FromValue(object)),
2663 object_(Unique<Object>::CreateUninitialized(object)),
2664 object_map_(Handle<Map>::null()),
2665 has_stable_map_value_(false),
2666 has_smi_value_(false),
2667 has_int32_value_(false),
2668 has_double_value_(false),
2669 has_external_reference_value_(false),
2670 is_not_in_new_space_(true),
2671 boolean_value_(object->BooleanValue()),
2672 is_undetectable_(false),
2673 instance_type_(kUnknownInstanceType) {
2674 if (object->IsHeapObject()) {
2675 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2676 Isolate* isolate = heap_object->GetIsolate();
2677 Handle<Map> map(heap_object->map(), isolate);
2678 is_not_in_new_space_ = !isolate->heap()->InNewSpace(*object);
2679 instance_type_ = map->instance_type();
2680 is_undetectable_ = map->is_undetectable();
2681 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2682 has_stable_map_value_ = (instance_type_ == MAP_TYPE &&
2683 Handle<Map>::cast(heap_object)->is_stable());
2684 }
2685 if (object->IsNumber()) {
2686 double n = object->Number();
2687 has_int32_value_ = IsInteger32(n);
2688 int32_value_ = DoubleToInt32(n);
2689 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2690 double_value_ = n;
2691 has_double_value_ = true;
2692 // TODO(titzer): if this heap number is new space, tenure a new one.
2693 }
2694
2695 Initialize(r);
2696 }
2697
2698
HConstant(Unique<Object> object,Unique<Map> object_map,bool has_stable_map_value,Representation r,HType type,bool is_not_in_new_space,bool boolean_value,bool is_undetectable,InstanceType instance_type)2699 HConstant::HConstant(Unique<Object> object,
2700 Unique<Map> object_map,
2701 bool has_stable_map_value,
2702 Representation r,
2703 HType type,
2704 bool is_not_in_new_space,
2705 bool boolean_value,
2706 bool is_undetectable,
2707 InstanceType instance_type)
2708 : HTemplateInstruction<0>(type),
2709 object_(object),
2710 object_map_(object_map),
2711 has_stable_map_value_(has_stable_map_value),
2712 has_smi_value_(false),
2713 has_int32_value_(false),
2714 has_double_value_(false),
2715 has_external_reference_value_(false),
2716 is_not_in_new_space_(is_not_in_new_space),
2717 boolean_value_(boolean_value),
2718 is_undetectable_(is_undetectable),
2719 instance_type_(instance_type) {
2720 DCHECK(!object.handle().is_null());
2721 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2722 Initialize(r);
2723 }
2724
2725
HConstant(int32_t integer_value,Representation r,bool is_not_in_new_space,Unique<Object> object)2726 HConstant::HConstant(int32_t integer_value,
2727 Representation r,
2728 bool is_not_in_new_space,
2729 Unique<Object> object)
2730 : object_(object),
2731 object_map_(Handle<Map>::null()),
2732 has_stable_map_value_(false),
2733 has_smi_value_(Smi::IsValid(integer_value)),
2734 has_int32_value_(true),
2735 has_double_value_(true),
2736 has_external_reference_value_(false),
2737 is_not_in_new_space_(is_not_in_new_space),
2738 boolean_value_(integer_value != 0),
2739 is_undetectable_(false),
2740 int32_value_(integer_value),
2741 double_value_(FastI2D(integer_value)),
2742 instance_type_(kUnknownInstanceType) {
2743 // It's possible to create a constant with a value in Smi-range but stored
2744 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2745 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2746 bool is_smi = has_smi_value_ && !could_be_heapobject;
2747 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2748 Initialize(r);
2749 }
2750
2751
HConstant(double double_value,Representation r,bool is_not_in_new_space,Unique<Object> object)2752 HConstant::HConstant(double double_value,
2753 Representation r,
2754 bool is_not_in_new_space,
2755 Unique<Object> object)
2756 : object_(object),
2757 object_map_(Handle<Map>::null()),
2758 has_stable_map_value_(false),
2759 has_int32_value_(IsInteger32(double_value)),
2760 has_double_value_(true),
2761 has_external_reference_value_(false),
2762 is_not_in_new_space_(is_not_in_new_space),
2763 boolean_value_(double_value != 0 && !std::isnan(double_value)),
2764 is_undetectable_(false),
2765 int32_value_(DoubleToInt32(double_value)),
2766 double_value_(double_value),
2767 instance_type_(kUnknownInstanceType) {
2768 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2769 // It's possible to create a constant with a value in Smi-range but stored
2770 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2771 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2772 bool is_smi = has_smi_value_ && !could_be_heapobject;
2773 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2774 Initialize(r);
2775 }
2776
2777
HConstant(ExternalReference reference)2778 HConstant::HConstant(ExternalReference reference)
2779 : HTemplateInstruction<0>(HType::Any()),
2780 object_(Unique<Object>(Handle<Object>::null())),
2781 object_map_(Handle<Map>::null()),
2782 has_stable_map_value_(false),
2783 has_smi_value_(false),
2784 has_int32_value_(false),
2785 has_double_value_(false),
2786 has_external_reference_value_(true),
2787 is_not_in_new_space_(true),
2788 boolean_value_(true),
2789 is_undetectable_(false),
2790 external_reference_value_(reference),
2791 instance_type_(kUnknownInstanceType) {
2792 Initialize(Representation::External());
2793 }
2794
2795
Initialize(Representation r)2796 void HConstant::Initialize(Representation r) {
2797 if (r.IsNone()) {
2798 if (has_smi_value_ && SmiValuesAre31Bits()) {
2799 r = Representation::Smi();
2800 } else if (has_int32_value_) {
2801 r = Representation::Integer32();
2802 } else if (has_double_value_) {
2803 r = Representation::Double();
2804 } else if (has_external_reference_value_) {
2805 r = Representation::External();
2806 } else {
2807 Handle<Object> object = object_.handle();
2808 if (object->IsJSObject()) {
2809 // Try to eagerly migrate JSObjects that have deprecated maps.
2810 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2811 if (js_object->map()->is_deprecated()) {
2812 JSObject::TryMigrateInstance(js_object);
2813 }
2814 }
2815 r = Representation::Tagged();
2816 }
2817 }
2818 if (r.IsSmi()) {
2819 // If we have an existing handle, zap it, because it might be a heap
2820 // number which we must not re-use when copying this HConstant to
2821 // Tagged representation later, because having Smi representation now
2822 // could cause heap object checks not to get emitted.
2823 object_ = Unique<Object>(Handle<Object>::null());
2824 }
2825 set_representation(r);
2826 SetFlag(kUseGVN);
2827 }
2828
2829
ImmortalImmovable() const2830 bool HConstant::ImmortalImmovable() const {
2831 if (has_int32_value_) {
2832 return false;
2833 }
2834 if (has_double_value_) {
2835 if (IsSpecialDouble()) {
2836 return true;
2837 }
2838 return false;
2839 }
2840 if (has_external_reference_value_) {
2841 return false;
2842 }
2843
2844 DCHECK(!object_.handle().is_null());
2845 Heap* heap = isolate()->heap();
2846 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2847 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2848 return
2849 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2850 object_.IsKnownGlobal(heap->name()) ||
2851 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2852 #undef IMMORTAL_IMMOVABLE_ROOT
2853 #define INTERNALIZED_STRING(name, value) \
2854 object_.IsKnownGlobal(heap->name()) ||
2855 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2856 #undef INTERNALIZED_STRING
2857 #define STRING_TYPE(NAME, size, name, Name) \
2858 object_.IsKnownGlobal(heap->name##_map()) ||
2859 STRING_TYPE_LIST(STRING_TYPE)
2860 #undef STRING_TYPE
2861 false;
2862 }
2863
2864
EmitAtUses()2865 bool HConstant::EmitAtUses() {
2866 DCHECK(IsLinked());
2867 if (block()->graph()->has_osr() &&
2868 block()->graph()->IsStandardConstant(this)) {
2869 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2870 return true;
2871 }
2872 if (HasNoUses()) return true;
2873 if (IsCell()) return false;
2874 if (representation().IsDouble()) return false;
2875 if (representation().IsExternal()) return false;
2876 return true;
2877 }
2878
2879
CopyToRepresentation(Representation r,Zone * zone) const2880 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2881 if (r.IsSmi() && !has_smi_value_) return NULL;
2882 if (r.IsInteger32() && !has_int32_value_) return NULL;
2883 if (r.IsDouble() && !has_double_value_) return NULL;
2884 if (r.IsExternal() && !has_external_reference_value_) return NULL;
2885 if (has_int32_value_) {
2886 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_);
2887 }
2888 if (has_double_value_) {
2889 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_);
2890 }
2891 if (has_external_reference_value_) {
2892 return new(zone) HConstant(external_reference_value_);
2893 }
2894 DCHECK(!object_.handle().is_null());
2895 return new(zone) HConstant(object_,
2896 object_map_,
2897 has_stable_map_value_,
2898 r,
2899 type_,
2900 is_not_in_new_space_,
2901 boolean_value_,
2902 is_undetectable_,
2903 instance_type_);
2904 }
2905
2906
CopyToTruncatedInt32(Zone * zone)2907 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2908 HConstant* res = NULL;
2909 if (has_int32_value_) {
2910 res = new(zone) HConstant(int32_value_,
2911 Representation::Integer32(),
2912 is_not_in_new_space_,
2913 object_);
2914 } else if (has_double_value_) {
2915 res = new(zone) HConstant(DoubleToInt32(double_value_),
2916 Representation::Integer32(),
2917 is_not_in_new_space_,
2918 object_);
2919 }
2920 return Maybe<HConstant*>(res != NULL, res);
2921 }
2922
2923
CopyToTruncatedNumber(Zone * zone)2924 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) {
2925 HConstant* res = NULL;
2926 Handle<Object> handle = this->handle(zone->isolate());
2927 if (handle->IsBoolean()) {
2928 res = handle->BooleanValue() ?
2929 new(zone) HConstant(1) : new(zone) HConstant(0);
2930 } else if (handle->IsUndefined()) {
2931 res = new(zone) HConstant(base::OS::nan_value());
2932 } else if (handle->IsNull()) {
2933 res = new(zone) HConstant(0);
2934 }
2935 return Maybe<HConstant*>(res != NULL, res);
2936 }
2937
2938
PrintDataTo(OStream & os) const2939 OStream& HConstant::PrintDataTo(OStream& os) const { // NOLINT
2940 if (has_int32_value_) {
2941 os << int32_value_ << " ";
2942 } else if (has_double_value_) {
2943 os << double_value_ << " ";
2944 } else if (has_external_reference_value_) {
2945 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2946 } else {
2947 // The handle() method is silently and lazily mutating the object.
2948 Handle<Object> h = const_cast<HConstant*>(this)->handle(Isolate::Current());
2949 os << Brief(*h) << " ";
2950 if (HasStableMapValue()) os << "[stable-map] ";
2951 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2952 }
2953 if (!is_not_in_new_space_) os << "[new space] ";
2954 return os;
2955 }
2956
2957
PrintDataTo(OStream & os) const2958 OStream& HBinaryOperation::PrintDataTo(OStream& os) const { // NOLINT
2959 os << NameOf(left()) << " " << NameOf(right());
2960 if (CheckFlag(kCanOverflow)) os << " !";
2961 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2962 return os;
2963 }
2964
2965
InferRepresentation(HInferRepresentationPhase * h_infer)2966 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2967 DCHECK(CheckFlag(kFlexibleRepresentation));
2968 Representation new_rep = RepresentationFromInputs();
2969 UpdateRepresentation(new_rep, h_infer, "inputs");
2970
2971 if (representation().IsSmi() && HasNonSmiUse()) {
2972 UpdateRepresentation(
2973 Representation::Integer32(), h_infer, "use requirements");
2974 }
2975
2976 if (observed_output_representation_.IsNone()) {
2977 new_rep = RepresentationFromUses();
2978 UpdateRepresentation(new_rep, h_infer, "uses");
2979 } else {
2980 new_rep = RepresentationFromOutput();
2981 UpdateRepresentation(new_rep, h_infer, "output");
2982 }
2983 }
2984
2985
RepresentationFromInputs()2986 Representation HBinaryOperation::RepresentationFromInputs() {
2987 // Determine the worst case of observed input representations and
2988 // the currently assumed output representation.
2989 Representation rep = representation();
2990 for (int i = 1; i <= 2; ++i) {
2991 rep = rep.generalize(observed_input_representation(i));
2992 }
2993 // If any of the actual input representation is more general than what we
2994 // have so far but not Tagged, use that representation instead.
2995 Representation left_rep = left()->representation();
2996 Representation right_rep = right()->representation();
2997 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2998 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2999
3000 return rep;
3001 }
3002
3003
IgnoreObservedOutputRepresentation(Representation current_rep)3004 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3005 Representation current_rep) {
3006 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3007 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3008 // Mul in Integer32 mode would be too precise.
3009 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
3010 }
3011
3012
RepresentationFromOutput()3013 Representation HBinaryOperation::RepresentationFromOutput() {
3014 Representation rep = representation();
3015 // Consider observed output representation, but ignore it if it's Double,
3016 // this instruction is not a division, and all its uses are truncating
3017 // to Integer32.
3018 if (observed_output_representation_.is_more_general_than(rep) &&
3019 !IgnoreObservedOutputRepresentation(rep)) {
3020 return observed_output_representation_;
3021 }
3022 return Representation::None();
3023 }
3024
3025
AssumeRepresentation(Representation r)3026 void HBinaryOperation::AssumeRepresentation(Representation r) {
3027 set_observed_input_representation(1, r);
3028 set_observed_input_representation(2, r);
3029 HValue::AssumeRepresentation(r);
3030 }
3031
3032
InferRepresentation(HInferRepresentationPhase * h_infer)3033 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
3034 DCHECK(CheckFlag(kFlexibleRepresentation));
3035 Representation new_rep = RepresentationFromInputs();
3036 UpdateRepresentation(new_rep, h_infer, "inputs");
3037 // Do not care about uses.
3038 }
3039
3040
InferRange(Zone * zone)3041 Range* HBitwise::InferRange(Zone* zone) {
3042 if (op() == Token::BIT_XOR) {
3043 if (left()->HasRange() && right()->HasRange()) {
3044 // The maximum value has the high bit, and all bits below, set:
3045 // (1 << high) - 1.
3046 // If the range can be negative, the minimum int is a negative number with
3047 // the high bit, and all bits below, unset:
3048 // -(1 << high).
3049 // If it cannot be negative, conservatively choose 0 as minimum int.
3050 int64_t left_upper = left()->range()->upper();
3051 int64_t left_lower = left()->range()->lower();
3052 int64_t right_upper = right()->range()->upper();
3053 int64_t right_lower = right()->range()->lower();
3054
3055 if (left_upper < 0) left_upper = ~left_upper;
3056 if (left_lower < 0) left_lower = ~left_lower;
3057 if (right_upper < 0) right_upper = ~right_upper;
3058 if (right_lower < 0) right_lower = ~right_lower;
3059
3060 int high = MostSignificantBit(
3061 static_cast<uint32_t>(
3062 left_upper | left_lower | right_upper | right_lower));
3063
3064 int64_t limit = 1;
3065 limit <<= high;
3066 int32_t min = (left()->range()->CanBeNegative() ||
3067 right()->range()->CanBeNegative())
3068 ? static_cast<int32_t>(-limit) : 0;
3069 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3070 }
3071 Range* result = HValue::InferRange(zone);
3072 result->set_can_be_minus_zero(false);
3073 return result;
3074 }
3075 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
3076 int32_t left_mask = (left()->range() != NULL)
3077 ? left()->range()->Mask()
3078 : kDefaultMask;
3079 int32_t right_mask = (right()->range() != NULL)
3080 ? right()->range()->Mask()
3081 : kDefaultMask;
3082 int32_t result_mask = (op() == Token::BIT_AND)
3083 ? left_mask & right_mask
3084 : left_mask | right_mask;
3085 if (result_mask >= 0) return new(zone) Range(0, result_mask);
3086
3087 Range* result = HValue::InferRange(zone);
3088 result->set_can_be_minus_zero(false);
3089 return result;
3090 }
3091
3092
InferRange(Zone * zone)3093 Range* HSar::InferRange(Zone* zone) {
3094 if (right()->IsConstant()) {
3095 HConstant* c = HConstant::cast(right());
3096 if (c->HasInteger32Value()) {
3097 Range* result = (left()->range() != NULL)
3098 ? left()->range()->Copy(zone)
3099 : new(zone) Range();
3100 result->Sar(c->Integer32Value());
3101 return result;
3102 }
3103 }
3104 return HValue::InferRange(zone);
3105 }
3106
3107
InferRange(Zone * zone)3108 Range* HShr::InferRange(Zone* zone) {
3109 if (right()->IsConstant()) {
3110 HConstant* c = HConstant::cast(right());
3111 if (c->HasInteger32Value()) {
3112 int shift_count = c->Integer32Value() & 0x1f;
3113 if (left()->range()->CanBeNegative()) {
3114 // Only compute bounds if the result always fits into an int32.
3115 return (shift_count >= 1)
3116 ? new(zone) Range(0,
3117 static_cast<uint32_t>(0xffffffff) >> shift_count)
3118 : new(zone) Range();
3119 } else {
3120 // For positive inputs we can use the >> operator.
3121 Range* result = (left()->range() != NULL)
3122 ? left()->range()->Copy(zone)
3123 : new(zone) Range();
3124 result->Sar(c->Integer32Value());
3125 return result;
3126 }
3127 }
3128 }
3129 return HValue::InferRange(zone);
3130 }
3131
3132
InferRange(Zone * zone)3133 Range* HShl::InferRange(Zone* zone) {
3134 if (right()->IsConstant()) {
3135 HConstant* c = HConstant::cast(right());
3136 if (c->HasInteger32Value()) {
3137 Range* result = (left()->range() != NULL)
3138 ? left()->range()->Copy(zone)
3139 : new(zone) Range();
3140 result->Shl(c->Integer32Value());
3141 return result;
3142 }
3143 }
3144 return HValue::InferRange(zone);
3145 }
3146
3147
InferRange(Zone * zone)3148 Range* HLoadNamedField::InferRange(Zone* zone) {
3149 if (access().representation().IsInteger8()) {
3150 return new(zone) Range(kMinInt8, kMaxInt8);
3151 }
3152 if (access().representation().IsUInteger8()) {
3153 return new(zone) Range(kMinUInt8, kMaxUInt8);
3154 }
3155 if (access().representation().IsInteger16()) {
3156 return new(zone) Range(kMinInt16, kMaxInt16);
3157 }
3158 if (access().representation().IsUInteger16()) {
3159 return new(zone) Range(kMinUInt16, kMaxUInt16);
3160 }
3161 if (access().IsStringLength()) {
3162 return new(zone) Range(0, String::kMaxLength);
3163 }
3164 return HValue::InferRange(zone);
3165 }
3166
3167
InferRange(Zone * zone)3168 Range* HLoadKeyed::InferRange(Zone* zone) {
3169 switch (elements_kind()) {
3170 case EXTERNAL_INT8_ELEMENTS:
3171 return new(zone) Range(kMinInt8, kMaxInt8);
3172 case EXTERNAL_UINT8_ELEMENTS:
3173 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
3174 return new(zone) Range(kMinUInt8, kMaxUInt8);
3175 case EXTERNAL_INT16_ELEMENTS:
3176 return new(zone) Range(kMinInt16, kMaxInt16);
3177 case EXTERNAL_UINT16_ELEMENTS:
3178 return new(zone) Range(kMinUInt16, kMaxUInt16);
3179 default:
3180 return HValue::InferRange(zone);
3181 }
3182 }
3183
3184
PrintDataTo(OStream & os) const3185 OStream& HCompareGeneric::PrintDataTo(OStream& os) const { // NOLINT
3186 os << Token::Name(token()) << " ";
3187 return HBinaryOperation::PrintDataTo(os);
3188 }
3189
3190
PrintDataTo(OStream & os) const3191 OStream& HStringCompareAndBranch::PrintDataTo(OStream& os) const { // NOLINT
3192 os << Token::Name(token()) << " ";
3193 return HControlInstruction::PrintDataTo(os);
3194 }
3195
3196
PrintDataTo(OStream & os) const3197 OStream& HCompareNumericAndBranch::PrintDataTo(OStream& os) const { // NOLINT
3198 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
3199 return HControlInstruction::PrintDataTo(os);
3200 }
3201
3202
PrintDataTo(OStream & os) const3203 OStream& HCompareObjectEqAndBranch::PrintDataTo(OStream& os) const { // NOLINT
3204 os << NameOf(left()) << " " << NameOf(right());
3205 return HControlInstruction::PrintDataTo(os);
3206 }
3207
3208
KnownSuccessorBlock(HBasicBlock ** block)3209 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3210 if (known_successor_index() != kNoKnownSuccessorIndex) {
3211 *block = SuccessorAt(known_successor_index());
3212 return true;
3213 }
3214 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3215 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3216 ? FirstSuccessor() : SecondSuccessor();
3217 return true;
3218 }
3219 *block = NULL;
3220 return false;
3221 }
3222
3223
ConstantIsObject(HConstant * constant,Isolate * isolate)3224 bool ConstantIsObject(HConstant* constant, Isolate* isolate) {
3225 if (constant->HasNumberValue()) return false;
3226 if (constant->GetUnique().IsKnownGlobal(isolate->heap()->null_value())) {
3227 return true;
3228 }
3229 if (constant->IsUndetectable()) return false;
3230 InstanceType type = constant->GetInstanceType();
3231 return (FIRST_NONCALLABLE_SPEC_OBJECT_TYPE <= type) &&
3232 (type <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3233 }
3234
3235
KnownSuccessorBlock(HBasicBlock ** block)3236 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3237 if (FLAG_fold_constants && value()->IsConstant()) {
3238 *block = ConstantIsObject(HConstant::cast(value()), isolate())
3239 ? FirstSuccessor() : SecondSuccessor();
3240 return true;
3241 }
3242 *block = NULL;
3243 return false;
3244 }
3245
3246
KnownSuccessorBlock(HBasicBlock ** block)3247 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3248 if (known_successor_index() != kNoKnownSuccessorIndex) {
3249 *block = SuccessorAt(known_successor_index());
3250 return true;
3251 }
3252 if (FLAG_fold_constants && value()->IsConstant()) {
3253 *block = HConstant::cast(value())->HasStringValue()
3254 ? FirstSuccessor() : SecondSuccessor();
3255 return true;
3256 }
3257 if (value()->type().IsString()) {
3258 *block = FirstSuccessor();
3259 return true;
3260 }
3261 if (value()->type().IsSmi() ||
3262 value()->type().IsNull() ||
3263 value()->type().IsBoolean() ||
3264 value()->type().IsUndefined() ||
3265 value()->type().IsJSObject()) {
3266 *block = SecondSuccessor();
3267 return true;
3268 }
3269 *block = NULL;
3270 return false;
3271 }
3272
3273
KnownSuccessorBlock(HBasicBlock ** block)3274 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3275 if (FLAG_fold_constants && value()->IsConstant()) {
3276 *block = HConstant::cast(value())->IsUndetectable()
3277 ? FirstSuccessor() : SecondSuccessor();
3278 return true;
3279 }
3280 *block = NULL;
3281 return false;
3282 }
3283
3284
KnownSuccessorBlock(HBasicBlock ** block)3285 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3286 if (FLAG_fold_constants && value()->IsConstant()) {
3287 InstanceType type = HConstant::cast(value())->GetInstanceType();
3288 *block = (from_ <= type) && (type <= to_)
3289 ? FirstSuccessor() : SecondSuccessor();
3290 return true;
3291 }
3292 *block = NULL;
3293 return false;
3294 }
3295
3296
InferRepresentation(HInferRepresentationPhase * h_infer)3297 void HCompareHoleAndBranch::InferRepresentation(
3298 HInferRepresentationPhase* h_infer) {
3299 ChangeRepresentation(value()->representation());
3300 }
3301
3302
KnownSuccessorBlock(HBasicBlock ** block)3303 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3304 if (left() == right() &&
3305 left()->representation().IsSmiOrInteger32()) {
3306 *block = (token() == Token::EQ ||
3307 token() == Token::EQ_STRICT ||
3308 token() == Token::LTE ||
3309 token() == Token::GTE)
3310 ? FirstSuccessor() : SecondSuccessor();
3311 return true;
3312 }
3313 *block = NULL;
3314 return false;
3315 }
3316
3317
KnownSuccessorBlock(HBasicBlock ** block)3318 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3319 if (FLAG_fold_constants && value()->IsConstant()) {
3320 HConstant* constant = HConstant::cast(value());
3321 if (constant->HasDoubleValue()) {
3322 *block = IsMinusZero(constant->DoubleValue())
3323 ? FirstSuccessor() : SecondSuccessor();
3324 return true;
3325 }
3326 }
3327 if (value()->representation().IsSmiOrInteger32()) {
3328 // A Smi or Integer32 cannot contain minus zero.
3329 *block = SecondSuccessor();
3330 return true;
3331 }
3332 *block = NULL;
3333 return false;
3334 }
3335
3336
InferRepresentation(HInferRepresentationPhase * h_infer)3337 void HCompareMinusZeroAndBranch::InferRepresentation(
3338 HInferRepresentationPhase* h_infer) {
3339 ChangeRepresentation(value()->representation());
3340 }
3341
3342
PrintDataTo(OStream & os) const3343 OStream& HGoto::PrintDataTo(OStream& os) const { // NOLINT
3344 return os << *SuccessorAt(0);
3345 }
3346
3347
InferRepresentation(HInferRepresentationPhase * h_infer)3348 void HCompareNumericAndBranch::InferRepresentation(
3349 HInferRepresentationPhase* h_infer) {
3350 Representation left_rep = left()->representation();
3351 Representation right_rep = right()->representation();
3352 Representation observed_left = observed_input_representation(0);
3353 Representation observed_right = observed_input_representation(1);
3354
3355 Representation rep = Representation::None();
3356 rep = rep.generalize(observed_left);
3357 rep = rep.generalize(observed_right);
3358 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3359 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3360 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3361 } else {
3362 rep = Representation::Double();
3363 }
3364
3365 if (rep.IsDouble()) {
3366 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3367 // and !=) have special handling of undefined, e.g. undefined == undefined
3368 // is 'true'. Relational comparisons have a different semantic, first
3369 // calling ToPrimitive() on their arguments. The standard Crankshaft
3370 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3371 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3372 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3373 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3374 // it is not consistent with the spec. For example, it would cause undefined
3375 // == undefined (should be true) to be evaluated as NaN == NaN
3376 // (false). Therefore, any comparisons other than ordered relational
3377 // comparisons must cause a deopt when one of their arguments is undefined.
3378 // See also v8:1434
3379 if (Token::IsOrderedRelationalCompareOp(token_)) {
3380 SetFlag(kAllowUndefinedAsNaN);
3381 }
3382 }
3383 ChangeRepresentation(rep);
3384 }
3385
3386
PrintDataTo(OStream & os) const3387 OStream& HParameter::PrintDataTo(OStream& os) const { // NOLINT
3388 return os << index();
3389 }
3390
3391
PrintDataTo(OStream & os) const3392 OStream& HLoadNamedField::PrintDataTo(OStream& os) const { // NOLINT
3393 os << NameOf(object()) << access_;
3394
3395 if (maps() != NULL) {
3396 os << " [" << *maps()->at(0).handle();
3397 for (int i = 1; i < maps()->size(); ++i) {
3398 os << "," << *maps()->at(i).handle();
3399 }
3400 os << "]";
3401 }
3402
3403 if (HasDependency()) os << " " << NameOf(dependency());
3404 return os;
3405 }
3406
3407
PrintDataTo(OStream & os) const3408 OStream& HLoadNamedGeneric::PrintDataTo(OStream& os) const { // NOLINT
3409 Handle<String> n = Handle<String>::cast(name());
3410 return os << NameOf(object()) << "." << n->ToCString().get();
3411 }
3412
3413
PrintDataTo(OStream & os) const3414 OStream& HLoadKeyed::PrintDataTo(OStream& os) const { // NOLINT
3415 if (!is_external()) {
3416 os << NameOf(elements());
3417 } else {
3418 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3419 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3420 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3421 }
3422
3423 os << "[" << NameOf(key());
3424 if (IsDehoisted()) os << " + " << base_offset();
3425 os << "]";
3426
3427 if (HasDependency()) os << " " << NameOf(dependency());
3428 if (RequiresHoleCheck()) os << " check_hole";
3429 return os;
3430 }
3431
3432
TryIncreaseBaseOffset(uint32_t increase_by_value)3433 bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3434 // The base offset is usually simply the size of the array header, except
3435 // with dehoisting adds an addition offset due to a array index key
3436 // manipulation, in which case it becomes (array header size +
3437 // constant-offset-from-key * kPointerSize)
3438 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
3439 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
3440 addition_result += increase_by_value;
3441 if (!addition_result.IsValid()) return false;
3442 base_offset = addition_result.ValueOrDie();
3443 if (!BaseOffsetField::is_valid(base_offset)) return false;
3444 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
3445 return true;
3446 }
3447
3448
UsesMustHandleHole() const3449 bool HLoadKeyed::UsesMustHandleHole() const {
3450 if (IsFastPackedElementsKind(elements_kind())) {
3451 return false;
3452 }
3453
3454 if (IsExternalArrayElementsKind(elements_kind())) {
3455 return false;
3456 }
3457
3458 if (hole_mode() == ALLOW_RETURN_HOLE) {
3459 if (IsFastDoubleElementsKind(elements_kind())) {
3460 return AllUsesCanTreatHoleAsNaN();
3461 }
3462 return true;
3463 }
3464
3465 if (IsFastDoubleElementsKind(elements_kind())) {
3466 return false;
3467 }
3468
3469 // Holes are only returned as tagged values.
3470 if (!representation().IsTagged()) {
3471 return false;
3472 }
3473
3474 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3475 HValue* use = it.value();
3476 if (!use->IsChange()) return false;
3477 }
3478
3479 return true;
3480 }
3481
3482
AllUsesCanTreatHoleAsNaN() const3483 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3484 return IsFastDoubleElementsKind(elements_kind()) &&
3485 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3486 }
3487
3488
RequiresHoleCheck() const3489 bool HLoadKeyed::RequiresHoleCheck() const {
3490 if (IsFastPackedElementsKind(elements_kind())) {
3491 return false;
3492 }
3493
3494 if (IsExternalArrayElementsKind(elements_kind())) {
3495 return false;
3496 }
3497
3498 return !UsesMustHandleHole();
3499 }
3500
3501
PrintDataTo(OStream & os) const3502 OStream& HLoadKeyedGeneric::PrintDataTo(OStream& os) const { // NOLINT
3503 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
3504 }
3505
3506
Canonicalize()3507 HValue* HLoadKeyedGeneric::Canonicalize() {
3508 // Recognize generic keyed loads that use property name generated
3509 // by for-in statement as a key and rewrite them into fast property load
3510 // by index.
3511 if (key()->IsLoadKeyed()) {
3512 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3513 if (key_load->elements()->IsForInCacheArray()) {
3514 HForInCacheArray* names_cache =
3515 HForInCacheArray::cast(key_load->elements());
3516
3517 if (names_cache->enumerable() == object()) {
3518 HForInCacheArray* index_cache =
3519 names_cache->index_cache();
3520 HCheckMapValue* map_check =
3521 HCheckMapValue::New(block()->graph()->zone(),
3522 block()->graph()->GetInvalidContext(),
3523 object(),
3524 names_cache->map());
3525 HInstruction* index = HLoadKeyed::New(
3526 block()->graph()->zone(),
3527 block()->graph()->GetInvalidContext(),
3528 index_cache,
3529 key_load->key(),
3530 key_load->key(),
3531 key_load->elements_kind());
3532 map_check->InsertBefore(this);
3533 index->InsertBefore(this);
3534 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3535 object(), index));
3536 }
3537 }
3538 }
3539
3540 return this;
3541 }
3542
3543
PrintDataTo(OStream & os) const3544 OStream& HStoreNamedGeneric::PrintDataTo(OStream& os) const { // NOLINT
3545 Handle<String> n = Handle<String>::cast(name());
3546 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3547 << NameOf(value());
3548 }
3549
3550
PrintDataTo(OStream & os) const3551 OStream& HStoreNamedField::PrintDataTo(OStream& os) const { // NOLINT
3552 os << NameOf(object()) << access_ << " = " << NameOf(value());
3553 if (NeedsWriteBarrier()) os << " (write-barrier)";
3554 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3555 return os;
3556 }
3557
3558
PrintDataTo(OStream & os) const3559 OStream& HStoreKeyed::PrintDataTo(OStream& os) const { // NOLINT
3560 if (!is_external()) {
3561 os << NameOf(elements());
3562 } else {
3563 DCHECK(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3564 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3565 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3566 }
3567
3568 os << "[" << NameOf(key());
3569 if (IsDehoisted()) os << " + " << base_offset();
3570 return os << "] = " << NameOf(value());
3571 }
3572
3573
PrintDataTo(OStream & os) const3574 OStream& HStoreKeyedGeneric::PrintDataTo(OStream& os) const { // NOLINT
3575 return os << NameOf(object()) << "[" << NameOf(key())
3576 << "] = " << NameOf(value());
3577 }
3578
3579
PrintDataTo(OStream & os) const3580 OStream& HTransitionElementsKind::PrintDataTo(OStream& os) const { // NOLINT
3581 os << NameOf(object());
3582 ElementsKind from_kind = original_map().handle()->elements_kind();
3583 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3584 os << " " << *original_map().handle() << " ["
3585 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3586 << *transitioned_map().handle() << " ["
3587 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3588 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3589 return os;
3590 }
3591
3592
PrintDataTo(OStream & os) const3593 OStream& HLoadGlobalCell::PrintDataTo(OStream& os) const { // NOLINT
3594 os << "[" << *cell().handle() << "]";
3595 if (details_.IsConfigurable()) os << " (configurable)";
3596 if (details_.IsReadOnly()) os << " (read-only)";
3597 return os;
3598 }
3599
3600
RequiresHoleCheck() const3601 bool HLoadGlobalCell::RequiresHoleCheck() const {
3602 if (!details_.IsConfigurable()) return false;
3603 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3604 HValue* use = it.value();
3605 if (!use->IsChange()) return true;
3606 }
3607 return false;
3608 }
3609
3610
PrintDataTo(OStream & os) const3611 OStream& HLoadGlobalGeneric::PrintDataTo(OStream& os) const { // NOLINT
3612 return os << name()->ToCString().get() << " ";
3613 }
3614
3615
PrintDataTo(OStream & os) const3616 OStream& HInnerAllocatedObject::PrintDataTo(OStream& os) const { // NOLINT
3617 os << NameOf(base_object()) << " offset ";
3618 return offset()->PrintTo(os);
3619 }
3620
3621
PrintDataTo(OStream & os) const3622 OStream& HStoreGlobalCell::PrintDataTo(OStream& os) const { // NOLINT
3623 os << "[" << *cell().handle() << "] = " << NameOf(value());
3624 if (details_.IsConfigurable()) os << " (configurable)";
3625 if (details_.IsReadOnly()) os << " (read-only)";
3626 return os;
3627 }
3628
3629
PrintDataTo(OStream & os) const3630 OStream& HLoadContextSlot::PrintDataTo(OStream& os) const { // NOLINT
3631 return os << NameOf(value()) << "[" << slot_index() << "]";
3632 }
3633
3634
PrintDataTo(OStream & os) const3635 OStream& HStoreContextSlot::PrintDataTo(OStream& os) const { // NOLINT
3636 return os << NameOf(context()) << "[" << slot_index()
3637 << "] = " << NameOf(value());
3638 }
3639
3640
3641 // Implementation of type inference and type conversions. Calculates
3642 // the inferred type of this instruction based on the input operands.
3643
CalculateInferredType()3644 HType HValue::CalculateInferredType() {
3645 return type_;
3646 }
3647
3648
CalculateInferredType()3649 HType HPhi::CalculateInferredType() {
3650 if (OperandCount() == 0) return HType::Tagged();
3651 HType result = OperandAt(0)->type();
3652 for (int i = 1; i < OperandCount(); ++i) {
3653 HType current = OperandAt(i)->type();
3654 result = result.Combine(current);
3655 }
3656 return result;
3657 }
3658
3659
CalculateInferredType()3660 HType HChange::CalculateInferredType() {
3661 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3662 return type();
3663 }
3664
3665
RepresentationFromInputs()3666 Representation HUnaryMathOperation::RepresentationFromInputs() {
3667 if (SupportsFlexibleFloorAndRound() &&
3668 (op_ == kMathFloor || op_ == kMathRound)) {
3669 // Floor and Round always take a double input. The integral result can be
3670 // used as an integer or a double. Infer the representation from the uses.
3671 return Representation::None();
3672 }
3673 Representation rep = representation();
3674 // If any of the actual input representation is more general than what we
3675 // have so far but not Tagged, use that representation instead.
3676 Representation input_rep = value()->representation();
3677 if (!input_rep.IsTagged()) {
3678 rep = rep.generalize(input_rep);
3679 }
3680 return rep;
3681 }
3682
3683
HandleSideEffectDominator(GVNFlag side_effect,HValue * dominator)3684 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3685 HValue* dominator) {
3686 DCHECK(side_effect == kNewSpacePromotion);
3687 Zone* zone = block()->zone();
3688 if (!FLAG_use_allocation_folding) return false;
3689
3690 // Try to fold allocations together with their dominating allocations.
3691 if (!dominator->IsAllocate()) {
3692 if (FLAG_trace_allocation_folding) {
3693 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3694 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3695 }
3696 return false;
3697 }
3698
3699 // Check whether we are folding within the same block for local folding.
3700 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3701 if (FLAG_trace_allocation_folding) {
3702 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3703 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3704 }
3705 return false;
3706 }
3707
3708 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3709 HValue* dominator_size = dominator_allocate->size();
3710 HValue* current_size = size();
3711
3712 // TODO(hpayer): Add support for non-constant allocation in dominator.
3713 if (!dominator_size->IsInteger32Constant()) {
3714 if (FLAG_trace_allocation_folding) {
3715 PrintF("#%d (%s) cannot fold into #%d (%s), "
3716 "dynamic allocation size in dominator\n",
3717 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3718 }
3719 return false;
3720 }
3721
3722 dominator_allocate = GetFoldableDominator(dominator_allocate);
3723 if (dominator_allocate == NULL) {
3724 return false;
3725 }
3726
3727 if (!has_size_upper_bound()) {
3728 if (FLAG_trace_allocation_folding) {
3729 PrintF("#%d (%s) cannot fold into #%d (%s), "
3730 "can't estimate total allocation size\n",
3731 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3732 }
3733 return false;
3734 }
3735
3736 if (!current_size->IsInteger32Constant()) {
3737 // If it's not constant then it is a size_in_bytes calculation graph
3738 // like this: (const_header_size + const_element_size * size).
3739 DCHECK(current_size->IsInstruction());
3740
3741 HInstruction* current_instr = HInstruction::cast(current_size);
3742 if (!current_instr->Dominates(dominator_allocate)) {
3743 if (FLAG_trace_allocation_folding) {
3744 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size "
3745 "value does not dominate target allocation\n",
3746 id(), Mnemonic(), dominator_allocate->id(),
3747 dominator_allocate->Mnemonic());
3748 }
3749 return false;
3750 }
3751 }
3752
3753 DCHECK((IsNewSpaceAllocation() &&
3754 dominator_allocate->IsNewSpaceAllocation()) ||
3755 (IsOldDataSpaceAllocation() &&
3756 dominator_allocate->IsOldDataSpaceAllocation()) ||
3757 (IsOldPointerSpaceAllocation() &&
3758 dominator_allocate->IsOldPointerSpaceAllocation()));
3759
3760 // First update the size of the dominator allocate instruction.
3761 dominator_size = dominator_allocate->size();
3762 int32_t original_object_size =
3763 HConstant::cast(dominator_size)->GetInteger32Constant();
3764 int32_t dominator_size_constant = original_object_size;
3765
3766 if (MustAllocateDoubleAligned()) {
3767 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3768 dominator_size_constant += kDoubleSize / 2;
3769 }
3770 }
3771
3772 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant();
3773 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3774
3775 // Since we clear the first word after folded memory, we cannot use the
3776 // whole Page::kMaxRegularHeapObjectSize memory.
3777 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3778 if (FLAG_trace_allocation_folding) {
3779 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3780 id(), Mnemonic(), dominator_allocate->id(),
3781 dominator_allocate->Mnemonic(), new_dominator_size);
3782 }
3783 return false;
3784 }
3785
3786 HInstruction* new_dominator_size_value;
3787
3788 if (current_size->IsInteger32Constant()) {
3789 new_dominator_size_value =
3790 HConstant::CreateAndInsertBefore(zone,
3791 context(),
3792 new_dominator_size,
3793 Representation::None(),
3794 dominator_allocate);
3795 } else {
3796 HValue* new_dominator_size_constant =
3797 HConstant::CreateAndInsertBefore(zone,
3798 context(),
3799 dominator_size_constant,
3800 Representation::Integer32(),
3801 dominator_allocate);
3802
3803 // Add old and new size together and insert.
3804 current_size->ChangeRepresentation(Representation::Integer32());
3805
3806 new_dominator_size_value = HAdd::New(zone, context(),
3807 new_dominator_size_constant, current_size);
3808 new_dominator_size_value->ClearFlag(HValue::kCanOverflow);
3809 new_dominator_size_value->ChangeRepresentation(Representation::Integer32());
3810
3811 new_dominator_size_value->InsertBefore(dominator_allocate);
3812 }
3813
3814 dominator_allocate->UpdateSize(new_dominator_size_value);
3815
3816 if (MustAllocateDoubleAligned()) {
3817 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3818 dominator_allocate->MakeDoubleAligned();
3819 }
3820 }
3821
3822 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats;
3823 #ifdef VERIFY_HEAP
3824 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap;
3825 #endif
3826
3827 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) {
3828 dominator_allocate->MakePrefillWithFiller();
3829 } else {
3830 // TODO(hpayer): This is a short-term hack to make allocation mementos
3831 // work again in new space.
3832 dominator_allocate->ClearNextMapWord(original_object_size);
3833 }
3834
3835 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3836
3837 // After that replace the dominated allocate instruction.
3838 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3839 zone,
3840 context(),
3841 dominator_size_constant,
3842 Representation::None(),
3843 this);
3844
3845 HInstruction* dominated_allocate_instr =
3846 HInnerAllocatedObject::New(zone,
3847 context(),
3848 dominator_allocate,
3849 inner_offset,
3850 type());
3851 dominated_allocate_instr->InsertBefore(this);
3852 DeleteAndReplaceWith(dominated_allocate_instr);
3853 if (FLAG_trace_allocation_folding) {
3854 PrintF("#%d (%s) folded into #%d (%s)\n",
3855 id(), Mnemonic(), dominator_allocate->id(),
3856 dominator_allocate->Mnemonic());
3857 }
3858 return true;
3859 }
3860
3861
GetFoldableDominator(HAllocate * dominator)3862 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3863 if (!IsFoldable(dominator)) {
3864 // We cannot hoist old space allocations over new space allocations.
3865 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3866 if (FLAG_trace_allocation_folding) {
3867 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
3868 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3869 }
3870 return NULL;
3871 }
3872
3873 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3874
3875 // We can hoist old data space allocations over an old pointer space
3876 // allocation and vice versa. For that we have to check the dominator
3877 // of the dominator allocate instruction.
3878 if (dominator_dominator == NULL) {
3879 dominating_allocate_ = dominator;
3880 if (FLAG_trace_allocation_folding) {
3881 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
3882 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3883 }
3884 return NULL;
3885 }
3886
3887 // We can just fold old space allocations that are in the same basic block,
3888 // since it is not guaranteed that we fill up the whole allocated old
3889 // space memory.
3890 // TODO(hpayer): Remove this limitation and add filler maps for each each
3891 // allocation as soon as we have store elimination.
3892 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3893 if (FLAG_trace_allocation_folding) {
3894 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3895 id(), Mnemonic(), dominator_dominator->id(),
3896 dominator_dominator->Mnemonic());
3897 }
3898 return NULL;
3899 }
3900
3901 DCHECK((IsOldDataSpaceAllocation() &&
3902 dominator_dominator->IsOldDataSpaceAllocation()) ||
3903 (IsOldPointerSpaceAllocation() &&
3904 dominator_dominator->IsOldPointerSpaceAllocation()));
3905
3906 int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
3907 HStoreNamedField* dominator_free_space_size =
3908 dominator->filler_free_space_size_;
3909 if (dominator_free_space_size != NULL) {
3910 // We already hoisted one old space allocation, i.e., we already installed
3911 // a filler map. Hence, we just have to update the free space size.
3912 dominator->UpdateFreeSpaceFiller(current_size);
3913 } else {
3914 // This is the first old space allocation that gets hoisted. We have to
3915 // install a filler map since the follwing allocation may cause a GC.
3916 dominator->CreateFreeSpaceFiller(current_size);
3917 }
3918
3919 // We can hoist the old space allocation over the actual dominator.
3920 return dominator_dominator;
3921 }
3922 return dominator;
3923 }
3924
3925
UpdateFreeSpaceFiller(int32_t free_space_size)3926 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3927 DCHECK(filler_free_space_size_ != NULL);
3928 Zone* zone = block()->zone();
3929 // We must explicitly force Smi representation here because on x64 we
3930 // would otherwise automatically choose int32, but the actual store
3931 // requires a Smi-tagged value.
3932 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3933 zone,
3934 context(),
3935 filler_free_space_size_->value()->GetInteger32Constant() +
3936 free_space_size,
3937 Representation::Smi(),
3938 filler_free_space_size_);
3939 filler_free_space_size_->UpdateValue(new_free_space_size);
3940 }
3941
3942
CreateFreeSpaceFiller(int32_t free_space_size)3943 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3944 DCHECK(filler_free_space_size_ == NULL);
3945 Zone* zone = block()->zone();
3946 HInstruction* free_space_instr =
3947 HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
3948 dominating_allocate_->size(), type());
3949 free_space_instr->InsertBefore(this);
3950 HConstant* filler_map = HConstant::CreateAndInsertAfter(
3951 zone, Unique<Map>::CreateImmovable(
3952 isolate()->factory()->free_space_map()), true, free_space_instr);
3953 HInstruction* store_map = HStoreNamedField::New(zone, context(),
3954 free_space_instr, HObjectAccess::ForMap(), filler_map);
3955 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3956 store_map->InsertAfter(filler_map);
3957
3958 // We must explicitly force Smi representation here because on x64 we
3959 // would otherwise automatically choose int32, but the actual store
3960 // requires a Smi-tagged value.
3961 HConstant* filler_size = HConstant::CreateAndInsertAfter(
3962 zone, context(), free_space_size, Representation::Smi(), store_map);
3963 // Must force Smi representation for x64 (see comment above).
3964 HObjectAccess access =
3965 HObjectAccess::ForMapAndOffset(isolate()->factory()->free_space_map(),
3966 FreeSpace::kSizeOffset,
3967 Representation::Smi());
3968 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
3969 free_space_instr, access, filler_size);
3970 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3971 store_size->InsertAfter(filler_size);
3972 filler_free_space_size_ = store_size;
3973 }
3974
3975
ClearNextMapWord(int offset)3976 void HAllocate::ClearNextMapWord(int offset) {
3977 if (MustClearNextMapWord()) {
3978 Zone* zone = block()->zone();
3979 HObjectAccess access =
3980 HObjectAccess::ForObservableJSObjectOffset(offset);
3981 HStoreNamedField* clear_next_map =
3982 HStoreNamedField::New(zone, context(), this, access,
3983 block()->graph()->GetConstant0());
3984 clear_next_map->ClearAllSideEffects();
3985 clear_next_map->InsertAfter(this);
3986 }
3987 }
3988
3989
PrintDataTo(OStream & os) const3990 OStream& HAllocate::PrintDataTo(OStream& os) const { // NOLINT
3991 os << NameOf(size()) << " (";
3992 if (IsNewSpaceAllocation()) os << "N";
3993 if (IsOldPointerSpaceAllocation()) os << "P";
3994 if (IsOldDataSpaceAllocation()) os << "D";
3995 if (MustAllocateDoubleAligned()) os << "A";
3996 if (MustPrefillWithFiller()) os << "F";
3997 return os << ")";
3998 }
3999
4000
TryIncreaseBaseOffset(uint32_t increase_by_value)4001 bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
4002 // The base offset is usually simply the size of the array header, except
4003 // with dehoisting adds an addition offset due to a array index key
4004 // manipulation, in which case it becomes (array header size +
4005 // constant-offset-from-key * kPointerSize)
4006 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
4007 addition_result += increase_by_value;
4008 if (!addition_result.IsValid()) return false;
4009 base_offset_ = addition_result.ValueOrDie();
4010 return true;
4011 }
4012
4013
NeedsCanonicalization()4014 bool HStoreKeyed::NeedsCanonicalization() {
4015 // If value is an integer or smi or comes from the result of a keyed load or
4016 // constant then it is either be a non-hole value or in the case of a constant
4017 // the hole is only being stored explicitly: no need for canonicalization.
4018 //
4019 // The exception to that is keyed loads from external float or double arrays:
4020 // these can load arbitrary representation of NaN.
4021
4022 if (value()->IsConstant()) {
4023 return false;
4024 }
4025
4026 if (value()->IsLoadKeyed()) {
4027 return IsExternalFloatOrDoubleElementsKind(
4028 HLoadKeyed::cast(value())->elements_kind());
4029 }
4030
4031 if (value()->IsChange()) {
4032 if (HChange::cast(value())->from().IsSmiOrInteger32()) {
4033 return false;
4034 }
4035 if (HChange::cast(value())->value()->type().IsSmi()) {
4036 return false;
4037 }
4038 }
4039 return true;
4040 }
4041
4042
4043 #define H_CONSTANT_INT(val) \
4044 HConstant::New(zone, context, static_cast<int32_t>(val))
4045 #define H_CONSTANT_DOUBLE(val) \
4046 HConstant::New(zone, context, static_cast<double>(val))
4047
4048 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
4049 HInstruction* HInstr::New( \
4050 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4051 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4052 HConstant* c_left = HConstant::cast(left); \
4053 HConstant* c_right = HConstant::cast(right); \
4054 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4055 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
4056 if (IsInt32Double(double_res)) { \
4057 return H_CONSTANT_INT(double_res); \
4058 } \
4059 return H_CONSTANT_DOUBLE(double_res); \
4060 } \
4061 } \
4062 return new(zone) HInstr(context, left, right); \
4063 }
4064
4065
4066 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
4067 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
4068 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
4069
4070 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
4071
4072
New(Zone * zone,HValue * context,HValue * left,HValue * right,PretenureFlag pretenure_flag,StringAddFlags flags,Handle<AllocationSite> allocation_site)4073 HInstruction* HStringAdd::New(Zone* zone,
4074 HValue* context,
4075 HValue* left,
4076 HValue* right,
4077 PretenureFlag pretenure_flag,
4078 StringAddFlags flags,
4079 Handle<AllocationSite> allocation_site) {
4080 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4081 HConstant* c_right = HConstant::cast(right);
4082 HConstant* c_left = HConstant::cast(left);
4083 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4084 Handle<String> left_string = c_left->StringValue();
4085 Handle<String> right_string = c_right->StringValue();
4086 // Prevent possible exception by invalid string length.
4087 if (left_string->length() + right_string->length() < String::kMaxLength) {
4088 MaybeHandle<String> concat = zone->isolate()->factory()->NewConsString(
4089 c_left->StringValue(), c_right->StringValue());
4090 return HConstant::New(zone, context, concat.ToHandleChecked());
4091 }
4092 }
4093 }
4094 return new(zone) HStringAdd(
4095 context, left, right, pretenure_flag, flags, allocation_site);
4096 }
4097
4098
PrintDataTo(OStream & os) const4099 OStream& HStringAdd::PrintDataTo(OStream& os) const { // NOLINT
4100 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4101 os << "_CheckBoth";
4102 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
4103 os << "_CheckLeft";
4104 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
4105 os << "_CheckRight";
4106 }
4107 HBinaryOperation::PrintDataTo(os);
4108 os << " (";
4109 if (pretenure_flag() == NOT_TENURED)
4110 os << "N";
4111 else if (pretenure_flag() == TENURED)
4112 os << "D";
4113 return os << ")";
4114 }
4115
4116
New(Zone * zone,HValue * context,HValue * char_code)4117 HInstruction* HStringCharFromCode::New(
4118 Zone* zone, HValue* context, HValue* char_code) {
4119 if (FLAG_fold_constants && char_code->IsConstant()) {
4120 HConstant* c_code = HConstant::cast(char_code);
4121 Isolate* isolate = zone->isolate();
4122 if (c_code->HasNumberValue()) {
4123 if (std::isfinite(c_code->DoubleValue())) {
4124 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4125 return HConstant::New(zone, context,
4126 isolate->factory()->LookupSingleCharacterStringFromCode(code));
4127 }
4128 return HConstant::New(zone, context, isolate->factory()->empty_string());
4129 }
4130 }
4131 return new(zone) HStringCharFromCode(context, char_code);
4132 }
4133
4134
New(Zone * zone,HValue * context,HValue * value,BuiltinFunctionId op)4135 HInstruction* HUnaryMathOperation::New(
4136 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) {
4137 do {
4138 if (!FLAG_fold_constants) break;
4139 if (!value->IsConstant()) break;
4140 HConstant* constant = HConstant::cast(value);
4141 if (!constant->HasNumberValue()) break;
4142 double d = constant->DoubleValue();
4143 if (std::isnan(d)) { // NaN poisons everything.
4144 return H_CONSTANT_DOUBLE(base::OS::nan_value());
4145 }
4146 if (std::isinf(d)) { // +Infinity and -Infinity.
4147 switch (op) {
4148 case kMathExp:
4149 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
4150 case kMathLog:
4151 case kMathSqrt:
4152 return H_CONSTANT_DOUBLE((d > 0.0) ? d : base::OS::nan_value());
4153 case kMathPowHalf:
4154 case kMathAbs:
4155 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
4156 case kMathRound:
4157 case kMathFround:
4158 case kMathFloor:
4159 return H_CONSTANT_DOUBLE(d);
4160 case kMathClz32:
4161 return H_CONSTANT_INT(32);
4162 default:
4163 UNREACHABLE();
4164 break;
4165 }
4166 }
4167 switch (op) {
4168 case kMathExp:
4169 return H_CONSTANT_DOUBLE(fast_exp(d));
4170 case kMathLog:
4171 return H_CONSTANT_DOUBLE(std::log(d));
4172 case kMathSqrt:
4173 return H_CONSTANT_DOUBLE(fast_sqrt(d));
4174 case kMathPowHalf:
4175 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
4176 case kMathAbs:
4177 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
4178 case kMathRound:
4179 // -0.5 .. -0.0 round to -0.0.
4180 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
4181 // Doubles are represented as Significant * 2 ^ Exponent. If the
4182 // Exponent is not negative, the double value is already an integer.
4183 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
4184 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
4185 case kMathFround:
4186 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
4187 case kMathFloor:
4188 return H_CONSTANT_DOUBLE(Floor(d));
4189 case kMathClz32: {
4190 uint32_t i = DoubleToUint32(d);
4191 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
4192 }
4193 default:
4194 UNREACHABLE();
4195 break;
4196 }
4197 } while (false);
4198 return new(zone) HUnaryMathOperation(context, value, op);
4199 }
4200
4201
RepresentationFromUses()4202 Representation HUnaryMathOperation::RepresentationFromUses() {
4203 if (op_ != kMathFloor && op_ != kMathRound) {
4204 return HValue::RepresentationFromUses();
4205 }
4206
4207 // The instruction can have an int32 or double output. Prefer a double
4208 // representation if there are double uses.
4209 bool use_double = false;
4210
4211 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4212 HValue* use = it.value();
4213 int use_index = it.index();
4214 Representation rep_observed = use->observed_input_representation(use_index);
4215 Representation rep_required = use->RequiredInputRepresentation(use_index);
4216 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
4217 if (use_double && !FLAG_trace_representation) {
4218 // Having seen one double is enough.
4219 break;
4220 }
4221 if (FLAG_trace_representation) {
4222 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
4223 PrintF("#%d %s is used by #%d %s as %s%s\n",
4224 id(), Mnemonic(), use->id(),
4225 use->Mnemonic(), rep_observed.Mnemonic(),
4226 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4227 } else {
4228 PrintF("#%d %s is required by #%d %s as %s%s\n",
4229 id(), Mnemonic(), use->id(),
4230 use->Mnemonic(), rep_required.Mnemonic(),
4231 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4232 }
4233 }
4234 }
4235 return use_double ? Representation::Double() : Representation::Integer32();
4236 }
4237
4238
New(Zone * zone,HValue * context,HValue * left,HValue * right)4239 HInstruction* HPower::New(Zone* zone,
4240 HValue* context,
4241 HValue* left,
4242 HValue* right) {
4243 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4244 HConstant* c_left = HConstant::cast(left);
4245 HConstant* c_right = HConstant::cast(right);
4246 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4247 double result = power_helper(c_left->DoubleValue(),
4248 c_right->DoubleValue());
4249 return H_CONSTANT_DOUBLE(std::isnan(result) ? base::OS::nan_value()
4250 : result);
4251 }
4252 }
4253 return new(zone) HPower(left, right);
4254 }
4255
4256
New(Zone * zone,HValue * context,HValue * left,HValue * right,Operation op)4257 HInstruction* HMathMinMax::New(
4258 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) {
4259 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4260 HConstant* c_left = HConstant::cast(left);
4261 HConstant* c_right = HConstant::cast(right);
4262 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4263 double d_left = c_left->DoubleValue();
4264 double d_right = c_right->DoubleValue();
4265 if (op == kMathMin) {
4266 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4267 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4268 if (d_left == d_right) {
4269 // Handle +0 and -0.
4270 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4271 : d_right);
4272 }
4273 } else {
4274 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4275 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4276 if (d_left == d_right) {
4277 // Handle +0 and -0.
4278 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4279 : d_left);
4280 }
4281 }
4282 // All comparisons failed, must be NaN.
4283 return H_CONSTANT_DOUBLE(base::OS::nan_value());
4284 }
4285 }
4286 return new(zone) HMathMinMax(context, left, right, op);
4287 }
4288
4289
New(Zone * zone,HValue * context,HValue * left,HValue * right)4290 HInstruction* HMod::New(Zone* zone,
4291 HValue* context,
4292 HValue* left,
4293 HValue* right) {
4294 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4295 HConstant* c_left = HConstant::cast(left);
4296 HConstant* c_right = HConstant::cast(right);
4297 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4298 int32_t dividend = c_left->Integer32Value();
4299 int32_t divisor = c_right->Integer32Value();
4300 if (dividend == kMinInt && divisor == -1) {
4301 return H_CONSTANT_DOUBLE(-0.0);
4302 }
4303 if (divisor != 0) {
4304 int32_t res = dividend % divisor;
4305 if ((res == 0) && (dividend < 0)) {
4306 return H_CONSTANT_DOUBLE(-0.0);
4307 }
4308 return H_CONSTANT_INT(res);
4309 }
4310 }
4311 }
4312 return new(zone) HMod(context, left, right);
4313 }
4314
4315
New(Zone * zone,HValue * context,HValue * left,HValue * right)4316 HInstruction* HDiv::New(
4317 Zone* zone, HValue* context, HValue* left, HValue* right) {
4318 // If left and right are constant values, try to return a constant value.
4319 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4320 HConstant* c_left = HConstant::cast(left);
4321 HConstant* c_right = HConstant::cast(right);
4322 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4323 if (c_right->DoubleValue() != 0) {
4324 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4325 if (IsInt32Double(double_res)) {
4326 return H_CONSTANT_INT(double_res);
4327 }
4328 return H_CONSTANT_DOUBLE(double_res);
4329 } else {
4330 int sign = Double(c_left->DoubleValue()).Sign() *
4331 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4332 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4333 }
4334 }
4335 }
4336 return new(zone) HDiv(context, left, right);
4337 }
4338
4339
New(Zone * zone,HValue * context,Token::Value op,HValue * left,HValue * right)4340 HInstruction* HBitwise::New(
4341 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) {
4342 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4343 HConstant* c_left = HConstant::cast(left);
4344 HConstant* c_right = HConstant::cast(right);
4345 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4346 int32_t result;
4347 int32_t v_left = c_left->NumberValueAsInteger32();
4348 int32_t v_right = c_right->NumberValueAsInteger32();
4349 switch (op) {
4350 case Token::BIT_XOR:
4351 result = v_left ^ v_right;
4352 break;
4353 case Token::BIT_AND:
4354 result = v_left & v_right;
4355 break;
4356 case Token::BIT_OR:
4357 result = v_left | v_right;
4358 break;
4359 default:
4360 result = 0; // Please the compiler.
4361 UNREACHABLE();
4362 }
4363 return H_CONSTANT_INT(result);
4364 }
4365 }
4366 return new(zone) HBitwise(context, op, left, right);
4367 }
4368
4369
4370 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4371 HInstruction* HInstr::New( \
4372 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4373 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4374 HConstant* c_left = HConstant::cast(left); \
4375 HConstant* c_right = HConstant::cast(right); \
4376 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4377 return H_CONSTANT_INT(result); \
4378 } \
4379 } \
4380 return new(zone) HInstr(context, left, right); \
4381 }
4382
4383
4384 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4385 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4386 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4387 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4388
4389 #undef DEFINE_NEW_H_BITWISE_INSTR
4390
4391
New(Zone * zone,HValue * context,HValue * left,HValue * right)4392 HInstruction* HShr::New(
4393 Zone* zone, HValue* context, HValue* left, HValue* right) {
4394 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4395 HConstant* c_left = HConstant::cast(left);
4396 HConstant* c_right = HConstant::cast(right);
4397 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4398 int32_t left_val = c_left->NumberValueAsInteger32();
4399 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4400 if ((right_val == 0) && (left_val < 0)) {
4401 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4402 }
4403 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4404 }
4405 }
4406 return new(zone) HShr(context, left, right);
4407 }
4408
4409
New(Zone * zone,HValue * context,String::Encoding encoding,HValue * string,HValue * index)4410 HInstruction* HSeqStringGetChar::New(Zone* zone,
4411 HValue* context,
4412 String::Encoding encoding,
4413 HValue* string,
4414 HValue* index) {
4415 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4416 HConstant* c_string = HConstant::cast(string);
4417 HConstant* c_index = HConstant::cast(index);
4418 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4419 Handle<String> s = c_string->StringValue();
4420 int32_t i = c_index->Integer32Value();
4421 DCHECK_LE(0, i);
4422 DCHECK_LT(i, s->length());
4423 return H_CONSTANT_INT(s->Get(i));
4424 }
4425 }
4426 return new(zone) HSeqStringGetChar(encoding, string, index);
4427 }
4428
4429
4430 #undef H_CONSTANT_INT
4431 #undef H_CONSTANT_DOUBLE
4432
4433
PrintDataTo(OStream & os) const4434 OStream& HBitwise::PrintDataTo(OStream& os) const { // NOLINT
4435 os << Token::Name(op_) << " ";
4436 return HBitwiseBinaryOperation::PrintDataTo(os);
4437 }
4438
4439
SimplifyConstantInputs()4440 void HPhi::SimplifyConstantInputs() {
4441 // Convert constant inputs to integers when all uses are truncating.
4442 // This must happen before representation inference takes place.
4443 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4444 for (int i = 0; i < OperandCount(); ++i) {
4445 if (!OperandAt(i)->IsConstant()) return;
4446 }
4447 HGraph* graph = block()->graph();
4448 for (int i = 0; i < OperandCount(); ++i) {
4449 HConstant* operand = HConstant::cast(OperandAt(i));
4450 if (operand->HasInteger32Value()) {
4451 continue;
4452 } else if (operand->HasDoubleValue()) {
4453 HConstant* integer_input =
4454 HConstant::New(graph->zone(), graph->GetInvalidContext(),
4455 DoubleToInt32(operand->DoubleValue()));
4456 integer_input->InsertAfter(operand);
4457 SetOperandAt(i, integer_input);
4458 } else if (operand->HasBooleanValue()) {
4459 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4460 : graph->GetConstant0());
4461 } else if (operand->ImmortalImmovable()) {
4462 SetOperandAt(i, graph->GetConstant0());
4463 }
4464 }
4465 // Overwrite observed input representations because they are likely Tagged.
4466 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4467 HValue* use = it.value();
4468 if (use->IsBinaryOperation()) {
4469 HBinaryOperation::cast(use)->set_observed_input_representation(
4470 it.index(), Representation::Smi());
4471 }
4472 }
4473 }
4474
4475
InferRepresentation(HInferRepresentationPhase * h_infer)4476 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4477 DCHECK(CheckFlag(kFlexibleRepresentation));
4478 Representation new_rep = RepresentationFromInputs();
4479 UpdateRepresentation(new_rep, h_infer, "inputs");
4480 new_rep = RepresentationFromUses();
4481 UpdateRepresentation(new_rep, h_infer, "uses");
4482 new_rep = RepresentationFromUseRequirements();
4483 UpdateRepresentation(new_rep, h_infer, "use requirements");
4484 }
4485
4486
RepresentationFromInputs()4487 Representation HPhi::RepresentationFromInputs() {
4488 Representation r = Representation::None();
4489 for (int i = 0; i < OperandCount(); ++i) {
4490 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4491 }
4492 return r;
4493 }
4494
4495
4496 // Returns a representation if all uses agree on the same representation.
4497 // Integer32 is also returned when some uses are Smi but others are Integer32.
RepresentationFromUseRequirements()4498 Representation HValue::RepresentationFromUseRequirements() {
4499 Representation rep = Representation::None();
4500 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4501 // Ignore the use requirement from never run code
4502 if (it.value()->block()->IsUnreachable()) continue;
4503
4504 // We check for observed_input_representation elsewhere.
4505 Representation use_rep =
4506 it.value()->RequiredInputRepresentation(it.index());
4507 if (rep.IsNone()) {
4508 rep = use_rep;
4509 continue;
4510 }
4511 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4512 if (rep.generalize(use_rep).IsInteger32()) {
4513 rep = Representation::Integer32();
4514 continue;
4515 }
4516 return Representation::None();
4517 }
4518 return rep;
4519 }
4520
4521
HasNonSmiUse()4522 bool HValue::HasNonSmiUse() {
4523 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4524 // We check for observed_input_representation elsewhere.
4525 Representation use_rep =
4526 it.value()->RequiredInputRepresentation(it.index());
4527 if (!use_rep.IsNone() &&
4528 !use_rep.IsSmi() &&
4529 !use_rep.IsTagged()) {
4530 return true;
4531 }
4532 }
4533 return false;
4534 }
4535
4536
4537 // Node-specific verification code is only included in debug mode.
4538 #ifdef DEBUG
4539
Verify()4540 void HPhi::Verify() {
4541 DCHECK(OperandCount() == block()->predecessors()->length());
4542 for (int i = 0; i < OperandCount(); ++i) {
4543 HValue* value = OperandAt(i);
4544 HBasicBlock* defining_block = value->block();
4545 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4546 DCHECK(defining_block == predecessor_block ||
4547 defining_block->Dominates(predecessor_block));
4548 }
4549 }
4550
4551
Verify()4552 void HSimulate::Verify() {
4553 HInstruction::Verify();
4554 DCHECK(HasAstId() || next()->IsEnterInlined());
4555 }
4556
4557
Verify()4558 void HCheckHeapObject::Verify() {
4559 HInstruction::Verify();
4560 DCHECK(HasNoUses());
4561 }
4562
4563
Verify()4564 void HCheckValue::Verify() {
4565 HInstruction::Verify();
4566 DCHECK(HasNoUses());
4567 }
4568
4569 #endif
4570
4571
ForFixedArrayHeader(int offset)4572 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4573 DCHECK(offset >= 0);
4574 DCHECK(offset < FixedArray::kHeaderSize);
4575 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4576 return HObjectAccess(kInobject, offset);
4577 }
4578
4579
ForMapAndOffset(Handle<Map> map,int offset,Representation representation)4580 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4581 Representation representation) {
4582 DCHECK(offset >= 0);
4583 Portion portion = kInobject;
4584
4585 if (offset == JSObject::kElementsOffset) {
4586 portion = kElementsPointer;
4587 } else if (offset == JSObject::kMapOffset) {
4588 portion = kMaps;
4589 }
4590 bool existing_inobject_property = true;
4591 if (!map.is_null()) {
4592 existing_inobject_property = (offset <
4593 map->instance_size() - map->unused_property_fields() * kPointerSize);
4594 }
4595 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4596 false, existing_inobject_property);
4597 }
4598
4599
ForAllocationSiteOffset(int offset)4600 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4601 switch (offset) {
4602 case AllocationSite::kTransitionInfoOffset:
4603 return HObjectAccess(kInobject, offset, Representation::Tagged());
4604 case AllocationSite::kNestedSiteOffset:
4605 return HObjectAccess(kInobject, offset, Representation::Tagged());
4606 case AllocationSite::kPretenureDataOffset:
4607 return HObjectAccess(kInobject, offset, Representation::Smi());
4608 case AllocationSite::kPretenureCreateCountOffset:
4609 return HObjectAccess(kInobject, offset, Representation::Smi());
4610 case AllocationSite::kDependentCodeOffset:
4611 return HObjectAccess(kInobject, offset, Representation::Tagged());
4612 case AllocationSite::kWeakNextOffset:
4613 return HObjectAccess(kInobject, offset, Representation::Tagged());
4614 default:
4615 UNREACHABLE();
4616 }
4617 return HObjectAccess(kInobject, offset);
4618 }
4619
4620
ForContextSlot(int index)4621 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4622 DCHECK(index >= 0);
4623 Portion portion = kInobject;
4624 int offset = Context::kHeaderSize + index * kPointerSize;
4625 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4626 return HObjectAccess(portion, offset, Representation::Tagged());
4627 }
4628
4629
ForJSArrayOffset(int offset)4630 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4631 DCHECK(offset >= 0);
4632 Portion portion = kInobject;
4633
4634 if (offset == JSObject::kElementsOffset) {
4635 portion = kElementsPointer;
4636 } else if (offset == JSArray::kLengthOffset) {
4637 portion = kArrayLengths;
4638 } else if (offset == JSObject::kMapOffset) {
4639 portion = kMaps;
4640 }
4641 return HObjectAccess(portion, offset);
4642 }
4643
4644
ForBackingStoreOffset(int offset,Representation representation)4645 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4646 Representation representation) {
4647 DCHECK(offset >= 0);
4648 return HObjectAccess(kBackingStore, offset, representation,
4649 Handle<String>::null(), false, false);
4650 }
4651
4652
ForField(Handle<Map> map,int index,Representation representation,Handle<String> name)4653 HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
4654 Representation representation,
4655 Handle<String> name) {
4656 if (index < 0) {
4657 // Negative property indices are in-object properties, indexed
4658 // from the end of the fixed part of the object.
4659 int offset = (index * kPointerSize) + map->instance_size();
4660 return HObjectAccess(kInobject, offset, representation, name, false, true);
4661 } else {
4662 // Non-negative property indices are in the properties array.
4663 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4664 return HObjectAccess(kBackingStore, offset, representation, name,
4665 false, false);
4666 }
4667 }
4668
4669
ForCellPayload(Isolate * isolate)4670 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
4671 return HObjectAccess(kInobject, Cell::kValueOffset, Representation::Tagged(),
4672 isolate->factory()->cell_value_string());
4673 }
4674
4675
SetGVNFlags(HValue * instr,PropertyAccessType access_type)4676 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
4677 // set the appropriate GVN flags for a given load or store instruction
4678 if (access_type == STORE) {
4679 // track dominating allocations in order to eliminate write barriers
4680 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4681 instr->SetFlag(HValue::kTrackSideEffectDominators);
4682 } else {
4683 // try to GVN loads, but don't hoist above map changes
4684 instr->SetFlag(HValue::kUseGVN);
4685 instr->SetDependsOnFlag(::v8::internal::kMaps);
4686 }
4687
4688 switch (portion()) {
4689 case kArrayLengths:
4690 if (access_type == STORE) {
4691 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4692 } else {
4693 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4694 }
4695 break;
4696 case kStringLengths:
4697 if (access_type == STORE) {
4698 instr->SetChangesFlag(::v8::internal::kStringLengths);
4699 } else {
4700 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4701 }
4702 break;
4703 case kInobject:
4704 if (access_type == STORE) {
4705 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4706 } else {
4707 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4708 }
4709 break;
4710 case kDouble:
4711 if (access_type == STORE) {
4712 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4713 } else {
4714 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4715 }
4716 break;
4717 case kBackingStore:
4718 if (access_type == STORE) {
4719 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4720 } else {
4721 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4722 }
4723 break;
4724 case kElementsPointer:
4725 if (access_type == STORE) {
4726 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4727 } else {
4728 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4729 }
4730 break;
4731 case kMaps:
4732 if (access_type == STORE) {
4733 instr->SetChangesFlag(::v8::internal::kMaps);
4734 } else {
4735 instr->SetDependsOnFlag(::v8::internal::kMaps);
4736 }
4737 break;
4738 case kExternalMemory:
4739 if (access_type == STORE) {
4740 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4741 } else {
4742 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4743 }
4744 break;
4745 }
4746 }
4747
4748
operator <<(OStream & os,const HObjectAccess & access)4749 OStream& operator<<(OStream& os, const HObjectAccess& access) {
4750 os << ".";
4751
4752 switch (access.portion()) {
4753 case HObjectAccess::kArrayLengths:
4754 case HObjectAccess::kStringLengths:
4755 os << "%length";
4756 break;
4757 case HObjectAccess::kElementsPointer:
4758 os << "%elements";
4759 break;
4760 case HObjectAccess::kMaps:
4761 os << "%map";
4762 break;
4763 case HObjectAccess::kDouble: // fall through
4764 case HObjectAccess::kInobject:
4765 if (!access.name().is_null()) {
4766 os << Handle<String>::cast(access.name())->ToCString().get();
4767 }
4768 os << "[in-object]";
4769 break;
4770 case HObjectAccess::kBackingStore:
4771 if (!access.name().is_null()) {
4772 os << Handle<String>::cast(access.name())->ToCString().get();
4773 }
4774 os << "[backing-store]";
4775 break;
4776 case HObjectAccess::kExternalMemory:
4777 os << "[external-memory]";
4778 break;
4779 }
4780
4781 return os << "@" << access.offset();
4782 }
4783
4784 } } // namespace v8::internal
4785