• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/escape-analysis.h"
6 
7 #include <limits>
8 
9 #include "src/base/flags.h"
10 #include "src/bootstrapper.h"
11 #include "src/compilation-dependencies.h"
12 #include "src/compiler/common-operator.h"
13 #include "src/compiler/graph-reducer.h"
14 #include "src/compiler/js-operator.h"
15 #include "src/compiler/node-matchers.h"
16 #include "src/compiler/node-properties.h"
17 #include "src/compiler/node.h"
18 #include "src/compiler/operator-properties.h"
19 #include "src/compiler/simplified-operator.h"
20 #include "src/compiler/type-cache.h"
21 #include "src/objects-inl.h"
22 
23 namespace v8 {
24 namespace internal {
25 namespace compiler {
26 
27 typedef NodeId Alias;
28 
29 #ifdef DEBUG
30 #define TRACE(...)                                    \
31   do {                                                \
32     if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
33   } while (false)
34 #else
35 #define TRACE(...)
36 #endif
37 
38 // EscapeStatusAnalysis determines for each allocation whether it escapes.
39 class EscapeStatusAnalysis : public ZoneObject {
40  public:
41   enum Status {
42     kUnknown = 0u,
43     kTracked = 1u << 0,
44     kEscaped = 1u << 1,
45     kOnStack = 1u << 2,
46     kVisited = 1u << 3,
47     // A node is dangling, if it is a load of some kind, and does not have
48     // an effect successor.
49     kDanglingComputed = 1u << 4,
50     kDangling = 1u << 5,
51     // A node is is an effect branch point, if it has more than 2 non-dangling
52     // effect successors.
53     kBranchPointComputed = 1u << 6,
54     kBranchPoint = 1u << 7,
55     kInQueue = 1u << 8
56   };
57   typedef base::Flags<Status, uint16_t> StatusFlags;
58 
59   void RunStatusAnalysis();
60 
61   bool IsVirtual(Node* node);
62   bool IsEscaped(Node* node);
63   bool IsAllocation(Node* node);
64 
65   bool IsInQueue(NodeId id);
66   void SetInQueue(NodeId id, bool on_stack);
67 
68   void DebugPrint();
69 
70   EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
71                        Zone* zone);
72   void EnqueueForStatusAnalysis(Node* node);
73   bool SetEscaped(Node* node);
74   bool IsEffectBranchPoint(Node* node);
75   bool IsDanglingEffectNode(Node* node);
76   void ResizeStatusVector();
77   size_t GetStatusVectorSize();
78   bool IsVirtual(NodeId id);
79 
graph() const80   Graph* graph() const { return graph_; }
81   void AssignAliases();
GetAlias(NodeId id) const82   Alias GetAlias(NodeId id) const { return aliases_[id]; }
GetAliasMap() const83   const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
AliasCount() const84   Alias AliasCount() const { return next_free_alias_; }
85   static const Alias kNotReachable;
86   static const Alias kUntrackable;
87 
88   bool IsNotReachable(Node* node);
89 
90  private:
91   void Process(Node* node);
92   void ProcessAllocate(Node* node);
93   void ProcessFinishRegion(Node* node);
94   void ProcessStoreField(Node* node);
95   void ProcessStoreElement(Node* node);
CheckUsesForEscape(Node * node,bool phi_escaping=false)96   bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
97     return CheckUsesForEscape(node, node, phi_escaping);
98   }
99   bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
100   void RevisitUses(Node* node);
101   void RevisitInputs(Node* node);
102 
NextAlias()103   Alias NextAlias() { return next_free_alias_++; }
104 
105   bool HasEntry(Node* node);
106 
107   bool IsAllocationPhi(Node* node);
108 
109   ZoneVector<Node*> stack_;
110   EscapeAnalysis* object_analysis_;
111   Graph* const graph_;
112   ZoneVector<StatusFlags> status_;
113   Alias next_free_alias_;
114   ZoneVector<Node*> status_stack_;
115   ZoneVector<Alias> aliases_;
116 
117   DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
118 };
119 
120 DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)
121 
122 const Alias EscapeStatusAnalysis::kNotReachable =
123     std::numeric_limits<Alias>::max();
124 const Alias EscapeStatusAnalysis::kUntrackable =
125     std::numeric_limits<Alias>::max() - 1;
126 
127 class VirtualObject : public ZoneObject {
128  public:
129   enum Status {
130     kInitial = 0,
131     kTracked = 1u << 0,
132     kInitialized = 1u << 1,
133     kCopyRequired = 1u << 2,
134   };
135   typedef base::Flags<Status, unsigned char> StatusFlags;
136 
VirtualObject(NodeId id,VirtualState * owner,Zone * zone)137   VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
138       : id_(id),
139         status_(kInitial),
140         fields_(zone),
141         phi_(zone),
142         object_state_(nullptr),
143         owner_(owner) {}
144 
VirtualObject(VirtualState * owner,const VirtualObject & other)145   VirtualObject(VirtualState* owner, const VirtualObject& other)
146       : id_(other.id_),
147         status_(other.status_ & ~kCopyRequired),
148         fields_(other.fields_),
149         phi_(other.phi_),
150         object_state_(other.object_state_),
151         owner_(owner) {}
152 
VirtualObject(NodeId id,VirtualState * owner,Zone * zone,size_t field_number,bool initialized)153   VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
154                 bool initialized)
155       : id_(id),
156         status_(kTracked | (initialized ? kInitialized : kInitial)),
157         fields_(zone),
158         phi_(zone),
159         object_state_(nullptr),
160         owner_(owner) {
161     fields_.resize(field_number);
162     phi_.resize(field_number, false);
163   }
164 
GetField(size_t offset)165   Node* GetField(size_t offset) { return fields_[offset]; }
166 
IsCreatedPhi(size_t offset)167   bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
168 
SetField(size_t offset,Node * node,bool created_phi=false)169   void SetField(size_t offset, Node* node, bool created_phi = false) {
170     fields_[offset] = node;
171     phi_[offset] = created_phi;
172   }
IsTracked() const173   bool IsTracked() const { return status_ & kTracked; }
IsInitialized() const174   bool IsInitialized() const { return status_ & kInitialized; }
SetInitialized()175   bool SetInitialized() { return status_ |= kInitialized; }
owner() const176   VirtualState* owner() const { return owner_; }
177 
fields_array()178   Node** fields_array() { return &fields_.front(); }
field_count()179   size_t field_count() { return fields_.size(); }
ResizeFields(size_t field_count)180   bool ResizeFields(size_t field_count) {
181     if (field_count > fields_.size()) {
182       fields_.resize(field_count);
183       phi_.resize(field_count);
184       return true;
185     }
186     return false;
187   }
ClearAllFields()188   void ClearAllFields() {
189     for (size_t i = 0; i < fields_.size(); ++i) {
190       fields_[i] = nullptr;
191       phi_[i] = false;
192     }
193   }
AllFieldsClear()194   bool AllFieldsClear() {
195     for (size_t i = 0; i < fields_.size(); ++i) {
196       if (fields_[i] != nullptr) {
197         return false;
198       }
199     }
200     return true;
201   }
202   bool UpdateFrom(const VirtualObject& other);
203   bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
204                  CommonOperatorBuilder* common);
SetObjectState(Node * node)205   void SetObjectState(Node* node) { object_state_ = node; }
GetObjectState() const206   Node* GetObjectState() const { return object_state_; }
IsCopyRequired() const207   bool IsCopyRequired() const { return status_ & kCopyRequired; }
SetCopyRequired()208   void SetCopyRequired() { status_ |= kCopyRequired; }
NeedCopyForModification()209   bool NeedCopyForModification() {
210     if (!IsCopyRequired() || !IsInitialized()) {
211       return false;
212     }
213     return true;
214   }
215 
id() const216   NodeId id() const { return id_; }
id(NodeId id)217   void id(NodeId id) { id_ = id; }
218 
219  private:
220   bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph,
221                    CommonOperatorBuilder* common);
222 
223   NodeId id_;
224   StatusFlags status_;
225   ZoneVector<Node*> fields_;
226   ZoneVector<bool> phi_;
227   Node* object_state_;
228   VirtualState* owner_;
229 
230   DISALLOW_COPY_AND_ASSIGN(VirtualObject);
231 };
232 
DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)233 DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)
234 
235 bool VirtualObject::UpdateFrom(const VirtualObject& other) {
236   bool changed = status_ != other.status_;
237   status_ = other.status_;
238   phi_ = other.phi_;
239   if (fields_.size() != other.fields_.size()) {
240     fields_ = other.fields_;
241     return true;
242   }
243   for (size_t i = 0; i < fields_.size(); ++i) {
244     if (fields_[i] != other.fields_[i]) {
245       changed = true;
246       fields_[i] = other.fields_[i];
247     }
248   }
249   return changed;
250 }
251 
252 class VirtualState : public ZoneObject {
253  public:
VirtualState(Node * owner,Zone * zone,size_t size)254   VirtualState(Node* owner, Zone* zone, size_t size)
255       : info_(size, nullptr, zone), owner_(owner) {}
256 
VirtualState(Node * owner,const VirtualState & state)257   VirtualState(Node* owner, const VirtualState& state)
258       : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
259         owner_(owner) {
260     for (size_t i = 0; i < info_.size(); ++i) {
261       if (state.info_[i]) {
262         info_[i] = state.info_[i];
263       }
264     }
265   }
266 
267   VirtualObject* VirtualObjectFromAlias(size_t alias);
268   void SetVirtualObject(Alias alias, VirtualObject* state);
269   bool UpdateFrom(VirtualState* state, Zone* zone);
270   bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
271                  CommonOperatorBuilder* common, Node* at);
size() const272   size_t size() const { return info_.size(); }
owner() const273   Node* owner() const { return owner_; }
274   VirtualObject* Copy(VirtualObject* obj, Alias alias);
SetCopyRequired()275   void SetCopyRequired() {
276     for (VirtualObject* obj : info_) {
277       if (obj) obj->SetCopyRequired();
278     }
279   }
280 
281  private:
282   ZoneVector<VirtualObject*> info_;
283   Node* owner_;
284 
285   DISALLOW_COPY_AND_ASSIGN(VirtualState);
286 };
287 
288 class MergeCache : public ZoneObject {
289  public:
MergeCache(Zone * zone)290   explicit MergeCache(Zone* zone)
291       : states_(zone), objects_(zone), fields_(zone) {
292     states_.reserve(5);
293     objects_.reserve(5);
294     fields_.reserve(5);
295   }
states()296   ZoneVector<VirtualState*>& states() { return states_; }
objects()297   ZoneVector<VirtualObject*>& objects() { return objects_; }
fields()298   ZoneVector<Node*>& fields() { return fields_; }
Clear()299   void Clear() {
300     states_.clear();
301     objects_.clear();
302     fields_.clear();
303   }
304   size_t LoadVirtualObjectsFromStatesFor(Alias alias);
305   void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
306                                        const ZoneVector<Alias>& aliases);
307   Node* GetFields(size_t pos);
308 
309  private:
310   ZoneVector<VirtualState*> states_;
311   ZoneVector<VirtualObject*> objects_;
312   ZoneVector<Node*> fields_;
313 
314   DISALLOW_COPY_AND_ASSIGN(MergeCache);
315 };
316 
LoadVirtualObjectsFromStatesFor(Alias alias)317 size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) {
318   objects_.clear();
319   DCHECK_GT(states_.size(), 0u);
320   size_t min = std::numeric_limits<size_t>::max();
321   for (VirtualState* state : states_) {
322     if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
323       objects_.push_back(obj);
324       min = std::min(obj->field_count(), min);
325     }
326   }
327   return min;
328 }
329 
LoadVirtualObjectsForFieldsFrom(VirtualState * state,const ZoneVector<Alias> & aliases)330 void MergeCache::LoadVirtualObjectsForFieldsFrom(
331     VirtualState* state, const ZoneVector<Alias>& aliases) {
332   objects_.clear();
333   size_t max_alias = state->size();
334   for (Node* field : fields_) {
335     Alias alias = aliases[field->id()];
336     if (alias >= max_alias) continue;
337     if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
338       objects_.push_back(obj);
339     }
340   }
341 }
342 
GetFields(size_t pos)343 Node* MergeCache::GetFields(size_t pos) {
344   fields_.clear();
345   Node* rep = pos >= objects_.front()->field_count()
346                   ? nullptr
347                   : objects_.front()->GetField(pos);
348   for (VirtualObject* obj : objects_) {
349     if (pos >= obj->field_count()) continue;
350     Node* field = obj->GetField(pos);
351     if (field) {
352       fields_.push_back(field);
353     }
354     if (field != rep) {
355       rep = nullptr;
356     }
357   }
358   return rep;
359 }
360 
Copy(VirtualObject * obj,Alias alias)361 VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
362   if (obj->owner() == this) return obj;
363   VirtualObject* new_obj =
364       new (info_.get_allocator().zone()) VirtualObject(this, *obj);
365   TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
366         static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
367         static_cast<void*>(new_obj));
368   info_[alias] = new_obj;
369   return new_obj;
370 }
371 
VirtualObjectFromAlias(size_t alias)372 VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
373   return info_[alias];
374 }
375 
SetVirtualObject(Alias alias,VirtualObject * obj)376 void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
377   info_[alias] = obj;
378 }
379 
UpdateFrom(VirtualState * from,Zone * zone)380 bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
381   if (from == this) return false;
382   bool changed = false;
383   for (Alias alias = 0; alias < size(); ++alias) {
384     VirtualObject* ls = VirtualObjectFromAlias(alias);
385     VirtualObject* rs = from->VirtualObjectFromAlias(alias);
386 
387     if (ls == rs || rs == nullptr) continue;
388 
389     if (ls == nullptr) {
390       ls = new (zone) VirtualObject(this, *rs);
391       SetVirtualObject(alias, ls);
392       changed = true;
393       continue;
394     }
395 
396     TRACE("  Updating fields of @%d\n", alias);
397 
398     changed = ls->UpdateFrom(*rs) || changed;
399   }
400   return false;
401 }
402 
403 namespace {
404 
IsEquivalentPhi(Node * node1,Node * node2)405 bool IsEquivalentPhi(Node* node1, Node* node2) {
406   if (node1 == node2) return true;
407   if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
408       node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
409     return false;
410   }
411   for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
412     Node* input1 = NodeProperties::GetValueInput(node1, i);
413     Node* input2 = NodeProperties::GetValueInput(node2, i);
414     if (!IsEquivalentPhi(input1, input2)) {
415       return false;
416     }
417   }
418   return true;
419 }
420 
IsEquivalentPhi(Node * phi,ZoneVector<Node * > & inputs)421 bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
422   if (phi->opcode() != IrOpcode::kPhi) return false;
423   if (static_cast<size_t>(phi->op()->ValueInputCount()) != inputs.size()) {
424     return false;
425   }
426   for (size_t i = 0; i < inputs.size(); ++i) {
427     Node* input = NodeProperties::GetValueInput(phi, static_cast<int>(i));
428     if (!IsEquivalentPhi(input, inputs[i])) {
429       return false;
430     }
431   }
432   return true;
433 }
434 
435 }  // namespace
436 
MergeFields(size_t i,Node * at,MergeCache * cache,Graph * graph,CommonOperatorBuilder * common)437 bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache,
438                                 Graph* graph, CommonOperatorBuilder* common) {
439   bool changed = false;
440   int value_input_count = static_cast<int>(cache->fields().size());
441   Node* rep = GetField(i);
442   if (!rep || !IsCreatedPhi(i)) {
443     Node* control = NodeProperties::GetControlInput(at);
444     cache->fields().push_back(control);
445     Node* phi = graph->NewNode(
446         common->Phi(MachineRepresentation::kTagged, value_input_count),
447         value_input_count + 1, &cache->fields().front());
448     SetField(i, phi, true);
449 #ifdef DEBUG
450     if (FLAG_trace_turbo_escape) {
451       PrintF("    Creating Phi #%d as merge of", phi->id());
452       for (int i = 0; i < value_input_count; i++) {
453         PrintF(" #%d (%s)", cache->fields()[i]->id(),
454                cache->fields()[i]->op()->mnemonic());
455       }
456       PrintF("\n");
457     }
458 #endif
459     changed = true;
460   } else {
461     DCHECK(rep->opcode() == IrOpcode::kPhi);
462     for (int n = 0; n < value_input_count; ++n) {
463       Node* old = NodeProperties::GetValueInput(rep, n);
464       if (old != cache->fields()[n]) {
465         changed = true;
466         NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
467       }
468     }
469   }
470   return changed;
471 }
472 
MergeFrom(MergeCache * cache,Node * at,Graph * graph,CommonOperatorBuilder * common)473 bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph,
474                               CommonOperatorBuilder* common) {
475   DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
476          at->opcode() == IrOpcode::kPhi);
477   bool changed = false;
478   for (size_t i = 0; i < field_count(); ++i) {
479     if (Node* field = cache->GetFields(i)) {
480       changed = changed || GetField(i) != field;
481       SetField(i, field);
482       TRACE("    Field %zu agree on rep #%d\n", i, field->id());
483     } else {
484       size_t arity = at->opcode() == IrOpcode::kEffectPhi
485                          ? at->op()->EffectInputCount()
486                          : at->op()->ValueInputCount();
487       if (cache->fields().size() == arity) {
488         changed = MergeFields(i, at, cache, graph, common) || changed;
489       } else {
490         if (GetField(i) != nullptr) {
491           TRACE("    Field %zu cleared\n", i);
492           changed = true;
493         }
494         SetField(i, nullptr);
495       }
496     }
497   }
498   return changed;
499 }
500 
MergeFrom(MergeCache * cache,Zone * zone,Graph * graph,CommonOperatorBuilder * common,Node * at)501 bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
502                              CommonOperatorBuilder* common, Node* at) {
503   DCHECK_GT(cache->states().size(), 0u);
504   bool changed = false;
505   for (Alias alias = 0; alias < size(); ++alias) {
506     cache->objects().clear();
507     VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
508     bool copy_merge_object = false;
509     size_t fields = std::numeric_limits<size_t>::max();
510     for (VirtualState* state : cache->states()) {
511       if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
512         cache->objects().push_back(obj);
513         if (mergeObject == obj) {
514           copy_merge_object = true;
515         }
516         fields = std::min(obj->field_count(), fields);
517       }
518     }
519     if (cache->objects().size() == cache->states().size()) {
520       if (!mergeObject) {
521         VirtualObject* obj = new (zone)
522             VirtualObject(cache->objects().front()->id(), this, zone, fields,
523                           cache->objects().front()->IsInitialized());
524         SetVirtualObject(alias, obj);
525         mergeObject = obj;
526         changed = true;
527       } else if (copy_merge_object) {
528         VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
529         SetVirtualObject(alias, obj);
530         mergeObject = obj;
531         changed = true;
532       } else {
533         changed = mergeObject->ResizeFields(fields) || changed;
534       }
535 #ifdef DEBUG
536       if (FLAG_trace_turbo_escape) {
537         PrintF("  Alias @%d, merging into %p virtual objects", alias,
538                static_cast<void*>(mergeObject));
539         for (size_t i = 0; i < cache->objects().size(); i++) {
540           PrintF(" %p", static_cast<void*>(cache->objects()[i]));
541         }
542         PrintF("\n");
543       }
544 #endif  // DEBUG
545       changed = mergeObject->MergeFrom(cache, at, graph, common) || changed;
546     } else {
547       if (mergeObject) {
548         TRACE("  Alias %d, virtual object removed\n", alias);
549         changed = true;
550       }
551       SetVirtualObject(alias, nullptr);
552     }
553   }
554   return changed;
555 }
556 
EscapeStatusAnalysis(EscapeAnalysis * object_analysis,Graph * graph,Zone * zone)557 EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
558                                            Graph* graph, Zone* zone)
559     : stack_(zone),
560       object_analysis_(object_analysis),
561       graph_(graph),
562       status_(zone),
563       next_free_alias_(0),
564       status_stack_(zone),
565       aliases_(zone) {}
566 
HasEntry(Node * node)567 bool EscapeStatusAnalysis::HasEntry(Node* node) {
568   return status_[node->id()] & (kTracked | kEscaped);
569 }
570 
IsVirtual(Node * node)571 bool EscapeStatusAnalysis::IsVirtual(Node* node) {
572   return IsVirtual(node->id());
573 }
574 
IsVirtual(NodeId id)575 bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
576   return (status_[id] & kTracked) && !(status_[id] & kEscaped);
577 }
578 
IsEscaped(Node * node)579 bool EscapeStatusAnalysis::IsEscaped(Node* node) {
580   return status_[node->id()] & kEscaped;
581 }
582 
IsAllocation(Node * node)583 bool EscapeStatusAnalysis::IsAllocation(Node* node) {
584   return node->opcode() == IrOpcode::kAllocate ||
585          node->opcode() == IrOpcode::kFinishRegion;
586 }
587 
SetEscaped(Node * node)588 bool EscapeStatusAnalysis::SetEscaped(Node* node) {
589   bool changed = !(status_[node->id()] & kEscaped);
590   status_[node->id()] |= kEscaped | kTracked;
591   return changed;
592 }
593 
IsInQueue(NodeId id)594 bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
595   return status_[id] & kInQueue;
596 }
597 
SetInQueue(NodeId id,bool on_stack)598 void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
599   if (on_stack) {
600     status_[id] |= kInQueue;
601   } else {
602     status_[id] &= ~kInQueue;
603   }
604 }
605 
ResizeStatusVector()606 void EscapeStatusAnalysis::ResizeStatusVector() {
607   if (status_.size() <= graph()->NodeCount()) {
608     status_.resize(graph()->NodeCount() * 1.1, kUnknown);
609   }
610 }
611 
GetStatusVectorSize()612 size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
613 
RunStatusAnalysis()614 void EscapeStatusAnalysis::RunStatusAnalysis() {
615   ResizeStatusVector();
616   while (!status_stack_.empty()) {
617     Node* node = status_stack_.back();
618     status_stack_.pop_back();
619     status_[node->id()] &= ~kOnStack;
620     Process(node);
621     status_[node->id()] |= kVisited;
622   }
623 }
624 
EnqueueForStatusAnalysis(Node * node)625 void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
626   DCHECK_NOT_NULL(node);
627   if (!(status_[node->id()] & kOnStack)) {
628     status_stack_.push_back(node);
629     status_[node->id()] |= kOnStack;
630   }
631 }
632 
RevisitInputs(Node * node)633 void EscapeStatusAnalysis::RevisitInputs(Node* node) {
634   for (Edge edge : node->input_edges()) {
635     Node* input = edge.to();
636     if (!(status_[input->id()] & kOnStack)) {
637       status_stack_.push_back(input);
638       status_[input->id()] |= kOnStack;
639     }
640   }
641 }
642 
RevisitUses(Node * node)643 void EscapeStatusAnalysis::RevisitUses(Node* node) {
644   for (Edge edge : node->use_edges()) {
645     Node* use = edge.from();
646     if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
647       status_stack_.push_back(use);
648       status_[use->id()] |= kOnStack;
649     }
650   }
651 }
652 
Process(Node * node)653 void EscapeStatusAnalysis::Process(Node* node) {
654   switch (node->opcode()) {
655     case IrOpcode::kAllocate:
656       ProcessAllocate(node);
657       break;
658     case IrOpcode::kFinishRegion:
659       ProcessFinishRegion(node);
660       break;
661     case IrOpcode::kStoreField:
662       ProcessStoreField(node);
663       break;
664     case IrOpcode::kStoreElement:
665       ProcessStoreElement(node);
666       break;
667     case IrOpcode::kLoadField:
668     case IrOpcode::kLoadElement: {
669       if (Node* rep = object_analysis_->GetReplacement(node)) {
670         if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
671           RevisitInputs(rep);
672           RevisitUses(rep);
673         }
674       }
675       RevisitUses(node);
676       break;
677     }
678     case IrOpcode::kPhi:
679       if (!HasEntry(node)) {
680         status_[node->id()] |= kTracked;
681         RevisitUses(node);
682       }
683       if (!IsAllocationPhi(node) && SetEscaped(node)) {
684         RevisitInputs(node);
685         RevisitUses(node);
686       }
687       CheckUsesForEscape(node);
688     default:
689       break;
690   }
691 }
692 
IsAllocationPhi(Node * node)693 bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
694   for (Edge edge : node->input_edges()) {
695     Node* input = edge.to();
696     if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
697     if (IsAllocation(input)) continue;
698     return false;
699   }
700   return true;
701 }
702 
ProcessStoreField(Node * node)703 void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
704   DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
705   Node* to = NodeProperties::GetValueInput(node, 0);
706   Node* val = NodeProperties::GetValueInput(node, 1);
707   if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
708     RevisitUses(val);
709     RevisitInputs(val);
710     TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
711           val->id(), val->op()->mnemonic(), to->id());
712   }
713 }
714 
ProcessStoreElement(Node * node)715 void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
716   DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
717   Node* to = NodeProperties::GetValueInput(node, 0);
718   Node* val = NodeProperties::GetValueInput(node, 2);
719   if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
720     RevisitUses(val);
721     RevisitInputs(val);
722     TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
723           val->id(), val->op()->mnemonic(), to->id());
724   }
725 }
726 
ProcessAllocate(Node * node)727 void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
728   DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
729   if (!HasEntry(node)) {
730     status_[node->id()] |= kTracked;
731     TRACE("Created status entry for node #%d (%s)\n", node->id(),
732           node->op()->mnemonic());
733     NumberMatcher size(node->InputAt(0));
734     DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
735            node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
736            node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
737            node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
738     RevisitUses(node);
739     if (!size.HasValue() && SetEscaped(node)) {
740       TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
741       // This node is already known to escape, uses do not have to be checked
742       // for escape.
743       return;
744     }
745   }
746   if (CheckUsesForEscape(node, true)) {
747     RevisitUses(node);
748   }
749 }
750 
CheckUsesForEscape(Node * uses,Node * rep,bool phi_escaping)751 bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
752                                               bool phi_escaping) {
753   for (Edge edge : uses->use_edges()) {
754     Node* use = edge.from();
755     if (IsNotReachable(use)) continue;
756     if (edge.index() >= use->op()->ValueInputCount() +
757                             OperatorProperties::GetContextInputCount(use->op()))
758       continue;
759     switch (use->opcode()) {
760       case IrOpcode::kPhi:
761         if (phi_escaping && SetEscaped(rep)) {
762           TRACE(
763               "Setting #%d (%s) to escaped because of use by phi node "
764               "#%d (%s)\n",
765               rep->id(), rep->op()->mnemonic(), use->id(),
766               use->op()->mnemonic());
767           return true;
768         }
769       // Fallthrough.
770       case IrOpcode::kStoreField:
771       case IrOpcode::kLoadField:
772       case IrOpcode::kStoreElement:
773       case IrOpcode::kLoadElement:
774       case IrOpcode::kFrameState:
775       case IrOpcode::kStateValues:
776       case IrOpcode::kReferenceEqual:
777       case IrOpcode::kFinishRegion:
778         if (IsEscaped(use) && SetEscaped(rep)) {
779           TRACE(
780               "Setting #%d (%s) to escaped because of use by escaping node "
781               "#%d (%s)\n",
782               rep->id(), rep->op()->mnemonic(), use->id(),
783               use->op()->mnemonic());
784           return true;
785         }
786         break;
787       case IrOpcode::kObjectIsSmi:
788         if (!IsAllocation(rep) && SetEscaped(rep)) {
789           TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
790                 rep->id(), rep->op()->mnemonic(), use->id(),
791                 use->op()->mnemonic());
792           return true;
793         }
794         break;
795       case IrOpcode::kSelect:
796       // TODO(mstarzinger): The following list of operators will eventually be
797       // handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
798       case IrOpcode::kStringEqual:
799       case IrOpcode::kStringLessThan:
800       case IrOpcode::kStringLessThanOrEqual:
801       case IrOpcode::kTypeGuard:
802       case IrOpcode::kPlainPrimitiveToNumber:
803       case IrOpcode::kPlainPrimitiveToWord32:
804       case IrOpcode::kPlainPrimitiveToFloat64:
805       case IrOpcode::kStringCharCodeAt:
806       case IrOpcode::kObjectIsCallable:
807       case IrOpcode::kObjectIsNumber:
808       case IrOpcode::kObjectIsReceiver:
809       case IrOpcode::kObjectIsString:
810       case IrOpcode::kObjectIsUndetectable:
811         if (SetEscaped(rep)) {
812           TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
813                 rep->id(), rep->op()->mnemonic(), use->id(),
814                 use->op()->mnemonic());
815           return true;
816         }
817         break;
818       default:
819         if (use->op()->EffectInputCount() == 0 &&
820             uses->op()->EffectInputCount() > 0 &&
821             !IrOpcode::IsJsOpcode(use->opcode())) {
822           TRACE("Encountered unaccounted use by #%d (%s)\n", use->id(),
823                 use->op()->mnemonic());
824           UNREACHABLE();
825         }
826         if (SetEscaped(rep)) {
827           TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
828                 rep->id(), rep->op()->mnemonic(), use->id(),
829                 use->op()->mnemonic());
830           return true;
831         }
832     }
833   }
834   return false;
835 }
836 
ProcessFinishRegion(Node * node)837 void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
838   DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
839   if (!HasEntry(node)) {
840     status_[node->id()] |= kTracked;
841     RevisitUses(node);
842   }
843   if (CheckUsesForEscape(node, true)) {
844     RevisitInputs(node);
845   }
846 }
847 
DebugPrint()848 void EscapeStatusAnalysis::DebugPrint() {
849   for (NodeId id = 0; id < status_.size(); id++) {
850     if (status_[id] & kTracked) {
851       PrintF("Node #%d is %s\n", id,
852              (status_[id] & kEscaped) ? "escaping" : "virtual");
853     }
854   }
855 }
856 
EscapeAnalysis(Graph * graph,CommonOperatorBuilder * common,Zone * zone)857 EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
858                                Zone* zone)
859     : zone_(zone),
860       slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))),
861       common_(common),
862       status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)),
863       virtual_states_(zone),
864       replacements_(zone),
865       cycle_detection_(zone),
866       cache_(nullptr) {}
867 
~EscapeAnalysis()868 EscapeAnalysis::~EscapeAnalysis() {}
869 
Run()870 void EscapeAnalysis::Run() {
871   replacements_.resize(graph()->NodeCount());
872   status_analysis_->AssignAliases();
873   if (status_analysis_->AliasCount() > 0) {
874     cache_ = new (zone()) MergeCache(zone());
875     replacements_.resize(graph()->NodeCount());
876     status_analysis_->ResizeStatusVector();
877     RunObjectAnalysis();
878     status_analysis_->RunStatusAnalysis();
879   }
880 }
881 
AssignAliases()882 void EscapeStatusAnalysis::AssignAliases() {
883   size_t max_size = 1024;
884   size_t min_size = 32;
885   size_t stack_size =
886       std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
887   stack_.reserve(stack_size);
888   ResizeStatusVector();
889   stack_.push_back(graph()->end());
890   CHECK_LT(graph()->NodeCount(), kUntrackable);
891   aliases_.resize(graph()->NodeCount(), kNotReachable);
892   aliases_[graph()->end()->id()] = kUntrackable;
893   status_stack_.reserve(8);
894   TRACE("Discovering trackable nodes");
895   while (!stack_.empty()) {
896     Node* node = stack_.back();
897     stack_.pop_back();
898     switch (node->opcode()) {
899       case IrOpcode::kAllocate:
900         if (aliases_[node->id()] >= kUntrackable) {
901           aliases_[node->id()] = NextAlias();
902           TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
903                 node->id());
904           EnqueueForStatusAnalysis(node);
905         }
906         break;
907       case IrOpcode::kFinishRegion: {
908         Node* allocate = NodeProperties::GetValueInput(node, 0);
909         DCHECK_NOT_NULL(allocate);
910         if (allocate->opcode() == IrOpcode::kAllocate) {
911           if (aliases_[allocate->id()] >= kUntrackable) {
912             if (aliases_[allocate->id()] == kNotReachable) {
913               stack_.push_back(allocate);
914             }
915             aliases_[allocate->id()] = NextAlias();
916             TRACE(" @%d:%s#%u", aliases_[allocate->id()],
917                   allocate->op()->mnemonic(), allocate->id());
918             EnqueueForStatusAnalysis(allocate);
919           }
920           aliases_[node->id()] = aliases_[allocate->id()];
921           TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
922                 node->id());
923         }
924         break;
925       }
926       default:
927         DCHECK_EQ(aliases_[node->id()], kUntrackable);
928         break;
929     }
930     for (Edge edge : node->input_edges()) {
931       Node* input = edge.to();
932       if (aliases_[input->id()] == kNotReachable) {
933         stack_.push_back(input);
934         aliases_[input->id()] = kUntrackable;
935       }
936     }
937   }
938   TRACE("\n");
939 }
940 
IsNotReachable(Node * node)941 bool EscapeStatusAnalysis::IsNotReachable(Node* node) {
942   if (node->id() >= aliases_.size()) {
943     return false;
944   }
945   return aliases_[node->id()] == kNotReachable;
946 }
947 
RunObjectAnalysis()948 void EscapeAnalysis::RunObjectAnalysis() {
949   virtual_states_.resize(graph()->NodeCount());
950   ZoneDeque<Node*> queue(zone());
951   queue.push_back(graph()->start());
952   ZoneVector<Node*> danglers(zone());
953   while (!queue.empty()) {
954     Node* node = queue.back();
955     queue.pop_back();
956     status_analysis_->SetInQueue(node->id(), false);
957     if (Process(node)) {
958       for (Edge edge : node->use_edges()) {
959         Node* use = edge.from();
960         if (status_analysis_->IsNotReachable(use)) {
961           continue;
962         }
963         if (NodeProperties::IsEffectEdge(edge)) {
964           // Iteration order: depth first, but delay phis.
965           // We need DFS do avoid some duplication of VirtualStates and
966           // VirtualObjects, and we want to delay phis to improve performance.
967           if (use->opcode() == IrOpcode::kEffectPhi) {
968             if (!status_analysis_->IsInQueue(use->id())) {
969               queue.push_front(use);
970             }
971           } else if ((use->opcode() != IrOpcode::kLoadField &&
972                       use->opcode() != IrOpcode::kLoadElement) ||
973                      !status_analysis_->IsDanglingEffectNode(use)) {
974             if (!status_analysis_->IsInQueue(use->id())) {
975               status_analysis_->SetInQueue(use->id(), true);
976               queue.push_back(use);
977             }
978           } else {
979             danglers.push_back(use);
980           }
981         }
982       }
983       // Danglers need to be processed immediately, even if they are
984       // on the stack. Since they do not have effect outputs,
985       // we don't have to track whether they are on the stack.
986       queue.insert(queue.end(), danglers.begin(), danglers.end());
987       danglers.clear();
988     }
989   }
990 #ifdef DEBUG
991   if (FLAG_trace_turbo_escape) {
992     DebugPrint();
993   }
994 #endif
995 }
996 
IsDanglingEffectNode(Node * node)997 bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
998   if (status_[node->id()] & kDanglingComputed) {
999     return status_[node->id()] & kDangling;
1000   }
1001   if (node->op()->EffectInputCount() == 0 ||
1002       node->op()->EffectOutputCount() == 0 ||
1003       (node->op()->EffectInputCount() == 1 &&
1004        NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
1005     // The start node is used as sentinel for nodes that are in general
1006     // effectful, but of which an analysis has determined that they do not
1007     // produce effects in this instance. We don't consider these nodes dangling.
1008     status_[node->id()] |= kDanglingComputed;
1009     return false;
1010   }
1011   for (Edge edge : node->use_edges()) {
1012     Node* use = edge.from();
1013     if (aliases_[use->id()] == kNotReachable) continue;
1014     if (NodeProperties::IsEffectEdge(edge)) {
1015       status_[node->id()] |= kDanglingComputed;
1016       return false;
1017     }
1018   }
1019   status_[node->id()] |= kDanglingComputed | kDangling;
1020   return true;
1021 }
1022 
IsEffectBranchPoint(Node * node)1023 bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
1024   if (status_[node->id()] & kBranchPointComputed) {
1025     return status_[node->id()] & kBranchPoint;
1026   }
1027   int count = 0;
1028   for (Edge edge : node->use_edges()) {
1029     Node* use = edge.from();
1030     if (aliases_[use->id()] == kNotReachable) continue;
1031     if (NodeProperties::IsEffectEdge(edge)) {
1032       if ((use->opcode() == IrOpcode::kLoadField ||
1033            use->opcode() == IrOpcode::kLoadElement ||
1034            use->opcode() == IrOpcode::kLoad) &&
1035           IsDanglingEffectNode(use))
1036         continue;
1037       if (++count > 1) {
1038         status_[node->id()] |= kBranchPointComputed | kBranchPoint;
1039         return true;
1040       }
1041     }
1042   }
1043   status_[node->id()] |= kBranchPointComputed;
1044   return false;
1045 }
1046 
Process(Node * node)1047 bool EscapeAnalysis::Process(Node* node) {
1048   switch (node->opcode()) {
1049     case IrOpcode::kAllocate:
1050       ProcessAllocation(node);
1051       break;
1052     case IrOpcode::kBeginRegion:
1053       ForwardVirtualState(node);
1054       break;
1055     case IrOpcode::kFinishRegion:
1056       ProcessFinishRegion(node);
1057       break;
1058     case IrOpcode::kStoreField:
1059       ProcessStoreField(node);
1060       break;
1061     case IrOpcode::kLoadField:
1062       ProcessLoadField(node);
1063       break;
1064     case IrOpcode::kStoreElement:
1065       ProcessStoreElement(node);
1066       break;
1067     case IrOpcode::kLoadElement:
1068       ProcessLoadElement(node);
1069       break;
1070     case IrOpcode::kStart:
1071       ProcessStart(node);
1072       break;
1073     case IrOpcode::kEffectPhi:
1074       return ProcessEffectPhi(node);
1075       break;
1076     default:
1077       if (node->op()->EffectInputCount() > 0) {
1078         ForwardVirtualState(node);
1079       }
1080       ProcessAllocationUsers(node);
1081       break;
1082   }
1083   return true;
1084 }
1085 
ProcessAllocationUsers(Node * node)1086 void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
1087   for (Edge edge : node->input_edges()) {
1088     Node* input = edge.to();
1089     Node* use = edge.from();
1090     if (edge.index() >= use->op()->ValueInputCount() +
1091                             OperatorProperties::GetContextInputCount(use->op()))
1092       continue;
1093     switch (node->opcode()) {
1094       case IrOpcode::kStoreField:
1095       case IrOpcode::kLoadField:
1096       case IrOpcode::kStoreElement:
1097       case IrOpcode::kLoadElement:
1098       case IrOpcode::kFrameState:
1099       case IrOpcode::kStateValues:
1100       case IrOpcode::kReferenceEqual:
1101       case IrOpcode::kFinishRegion:
1102       case IrOpcode::kObjectIsSmi:
1103         break;
1104       default:
1105         VirtualState* state = virtual_states_[node->id()];
1106         if (VirtualObject* obj =
1107                 GetVirtualObject(state, ResolveReplacement(input))) {
1108           if (!obj->AllFieldsClear()) {
1109             obj = CopyForModificationAt(obj, state, node);
1110             obj->ClearAllFields();
1111             TRACE("Cleared all fields of @%d:#%d\n",
1112                   status_analysis_->GetAlias(obj->id()), obj->id());
1113           }
1114         }
1115         break;
1116     }
1117   }
1118 }
1119 
CopyForModificationAt(VirtualState * state,Node * node)1120 VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state,
1121                                                     Node* node) {
1122   if (state->owner() != node) {
1123     VirtualState* new_state = new (zone()) VirtualState(node, *state);
1124     virtual_states_[node->id()] = new_state;
1125     TRACE("Copying virtual state %p to new state %p at node %s#%d\n",
1126           static_cast<void*>(state), static_cast<void*>(new_state),
1127           node->op()->mnemonic(), node->id());
1128     return new_state;
1129   }
1130   return state;
1131 }
1132 
CopyForModificationAt(VirtualObject * obj,VirtualState * state,Node * node)1133 VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
1134                                                      VirtualState* state,
1135                                                      Node* node) {
1136   if (obj->NeedCopyForModification()) {
1137     state = CopyForModificationAt(state, node);
1138     // TODO(tebbi): this copies the complete virtual state. Replace with a more
1139     // precise analysis of which objects are actually affected by the change.
1140     Alias changed_alias = status_analysis_->GetAlias(obj->id());
1141     for (Alias alias = 0; alias < state->size(); ++alias) {
1142       if (VirtualObject* next_obj = state->VirtualObjectFromAlias(alias)) {
1143         if (alias != changed_alias && next_obj->NeedCopyForModification()) {
1144           state->Copy(next_obj, alias);
1145         }
1146       }
1147     }
1148     return state->Copy(obj, changed_alias);
1149   }
1150   return obj;
1151 }
1152 
ForwardVirtualState(Node * node)1153 void EscapeAnalysis::ForwardVirtualState(Node* node) {
1154   DCHECK_EQ(node->op()->EffectInputCount(), 1);
1155 #ifdef DEBUG
1156   if (node->opcode() != IrOpcode::kLoadField &&
1157       node->opcode() != IrOpcode::kLoadElement &&
1158       node->opcode() != IrOpcode::kLoad &&
1159       status_analysis_->IsDanglingEffectNode(node)) {
1160     PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
1161            node->op()->mnemonic());
1162     UNREACHABLE();
1163   }
1164 #endif  // DEBUG
1165   Node* effect = NodeProperties::GetEffectInput(node);
1166   DCHECK_NOT_NULL(virtual_states_[effect->id()]);
1167   if (virtual_states_[node->id()]) {
1168     virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
1169                                             zone());
1170   } else {
1171     virtual_states_[node->id()] = virtual_states_[effect->id()];
1172     TRACE("Forwarding object state %p from %s#%d to %s#%d",
1173           static_cast<void*>(virtual_states_[effect->id()]),
1174           effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
1175           node->id());
1176     if (status_analysis_->IsEffectBranchPoint(effect) ||
1177         OperatorProperties::HasFrameStateInput(node->op())) {
1178       virtual_states_[node->id()]->SetCopyRequired();
1179       TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
1180             effect->id());
1181     }
1182     TRACE("\n");
1183   }
1184 }
1185 
ProcessStart(Node * node)1186 void EscapeAnalysis::ProcessStart(Node* node) {
1187   DCHECK_EQ(node->opcode(), IrOpcode::kStart);
1188   virtual_states_[node->id()] =
1189       new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1190 }
1191 
ProcessEffectPhi(Node * node)1192 bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
1193   DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
1194   bool changed = false;
1195 
1196   VirtualState* mergeState = virtual_states_[node->id()];
1197   if (!mergeState) {
1198     mergeState =
1199         new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1200     virtual_states_[node->id()] = mergeState;
1201     changed = true;
1202     TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
1203           static_cast<void*>(mergeState));
1204   }
1205 
1206   cache_->Clear();
1207 
1208   TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
1209         static_cast<void*>(mergeState));
1210 
1211   for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
1212     Node* input = NodeProperties::GetEffectInput(node, i);
1213     VirtualState* state = virtual_states_[input->id()];
1214     if (state) {
1215       cache_->states().push_back(state);
1216       if (state == mergeState) {
1217         mergeState = new (zone())
1218             VirtualState(node, zone(), status_analysis_->AliasCount());
1219         virtual_states_[node->id()] = mergeState;
1220         changed = true;
1221       }
1222     }
1223     TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
1224           input->op()->mnemonic());
1225   }
1226   TRACE("\n");
1227 
1228   if (cache_->states().size() == 0) {
1229     return changed;
1230   }
1231 
1232   changed =
1233       mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed;
1234 
1235   TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
1236 
1237   if (changed) {
1238     status_analysis_->ResizeStatusVector();
1239   }
1240   return changed;
1241 }
1242 
ProcessAllocation(Node * node)1243 void EscapeAnalysis::ProcessAllocation(Node* node) {
1244   DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
1245   ForwardVirtualState(node);
1246   VirtualState* state = virtual_states_[node->id()];
1247   Alias alias = status_analysis_->GetAlias(node->id());
1248 
1249   // Check if we have already processed this node.
1250   if (state->VirtualObjectFromAlias(alias)) {
1251     return;
1252   }
1253 
1254   if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
1255     state = CopyForModificationAt(state, node);
1256   }
1257 
1258   NumberMatcher size(node->InputAt(0));
1259   DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
1260          node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
1261          node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
1262          node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
1263   if (size.HasValue()) {
1264     VirtualObject* obj = new (zone()) VirtualObject(
1265         node->id(), state, zone(), size.Value() / kPointerSize, false);
1266     state->SetVirtualObject(alias, obj);
1267   } else {
1268     state->SetVirtualObject(
1269         alias, new (zone()) VirtualObject(node->id(), state, zone()));
1270   }
1271 }
1272 
ProcessFinishRegion(Node * node)1273 void EscapeAnalysis::ProcessFinishRegion(Node* node) {
1274   DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
1275   ForwardVirtualState(node);
1276   Node* allocation = NodeProperties::GetValueInput(node, 0);
1277   if (allocation->opcode() == IrOpcode::kAllocate) {
1278     VirtualState* state = virtual_states_[node->id()];
1279     VirtualObject* obj =
1280         state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id()));
1281     DCHECK_NOT_NULL(obj);
1282     obj->SetInitialized();
1283   }
1284 }
1285 
replacement(Node * node)1286 Node* EscapeAnalysis::replacement(Node* node) {
1287   if (node->id() >= replacements_.size()) return nullptr;
1288   return replacements_[node->id()];
1289 }
1290 
SetReplacement(Node * node,Node * rep)1291 bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
1292   bool changed = replacements_[node->id()] != rep;
1293   replacements_[node->id()] = rep;
1294   return changed;
1295 }
1296 
UpdateReplacement(VirtualState * state,Node * node,Node * rep)1297 bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
1298                                        Node* rep) {
1299   if (SetReplacement(node, rep)) {
1300     if (rep) {
1301       TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
1302             rep->op()->mnemonic());
1303     } else {
1304       TRACE("Replacement of #%d cleared\n", node->id());
1305     }
1306     return true;
1307   }
1308   return false;
1309 }
1310 
ResolveReplacement(Node * node)1311 Node* EscapeAnalysis::ResolveReplacement(Node* node) {
1312   while (replacement(node)) {
1313     node = replacement(node);
1314   }
1315   return node;
1316 }
1317 
GetReplacement(Node * node)1318 Node* EscapeAnalysis::GetReplacement(Node* node) {
1319   Node* result = nullptr;
1320   while (replacement(node)) {
1321     node = result = replacement(node);
1322   }
1323   return result;
1324 }
1325 
IsVirtual(Node * node)1326 bool EscapeAnalysis::IsVirtual(Node* node) {
1327   if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1328     return false;
1329   }
1330   return status_analysis_->IsVirtual(node);
1331 }
1332 
IsEscaped(Node * node)1333 bool EscapeAnalysis::IsEscaped(Node* node) {
1334   if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1335     return false;
1336   }
1337   return status_analysis_->IsEscaped(node);
1338 }
1339 
CompareVirtualObjects(Node * left,Node * right)1340 bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
1341   DCHECK(IsVirtual(left) && IsVirtual(right));
1342   left = ResolveReplacement(left);
1343   right = ResolveReplacement(right);
1344   if (IsEquivalentPhi(left, right)) {
1345     return true;
1346   }
1347   return false;
1348 }
1349 
1350 namespace {
1351 
IsOffsetForFieldAccessCorrect(const FieldAccess & access)1352 bool IsOffsetForFieldAccessCorrect(const FieldAccess& access) {
1353 #if V8_TARGET_LITTLE_ENDIAN
1354   return (access.offset % kPointerSize) == 0;
1355 #else
1356   return ((access.offset +
1357            (1 << ElementSizeLog2Of(access.machine_type.representation()))) %
1358           kPointerSize) == 0;
1359 #endif
1360 }
1361 
OffsetForFieldAccess(Node * node)1362 int OffsetForFieldAccess(Node* node) {
1363   FieldAccess access = FieldAccessOf(node->op());
1364   DCHECK(IsOffsetForFieldAccessCorrect(access));
1365   return access.offset / kPointerSize;
1366 }
1367 
OffsetForElementAccess(Node * node,int index)1368 int OffsetForElementAccess(Node* node, int index) {
1369   ElementAccess access = ElementAccessOf(node->op());
1370   DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
1371             kPointerSizeLog2);
1372   DCHECK_EQ(access.header_size % kPointerSize, 0);
1373   return access.header_size / kPointerSize + index;
1374 }
1375 
1376 }  // namespace
1377 
ProcessLoadFromPhi(int offset,Node * from,Node * load,VirtualState * state)1378 void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* load,
1379                                         VirtualState* state) {
1380   TRACE("Load #%d from phi #%d", load->id(), from->id());
1381 
1382   cache_->fields().clear();
1383   for (int i = 0; i < load->op()->ValueInputCount(); ++i) {
1384     Node* input = NodeProperties::GetValueInput(load, i);
1385     cache_->fields().push_back(input);
1386   }
1387 
1388   cache_->LoadVirtualObjectsForFieldsFrom(state,
1389                                           status_analysis_->GetAliasMap());
1390   if (cache_->objects().size() == cache_->fields().size()) {
1391     cache_->GetFields(offset);
1392     if (cache_->fields().size() == cache_->objects().size()) {
1393       Node* rep = replacement(load);
1394       if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
1395         int value_input_count = static_cast<int>(cache_->fields().size());
1396         cache_->fields().push_back(NodeProperties::GetControlInput(from));
1397         Node* phi = graph()->NewNode(
1398             common()->Phi(MachineRepresentation::kTagged, value_input_count),
1399             value_input_count + 1, &cache_->fields().front());
1400         status_analysis_->ResizeStatusVector();
1401         SetReplacement(load, phi);
1402         TRACE(" got phi created.\n");
1403       } else {
1404         TRACE(" has already phi #%d.\n", rep->id());
1405       }
1406     } else {
1407       TRACE(" has incomplete field info.\n");
1408     }
1409   } else {
1410     TRACE(" has incomplete virtual object info.\n");
1411   }
1412 }
1413 
ProcessLoadField(Node * node)1414 void EscapeAnalysis::ProcessLoadField(Node* node) {
1415   DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
1416   ForwardVirtualState(node);
1417   Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1418   VirtualState* state = virtual_states_[node->id()];
1419   if (VirtualObject* object = GetVirtualObject(state, from)) {
1420     if (!object->IsTracked()) return;
1421     int offset = OffsetForFieldAccess(node);
1422     if (static_cast<size_t>(offset) >= object->field_count()) {
1423       // We have a load from a field that is not inside the {object}. This
1424       // can only happen with conflicting type feedback and for dead {node}s.
1425       // For now, we just mark the {object} as escaping.
1426       // TODO(turbofan): Consider introducing an Undefined or None operator
1427       // that we can replace this load with, since we know it's dead code.
1428       if (status_analysis_->SetEscaped(from)) {
1429         TRACE(
1430             "Setting #%d (%s) to escaped because load field #%d from "
1431             "offset %d outside of object\n",
1432             from->id(), from->op()->mnemonic(), node->id(), offset);
1433       }
1434       return;
1435     }
1436     Node* value = object->GetField(offset);
1437     if (value) {
1438       value = ResolveReplacement(value);
1439     }
1440     // Record that the load has this alias.
1441     UpdateReplacement(state, node, value);
1442   } else if (from->opcode() == IrOpcode::kPhi &&
1443              IsOffsetForFieldAccessCorrect(FieldAccessOf(node->op()))) {
1444     int offset = OffsetForFieldAccess(node);
1445     // Only binary phis are supported for now.
1446     ProcessLoadFromPhi(offset, from, node, state);
1447   } else {
1448     UpdateReplacement(state, node, nullptr);
1449   }
1450 }
1451 
ProcessLoadElement(Node * node)1452 void EscapeAnalysis::ProcessLoadElement(Node* node) {
1453   DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
1454   ForwardVirtualState(node);
1455   Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1456   VirtualState* state = virtual_states_[node->id()];
1457   Node* index_node = node->InputAt(1);
1458   NumberMatcher index(index_node);
1459   DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1460          index_node->opcode() != IrOpcode::kInt64Constant &&
1461          index_node->opcode() != IrOpcode::kFloat32Constant &&
1462          index_node->opcode() != IrOpcode::kFloat64Constant);
1463   if (index.HasValue()) {
1464     if (VirtualObject* object = GetVirtualObject(state, from)) {
1465       if (!object->IsTracked()) return;
1466       int offset = OffsetForElementAccess(node, index.Value());
1467       if (static_cast<size_t>(offset) >= object->field_count()) return;
1468       Node* value = object->GetField(offset);
1469       if (value) {
1470         value = ResolveReplacement(value);
1471       }
1472       // Record that the load has this alias.
1473       UpdateReplacement(state, node, value);
1474     } else if (from->opcode() == IrOpcode::kPhi) {
1475       int offset = OffsetForElementAccess(node, index.Value());
1476       ProcessLoadFromPhi(offset, from, node, state);
1477     } else {
1478       UpdateReplacement(state, node, nullptr);
1479     }
1480   } else {
1481     // We have a load from a non-const index, cannot eliminate object.
1482     if (status_analysis_->SetEscaped(from)) {
1483       TRACE(
1484           "Setting #%d (%s) to escaped because load element #%d from non-const "
1485           "index #%d (%s)\n",
1486           from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
1487           index_node->op()->mnemonic());
1488     }
1489   }
1490 }
1491 
ProcessStoreField(Node * node)1492 void EscapeAnalysis::ProcessStoreField(Node* node) {
1493   DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
1494   ForwardVirtualState(node);
1495   Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1496   VirtualState* state = virtual_states_[node->id()];
1497   if (VirtualObject* object = GetVirtualObject(state, to)) {
1498     if (!object->IsTracked()) return;
1499     int offset = OffsetForFieldAccess(node);
1500     if (static_cast<size_t>(offset) >= object->field_count()) {
1501       // We have a store to a field that is not inside the {object}. This
1502       // can only happen with conflicting type feedback and for dead {node}s.
1503       // For now, we just mark the {object} as escaping.
1504       // TODO(turbofan): Consider just eliminating the store in the reducer
1505       // pass, as it's dead code anyways.
1506       if (status_analysis_->SetEscaped(to)) {
1507         TRACE(
1508             "Setting #%d (%s) to escaped because store field #%d to "
1509             "offset %d outside of object\n",
1510             to->id(), to->op()->mnemonic(), node->id(), offset);
1511       }
1512       return;
1513     }
1514     Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
1515     // TODO(mstarzinger): The following is a workaround to not track some well
1516     // known raw fields. We only ever store default initial values into these
1517     // fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
1518     if (val->opcode() == IrOpcode::kInt32Constant ||
1519         val->opcode() == IrOpcode::kInt64Constant) {
1520       DCHECK(FieldAccessOf(node->op()).offset == JSFunction::kCodeEntryOffset ||
1521              FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
1522       val = slot_not_analyzed_;
1523     }
1524     if (object->GetField(offset) != val) {
1525       object = CopyForModificationAt(object, state, node);
1526       object->SetField(offset, val);
1527     }
1528   }
1529 }
1530 
ProcessStoreElement(Node * node)1531 void EscapeAnalysis::ProcessStoreElement(Node* node) {
1532   DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
1533   ForwardVirtualState(node);
1534   Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1535   Node* index_node = node->InputAt(1);
1536   NumberMatcher index(index_node);
1537   DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1538          index_node->opcode() != IrOpcode::kInt64Constant &&
1539          index_node->opcode() != IrOpcode::kFloat32Constant &&
1540          index_node->opcode() != IrOpcode::kFloat64Constant);
1541   VirtualState* state = virtual_states_[node->id()];
1542   if (index.HasValue()) {
1543     if (VirtualObject* object = GetVirtualObject(state, to)) {
1544       if (!object->IsTracked()) return;
1545       int offset = OffsetForElementAccess(node, index.Value());
1546       if (static_cast<size_t>(offset) >= object->field_count()) return;
1547       Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
1548       if (object->GetField(offset) != val) {
1549         object = CopyForModificationAt(object, state, node);
1550         object->SetField(offset, val);
1551       }
1552     }
1553   } else {
1554     // We have a store to a non-const index, cannot eliminate object.
1555     if (status_analysis_->SetEscaped(to)) {
1556       TRACE(
1557           "Setting #%d (%s) to escaped because store element #%d to non-const "
1558           "index #%d (%s)\n",
1559           to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
1560           index_node->op()->mnemonic());
1561     }
1562     if (VirtualObject* object = GetVirtualObject(state, to)) {
1563       if (!object->IsTracked()) return;
1564       if (!object->AllFieldsClear()) {
1565         object = CopyForModificationAt(object, state, node);
1566         object->ClearAllFields();
1567         TRACE("Cleared all fields of @%d:#%d\n",
1568               status_analysis_->GetAlias(object->id()), object->id());
1569       }
1570     }
1571   }
1572 }
1573 
GetOrCreateObjectState(Node * effect,Node * node)1574 Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
1575   if ((node->opcode() == IrOpcode::kFinishRegion ||
1576        node->opcode() == IrOpcode::kAllocate) &&
1577       IsVirtual(node)) {
1578     if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
1579                                                ResolveReplacement(node))) {
1580       if (Node* object_state = vobj->GetObjectState()) {
1581         return object_state;
1582       } else {
1583         cache_->fields().clear();
1584         for (size_t i = 0; i < vobj->field_count(); ++i) {
1585           if (Node* field = vobj->GetField(i)) {
1586             cache_->fields().push_back(field);
1587           }
1588         }
1589         int input_count = static_cast<int>(cache_->fields().size());
1590         Node* new_object_state =
1591             graph()->NewNode(common()->ObjectState(input_count), input_count,
1592                              &cache_->fields().front());
1593         vobj->SetObjectState(new_object_state);
1594         TRACE(
1595             "Creating object state #%d for vobj %p (from node #%d) at effect "
1596             "#%d\n",
1597             new_object_state->id(), static_cast<void*>(vobj), node->id(),
1598             effect->id());
1599         // Now fix uses of other objects.
1600         for (size_t i = 0; i < vobj->field_count(); ++i) {
1601           if (Node* field = vobj->GetField(i)) {
1602             if (Node* field_object_state =
1603                     GetOrCreateObjectState(effect, field)) {
1604               NodeProperties::ReplaceValueInput(
1605                   new_object_state, field_object_state, static_cast<int>(i));
1606             }
1607           }
1608         }
1609         return new_object_state;
1610       }
1611     }
1612   }
1613   return nullptr;
1614 }
1615 
IsCyclicObjectState(Node * effect,Node * node)1616 bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) {
1617   if ((node->opcode() == IrOpcode::kFinishRegion ||
1618        node->opcode() == IrOpcode::kAllocate) &&
1619       IsVirtual(node)) {
1620     if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
1621                                                ResolveReplacement(node))) {
1622       if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true;
1623       cycle_detection_.insert(vobj);
1624       bool cycle_detected = false;
1625       for (size_t i = 0; i < vobj->field_count(); ++i) {
1626         if (Node* field = vobj->GetField(i)) {
1627           if (IsCyclicObjectState(effect, field)) cycle_detected = true;
1628         }
1629       }
1630       cycle_detection_.erase(vobj);
1631       return cycle_detected;
1632     }
1633   }
1634   return false;
1635 }
1636 
DebugPrintState(VirtualState * state)1637 void EscapeAnalysis::DebugPrintState(VirtualState* state) {
1638   PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
1639   for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
1640     if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
1641       PrintF("  Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
1642              object->field_count());
1643       for (size_t i = 0; i < object->field_count(); ++i) {
1644         if (Node* f = object->GetField(i)) {
1645           PrintF("    Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
1646         }
1647       }
1648     }
1649   }
1650 }
1651 
DebugPrint()1652 void EscapeAnalysis::DebugPrint() {
1653   ZoneVector<VirtualState*> object_states(zone());
1654   for (NodeId id = 0; id < virtual_states_.size(); id++) {
1655     if (VirtualState* states = virtual_states_[id]) {
1656       if (std::find(object_states.begin(), object_states.end(), states) ==
1657           object_states.end()) {
1658         object_states.push_back(states);
1659       }
1660     }
1661   }
1662   for (size_t n = 0; n < object_states.size(); n++) {
1663     DebugPrintState(object_states[n]);
1664   }
1665 }
1666 
GetVirtualObject(VirtualState * state,Node * node)1667 VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
1668                                                 Node* node) {
1669   if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
1670   Alias alias = status_analysis_->GetAlias(node->id());
1671   if (alias >= state->size()) return nullptr;
1672   return state->VirtualObjectFromAlias(alias);
1673 }
1674 
ExistsVirtualAllocate()1675 bool EscapeAnalysis::ExistsVirtualAllocate() {
1676   for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) {
1677     Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id));
1678     if (alias < EscapeStatusAnalysis::kUntrackable) {
1679       if (status_analysis_->IsVirtual(static_cast<int>(id))) {
1680         return true;
1681       }
1682     }
1683   }
1684   return false;
1685 }
1686 
graph() const1687 Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }
1688 
1689 }  // namespace compiler
1690 }  // namespace internal
1691 }  // namespace v8
1692