1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "load_store_elimination.h"
18 #include "side_effects_analysis.h"
19 
20 #include <iostream>
21 
22 namespace art {
23 
24 class ReferenceInfo;
25 
26 // A cap for the number of heap locations to prevent pathological time/space consumption.
27 // The number of heap locations for most of the methods stays below this threshold.
28 constexpr size_t kMaxNumberOfHeapLocations = 32;
29 
30 // A ReferenceInfo contains additional info about a reference such as
31 // whether it's a singleton, returned, etc.
32 class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
33  public:
ReferenceInfo(HInstruction * reference,size_t pos)34   ReferenceInfo(HInstruction* reference, size_t pos) : reference_(reference), position_(pos) {
35     is_singleton_ = true;
36     is_singleton_and_not_returned_ = true;
37     if (!reference_->IsNewInstance() && !reference_->IsNewArray()) {
38       // For references not allocated in the method, don't assume anything.
39       is_singleton_ = false;
40       is_singleton_and_not_returned_ = false;
41       return;
42     }
43 
44     // Visit all uses to determine if this reference can spread into the heap,
45     // a method call, etc.
46     for (const HUseListNode<HInstruction*>& use : reference_->GetUses()) {
47       HInstruction* user = use.GetUser();
48       DCHECK(!user->IsNullCheck()) << "NullCheck should have been eliminated";
49       if (user->IsBoundType()) {
50         // BoundType shouldn't normally be necessary for a NewInstance.
51         // Just be conservative for the uncommon cases.
52         is_singleton_ = false;
53         is_singleton_and_not_returned_ = false;
54         return;
55       }
56       if (user->IsPhi() || user->IsSelect() || user->IsInvoke() ||
57           (user->IsInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
58           (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(1))) ||
59           (user->IsStaticFieldSet() && (reference_ == user->InputAt(1))) ||
60           (user->IsUnresolvedStaticFieldSet() && (reference_ == user->InputAt(0))) ||
61           (user->IsArraySet() && (reference_ == user->InputAt(2)))) {
62         // reference_ is merged to HPhi/HSelect, passed to a callee, or stored to heap.
63         // reference_ isn't the only name that can refer to its value anymore.
64         is_singleton_ = false;
65         is_singleton_and_not_returned_ = false;
66         return;
67       }
68       if ((user->IsUnresolvedInstanceFieldGet() && (reference_ == user->InputAt(0))) ||
69           (user->IsUnresolvedInstanceFieldSet() && (reference_ == user->InputAt(0)))) {
70         // The field is accessed in an unresolved way. We mark the object as a singleton to
71         // disable load/store optimizations on it.
72         // Note that we could optimize this case and still perform some optimizations until
73         // we hit the unresolved access, but disabling is the simplest.
74         is_singleton_ = false;
75         is_singleton_and_not_returned_ = false;
76         return;
77       }
78       if (user->IsReturn()) {
79         is_singleton_and_not_returned_ = false;
80       }
81     }
82   }
83 
GetReference() const84   HInstruction* GetReference() const {
85     return reference_;
86   }
87 
GetPosition() const88   size_t GetPosition() const {
89     return position_;
90   }
91 
92   // Returns true if reference_ is the only name that can refer to its value during
93   // the lifetime of the method. So it's guaranteed to not have any alias in
94   // the method (including its callees).
IsSingleton() const95   bool IsSingleton() const {
96     return is_singleton_;
97   }
98 
99   // Returns true if reference_ is a singleton and not returned to the caller.
100   // The allocation and stores into reference_ may be eliminated for such cases.
IsSingletonAndNotReturned() const101   bool IsSingletonAndNotReturned() const {
102     return is_singleton_and_not_returned_;
103   }
104 
105  private:
106   HInstruction* const reference_;
107   const size_t position_;     // position in HeapLocationCollector's ref_info_array_.
108   bool is_singleton_;         // can only be referred to by a single name in the method.
109   bool is_singleton_and_not_returned_;  // reference_ is singleton and not returned to caller.
110 
111   DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
112 };
113 
114 // A heap location is a reference-offset/index pair that a value can be loaded from
115 // or stored to.
116 class HeapLocation : public ArenaObject<kArenaAllocMisc> {
117  public:
118   static constexpr size_t kInvalidFieldOffset = -1;
119 
120   // TODO: more fine-grained array types.
121   static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
122 
HeapLocation(ReferenceInfo * ref_info,size_t offset,HInstruction * index,int16_t declaring_class_def_index)123   HeapLocation(ReferenceInfo* ref_info,
124                size_t offset,
125                HInstruction* index,
126                int16_t declaring_class_def_index)
127       : ref_info_(ref_info),
128         offset_(offset),
129         index_(index),
130         declaring_class_def_index_(declaring_class_def_index),
131         value_killed_by_loop_side_effects_(true) {
132     DCHECK(ref_info != nullptr);
133     DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
134            (offset != kInvalidFieldOffset && index == nullptr));
135     if (ref_info->IsSingleton() && !IsArrayElement()) {
136       // Assume this location's value cannot be killed by loop side effects
137       // until proven otherwise.
138       value_killed_by_loop_side_effects_ = false;
139     }
140   }
141 
GetReferenceInfo() const142   ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
GetOffset() const143   size_t GetOffset() const { return offset_; }
GetIndex() const144   HInstruction* GetIndex() const { return index_; }
145 
146   // Returns the definition of declaring class' dex index.
147   // It's kDeclaringClassDefIndexForArrays for an array element.
GetDeclaringClassDefIndex() const148   int16_t GetDeclaringClassDefIndex() const {
149     return declaring_class_def_index_;
150   }
151 
IsArrayElement() const152   bool IsArrayElement() const {
153     return index_ != nullptr;
154   }
155 
IsValueKilledByLoopSideEffects() const156   bool IsValueKilledByLoopSideEffects() const {
157     return value_killed_by_loop_side_effects_;
158   }
159 
SetValueKilledByLoopSideEffects(bool val)160   void SetValueKilledByLoopSideEffects(bool val) {
161     value_killed_by_loop_side_effects_ = val;
162   }
163 
164  private:
165   ReferenceInfo* const ref_info_;      // reference for instance/static field or array access.
166   const size_t offset_;                // offset of static/instance field.
167   HInstruction* const index_;          // index of an array element.
168   const int16_t declaring_class_def_index_;  // declaring class's def's dex index.
169   bool value_killed_by_loop_side_effects_;   // value of this location may be killed by loop
170                                              // side effects because this location is stored
171                                              // into inside a loop.
172 
173   DISALLOW_COPY_AND_ASSIGN(HeapLocation);
174 };
175 
HuntForOriginalReference(HInstruction * ref)176 static HInstruction* HuntForOriginalReference(HInstruction* ref) {
177   DCHECK(ref != nullptr);
178   while (ref->IsNullCheck() || ref->IsBoundType()) {
179     ref = ref->InputAt(0);
180   }
181   return ref;
182 }
183 
184 // A HeapLocationCollector collects all relevant heap locations and keeps
185 // an aliasing matrix for all locations.
186 class HeapLocationCollector : public HGraphVisitor {
187  public:
188   static constexpr size_t kHeapLocationNotFound = -1;
189   // Start with a single uint32_t word. That's enough bits for pair-wise
190   // aliasing matrix of 8 heap locations.
191   static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
192 
HeapLocationCollector(HGraph * graph)193   explicit HeapLocationCollector(HGraph* graph)
194       : HGraphVisitor(graph),
195         ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
196         heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
197         aliasing_matrix_(graph->GetArena(),
198                          kInitialAliasingMatrixBitVectorSize,
199                          true,
200                          kArenaAllocLSE),
201         has_heap_stores_(false),
202         has_volatile_(false),
203         has_monitor_operations_(false),
204         may_deoptimize_(false) {}
205 
GetNumberOfHeapLocations() const206   size_t GetNumberOfHeapLocations() const {
207     return heap_locations_.size();
208   }
209 
GetHeapLocation(size_t index) const210   HeapLocation* GetHeapLocation(size_t index) const {
211     return heap_locations_[index];
212   }
213 
FindReferenceInfoOf(HInstruction * ref) const214   ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
215     for (size_t i = 0; i < ref_info_array_.size(); i++) {
216       ReferenceInfo* ref_info = ref_info_array_[i];
217       if (ref_info->GetReference() == ref) {
218         DCHECK_EQ(i, ref_info->GetPosition());
219         return ref_info;
220       }
221     }
222     return nullptr;
223   }
224 
HasHeapStores() const225   bool HasHeapStores() const {
226     return has_heap_stores_;
227   }
228 
HasVolatile() const229   bool HasVolatile() const {
230     return has_volatile_;
231   }
232 
HasMonitorOps() const233   bool HasMonitorOps() const {
234     return has_monitor_operations_;
235   }
236 
237   // Returns whether this method may be deoptimized.
238   // Currently we don't have meta data support for deoptimizing
239   // a method that eliminates allocations/stores.
MayDeoptimize() const240   bool MayDeoptimize() const {
241     return may_deoptimize_;
242   }
243 
244   // Find and return the heap location index in heap_locations_.
FindHeapLocationIndex(ReferenceInfo * ref_info,size_t offset,HInstruction * index,int16_t declaring_class_def_index) const245   size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
246                                size_t offset,
247                                HInstruction* index,
248                                int16_t declaring_class_def_index) const {
249     for (size_t i = 0; i < heap_locations_.size(); i++) {
250       HeapLocation* loc = heap_locations_[i];
251       if (loc->GetReferenceInfo() == ref_info &&
252           loc->GetOffset() == offset &&
253           loc->GetIndex() == index &&
254           loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
255         return i;
256       }
257     }
258     return kHeapLocationNotFound;
259   }
260 
261   // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
MayAlias(size_t index1,size_t index2) const262   bool MayAlias(size_t index1, size_t index2) const {
263     if (index1 < index2) {
264       return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
265     } else if (index1 > index2) {
266       return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
267     } else {
268       DCHECK(false) << "index1 and index2 are expected to be different";
269       return true;
270     }
271   }
272 
BuildAliasingMatrix()273   void BuildAliasingMatrix() {
274     const size_t number_of_locations = heap_locations_.size();
275     if (number_of_locations == 0) {
276       return;
277     }
278     size_t pos = 0;
279     // Compute aliasing info between every pair of different heap locations.
280     // Save the result in a matrix represented as a BitVector.
281     for (size_t i = 0; i < number_of_locations - 1; i++) {
282       for (size_t j = i + 1; j < number_of_locations; j++) {
283         if (ComputeMayAlias(i, j)) {
284           aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
285         }
286         pos++;
287       }
288     }
289   }
290 
291  private:
292   // An allocation cannot alias with a name which already exists at the point
293   // of the allocation, such as a parameter or a load happening before the allocation.
MayAliasWithPreexistenceChecking(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2) const294   bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
295     if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
296       // Any reference that can alias with the allocation must appear after it in the block/in
297       // the block's successors. In reverse post order, those instructions will be visited after
298       // the allocation.
299       return ref_info2->GetPosition() >= ref_info1->GetPosition();
300     }
301     return true;
302   }
303 
CanReferencesAlias(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2) const304   bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
305     if (ref_info1 == ref_info2) {
306       return true;
307     } else if (ref_info1->IsSingleton()) {
308       return false;
309     } else if (ref_info2->IsSingleton()) {
310       return false;
311     } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
312         !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
313       return false;
314     }
315     return true;
316   }
317 
318   // `index1` and `index2` are indices in the array of collected heap locations.
319   // Returns the position in the bit vector that tracks whether the two heap
320   // locations may alias.
AliasingMatrixPosition(size_t index1,size_t index2) const321   size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
322     DCHECK(index2 > index1);
323     const size_t number_of_locations = heap_locations_.size();
324     // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
325     return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
326   }
327 
328   // An additional position is passed in to make sure the calculated position is correct.
CheckedAliasingMatrixPosition(size_t index1,size_t index2,size_t position)329   size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
330     size_t calculated_position = AliasingMatrixPosition(index1, index2);
331     DCHECK_EQ(calculated_position, position);
332     return calculated_position;
333   }
334 
335   // Compute if two locations may alias to each other.
ComputeMayAlias(size_t index1,size_t index2) const336   bool ComputeMayAlias(size_t index1, size_t index2) const {
337     HeapLocation* loc1 = heap_locations_[index1];
338     HeapLocation* loc2 = heap_locations_[index2];
339     if (loc1->GetOffset() != loc2->GetOffset()) {
340       // Either two different instance fields, or one is an instance
341       // field and the other is an array element.
342       return false;
343     }
344     if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
345       // Different types.
346       return false;
347     }
348     if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
349       return false;
350     }
351     if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
352       HInstruction* array_index1 = loc1->GetIndex();
353       HInstruction* array_index2 = loc2->GetIndex();
354       DCHECK(array_index1 != nullptr);
355       DCHECK(array_index2 != nullptr);
356       if (array_index1->IsIntConstant() &&
357           array_index2->IsIntConstant() &&
358           array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
359         // Different constant indices do not alias.
360         return false;
361       }
362     }
363     return true;
364   }
365 
GetOrCreateReferenceInfo(HInstruction * instruction)366   ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
367     ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
368     if (ref_info == nullptr) {
369       size_t pos = ref_info_array_.size();
370       ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
371       ref_info_array_.push_back(ref_info);
372     }
373     return ref_info;
374   }
375 
CreateReferenceInfoForReferenceType(HInstruction * instruction)376   void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
377     if (instruction->GetType() != Primitive::kPrimNot) {
378       return;
379     }
380     DCHECK(FindReferenceInfoOf(instruction) == nullptr);
381     GetOrCreateReferenceInfo(instruction);
382   }
383 
GetOrCreateHeapLocation(HInstruction * ref,size_t offset,HInstruction * index,int16_t declaring_class_def_index)384   HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
385                                         size_t offset,
386                                         HInstruction* index,
387                                         int16_t declaring_class_def_index) {
388     HInstruction* original_ref = HuntForOriginalReference(ref);
389     ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
390     size_t heap_location_idx = FindHeapLocationIndex(
391         ref_info, offset, index, declaring_class_def_index);
392     if (heap_location_idx == kHeapLocationNotFound) {
393       HeapLocation* heap_loc = new (GetGraph()->GetArena())
394           HeapLocation(ref_info, offset, index, declaring_class_def_index);
395       heap_locations_.push_back(heap_loc);
396       return heap_loc;
397     }
398     return heap_locations_[heap_location_idx];
399   }
400 
VisitFieldAccess(HInstruction * ref,const FieldInfo & field_info)401   HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
402     if (field_info.IsVolatile()) {
403       has_volatile_ = true;
404     }
405     const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
406     const size_t offset = field_info.GetFieldOffset().SizeValue();
407     return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
408   }
409 
VisitArrayAccess(HInstruction * array,HInstruction * index)410   void VisitArrayAccess(HInstruction* array, HInstruction* index) {
411     GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
412         index, HeapLocation::kDeclaringClassDefIndexForArrays);
413   }
414 
VisitInstanceFieldGet(HInstanceFieldGet * instruction)415   void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
416     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
417     CreateReferenceInfoForReferenceType(instruction);
418   }
419 
VisitInstanceFieldSet(HInstanceFieldSet * instruction)420   void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
421     HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
422     has_heap_stores_ = true;
423     if (instruction->GetBlock()->GetLoopInformation() != nullptr) {
424       location->SetValueKilledByLoopSideEffects(true);
425     }
426   }
427 
VisitStaticFieldGet(HStaticFieldGet * instruction)428   void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
429     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
430     CreateReferenceInfoForReferenceType(instruction);
431   }
432 
VisitStaticFieldSet(HStaticFieldSet * instruction)433   void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
434     VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
435     has_heap_stores_ = true;
436   }
437 
438   // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
439   // since we cannot accurately track the fields.
440 
VisitArrayGet(HArrayGet * instruction)441   void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
442     VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
443     CreateReferenceInfoForReferenceType(instruction);
444   }
445 
VisitArraySet(HArraySet * instruction)446   void VisitArraySet(HArraySet* instruction) OVERRIDE {
447     VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
448     has_heap_stores_ = true;
449   }
450 
VisitNewInstance(HNewInstance * new_instance)451   void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
452     // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
453     CreateReferenceInfoForReferenceType(new_instance);
454   }
455 
VisitInvokeStaticOrDirect(HInvokeStaticOrDirect * instruction)456   void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
457     CreateReferenceInfoForReferenceType(instruction);
458   }
459 
VisitInvokeVirtual(HInvokeVirtual * instruction)460   void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
461     CreateReferenceInfoForReferenceType(instruction);
462   }
463 
VisitInvokeInterface(HInvokeInterface * instruction)464   void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
465     CreateReferenceInfoForReferenceType(instruction);
466   }
467 
VisitParameterValue(HParameterValue * instruction)468   void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
469     CreateReferenceInfoForReferenceType(instruction);
470   }
471 
VisitSelect(HSelect * instruction)472   void VisitSelect(HSelect* instruction) OVERRIDE {
473     CreateReferenceInfoForReferenceType(instruction);
474   }
475 
VisitDeoptimize(HDeoptimize * instruction ATTRIBUTE_UNUSED)476   void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) OVERRIDE {
477     may_deoptimize_ = true;
478   }
479 
VisitMonitorOperation(HMonitorOperation * monitor ATTRIBUTE_UNUSED)480   void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
481     has_monitor_operations_ = true;
482   }
483 
484   ArenaVector<ReferenceInfo*> ref_info_array_;   // All references used for heap accesses.
485   ArenaVector<HeapLocation*> heap_locations_;    // All heap locations.
486   ArenaBitVector aliasing_matrix_;    // aliasing info between each pair of locations.
487   bool has_heap_stores_;    // If there is no heap stores, LSE acts as GVN with better
488                             // alias analysis and won't be as effective.
489   bool has_volatile_;       // If there are volatile field accesses.
490   bool has_monitor_operations_;    // If there are monitor operations.
491   bool may_deoptimize_;
492 
493   DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
494 };
495 
496 // An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
497 // A heap location can be set to kUnknownHeapValue when:
498 // - initially set a value.
499 // - killed due to aliasing, merging, invocation, or loop side effects.
500 static HInstruction* const kUnknownHeapValue =
501     reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
502 
503 // Default heap value after an allocation.
504 // A heap location can be set to that value right after an allocation.
505 static HInstruction* const kDefaultHeapValue =
506     reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
507 
508 class LSEVisitor : public HGraphVisitor {
509  public:
LSEVisitor(HGraph * graph,const HeapLocationCollector & heap_locations_collector,const SideEffectsAnalysis & side_effects)510   LSEVisitor(HGraph* graph,
511              const HeapLocationCollector& heap_locations_collector,
512              const SideEffectsAnalysis& side_effects)
513       : HGraphVisitor(graph),
514         heap_location_collector_(heap_locations_collector),
515         side_effects_(side_effects),
516         heap_values_for_(graph->GetBlocks().size(),
517                          ArenaVector<HInstruction*>(heap_locations_collector.
518                                                         GetNumberOfHeapLocations(),
519                                                     kUnknownHeapValue,
520                                                     graph->GetArena()->Adapter(kArenaAllocLSE)),
521                          graph->GetArena()->Adapter(kArenaAllocLSE)),
522         removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
523         substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
524         possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
525         singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
526   }
527 
VisitBasicBlock(HBasicBlock * block)528   void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
529     // Populate the heap_values array for this block.
530     // TODO: try to reuse the heap_values array from one predecessor if possible.
531     if (block->IsLoopHeader()) {
532       HandleLoopSideEffects(block);
533     } else {
534       MergePredecessorValues(block);
535     }
536     HGraphVisitor::VisitBasicBlock(block);
537   }
538 
539   // Remove recorded instructions that should be eliminated.
RemoveInstructions()540   void RemoveInstructions() {
541     size_t size = removed_loads_.size();
542     DCHECK_EQ(size, substitute_instructions_for_loads_.size());
543     for (size_t i = 0; i < size; i++) {
544       HInstruction* load = removed_loads_[i];
545       DCHECK(load != nullptr);
546       DCHECK(load->IsInstanceFieldGet() ||
547              load->IsStaticFieldGet() ||
548              load->IsArrayGet());
549       HInstruction* substitute = substitute_instructions_for_loads_[i];
550       DCHECK(substitute != nullptr);
551       // Keep tracing substitute till one that's not removed.
552       HInstruction* sub_sub = FindSubstitute(substitute);
553       while (sub_sub != substitute) {
554         substitute = sub_sub;
555         sub_sub = FindSubstitute(substitute);
556       }
557       load->ReplaceWith(substitute);
558       load->GetBlock()->RemoveInstruction(load);
559     }
560 
561     // At this point, stores in possibly_removed_stores_ can be safely removed.
562     size = possibly_removed_stores_.size();
563     for (size_t i = 0; i < size; i++) {
564       HInstruction* store = possibly_removed_stores_[i];
565       DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
566       store->GetBlock()->RemoveInstruction(store);
567     }
568 
569     // TODO: remove unnecessary allocations.
570     // Eliminate instructions in singleton_new_instances_ that:
571     // - don't have uses,
572     // - don't have finalizers,
573     // - are instantiable and accessible,
574     // - have no/separate clinit check.
575   }
576 
577  private:
578   // If heap_values[index] is an instance field store, need to keep the store.
579   // This is necessary if a heap value is killed due to merging, or loop side
580   // effects (which is essentially merging also), since a load later from the
581   // location won't be eliminated.
KeepIfIsStore(HInstruction * heap_value)582   void KeepIfIsStore(HInstruction* heap_value) {
583     if (heap_value == kDefaultHeapValue ||
584         heap_value == kUnknownHeapValue ||
585         !heap_value->IsInstanceFieldSet()) {
586       return;
587     }
588     auto idx = std::find(possibly_removed_stores_.begin(),
589         possibly_removed_stores_.end(), heap_value);
590     if (idx != possibly_removed_stores_.end()) {
591       // Make sure the store is kept.
592       possibly_removed_stores_.erase(idx);
593     }
594   }
595 
HandleLoopSideEffects(HBasicBlock * block)596   void HandleLoopSideEffects(HBasicBlock* block) {
597     DCHECK(block->IsLoopHeader());
598     int block_id = block->GetBlockId();
599     ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
600 
601     // Don't eliminate loads in irreducible loops. This is safe for singletons, because
602     // they are always used by the non-eliminated loop-phi.
603     if (block->GetLoopInformation()->IsIrreducible()) {
604       if (kIsDebugBuild) {
605         for (size_t i = 0; i < heap_values.size(); i++) {
606           DCHECK_EQ(heap_values[i], kUnknownHeapValue);
607         }
608       }
609       return;
610     }
611 
612     HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
613     ArenaVector<HInstruction*>& pre_header_heap_values =
614         heap_values_for_[pre_header->GetBlockId()];
615 
616     // Inherit the values from pre-header.
617     for (size_t i = 0; i < heap_values.size(); i++) {
618       heap_values[i] = pre_header_heap_values[i];
619     }
620 
621     // We do a single pass in reverse post order. For loops, use the side effects as a hint
622     // to see if the heap values should be killed.
623     if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
624       for (size_t i = 0; i < heap_values.size(); i++) {
625         HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
626         ReferenceInfo* ref_info = location->GetReferenceInfo();
627         if (!ref_info->IsSingleton() || location->IsValueKilledByLoopSideEffects()) {
628           // heap value is killed by loop side effects (stored into directly, or due to
629           // aliasing).
630           KeepIfIsStore(pre_header_heap_values[i]);
631           heap_values[i] = kUnknownHeapValue;
632         } else {
633           // A singleton's field that's not stored into inside a loop is invariant throughout
634           // the loop.
635         }
636       }
637     }
638   }
639 
MergePredecessorValues(HBasicBlock * block)640   void MergePredecessorValues(HBasicBlock* block) {
641     const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
642     if (predecessors.size() == 0) {
643       return;
644     }
645     ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
646     for (size_t i = 0; i < heap_values.size(); i++) {
647       HInstruction* pred0_value = heap_values_for_[predecessors[0]->GetBlockId()][i];
648       heap_values[i] = pred0_value;
649       if (pred0_value != kUnknownHeapValue) {
650         for (size_t j = 1; j < predecessors.size(); j++) {
651           HInstruction* pred_value = heap_values_for_[predecessors[j]->GetBlockId()][i];
652           if (pred_value != pred0_value) {
653             heap_values[i] = kUnknownHeapValue;
654             break;
655           }
656         }
657       }
658 
659       if (heap_values[i] == kUnknownHeapValue) {
660         // Keep the last store in each predecessor since future loads cannot be eliminated.
661         for (size_t j = 0; j < predecessors.size(); j++) {
662           ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessors[j]->GetBlockId()];
663           KeepIfIsStore(pred_values[i]);
664         }
665       }
666     }
667   }
668 
669   // `instruction` is being removed. Try to see if the null check on it
670   // can be removed. This can happen if the same value is set in two branches
671   // but not in dominators. Such as:
672   //   int[] a = foo();
673   //   if () {
674   //     a[0] = 2;
675   //   } else {
676   //     a[0] = 2;
677   //   }
678   //   // a[0] can now be replaced with constant 2, and the null check on it can be removed.
TryRemovingNullCheck(HInstruction * instruction)679   void TryRemovingNullCheck(HInstruction* instruction) {
680     HInstruction* prev = instruction->GetPrevious();
681     if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
682       // Previous instruction is a null check for this instruction. Remove the null check.
683       prev->ReplaceWith(prev->InputAt(0));
684       prev->GetBlock()->RemoveInstruction(prev);
685     }
686   }
687 
GetDefaultValue(Primitive::Type type)688   HInstruction* GetDefaultValue(Primitive::Type type) {
689     switch (type) {
690       case Primitive::kPrimNot:
691         return GetGraph()->GetNullConstant();
692       case Primitive::kPrimBoolean:
693       case Primitive::kPrimByte:
694       case Primitive::kPrimChar:
695       case Primitive::kPrimShort:
696       case Primitive::kPrimInt:
697         return GetGraph()->GetIntConstant(0);
698       case Primitive::kPrimLong:
699         return GetGraph()->GetLongConstant(0);
700       case Primitive::kPrimFloat:
701         return GetGraph()->GetFloatConstant(0);
702       case Primitive::kPrimDouble:
703         return GetGraph()->GetDoubleConstant(0);
704       default:
705         UNREACHABLE();
706     }
707   }
708 
VisitGetLocation(HInstruction * instruction,HInstruction * ref,size_t offset,HInstruction * index,int16_t declaring_class_def_index)709   void VisitGetLocation(HInstruction* instruction,
710                         HInstruction* ref,
711                         size_t offset,
712                         HInstruction* index,
713                         int16_t declaring_class_def_index) {
714     HInstruction* original_ref = HuntForOriginalReference(ref);
715     ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
716     size_t idx = heap_location_collector_.FindHeapLocationIndex(
717         ref_info, offset, index, declaring_class_def_index);
718     DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
719     ArenaVector<HInstruction*>& heap_values =
720         heap_values_for_[instruction->GetBlock()->GetBlockId()];
721     HInstruction* heap_value = heap_values[idx];
722     if (heap_value == kDefaultHeapValue) {
723       HInstruction* constant = GetDefaultValue(instruction->GetType());
724       removed_loads_.push_back(instruction);
725       substitute_instructions_for_loads_.push_back(constant);
726       heap_values[idx] = constant;
727       return;
728     }
729     if (heap_value != kUnknownHeapValue && heap_value->IsInstanceFieldSet()) {
730       HInstruction* store = heap_value;
731       // This load must be from a singleton since it's from the same field
732       // that a "removed" store puts the value. That store must be to a singleton's field.
733       DCHECK(ref_info->IsSingleton());
734       // Get the real heap value of the store.
735       heap_value = store->InputAt(1);
736     }
737     if (heap_value == kUnknownHeapValue) {
738       // Load isn't eliminated. Put the load as the value into the HeapLocation.
739       // This acts like GVN but with better aliasing analysis.
740       heap_values[idx] = instruction;
741     } else {
742       if (Primitive::PrimitiveKind(heap_value->GetType())
743               != Primitive::PrimitiveKind(instruction->GetType())) {
744         // The only situation where the same heap location has different type is when
745         // we do an array get on an instruction that originates from the null constant
746         // (the null could be behind a field access, an array access, a null check or
747         // a bound type).
748         // In order to stay properly typed on primitive types, we do not eliminate
749         // the array gets.
750         if (kIsDebugBuild) {
751           DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
752           DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
753         }
754         return;
755       }
756       removed_loads_.push_back(instruction);
757       substitute_instructions_for_loads_.push_back(heap_value);
758       TryRemovingNullCheck(instruction);
759     }
760   }
761 
Equal(HInstruction * heap_value,HInstruction * value)762   bool Equal(HInstruction* heap_value, HInstruction* value) {
763     if (heap_value == value) {
764       return true;
765     }
766     if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
767       return true;
768     }
769     return false;
770   }
771 
VisitSetLocation(HInstruction * instruction,HInstruction * ref,size_t offset,HInstruction * index,int16_t declaring_class_def_index,HInstruction * value)772   void VisitSetLocation(HInstruction* instruction,
773                         HInstruction* ref,
774                         size_t offset,
775                         HInstruction* index,
776                         int16_t declaring_class_def_index,
777                         HInstruction* value) {
778     HInstruction* original_ref = HuntForOriginalReference(ref);
779     ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
780     size_t idx = heap_location_collector_.FindHeapLocationIndex(
781         ref_info, offset, index, declaring_class_def_index);
782     DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
783     ArenaVector<HInstruction*>& heap_values =
784         heap_values_for_[instruction->GetBlock()->GetBlockId()];
785     HInstruction* heap_value = heap_values[idx];
786     bool same_value = false;
787     bool possibly_redundant = false;
788     if (Equal(heap_value, value)) {
789       // Store into the heap location with the same value.
790       same_value = true;
791     } else if (index != nullptr) {
792       // For array element, don't eliminate stores since it can be easily aliased
793       // with non-constant index.
794     } else if (!heap_location_collector_.MayDeoptimize() &&
795                ref_info->IsSingletonAndNotReturned()) {
796       // Store into a field of a singleton that's not returned. The value cannot be
797       // killed due to aliasing/invocation. It can be redundant since future loads can
798       // directly get the value set by this instruction. The value can still be killed due to
799       // merging or loop side effects. Stores whose values are killed due to merging/loop side
800       // effects later will be removed from possibly_removed_stores_ when that is detected.
801       possibly_redundant = true;
802       HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
803       DCHECK(new_instance != nullptr);
804       if (new_instance->IsFinalizable()) {
805         // Finalizable objects escape globally. Need to keep the store.
806         possibly_redundant = false;
807       } else {
808         HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
809         if (loop_info != nullptr) {
810           // instruction is a store in the loop so the loop must does write.
811           DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
812           // If it's a singleton, IsValueKilledByLoopSideEffects() must be true.
813           DCHECK(!ref_info->IsSingleton() ||
814                  heap_location_collector_.GetHeapLocation(idx)->IsValueKilledByLoopSideEffects());
815 
816           if (loop_info->IsDefinedOutOfTheLoop(original_ref)) {
817             DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
818             // Keep the store since its value may be needed at the loop header.
819             possibly_redundant = false;
820           } else {
821             // The singleton is created inside the loop. Value stored to it isn't needed at
822             // the loop header. This is true for outer loops also.
823           }
824         }
825       }
826     }
827     if (same_value || possibly_redundant) {
828       possibly_removed_stores_.push_back(instruction);
829     }
830 
831     if (!same_value) {
832       if (possibly_redundant) {
833         DCHECK(instruction->IsInstanceFieldSet());
834         // Put the store as the heap value. If the value is loaded from heap
835         // by a load later, this store isn't really redundant.
836         heap_values[idx] = instruction;
837       } else {
838         heap_values[idx] = value;
839       }
840     }
841     // This store may kill values in other heap locations due to aliasing.
842     for (size_t i = 0; i < heap_values.size(); i++) {
843       if (i == idx) {
844         continue;
845       }
846       if (heap_values[i] == value) {
847         // Same value should be kept even if aliasing happens.
848         continue;
849       }
850       if (heap_values[i] == kUnknownHeapValue) {
851         // Value is already unknown, no need for aliasing check.
852         continue;
853       }
854       if (heap_location_collector_.MayAlias(i, idx)) {
855         // Kill heap locations that may alias.
856         heap_values[i] = kUnknownHeapValue;
857       }
858     }
859   }
860 
VisitInstanceFieldGet(HInstanceFieldGet * instruction)861   void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
862     HInstruction* obj = instruction->InputAt(0);
863     size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
864     int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
865     VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
866   }
867 
VisitInstanceFieldSet(HInstanceFieldSet * instruction)868   void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
869     HInstruction* obj = instruction->InputAt(0);
870     size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
871     int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
872     HInstruction* value = instruction->InputAt(1);
873     VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
874   }
875 
VisitStaticFieldGet(HStaticFieldGet * instruction)876   void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
877     HInstruction* cls = instruction->InputAt(0);
878     size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
879     int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
880     VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
881   }
882 
VisitStaticFieldSet(HStaticFieldSet * instruction)883   void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
884     HInstruction* cls = instruction->InputAt(0);
885     size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
886     int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
887     HInstruction* value = instruction->InputAt(1);
888     VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
889   }
890 
VisitArrayGet(HArrayGet * instruction)891   void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
892     HInstruction* array = instruction->InputAt(0);
893     HInstruction* index = instruction->InputAt(1);
894     VisitGetLocation(instruction,
895                      array,
896                      HeapLocation::kInvalidFieldOffset,
897                      index,
898                      HeapLocation::kDeclaringClassDefIndexForArrays);
899   }
900 
VisitArraySet(HArraySet * instruction)901   void VisitArraySet(HArraySet* instruction) OVERRIDE {
902     HInstruction* array = instruction->InputAt(0);
903     HInstruction* index = instruction->InputAt(1);
904     HInstruction* value = instruction->InputAt(2);
905     VisitSetLocation(instruction,
906                      array,
907                      HeapLocation::kInvalidFieldOffset,
908                      index,
909                      HeapLocation::kDeclaringClassDefIndexForArrays,
910                      value);
911   }
912 
HandleInvoke(HInstruction * invoke)913   void HandleInvoke(HInstruction* invoke) {
914     ArenaVector<HInstruction*>& heap_values =
915         heap_values_for_[invoke->GetBlock()->GetBlockId()];
916     for (size_t i = 0; i < heap_values.size(); i++) {
917       ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
918       if (ref_info->IsSingleton()) {
919         // Singleton references cannot be seen by the callee.
920       } else {
921         heap_values[i] = kUnknownHeapValue;
922       }
923     }
924   }
925 
VisitInvokeStaticOrDirect(HInvokeStaticOrDirect * invoke)926   void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
927     HandleInvoke(invoke);
928   }
929 
VisitInvokeVirtual(HInvokeVirtual * invoke)930   void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
931     HandleInvoke(invoke);
932   }
933 
VisitInvokeInterface(HInvokeInterface * invoke)934   void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
935     HandleInvoke(invoke);
936   }
937 
VisitInvokeUnresolved(HInvokeUnresolved * invoke)938   void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
939     HandleInvoke(invoke);
940   }
941 
VisitClinitCheck(HClinitCheck * clinit)942   void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
943     HandleInvoke(clinit);
944   }
945 
VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet * instruction)946   void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
947     // Conservatively treat it as an invocation.
948     HandleInvoke(instruction);
949   }
950 
VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet * instruction)951   void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
952     // Conservatively treat it as an invocation.
953     HandleInvoke(instruction);
954   }
955 
VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet * instruction)956   void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
957     // Conservatively treat it as an invocation.
958     HandleInvoke(instruction);
959   }
960 
VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet * instruction)961   void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
962     // Conservatively treat it as an invocation.
963     HandleInvoke(instruction);
964   }
965 
VisitNewInstance(HNewInstance * new_instance)966   void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
967     ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
968     if (ref_info == nullptr) {
969       // new_instance isn't used for field accesses. No need to process it.
970       return;
971     }
972     if (!heap_location_collector_.MayDeoptimize() &&
973         ref_info->IsSingletonAndNotReturned() &&
974         !new_instance->IsFinalizable() &&
975         !new_instance->CanThrow()) {
976       // TODO: add new_instance to singleton_new_instances_ and enable allocation elimination.
977     }
978     ArenaVector<HInstruction*>& heap_values =
979         heap_values_for_[new_instance->GetBlock()->GetBlockId()];
980     for (size_t i = 0; i < heap_values.size(); i++) {
981       HInstruction* ref =
982           heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
983       size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
984       if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
985         // Instance fields except the header fields are set to default heap values.
986         heap_values[i] = kDefaultHeapValue;
987       }
988     }
989   }
990 
991   // Find an instruction's substitute if it should be removed.
992   // Return the same instruction if it should not be removed.
FindSubstitute(HInstruction * instruction)993   HInstruction* FindSubstitute(HInstruction* instruction) {
994     size_t size = removed_loads_.size();
995     for (size_t i = 0; i < size; i++) {
996       if (removed_loads_[i] == instruction) {
997         return substitute_instructions_for_loads_[i];
998       }
999     }
1000     return instruction;
1001   }
1002 
1003   const HeapLocationCollector& heap_location_collector_;
1004   const SideEffectsAnalysis& side_effects_;
1005 
1006   // One array of heap values for each block.
1007   ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
1008 
1009   // We record the instructions that should be eliminated but may be
1010   // used by heap locations. They'll be removed in the end.
1011   ArenaVector<HInstruction*> removed_loads_;
1012   ArenaVector<HInstruction*> substitute_instructions_for_loads_;
1013 
1014   // Stores in this list may be removed from the list later when it's
1015   // found that the store cannot be eliminated.
1016   ArenaVector<HInstruction*> possibly_removed_stores_;
1017 
1018   ArenaVector<HInstruction*> singleton_new_instances_;
1019 
1020   DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
1021 };
1022 
Run()1023 void LoadStoreElimination::Run() {
1024   if (graph_->IsDebuggable() || graph_->HasTryCatch()) {
1025     // Debugger may set heap values or trigger deoptimization of callers.
1026     // Try/catch support not implemented yet.
1027     // Skip this optimization.
1028     return;
1029   }
1030   HeapLocationCollector heap_location_collector(graph_);
1031   for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
1032     heap_location_collector.VisitBasicBlock(it.Current());
1033   }
1034   if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
1035     // Bail out if there are too many heap locations to deal with.
1036     return;
1037   }
1038   if (!heap_location_collector.HasHeapStores()) {
1039     // Without heap stores, this pass would act mostly as GVN on heap accesses.
1040     return;
1041   }
1042   if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
1043     // Don't do load/store elimination if the method has volatile field accesses or
1044     // monitor operations, for now.
1045     // TODO: do it right.
1046     return;
1047   }
1048   heap_location_collector.BuildAliasingMatrix();
1049   LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
1050   for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
1051     lse_visitor.VisitBasicBlock(it.Current());
1052   }
1053   lse_visitor.RemoveInstructions();
1054 }
1055 
1056 }  // namespace art
1057