1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "load_store_elimination.h"
18
19 #include "escape.h"
20 #include "side_effects_analysis.h"
21
22 #include <iostream>
23
24 namespace art {
25
26 class ReferenceInfo;
27
28 // A cap for the number of heap locations to prevent pathological time/space consumption.
29 // The number of heap locations for most of the methods stays below this threshold.
30 constexpr size_t kMaxNumberOfHeapLocations = 32;
31
32 // A ReferenceInfo contains additional info about a reference such as
33 // whether it's a singleton, returned, etc.
34 class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
35 public:
ReferenceInfo(HInstruction * reference,size_t pos)36 ReferenceInfo(HInstruction* reference, size_t pos)
37 : reference_(reference),
38 position_(pos),
39 is_singleton_(true),
40 is_singleton_and_not_returned_(true),
41 is_singleton_and_not_deopt_visible_(true),
42 has_index_aliasing_(false) {
43 CalculateEscape(reference_,
44 nullptr,
45 &is_singleton_,
46 &is_singleton_and_not_returned_,
47 &is_singleton_and_not_deopt_visible_);
48 }
49
GetReference() const50 HInstruction* GetReference() const {
51 return reference_;
52 }
53
GetPosition() const54 size_t GetPosition() const {
55 return position_;
56 }
57
58 // Returns true if reference_ is the only name that can refer to its value during
59 // the lifetime of the method. So it's guaranteed to not have any alias in
60 // the method (including its callees).
IsSingleton() const61 bool IsSingleton() const {
62 return is_singleton_;
63 }
64
65 // Returns true if reference_ is a singleton and not returned to the caller or
66 // used as an environment local of an HDeoptimize instruction.
67 // The allocation and stores into reference_ may be eliminated for such cases.
IsSingletonAndRemovable() const68 bool IsSingletonAndRemovable() const {
69 return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
70 }
71
72 // Returns true if reference_ is a singleton and returned to the caller or
73 // used as an environment local of an HDeoptimize instruction.
IsSingletonAndNonRemovable() const74 bool IsSingletonAndNonRemovable() const {
75 return is_singleton_ &&
76 (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
77 }
78
HasIndexAliasing()79 bool HasIndexAliasing() {
80 return has_index_aliasing_;
81 }
82
SetHasIndexAliasing(bool has_index_aliasing)83 void SetHasIndexAliasing(bool has_index_aliasing) {
84 // Only allow setting to true.
85 DCHECK(has_index_aliasing);
86 has_index_aliasing_ = has_index_aliasing;
87 }
88
89 private:
90 HInstruction* const reference_;
91 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
92
93 // Can only be referred to by a single name in the method.
94 bool is_singleton_;
95 // Is singleton and not returned to caller.
96 bool is_singleton_and_not_returned_;
97 // Is singleton and not used as an environment local of HDeoptimize.
98 bool is_singleton_and_not_deopt_visible_;
99 // Some heap locations with reference_ have array index aliasing,
100 // e.g. arr[i] and arr[j] may be the same location.
101 bool has_index_aliasing_;
102
103 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
104 };
105
106 // A heap location is a reference-offset/index pair that a value can be loaded from
107 // or stored to.
108 class HeapLocation : public ArenaObject<kArenaAllocMisc> {
109 public:
110 static constexpr size_t kInvalidFieldOffset = -1;
111
112 // TODO: more fine-grained array types.
113 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
114
HeapLocation(ReferenceInfo * ref_info,size_t offset,HInstruction * index,int16_t declaring_class_def_index)115 HeapLocation(ReferenceInfo* ref_info,
116 size_t offset,
117 HInstruction* index,
118 int16_t declaring_class_def_index)
119 : ref_info_(ref_info),
120 offset_(offset),
121 index_(index),
122 declaring_class_def_index_(declaring_class_def_index),
123 value_killed_by_loop_side_effects_(true) {
124 DCHECK(ref_info != nullptr);
125 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
126 (offset != kInvalidFieldOffset && index == nullptr));
127 if (ref_info->IsSingleton() && !IsArrayElement()) {
128 // Assume this location's value cannot be killed by loop side effects
129 // until proven otherwise.
130 value_killed_by_loop_side_effects_ = false;
131 }
132 }
133
GetReferenceInfo() const134 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
GetOffset() const135 size_t GetOffset() const { return offset_; }
GetIndex() const136 HInstruction* GetIndex() const { return index_; }
137
138 // Returns the definition of declaring class' dex index.
139 // It's kDeclaringClassDefIndexForArrays for an array element.
GetDeclaringClassDefIndex() const140 int16_t GetDeclaringClassDefIndex() const {
141 return declaring_class_def_index_;
142 }
143
IsArrayElement() const144 bool IsArrayElement() const {
145 return index_ != nullptr;
146 }
147
IsValueKilledByLoopSideEffects() const148 bool IsValueKilledByLoopSideEffects() const {
149 return value_killed_by_loop_side_effects_;
150 }
151
SetValueKilledByLoopSideEffects(bool val)152 void SetValueKilledByLoopSideEffects(bool val) {
153 value_killed_by_loop_side_effects_ = val;
154 }
155
156 private:
157 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
158 const size_t offset_; // offset of static/instance field.
159 HInstruction* const index_; // index of an array element.
160 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
161 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
162 // side effects because this location is stored
163 // into inside a loop. This gives
164 // better info on whether a singleton's location
165 // value may be killed by loop side effects.
166
167 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
168 };
169
HuntForOriginalReference(HInstruction * ref)170 static HInstruction* HuntForOriginalReference(HInstruction* ref) {
171 DCHECK(ref != nullptr);
172 while (ref->IsNullCheck() || ref->IsBoundType()) {
173 ref = ref->InputAt(0);
174 }
175 return ref;
176 }
177
178 // A HeapLocationCollector collects all relevant heap locations and keeps
179 // an aliasing matrix for all locations.
180 class HeapLocationCollector : public HGraphVisitor {
181 public:
182 static constexpr size_t kHeapLocationNotFound = -1;
183 // Start with a single uint32_t word. That's enough bits for pair-wise
184 // aliasing matrix of 8 heap locations.
185 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
186
HeapLocationCollector(HGraph * graph)187 explicit HeapLocationCollector(HGraph* graph)
188 : HGraphVisitor(graph),
189 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
190 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
191 aliasing_matrix_(graph->GetArena(),
192 kInitialAliasingMatrixBitVectorSize,
193 true,
194 kArenaAllocLSE),
195 has_heap_stores_(false),
196 has_volatile_(false),
197 has_monitor_operations_(false) {}
198
GetNumberOfHeapLocations() const199 size_t GetNumberOfHeapLocations() const {
200 return heap_locations_.size();
201 }
202
GetHeapLocation(size_t index) const203 HeapLocation* GetHeapLocation(size_t index) const {
204 return heap_locations_[index];
205 }
206
FindReferenceInfoOf(HInstruction * ref) const207 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
208 for (size_t i = 0; i < ref_info_array_.size(); i++) {
209 ReferenceInfo* ref_info = ref_info_array_[i];
210 if (ref_info->GetReference() == ref) {
211 DCHECK_EQ(i, ref_info->GetPosition());
212 return ref_info;
213 }
214 }
215 return nullptr;
216 }
217
HasHeapStores() const218 bool HasHeapStores() const {
219 return has_heap_stores_;
220 }
221
HasVolatile() const222 bool HasVolatile() const {
223 return has_volatile_;
224 }
225
HasMonitorOps() const226 bool HasMonitorOps() const {
227 return has_monitor_operations_;
228 }
229
230 // Find and return the heap location index in heap_locations_.
FindHeapLocationIndex(ReferenceInfo * ref_info,size_t offset,HInstruction * index,int16_t declaring_class_def_index) const231 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
232 size_t offset,
233 HInstruction* index,
234 int16_t declaring_class_def_index) const {
235 for (size_t i = 0; i < heap_locations_.size(); i++) {
236 HeapLocation* loc = heap_locations_[i];
237 if (loc->GetReferenceInfo() == ref_info &&
238 loc->GetOffset() == offset &&
239 loc->GetIndex() == index &&
240 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
241 return i;
242 }
243 }
244 return kHeapLocationNotFound;
245 }
246
247 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
MayAlias(size_t index1,size_t index2) const248 bool MayAlias(size_t index1, size_t index2) const {
249 if (index1 < index2) {
250 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
251 } else if (index1 > index2) {
252 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
253 } else {
254 DCHECK(false) << "index1 and index2 are expected to be different";
255 return true;
256 }
257 }
258
BuildAliasingMatrix()259 void BuildAliasingMatrix() {
260 const size_t number_of_locations = heap_locations_.size();
261 if (number_of_locations == 0) {
262 return;
263 }
264 size_t pos = 0;
265 // Compute aliasing info between every pair of different heap locations.
266 // Save the result in a matrix represented as a BitVector.
267 for (size_t i = 0; i < number_of_locations - 1; i++) {
268 for (size_t j = i + 1; j < number_of_locations; j++) {
269 if (ComputeMayAlias(i, j)) {
270 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
271 }
272 pos++;
273 }
274 }
275 }
276
277 private:
278 // An allocation cannot alias with a name which already exists at the point
279 // of the allocation, such as a parameter or a load happening before the allocation.
MayAliasWithPreexistenceChecking(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2) const280 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
281 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
282 // Any reference that can alias with the allocation must appear after it in the block/in
283 // the block's successors. In reverse post order, those instructions will be visited after
284 // the allocation.
285 return ref_info2->GetPosition() >= ref_info1->GetPosition();
286 }
287 return true;
288 }
289
CanReferencesAlias(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2) const290 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
291 if (ref_info1 == ref_info2) {
292 return true;
293 } else if (ref_info1->IsSingleton()) {
294 return false;
295 } else if (ref_info2->IsSingleton()) {
296 return false;
297 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
298 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
299 return false;
300 }
301 return true;
302 }
303
304 // `index1` and `index2` are indices in the array of collected heap locations.
305 // Returns the position in the bit vector that tracks whether the two heap
306 // locations may alias.
AliasingMatrixPosition(size_t index1,size_t index2) const307 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
308 DCHECK(index2 > index1);
309 const size_t number_of_locations = heap_locations_.size();
310 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
311 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
312 }
313
314 // An additional position is passed in to make sure the calculated position is correct.
CheckedAliasingMatrixPosition(size_t index1,size_t index2,size_t position)315 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
316 size_t calculated_position = AliasingMatrixPosition(index1, index2);
317 DCHECK_EQ(calculated_position, position);
318 return calculated_position;
319 }
320
321 // Compute if two locations may alias to each other.
ComputeMayAlias(size_t index1,size_t index2) const322 bool ComputeMayAlias(size_t index1, size_t index2) const {
323 HeapLocation* loc1 = heap_locations_[index1];
324 HeapLocation* loc2 = heap_locations_[index2];
325 if (loc1->GetOffset() != loc2->GetOffset()) {
326 // Either two different instance fields, or one is an instance
327 // field and the other is an array element.
328 return false;
329 }
330 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
331 // Different types.
332 return false;
333 }
334 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
335 return false;
336 }
337 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
338 HInstruction* array_index1 = loc1->GetIndex();
339 HInstruction* array_index2 = loc2->GetIndex();
340 DCHECK(array_index1 != nullptr);
341 DCHECK(array_index2 != nullptr);
342 if (array_index1->IsIntConstant() &&
343 array_index2->IsIntConstant() &&
344 array_index1->AsIntConstant()->GetValue() != array_index2->AsIntConstant()->GetValue()) {
345 // Different constant indices do not alias.
346 return false;
347 }
348 ReferenceInfo* ref_info = loc1->GetReferenceInfo();
349 ref_info->SetHasIndexAliasing(true);
350 }
351 return true;
352 }
353
GetOrCreateReferenceInfo(HInstruction * instruction)354 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
355 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
356 if (ref_info == nullptr) {
357 size_t pos = ref_info_array_.size();
358 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
359 ref_info_array_.push_back(ref_info);
360 }
361 return ref_info;
362 }
363
CreateReferenceInfoForReferenceType(HInstruction * instruction)364 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
365 if (instruction->GetType() != Primitive::kPrimNot) {
366 return;
367 }
368 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
369 GetOrCreateReferenceInfo(instruction);
370 }
371
GetOrCreateHeapLocation(HInstruction * ref,size_t offset,HInstruction * index,int16_t declaring_class_def_index)372 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
373 size_t offset,
374 HInstruction* index,
375 int16_t declaring_class_def_index) {
376 HInstruction* original_ref = HuntForOriginalReference(ref);
377 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
378 size_t heap_location_idx = FindHeapLocationIndex(
379 ref_info, offset, index, declaring_class_def_index);
380 if (heap_location_idx == kHeapLocationNotFound) {
381 HeapLocation* heap_loc = new (GetGraph()->GetArena())
382 HeapLocation(ref_info, offset, index, declaring_class_def_index);
383 heap_locations_.push_back(heap_loc);
384 return heap_loc;
385 }
386 return heap_locations_[heap_location_idx];
387 }
388
VisitFieldAccess(HInstruction * ref,const FieldInfo & field_info)389 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
390 if (field_info.IsVolatile()) {
391 has_volatile_ = true;
392 }
393 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
394 const size_t offset = field_info.GetFieldOffset().SizeValue();
395 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
396 }
397
VisitArrayAccess(HInstruction * array,HInstruction * index)398 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
399 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
400 index, HeapLocation::kDeclaringClassDefIndexForArrays);
401 }
402
VisitInstanceFieldGet(HInstanceFieldGet * instruction)403 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
404 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
405 CreateReferenceInfoForReferenceType(instruction);
406 }
407
VisitInstanceFieldSet(HInstanceFieldSet * instruction)408 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
409 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
410 has_heap_stores_ = true;
411 if (location->GetReferenceInfo()->IsSingleton()) {
412 // A singleton's location value may be killed by loop side effects if it's
413 // defined before that loop, and it's stored into inside that loop.
414 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
415 if (loop_info != nullptr) {
416 HInstruction* ref = location->GetReferenceInfo()->GetReference();
417 DCHECK(ref->IsNewInstance());
418 if (loop_info->IsDefinedOutOfTheLoop(ref)) {
419 // ref's location value may be killed by this loop's side effects.
420 location->SetValueKilledByLoopSideEffects(true);
421 } else {
422 // ref is defined inside this loop so this loop's side effects cannot
423 // kill its location value at the loop header since ref/its location doesn't
424 // exist yet at the loop header.
425 }
426 }
427 } else {
428 // For non-singletons, value_killed_by_loop_side_effects_ is inited to
429 // true.
430 DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
431 }
432 }
433
VisitStaticFieldGet(HStaticFieldGet * instruction)434 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
435 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
436 CreateReferenceInfoForReferenceType(instruction);
437 }
438
VisitStaticFieldSet(HStaticFieldSet * instruction)439 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
440 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
441 has_heap_stores_ = true;
442 }
443
444 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
445 // since we cannot accurately track the fields.
446
VisitArrayGet(HArrayGet * instruction)447 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
448 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
449 CreateReferenceInfoForReferenceType(instruction);
450 }
451
VisitArraySet(HArraySet * instruction)452 void VisitArraySet(HArraySet* instruction) OVERRIDE {
453 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
454 has_heap_stores_ = true;
455 }
456
VisitNewInstance(HNewInstance * new_instance)457 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
458 // Any references appearing in the ref_info_array_ so far cannot alias with new_instance.
459 CreateReferenceInfoForReferenceType(new_instance);
460 }
461
VisitInvokeStaticOrDirect(HInvokeStaticOrDirect * instruction)462 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* instruction) OVERRIDE {
463 CreateReferenceInfoForReferenceType(instruction);
464 }
465
VisitInvokeVirtual(HInvokeVirtual * instruction)466 void VisitInvokeVirtual(HInvokeVirtual* instruction) OVERRIDE {
467 CreateReferenceInfoForReferenceType(instruction);
468 }
469
VisitInvokeInterface(HInvokeInterface * instruction)470 void VisitInvokeInterface(HInvokeInterface* instruction) OVERRIDE {
471 CreateReferenceInfoForReferenceType(instruction);
472 }
473
VisitParameterValue(HParameterValue * instruction)474 void VisitParameterValue(HParameterValue* instruction) OVERRIDE {
475 CreateReferenceInfoForReferenceType(instruction);
476 }
477
VisitSelect(HSelect * instruction)478 void VisitSelect(HSelect* instruction) OVERRIDE {
479 CreateReferenceInfoForReferenceType(instruction);
480 }
481
VisitMonitorOperation(HMonitorOperation * monitor ATTRIBUTE_UNUSED)482 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
483 has_monitor_operations_ = true;
484 }
485
486 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
487 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
488 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
489 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
490 // alias analysis and won't be as effective.
491 bool has_volatile_; // If there are volatile field accesses.
492 bool has_monitor_operations_; // If there are monitor operations.
493
494 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
495 };
496
497 // An unknown heap value. Loads with such a value in the heap location cannot be eliminated.
498 // A heap location can be set to kUnknownHeapValue when:
499 // - initially set a value.
500 // - killed due to aliasing, merging, invocation, or loop side effects.
501 static HInstruction* const kUnknownHeapValue =
502 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-1));
503
504 // Default heap value after an allocation.
505 // A heap location can be set to that value right after an allocation.
506 static HInstruction* const kDefaultHeapValue =
507 reinterpret_cast<HInstruction*>(static_cast<uintptr_t>(-2));
508
509 class LSEVisitor : public HGraphVisitor {
510 public:
LSEVisitor(HGraph * graph,const HeapLocationCollector & heap_locations_collector,const SideEffectsAnalysis & side_effects)511 LSEVisitor(HGraph* graph,
512 const HeapLocationCollector& heap_locations_collector,
513 const SideEffectsAnalysis& side_effects)
514 : HGraphVisitor(graph),
515 heap_location_collector_(heap_locations_collector),
516 side_effects_(side_effects),
517 heap_values_for_(graph->GetBlocks().size(),
518 ArenaVector<HInstruction*>(heap_locations_collector.
519 GetNumberOfHeapLocations(),
520 kUnknownHeapValue,
521 graph->GetArena()->Adapter(kArenaAllocLSE)),
522 graph->GetArena()->Adapter(kArenaAllocLSE)),
523 removed_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
524 substitute_instructions_for_loads_(graph->GetArena()->Adapter(kArenaAllocLSE)),
525 possibly_removed_stores_(graph->GetArena()->Adapter(kArenaAllocLSE)),
526 singleton_new_instances_(graph->GetArena()->Adapter(kArenaAllocLSE)),
527 singleton_new_arrays_(graph->GetArena()->Adapter(kArenaAllocLSE)) {
528 }
529
VisitBasicBlock(HBasicBlock * block)530 void VisitBasicBlock(HBasicBlock* block) OVERRIDE {
531 // Populate the heap_values array for this block.
532 // TODO: try to reuse the heap_values array from one predecessor if possible.
533 if (block->IsLoopHeader()) {
534 HandleLoopSideEffects(block);
535 } else {
536 MergePredecessorValues(block);
537 }
538 HGraphVisitor::VisitBasicBlock(block);
539 }
540
541 // Remove recorded instructions that should be eliminated.
RemoveInstructions()542 void RemoveInstructions() {
543 size_t size = removed_loads_.size();
544 DCHECK_EQ(size, substitute_instructions_for_loads_.size());
545 for (size_t i = 0; i < size; i++) {
546 HInstruction* load = removed_loads_[i];
547 DCHECK(load != nullptr);
548 DCHECK(load->IsInstanceFieldGet() ||
549 load->IsStaticFieldGet() ||
550 load->IsArrayGet());
551 HInstruction* substitute = substitute_instructions_for_loads_[i];
552 DCHECK(substitute != nullptr);
553 // Keep tracing substitute till one that's not removed.
554 HInstruction* sub_sub = FindSubstitute(substitute);
555 while (sub_sub != substitute) {
556 substitute = sub_sub;
557 sub_sub = FindSubstitute(substitute);
558 }
559 load->ReplaceWith(substitute);
560 load->GetBlock()->RemoveInstruction(load);
561 }
562
563 // At this point, stores in possibly_removed_stores_ can be safely removed.
564 for (HInstruction* store : possibly_removed_stores_) {
565 DCHECK(store->IsInstanceFieldSet() || store->IsStaticFieldSet() || store->IsArraySet());
566 store->GetBlock()->RemoveInstruction(store);
567 }
568
569 // Eliminate allocations that are not used.
570 for (HInstruction* new_instance : singleton_new_instances_) {
571 if (!new_instance->HasNonEnvironmentUses()) {
572 new_instance->RemoveEnvironmentUsers();
573 new_instance->GetBlock()->RemoveInstruction(new_instance);
574 }
575 }
576 for (HInstruction* new_array : singleton_new_arrays_) {
577 if (!new_array->HasNonEnvironmentUses()) {
578 new_array->RemoveEnvironmentUsers();
579 new_array->GetBlock()->RemoveInstruction(new_array);
580 }
581 }
582 }
583
584 private:
585 // If heap_values[index] is an instance field store, need to keep the store.
586 // This is necessary if a heap value is killed due to merging, or loop side
587 // effects (which is essentially merging also), since a load later from the
588 // location won't be eliminated.
KeepIfIsStore(HInstruction * heap_value)589 void KeepIfIsStore(HInstruction* heap_value) {
590 if (heap_value == kDefaultHeapValue ||
591 heap_value == kUnknownHeapValue ||
592 !(heap_value->IsInstanceFieldSet() || heap_value->IsArraySet())) {
593 return;
594 }
595 auto idx = std::find(possibly_removed_stores_.begin(),
596 possibly_removed_stores_.end(), heap_value);
597 if (idx != possibly_removed_stores_.end()) {
598 // Make sure the store is kept.
599 possibly_removed_stores_.erase(idx);
600 }
601 }
602
HandleLoopSideEffects(HBasicBlock * block)603 void HandleLoopSideEffects(HBasicBlock* block) {
604 DCHECK(block->IsLoopHeader());
605 int block_id = block->GetBlockId();
606 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block_id];
607
608 // Don't eliminate loads in irreducible loops. This is safe for singletons, because
609 // they are always used by the non-eliminated loop-phi.
610 if (block->GetLoopInformation()->IsIrreducible()) {
611 if (kIsDebugBuild) {
612 for (size_t i = 0; i < heap_values.size(); i++) {
613 DCHECK_EQ(heap_values[i], kUnknownHeapValue);
614 }
615 }
616 return;
617 }
618
619 HBasicBlock* pre_header = block->GetLoopInformation()->GetPreHeader();
620 ArenaVector<HInstruction*>& pre_header_heap_values =
621 heap_values_for_[pre_header->GetBlockId()];
622
623 // Inherit the values from pre-header.
624 for (size_t i = 0; i < heap_values.size(); i++) {
625 heap_values[i] = pre_header_heap_values[i];
626 }
627
628 // We do a single pass in reverse post order. For loops, use the side effects as a hint
629 // to see if the heap values should be killed.
630 if (side_effects_.GetLoopEffects(block).DoesAnyWrite()) {
631 for (size_t i = 0; i < heap_values.size(); i++) {
632 HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
633 ReferenceInfo* ref_info = location->GetReferenceInfo();
634 if (ref_info->IsSingletonAndRemovable() &&
635 !location->IsValueKilledByLoopSideEffects()) {
636 // A removable singleton's field that's not stored into inside a loop is
637 // invariant throughout the loop. Nothing to do.
638 DCHECK(ref_info->IsSingletonAndRemovable());
639 } else {
640 // heap value is killed by loop side effects (stored into directly, or
641 // due to aliasing). Or the heap value may be needed after method return
642 // or deoptimization.
643 KeepIfIsStore(pre_header_heap_values[i]);
644 heap_values[i] = kUnknownHeapValue;
645 }
646 }
647 }
648 }
649
MergePredecessorValues(HBasicBlock * block)650 void MergePredecessorValues(HBasicBlock* block) {
651 const ArenaVector<HBasicBlock*>& predecessors = block->GetPredecessors();
652 if (predecessors.size() == 0) {
653 return;
654 }
655
656 ArenaVector<HInstruction*>& heap_values = heap_values_for_[block->GetBlockId()];
657 for (size_t i = 0; i < heap_values.size(); i++) {
658 HInstruction* merged_value = nullptr;
659 // Whether merged_value is a result that's merged from all predecessors.
660 bool from_all_predecessors = true;
661 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
662 HInstruction* singleton_ref = nullptr;
663 if (ref_info->IsSingleton()) {
664 // We do more analysis of liveness when merging heap values for such
665 // cases since stores into such references may potentially be eliminated.
666 singleton_ref = ref_info->GetReference();
667 }
668
669 for (HBasicBlock* predecessor : predecessors) {
670 HInstruction* pred_value = heap_values_for_[predecessor->GetBlockId()][i];
671 if ((singleton_ref != nullptr) &&
672 !singleton_ref->GetBlock()->Dominates(predecessor)) {
673 // singleton_ref is not live in this predecessor. Skip this predecessor since
674 // it does not really have the location.
675 DCHECK_EQ(pred_value, kUnknownHeapValue);
676 from_all_predecessors = false;
677 continue;
678 }
679 if (merged_value == nullptr) {
680 // First seen heap value.
681 merged_value = pred_value;
682 } else if (pred_value != merged_value) {
683 // There are conflicting values.
684 merged_value = kUnknownHeapValue;
685 break;
686 }
687 }
688
689 if (merged_value == kUnknownHeapValue || ref_info->IsSingletonAndNonRemovable()) {
690 // There are conflicting heap values from different predecessors,
691 // or the heap value may be needed after method return or deoptimization.
692 // Keep the last store in each predecessor since future loads cannot be eliminated.
693 for (HBasicBlock* predecessor : predecessors) {
694 ArenaVector<HInstruction*>& pred_values = heap_values_for_[predecessor->GetBlockId()];
695 KeepIfIsStore(pred_values[i]);
696 }
697 }
698
699 if ((merged_value == nullptr) || !from_all_predecessors) {
700 DCHECK(singleton_ref != nullptr);
701 DCHECK((singleton_ref->GetBlock() == block) ||
702 !singleton_ref->GetBlock()->Dominates(block));
703 // singleton_ref is not defined before block or defined only in some of its
704 // predecessors, so block doesn't really have the location at its entry.
705 heap_values[i] = kUnknownHeapValue;
706 } else {
707 heap_values[i] = merged_value;
708 }
709 }
710 }
711
712 // `instruction` is being removed. Try to see if the null check on it
713 // can be removed. This can happen if the same value is set in two branches
714 // but not in dominators. Such as:
715 // int[] a = foo();
716 // if () {
717 // a[0] = 2;
718 // } else {
719 // a[0] = 2;
720 // }
721 // // a[0] can now be replaced with constant 2, and the null check on it can be removed.
TryRemovingNullCheck(HInstruction * instruction)722 void TryRemovingNullCheck(HInstruction* instruction) {
723 HInstruction* prev = instruction->GetPrevious();
724 if ((prev != nullptr) && prev->IsNullCheck() && (prev == instruction->InputAt(0))) {
725 // Previous instruction is a null check for this instruction. Remove the null check.
726 prev->ReplaceWith(prev->InputAt(0));
727 prev->GetBlock()->RemoveInstruction(prev);
728 }
729 }
730
GetDefaultValue(Primitive::Type type)731 HInstruction* GetDefaultValue(Primitive::Type type) {
732 switch (type) {
733 case Primitive::kPrimNot:
734 return GetGraph()->GetNullConstant();
735 case Primitive::kPrimBoolean:
736 case Primitive::kPrimByte:
737 case Primitive::kPrimChar:
738 case Primitive::kPrimShort:
739 case Primitive::kPrimInt:
740 return GetGraph()->GetIntConstant(0);
741 case Primitive::kPrimLong:
742 return GetGraph()->GetLongConstant(0);
743 case Primitive::kPrimFloat:
744 return GetGraph()->GetFloatConstant(0);
745 case Primitive::kPrimDouble:
746 return GetGraph()->GetDoubleConstant(0);
747 default:
748 UNREACHABLE();
749 }
750 }
751
VisitGetLocation(HInstruction * instruction,HInstruction * ref,size_t offset,HInstruction * index,int16_t declaring_class_def_index)752 void VisitGetLocation(HInstruction* instruction,
753 HInstruction* ref,
754 size_t offset,
755 HInstruction* index,
756 int16_t declaring_class_def_index) {
757 HInstruction* original_ref = HuntForOriginalReference(ref);
758 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
759 size_t idx = heap_location_collector_.FindHeapLocationIndex(
760 ref_info, offset, index, declaring_class_def_index);
761 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
762 ArenaVector<HInstruction*>& heap_values =
763 heap_values_for_[instruction->GetBlock()->GetBlockId()];
764 HInstruction* heap_value = heap_values[idx];
765 if (heap_value == kDefaultHeapValue) {
766 HInstruction* constant = GetDefaultValue(instruction->GetType());
767 removed_loads_.push_back(instruction);
768 substitute_instructions_for_loads_.push_back(constant);
769 heap_values[idx] = constant;
770 return;
771 }
772 if (heap_value != kUnknownHeapValue) {
773 if (heap_value->IsInstanceFieldSet() || heap_value->IsArraySet()) {
774 HInstruction* store = heap_value;
775 // This load must be from a singleton since it's from the same
776 // field/element that a "removed" store puts the value. That store
777 // must be to a singleton's field/element.
778 DCHECK(ref_info->IsSingleton());
779 // Get the real heap value of the store.
780 heap_value = heap_value->IsInstanceFieldSet() ? store->InputAt(1) : store->InputAt(2);
781 }
782 }
783 if (heap_value == kUnknownHeapValue) {
784 // Load isn't eliminated. Put the load as the value into the HeapLocation.
785 // This acts like GVN but with better aliasing analysis.
786 heap_values[idx] = instruction;
787 } else {
788 if (Primitive::PrimitiveKind(heap_value->GetType())
789 != Primitive::PrimitiveKind(instruction->GetType())) {
790 // The only situation where the same heap location has different type is when
791 // we do an array get on an instruction that originates from the null constant
792 // (the null could be behind a field access, an array access, a null check or
793 // a bound type).
794 // In order to stay properly typed on primitive types, we do not eliminate
795 // the array gets.
796 if (kIsDebugBuild) {
797 DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
798 DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
799 }
800 return;
801 }
802 removed_loads_.push_back(instruction);
803 substitute_instructions_for_loads_.push_back(heap_value);
804 TryRemovingNullCheck(instruction);
805 }
806 }
807
Equal(HInstruction * heap_value,HInstruction * value)808 bool Equal(HInstruction* heap_value, HInstruction* value) {
809 if (heap_value == value) {
810 return true;
811 }
812 if (heap_value == kDefaultHeapValue && GetDefaultValue(value->GetType()) == value) {
813 return true;
814 }
815 return false;
816 }
817
VisitSetLocation(HInstruction * instruction,HInstruction * ref,size_t offset,HInstruction * index,int16_t declaring_class_def_index,HInstruction * value)818 void VisitSetLocation(HInstruction* instruction,
819 HInstruction* ref,
820 size_t offset,
821 HInstruction* index,
822 int16_t declaring_class_def_index,
823 HInstruction* value) {
824 HInstruction* original_ref = HuntForOriginalReference(ref);
825 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
826 size_t idx = heap_location_collector_.FindHeapLocationIndex(
827 ref_info, offset, index, declaring_class_def_index);
828 DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
829 ArenaVector<HInstruction*>& heap_values =
830 heap_values_for_[instruction->GetBlock()->GetBlockId()];
831 HInstruction* heap_value = heap_values[idx];
832 bool same_value = false;
833 bool possibly_redundant = false;
834 if (Equal(heap_value, value)) {
835 // Store into the heap location with the same value.
836 same_value = true;
837 } else if (index != nullptr && ref_info->HasIndexAliasing()) {
838 // For array element, don't eliminate stores if the index can be aliased.
839 } else if (ref_info->IsSingleton()) {
840 // Store into a field of a singleton. The value cannot be killed due to
841 // aliasing/invocation. It can be redundant since future loads can
842 // directly get the value set by this instruction. The value can still be killed due to
843 // merging or loop side effects. Stores whose values are killed due to merging/loop side
844 // effects later will be removed from possibly_removed_stores_ when that is detected.
845 // Stores whose values may be needed after method return or deoptimization
846 // are also removed from possibly_removed_stores_ when that is detected.
847 possibly_redundant = true;
848 HNewInstance* new_instance = ref_info->GetReference()->AsNewInstance();
849 if (new_instance != nullptr && new_instance->IsFinalizable()) {
850 // Finalizable objects escape globally. Need to keep the store.
851 possibly_redundant = false;
852 } else {
853 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
854 if (loop_info != nullptr) {
855 // instruction is a store in the loop so the loop must does write.
856 DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
857
858 if (loop_info->IsDefinedOutOfTheLoop(original_ref)) {
859 DCHECK(original_ref->GetBlock()->Dominates(loop_info->GetPreHeader()));
860 // Keep the store since its value may be needed at the loop header.
861 possibly_redundant = false;
862 } else {
863 // The singleton is created inside the loop. Value stored to it isn't needed at
864 // the loop header. This is true for outer loops also.
865 }
866 }
867 }
868 }
869 if (same_value || possibly_redundant) {
870 possibly_removed_stores_.push_back(instruction);
871 }
872
873 if (!same_value) {
874 if (possibly_redundant) {
875 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsArraySet());
876 // Put the store as the heap value. If the value is loaded from heap
877 // by a load later, this store isn't really redundant.
878 heap_values[idx] = instruction;
879 } else {
880 heap_values[idx] = value;
881 }
882 }
883 // This store may kill values in other heap locations due to aliasing.
884 for (size_t i = 0; i < heap_values.size(); i++) {
885 if (i == idx) {
886 continue;
887 }
888 if (heap_values[i] == value) {
889 // Same value should be kept even if aliasing happens.
890 continue;
891 }
892 if (heap_values[i] == kUnknownHeapValue) {
893 // Value is already unknown, no need for aliasing check.
894 continue;
895 }
896 if (heap_location_collector_.MayAlias(i, idx)) {
897 // Kill heap locations that may alias.
898 heap_values[i] = kUnknownHeapValue;
899 }
900 }
901 }
902
VisitInstanceFieldGet(HInstanceFieldGet * instruction)903 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
904 HInstruction* obj = instruction->InputAt(0);
905 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
906 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
907 VisitGetLocation(instruction, obj, offset, nullptr, declaring_class_def_index);
908 }
909
VisitInstanceFieldSet(HInstanceFieldSet * instruction)910 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
911 HInstruction* obj = instruction->InputAt(0);
912 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
913 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
914 HInstruction* value = instruction->InputAt(1);
915 VisitSetLocation(instruction, obj, offset, nullptr, declaring_class_def_index, value);
916 }
917
VisitStaticFieldGet(HStaticFieldGet * instruction)918 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
919 HInstruction* cls = instruction->InputAt(0);
920 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
921 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
922 VisitGetLocation(instruction, cls, offset, nullptr, declaring_class_def_index);
923 }
924
VisitStaticFieldSet(HStaticFieldSet * instruction)925 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
926 HInstruction* cls = instruction->InputAt(0);
927 size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
928 int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
929 HInstruction* value = instruction->InputAt(1);
930 VisitSetLocation(instruction, cls, offset, nullptr, declaring_class_def_index, value);
931 }
932
VisitArrayGet(HArrayGet * instruction)933 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
934 HInstruction* array = instruction->InputAt(0);
935 HInstruction* index = instruction->InputAt(1);
936 VisitGetLocation(instruction,
937 array,
938 HeapLocation::kInvalidFieldOffset,
939 index,
940 HeapLocation::kDeclaringClassDefIndexForArrays);
941 }
942
VisitArraySet(HArraySet * instruction)943 void VisitArraySet(HArraySet* instruction) OVERRIDE {
944 HInstruction* array = instruction->InputAt(0);
945 HInstruction* index = instruction->InputAt(1);
946 HInstruction* value = instruction->InputAt(2);
947 VisitSetLocation(instruction,
948 array,
949 HeapLocation::kInvalidFieldOffset,
950 index,
951 HeapLocation::kDeclaringClassDefIndexForArrays,
952 value);
953 }
954
VisitDeoptimize(HDeoptimize * instruction)955 void VisitDeoptimize(HDeoptimize* instruction) {
956 const ArenaVector<HInstruction*>& heap_values =
957 heap_values_for_[instruction->GetBlock()->GetBlockId()];
958 for (HInstruction* heap_value : heap_values) {
959 // Filter out fake instructions before checking instruction kind below.
960 if (heap_value == kUnknownHeapValue || heap_value == kDefaultHeapValue) {
961 continue;
962 }
963 // A store is kept as the heap value for possibly removed stores.
964 if (heap_value->IsInstanceFieldSet() || heap_value->IsArraySet()) {
965 // Check whether the reference for a store is used by an environment local of
966 // HDeoptimize.
967 HInstruction* reference = heap_value->InputAt(0);
968 DCHECK(heap_location_collector_.FindReferenceInfoOf(reference)->IsSingleton());
969 for (const HUseListNode<HEnvironment*>& use : reference->GetEnvUses()) {
970 HEnvironment* user = use.GetUser();
971 if (user->GetHolder() == instruction) {
972 // The singleton for the store is visible at this deoptimization
973 // point. Need to keep the store so that the heap value is
974 // seen by the interpreter.
975 KeepIfIsStore(heap_value);
976 }
977 }
978 }
979 }
980 }
981
HandleInvoke(HInstruction * invoke)982 void HandleInvoke(HInstruction* invoke) {
983 ArenaVector<HInstruction*>& heap_values =
984 heap_values_for_[invoke->GetBlock()->GetBlockId()];
985 for (size_t i = 0; i < heap_values.size(); i++) {
986 ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo();
987 if (ref_info->IsSingleton()) {
988 // Singleton references cannot be seen by the callee.
989 } else {
990 heap_values[i] = kUnknownHeapValue;
991 }
992 }
993 }
994
VisitInvokeStaticOrDirect(HInvokeStaticOrDirect * invoke)995 void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
996 HandleInvoke(invoke);
997 }
998
VisitInvokeVirtual(HInvokeVirtual * invoke)999 void VisitInvokeVirtual(HInvokeVirtual* invoke) OVERRIDE {
1000 HandleInvoke(invoke);
1001 }
1002
VisitInvokeInterface(HInvokeInterface * invoke)1003 void VisitInvokeInterface(HInvokeInterface* invoke) OVERRIDE {
1004 HandleInvoke(invoke);
1005 }
1006
VisitInvokeUnresolved(HInvokeUnresolved * invoke)1007 void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
1008 HandleInvoke(invoke);
1009 }
1010
VisitInvokePolymorphic(HInvokePolymorphic * invoke)1011 void VisitInvokePolymorphic(HInvokePolymorphic* invoke) OVERRIDE {
1012 HandleInvoke(invoke);
1013 }
1014
VisitClinitCheck(HClinitCheck * clinit)1015 void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
1016 HandleInvoke(clinit);
1017 }
1018
VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet * instruction)1019 void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instruction) OVERRIDE {
1020 // Conservatively treat it as an invocation.
1021 HandleInvoke(instruction);
1022 }
1023
VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet * instruction)1024 void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* instruction) OVERRIDE {
1025 // Conservatively treat it as an invocation.
1026 HandleInvoke(instruction);
1027 }
1028
VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet * instruction)1029 void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instruction) OVERRIDE {
1030 // Conservatively treat it as an invocation.
1031 HandleInvoke(instruction);
1032 }
1033
VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet * instruction)1034 void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* instruction) OVERRIDE {
1035 // Conservatively treat it as an invocation.
1036 HandleInvoke(instruction);
1037 }
1038
VisitNewInstance(HNewInstance * new_instance)1039 void VisitNewInstance(HNewInstance* new_instance) OVERRIDE {
1040 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_instance);
1041 if (ref_info == nullptr) {
1042 // new_instance isn't used for field accesses. No need to process it.
1043 return;
1044 }
1045 if (ref_info->IsSingletonAndRemovable() &&
1046 !new_instance->IsFinalizable() &&
1047 !new_instance->NeedsChecks()) {
1048 singleton_new_instances_.push_back(new_instance);
1049 }
1050 ArenaVector<HInstruction*>& heap_values =
1051 heap_values_for_[new_instance->GetBlock()->GetBlockId()];
1052 for (size_t i = 0; i < heap_values.size(); i++) {
1053 HInstruction* ref =
1054 heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo()->GetReference();
1055 size_t offset = heap_location_collector_.GetHeapLocation(i)->GetOffset();
1056 if (ref == new_instance && offset >= mirror::kObjectHeaderSize) {
1057 // Instance fields except the header fields are set to default heap values.
1058 heap_values[i] = kDefaultHeapValue;
1059 }
1060 }
1061 }
1062
VisitNewArray(HNewArray * new_array)1063 void VisitNewArray(HNewArray* new_array) OVERRIDE {
1064 ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(new_array);
1065 if (ref_info == nullptr) {
1066 // new_array isn't used for array accesses. No need to process it.
1067 return;
1068 }
1069 if (ref_info->IsSingletonAndRemovable()) {
1070 singleton_new_arrays_.push_back(new_array);
1071 }
1072 ArenaVector<HInstruction*>& heap_values =
1073 heap_values_for_[new_array->GetBlock()->GetBlockId()];
1074 for (size_t i = 0; i < heap_values.size(); i++) {
1075 HeapLocation* location = heap_location_collector_.GetHeapLocation(i);
1076 HInstruction* ref = location->GetReferenceInfo()->GetReference();
1077 if (ref == new_array && location->GetIndex() != nullptr) {
1078 // Array elements are set to default heap values.
1079 heap_values[i] = kDefaultHeapValue;
1080 }
1081 }
1082 }
1083
1084 // Find an instruction's substitute if it should be removed.
1085 // Return the same instruction if it should not be removed.
FindSubstitute(HInstruction * instruction)1086 HInstruction* FindSubstitute(HInstruction* instruction) {
1087 size_t size = removed_loads_.size();
1088 for (size_t i = 0; i < size; i++) {
1089 if (removed_loads_[i] == instruction) {
1090 return substitute_instructions_for_loads_[i];
1091 }
1092 }
1093 return instruction;
1094 }
1095
1096 const HeapLocationCollector& heap_location_collector_;
1097 const SideEffectsAnalysis& side_effects_;
1098
1099 // One array of heap values for each block.
1100 ArenaVector<ArenaVector<HInstruction*>> heap_values_for_;
1101
1102 // We record the instructions that should be eliminated but may be
1103 // used by heap locations. They'll be removed in the end.
1104 ArenaVector<HInstruction*> removed_loads_;
1105 ArenaVector<HInstruction*> substitute_instructions_for_loads_;
1106
1107 // Stores in this list may be removed from the list later when it's
1108 // found that the store cannot be eliminated.
1109 ArenaVector<HInstruction*> possibly_removed_stores_;
1110
1111 ArenaVector<HInstruction*> singleton_new_instances_;
1112 ArenaVector<HInstruction*> singleton_new_arrays_;
1113
1114 DISALLOW_COPY_AND_ASSIGN(LSEVisitor);
1115 };
1116
Run()1117 void LoadStoreElimination::Run() {
1118 if (graph_->IsDebuggable() || graph_->HasTryCatch()) {
1119 // Debugger may set heap values or trigger deoptimization of callers.
1120 // Try/catch support not implemented yet.
1121 // Skip this optimization.
1122 return;
1123 }
1124 HeapLocationCollector heap_location_collector(graph_);
1125 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1126 heap_location_collector.VisitBasicBlock(block);
1127 }
1128 if (heap_location_collector.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
1129 // Bail out if there are too many heap locations to deal with.
1130 return;
1131 }
1132 if (!heap_location_collector.HasHeapStores()) {
1133 // Without heap stores, this pass would act mostly as GVN on heap accesses.
1134 return;
1135 }
1136 if (heap_location_collector.HasVolatile() || heap_location_collector.HasMonitorOps()) {
1137 // Don't do load/store elimination if the method has volatile field accesses or
1138 // monitor operations, for now.
1139 // TODO: do it right.
1140 return;
1141 }
1142 heap_location_collector.BuildAliasingMatrix();
1143 LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_);
1144 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1145 lse_visitor.VisitBasicBlock(block);
1146 }
1147 lse_visitor.RemoveInstructions();
1148 }
1149
1150 } // namespace art
1151