1 /* 2 * Copyright (C) 2017 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_ 18 #define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_ 19 20 #include "escape.h" 21 #include "nodes.h" 22 #include "optimization.h" 23 24 namespace art { 25 26 // A ReferenceInfo contains additional info about a reference such as 27 // whether it's a singleton, returned, etc. 28 class ReferenceInfo : public ArenaObject<kArenaAllocLSA> { 29 public: ReferenceInfo(HInstruction * reference,size_t pos)30 ReferenceInfo(HInstruction* reference, size_t pos) 31 : reference_(reference), 32 position_(pos), 33 is_singleton_(true), 34 is_singleton_and_not_returned_(true), 35 is_singleton_and_not_deopt_visible_(true) { 36 CalculateEscape(reference_, 37 nullptr, 38 &is_singleton_, 39 &is_singleton_and_not_returned_, 40 &is_singleton_and_not_deopt_visible_); 41 } 42 GetReference()43 HInstruction* GetReference() const { 44 return reference_; 45 } 46 GetPosition()47 size_t GetPosition() const { 48 return position_; 49 } 50 51 // Returns true if reference_ is the only name that can refer to its value during 52 // the lifetime of the method. So it's guaranteed to not have any alias in 53 // the method (including its callees). IsSingleton()54 bool IsSingleton() const { 55 return is_singleton_; 56 } 57 58 // Returns true if reference_ is a singleton and not returned to the caller or 59 // used as an environment local of an HDeoptimize instruction. 60 // The allocation and stores into reference_ may be eliminated for such cases. IsSingletonAndRemovable()61 bool IsSingletonAndRemovable() const { 62 return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_; 63 } 64 65 // Returns true if reference_ is a singleton and returned to the caller or 66 // used as an environment local of an HDeoptimize instruction. IsSingletonAndNonRemovable()67 bool IsSingletonAndNonRemovable() const { 68 return is_singleton_ && 69 (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_); 70 } 71 72 private: 73 HInstruction* const reference_; 74 const size_t position_; // position in HeapLocationCollector's ref_info_array_. 75 76 // Can only be referred to by a single name in the method. 77 bool is_singleton_; 78 // Is singleton and not returned to caller. 79 bool is_singleton_and_not_returned_; 80 // Is singleton and not used as an environment local of HDeoptimize. 81 bool is_singleton_and_not_deopt_visible_; 82 83 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo); 84 }; 85 86 // A heap location is a reference-offset/index pair that a value can be loaded from 87 // or stored to. 88 class HeapLocation : public ArenaObject<kArenaAllocLSA> { 89 public: 90 static constexpr size_t kInvalidFieldOffset = -1; 91 // Default value for heap locations which are not vector data. 92 static constexpr size_t kScalar = 1; 93 // TODO: more fine-grained array types. 94 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1; 95 HeapLocation(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index)96 HeapLocation(ReferenceInfo* ref_info, 97 DataType::Type type, 98 size_t offset, 99 HInstruction* index, 100 size_t vector_length, 101 int16_t declaring_class_def_index) 102 : ref_info_(ref_info), 103 type_(DataType::ToSigned(type)), 104 offset_(offset), 105 index_(index), 106 vector_length_(vector_length), 107 declaring_class_def_index_(declaring_class_def_index), 108 value_killed_by_loop_side_effects_(true), 109 has_aliased_locations_(false) { 110 DCHECK(ref_info != nullptr); 111 DCHECK((offset == kInvalidFieldOffset && index != nullptr) || 112 (offset != kInvalidFieldOffset && index == nullptr)); 113 if (ref_info->IsSingleton() && !IsArray()) { 114 // Assume this location's value cannot be killed by loop side effects 115 // until proven otherwise. 116 value_killed_by_loop_side_effects_ = false; 117 } 118 } 119 GetReferenceInfo()120 ReferenceInfo* GetReferenceInfo() const { return ref_info_; } GetType()121 DataType::Type GetType() const { return type_; } GetOffset()122 size_t GetOffset() const { return offset_; } GetIndex()123 HInstruction* GetIndex() const { return index_; } GetVectorLength()124 size_t GetVectorLength() const { return vector_length_; } 125 126 // Returns the definition of declaring class' dex index. 127 // It's kDeclaringClassDefIndexForArrays for an array element. GetDeclaringClassDefIndex()128 int16_t GetDeclaringClassDefIndex() const { 129 return declaring_class_def_index_; 130 } 131 IsArray()132 bool IsArray() const { 133 return index_ != nullptr; 134 } 135 IsValueKilledByLoopSideEffects()136 bool IsValueKilledByLoopSideEffects() const { 137 return value_killed_by_loop_side_effects_; 138 } 139 SetValueKilledByLoopSideEffects(bool val)140 void SetValueKilledByLoopSideEffects(bool val) { 141 value_killed_by_loop_side_effects_ = val; 142 } 143 HasAliasedLocations()144 bool HasAliasedLocations() const { 145 return has_aliased_locations_; 146 } 147 SetHasAliasedLocations(bool val)148 void SetHasAliasedLocations(bool val) { 149 has_aliased_locations_ = val; 150 } 151 152 private: 153 // Reference for instance/static field, array element or vector data. 154 ReferenceInfo* const ref_info_; 155 // Type of data residing at HeapLocation (always signed for integral 156 // data since e.g. a[i] and a[i] & 0xff are represented by differently 157 // signed types; char vs short are disambiguated through the reference). 158 const DataType::Type type_; 159 // Offset of static/instance field. 160 // Invalid when this HeapLocation is not field. 161 const size_t offset_; 162 // Index of an array element or starting index of vector data. 163 // Invalid when this HeapLocation is not array. 164 HInstruction* const index_; 165 // Vector length of vector data. 166 // When this HeapLocation is not vector data, it's value is kScalar. 167 const size_t vector_length_; 168 // Declaring class's def's dex index. 169 // Invalid when this HeapLocation is not field access. 170 const int16_t declaring_class_def_index_; 171 172 // Value of this location may be killed by loop side effects 173 // because this location is stored into inside a loop. 174 // This gives better info on whether a singleton's location 175 // value may be killed by loop side effects. 176 bool value_killed_by_loop_side_effects_; 177 178 // Has aliased heap locations in the method, due to either the 179 // reference is aliased or the array element is aliased via different 180 // index names. 181 bool has_aliased_locations_; 182 183 DISALLOW_COPY_AND_ASSIGN(HeapLocation); 184 }; 185 186 // A HeapLocationCollector collects all relevant heap locations and keeps 187 // an aliasing matrix for all locations. 188 class HeapLocationCollector : public HGraphVisitor { 189 public: 190 static constexpr size_t kHeapLocationNotFound = -1; 191 // Start with a single uint32_t word. That's enough bits for pair-wise 192 // aliasing matrix of 8 heap locations. 193 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32; 194 HeapLocationCollector(HGraph * graph)195 explicit HeapLocationCollector(HGraph* graph) 196 : HGraphVisitor(graph), 197 ref_info_array_(graph->GetAllocator()->Adapter(kArenaAllocLSA)), 198 heap_locations_(graph->GetAllocator()->Adapter(kArenaAllocLSA)), 199 aliasing_matrix_(graph->GetAllocator(), 200 kInitialAliasingMatrixBitVectorSize, 201 true, 202 kArenaAllocLSA), 203 has_heap_stores_(false), 204 has_volatile_(false), 205 has_monitor_operations_(false) {} 206 CleanUp()207 void CleanUp() { 208 heap_locations_.clear(); 209 ref_info_array_.clear(); 210 } 211 GetNumberOfHeapLocations()212 size_t GetNumberOfHeapLocations() const { 213 return heap_locations_.size(); 214 } 215 GetHeapLocation(size_t index)216 HeapLocation* GetHeapLocation(size_t index) const { 217 return heap_locations_[index]; 218 } 219 HuntForOriginalReference(HInstruction * ref)220 HInstruction* HuntForOriginalReference(HInstruction* ref) const { 221 // An original reference can be transformed by instructions like: 222 // i0 NewArray 223 // i1 HInstruction(i0) <-- NullCheck, BoundType, IntermediateAddress. 224 // i2 ArrayGet(i1, index) 225 DCHECK(ref != nullptr); 226 while (ref->IsNullCheck() || ref->IsBoundType() || ref->IsIntermediateAddress()) { 227 ref = ref->InputAt(0); 228 } 229 return ref; 230 } 231 FindReferenceInfoOf(HInstruction * ref)232 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const { 233 for (size_t i = 0; i < ref_info_array_.size(); i++) { 234 ReferenceInfo* ref_info = ref_info_array_[i]; 235 if (ref_info->GetReference() == ref) { 236 DCHECK_EQ(i, ref_info->GetPosition()); 237 return ref_info; 238 } 239 } 240 return nullptr; 241 } 242 GetFieldHeapLocation(HInstruction * object,const FieldInfo * field)243 size_t GetFieldHeapLocation(HInstruction* object, const FieldInfo* field) const { 244 DCHECK(object != nullptr); 245 DCHECK(field != nullptr); 246 return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(object)), 247 field->GetFieldType(), 248 field->GetFieldOffset().SizeValue(), 249 nullptr, 250 HeapLocation::kScalar, 251 field->GetDeclaringClassDefIndex()); 252 } 253 GetArrayHeapLocation(HInstruction * instruction)254 size_t GetArrayHeapLocation(HInstruction* instruction) const { 255 DCHECK(instruction != nullptr); 256 HInstruction* array = instruction->InputAt(0); 257 HInstruction* index = instruction->InputAt(1); 258 DataType::Type type = instruction->GetType(); 259 size_t vector_length = HeapLocation::kScalar; 260 if (instruction->IsArraySet()) { 261 type = instruction->AsArraySet()->GetComponentType(); 262 } else if (instruction->IsVecStore() || 263 instruction->IsVecLoad()) { 264 HVecOperation* vec_op = instruction->AsVecOperation(); 265 type = vec_op->GetPackedType(); 266 vector_length = vec_op->GetVectorLength(); 267 } else { 268 DCHECK(instruction->IsArrayGet()); 269 } 270 return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(array)), 271 type, 272 HeapLocation::kInvalidFieldOffset, 273 index, 274 vector_length, 275 HeapLocation::kDeclaringClassDefIndexForArrays); 276 } 277 HasHeapStores()278 bool HasHeapStores() const { 279 return has_heap_stores_; 280 } 281 HasVolatile()282 bool HasVolatile() const { 283 return has_volatile_; 284 } 285 HasMonitorOps()286 bool HasMonitorOps() const { 287 return has_monitor_operations_; 288 } 289 290 // Find and return the heap location index in heap_locations_. 291 // NOTE: When heap locations are created, potentially aliasing/overlapping 292 // accesses are given different indexes. This find function also 293 // doesn't take aliasing/overlapping into account. For example, 294 // this function returns three different indexes for: 295 // - ref_info=array, index=i, vector_length=kScalar; 296 // - ref_info=array, index=i, vector_length=2; 297 // - ref_info=array, index=i, vector_length=4; 298 // In later analysis, ComputeMayAlias() and MayAlias() compute and tell whether 299 // these indexes alias. FindHeapLocationIndex(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index)300 size_t FindHeapLocationIndex(ReferenceInfo* ref_info, 301 DataType::Type type, 302 size_t offset, 303 HInstruction* index, 304 size_t vector_length, 305 int16_t declaring_class_def_index) const { 306 DataType::Type lookup_type = DataType::ToSigned(type); 307 for (size_t i = 0; i < heap_locations_.size(); i++) { 308 HeapLocation* loc = heap_locations_[i]; 309 if (loc->GetReferenceInfo() == ref_info && 310 loc->GetType() == lookup_type && 311 loc->GetOffset() == offset && 312 loc->GetIndex() == index && 313 loc->GetVectorLength() == vector_length && 314 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) { 315 return i; 316 } 317 } 318 return kHeapLocationNotFound; 319 } 320 321 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias. MayAlias(size_t index1,size_t index2)322 bool MayAlias(size_t index1, size_t index2) const { 323 if (index1 < index2) { 324 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2)); 325 } else if (index1 > index2) { 326 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1)); 327 } else { 328 DCHECK(false) << "index1 and index2 are expected to be different"; 329 return true; 330 } 331 } 332 BuildAliasingMatrix()333 void BuildAliasingMatrix() { 334 const size_t number_of_locations = heap_locations_.size(); 335 if (number_of_locations == 0) { 336 return; 337 } 338 size_t pos = 0; 339 // Compute aliasing info between every pair of different heap locations. 340 // Save the result in a matrix represented as a BitVector. 341 for (size_t i = 0; i < number_of_locations - 1; i++) { 342 for (size_t j = i + 1; j < number_of_locations; j++) { 343 if (ComputeMayAlias(i, j)) { 344 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos)); 345 } 346 pos++; 347 } 348 } 349 } 350 351 private: 352 // An allocation cannot alias with a name which already exists at the point 353 // of the allocation, such as a parameter or a load happening before the allocation. MayAliasWithPreexistenceChecking(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)354 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const { 355 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) { 356 // Any reference that can alias with the allocation must appear after it in the block/in 357 // the block's successors. In reverse post order, those instructions will be visited after 358 // the allocation. 359 return ref_info2->GetPosition() >= ref_info1->GetPosition(); 360 } 361 return true; 362 } 363 CanReferencesAlias(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)364 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const { 365 if (ref_info1 == ref_info2) { 366 return true; 367 } else if (ref_info1->IsSingleton()) { 368 return false; 369 } else if (ref_info2->IsSingleton()) { 370 return false; 371 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) || 372 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) { 373 return false; 374 } 375 return true; 376 } 377 378 bool CanArrayElementsAlias(const HInstruction* idx1, 379 const size_t vector_length1, 380 const HInstruction* idx2, 381 const size_t vector_length2) const; 382 383 // `index1` and `index2` are indices in the array of collected heap locations. 384 // Returns the position in the bit vector that tracks whether the two heap 385 // locations may alias. AliasingMatrixPosition(size_t index1,size_t index2)386 size_t AliasingMatrixPosition(size_t index1, size_t index2) const { 387 DCHECK(index2 > index1); 388 const size_t number_of_locations = heap_locations_.size(); 389 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1). 390 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1)); 391 } 392 393 // An additional position is passed in to make sure the calculated position is correct. CheckedAliasingMatrixPosition(size_t index1,size_t index2,size_t position)394 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) { 395 size_t calculated_position = AliasingMatrixPosition(index1, index2); 396 DCHECK_EQ(calculated_position, position); 397 return calculated_position; 398 } 399 400 // Compute if two locations may alias to each other. ComputeMayAlias(size_t index1,size_t index2)401 bool ComputeMayAlias(size_t index1, size_t index2) const { 402 DCHECK_NE(index1, index2); 403 HeapLocation* loc1 = heap_locations_[index1]; 404 HeapLocation* loc2 = heap_locations_[index2]; 405 if (loc1->GetOffset() != loc2->GetOffset()) { 406 // Either two different instance fields, or one is an instance 407 // field and the other is an array data. 408 return false; 409 } 410 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) { 411 // Different types. 412 return false; 413 } 414 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) { 415 return false; 416 } 417 if (loc1->IsArray() && loc2->IsArray()) { 418 HInstruction* idx1 = loc1->GetIndex(); 419 HInstruction* idx2 = loc2->GetIndex(); 420 size_t vector_length1 = loc1->GetVectorLength(); 421 size_t vector_length2 = loc2->GetVectorLength(); 422 if (!CanArrayElementsAlias(idx1, vector_length1, idx2, vector_length2)) { 423 return false; 424 } 425 } 426 loc1->SetHasAliasedLocations(true); 427 loc2->SetHasAliasedLocations(true); 428 return true; 429 } 430 GetOrCreateReferenceInfo(HInstruction * instruction)431 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) { 432 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction); 433 if (ref_info == nullptr) { 434 size_t pos = ref_info_array_.size(); 435 ref_info = new (GetGraph()->GetAllocator()) ReferenceInfo(instruction, pos); 436 ref_info_array_.push_back(ref_info); 437 } 438 return ref_info; 439 } 440 CreateReferenceInfoForReferenceType(HInstruction * instruction)441 void CreateReferenceInfoForReferenceType(HInstruction* instruction) { 442 if (instruction->GetType() != DataType::Type::kReference) { 443 return; 444 } 445 DCHECK(FindReferenceInfoOf(instruction) == nullptr); 446 GetOrCreateReferenceInfo(instruction); 447 } 448 GetOrCreateHeapLocation(HInstruction * ref,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index)449 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref, 450 DataType::Type type, 451 size_t offset, 452 HInstruction* index, 453 size_t vector_length, 454 int16_t declaring_class_def_index) { 455 HInstruction* original_ref = HuntForOriginalReference(ref); 456 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref); 457 size_t heap_location_idx = FindHeapLocationIndex( 458 ref_info, type, offset, index, vector_length, declaring_class_def_index); 459 if (heap_location_idx == kHeapLocationNotFound) { 460 HeapLocation* heap_loc = new (GetGraph()->GetAllocator()) 461 HeapLocation(ref_info, type, offset, index, vector_length, declaring_class_def_index); 462 heap_locations_.push_back(heap_loc); 463 return heap_loc; 464 } 465 return heap_locations_[heap_location_idx]; 466 } 467 VisitFieldAccess(HInstruction * ref,const FieldInfo & field_info)468 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) { 469 if (field_info.IsVolatile()) { 470 has_volatile_ = true; 471 } 472 DataType::Type type = field_info.GetFieldType(); 473 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex(); 474 const size_t offset = field_info.GetFieldOffset().SizeValue(); 475 return GetOrCreateHeapLocation(ref, 476 type, 477 offset, 478 nullptr, 479 HeapLocation::kScalar, 480 declaring_class_def_index); 481 } 482 VisitArrayAccess(HInstruction * array,HInstruction * index,DataType::Type type,size_t vector_length)483 void VisitArrayAccess(HInstruction* array, 484 HInstruction* index, 485 DataType::Type type, 486 size_t vector_length) { 487 GetOrCreateHeapLocation(array, 488 type, 489 HeapLocation::kInvalidFieldOffset, 490 index, 491 vector_length, 492 HeapLocation::kDeclaringClassDefIndexForArrays); 493 } 494 VisitInstanceFieldGet(HInstanceFieldGet * instruction)495 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override { 496 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 497 CreateReferenceInfoForReferenceType(instruction); 498 } 499 VisitInstanceFieldSet(HInstanceFieldSet * instruction)500 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) override { 501 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 502 has_heap_stores_ = true; 503 if (location->GetReferenceInfo()->IsSingleton()) { 504 // A singleton's location value may be killed by loop side effects if it's 505 // defined before that loop, and it's stored into inside that loop. 506 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation(); 507 if (loop_info != nullptr) { 508 HInstruction* ref = location->GetReferenceInfo()->GetReference(); 509 DCHECK(ref->IsNewInstance()); 510 if (loop_info->IsDefinedOutOfTheLoop(ref)) { 511 // ref's location value may be killed by this loop's side effects. 512 location->SetValueKilledByLoopSideEffects(true); 513 } else { 514 // ref is defined inside this loop so this loop's side effects cannot 515 // kill its location value at the loop header since ref/its location doesn't 516 // exist yet at the loop header. 517 } 518 } 519 } else { 520 // For non-singletons, value_killed_by_loop_side_effects_ is inited to 521 // true. 522 DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true); 523 } 524 } 525 VisitStaticFieldGet(HStaticFieldGet * instruction)526 void VisitStaticFieldGet(HStaticFieldGet* instruction) override { 527 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 528 CreateReferenceInfoForReferenceType(instruction); 529 } 530 VisitStaticFieldSet(HStaticFieldSet * instruction)531 void VisitStaticFieldSet(HStaticFieldSet* instruction) override { 532 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 533 has_heap_stores_ = true; 534 } 535 536 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses 537 // since we cannot accurately track the fields. 538 VisitArrayGet(HArrayGet * instruction)539 void VisitArrayGet(HArrayGet* instruction) override { 540 HInstruction* array = instruction->InputAt(0); 541 HInstruction* index = instruction->InputAt(1); 542 DataType::Type type = instruction->GetType(); 543 VisitArrayAccess(array, index, type, HeapLocation::kScalar); 544 CreateReferenceInfoForReferenceType(instruction); 545 } 546 VisitArraySet(HArraySet * instruction)547 void VisitArraySet(HArraySet* instruction) override { 548 HInstruction* array = instruction->InputAt(0); 549 HInstruction* index = instruction->InputAt(1); 550 DataType::Type type = instruction->GetComponentType(); 551 VisitArrayAccess(array, index, type, HeapLocation::kScalar); 552 has_heap_stores_ = true; 553 } 554 VisitVecLoad(HVecLoad * instruction)555 void VisitVecLoad(HVecLoad* instruction) override { 556 HInstruction* array = instruction->InputAt(0); 557 HInstruction* index = instruction->InputAt(1); 558 DataType::Type type = instruction->GetPackedType(); 559 VisitArrayAccess(array, index, type, instruction->GetVectorLength()); 560 CreateReferenceInfoForReferenceType(instruction); 561 } 562 VisitVecStore(HVecStore * instruction)563 void VisitVecStore(HVecStore* instruction) override { 564 HInstruction* array = instruction->InputAt(0); 565 HInstruction* index = instruction->InputAt(1); 566 DataType::Type type = instruction->GetPackedType(); 567 VisitArrayAccess(array, index, type, instruction->GetVectorLength()); 568 has_heap_stores_ = true; 569 } 570 VisitInstruction(HInstruction * instruction)571 void VisitInstruction(HInstruction* instruction) override { 572 // Any new-instance or new-array cannot alias with references that 573 // pre-exist the new-instance/new-array. We append entries into 574 // ref_info_array_ which keeps track of the order of creation 575 // of reference values since we visit the blocks in reverse post order. 576 // 577 // By default, VisitXXX() (including VisitPhi()) calls VisitInstruction(), 578 // unless VisitXXX() is overridden. VisitInstanceFieldGet() etc. above 579 // also call CreateReferenceInfoForReferenceType() explicitly. 580 CreateReferenceInfoForReferenceType(instruction); 581 } 582 VisitMonitorOperation(HMonitorOperation * monitor ATTRIBUTE_UNUSED)583 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) override { 584 has_monitor_operations_ = true; 585 } 586 587 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses. 588 ArenaVector<HeapLocation*> heap_locations_; // All heap locations. 589 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations. 590 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better 591 // alias analysis and won't be as effective. 592 bool has_volatile_; // If there are volatile field accesses. 593 bool has_monitor_operations_; // If there are monitor operations. 594 595 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector); 596 }; 597 598 class LoadStoreAnalysis : public HOptimization { 599 public: 600 explicit LoadStoreAnalysis(HGraph* graph, const char* name = kLoadStoreAnalysisPassName) HOptimization(graph,name)601 : HOptimization(graph, name), 602 heap_location_collector_(graph) {} 603 GetHeapLocationCollector()604 const HeapLocationCollector& GetHeapLocationCollector() const { 605 return heap_location_collector_; 606 } 607 608 bool Run() override; 609 610 static constexpr const char* kLoadStoreAnalysisPassName = "load_store_analysis"; 611 612 private: 613 HeapLocationCollector heap_location_collector_; 614 615 DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis); 616 }; 617 618 } // namespace art 619 620 #endif // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_ 621