1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
19
20 #include <iostream>
21
22 #include "base/intrusive_forward_list.h"
23 #include "base/iteration_range.h"
24 #include "base/macros.h"
25 #include "base/scoped_arena_allocator.h"
26 #include "base/scoped_arena_containers.h"
27 #include "nodes.h"
28
29 namespace art HIDDEN {
30
31 class CodeGenerator;
32 class SsaLivenessAnalysis;
33
34 static constexpr int kNoRegister = -1;
35
36 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
37 public:
BlockInfo(ScopedArenaAllocator * allocator,const HBasicBlock & block,size_t number_of_ssa_values)38 BlockInfo(ScopedArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
39 : block_(block),
40 live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
41 live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
42 kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) {
43 UNUSED(block_);
44 }
45
46 private:
47 const HBasicBlock& block_;
48 ArenaBitVector live_in_;
49 ArenaBitVector live_out_;
50 ArenaBitVector kill_;
51
52 friend class SsaLivenessAnalysis;
53
54 DISALLOW_COPY_AND_ASSIGN(BlockInfo);
55 };
56
57 /**
58 * A live range contains the start and end of a range where an instruction or a temporary
59 * is live.
60 */
61 class LiveRange final : public ArenaObject<kArenaAllocSsaLiveness> {
62 public:
LiveRange(size_t start,size_t end,LiveRange * next)63 LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
64 DCHECK_LT(start, end);
65 DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
66 }
67
GetStart()68 size_t GetStart() const { return start_; }
GetEnd()69 size_t GetEnd() const { return end_; }
GetNext()70 LiveRange* GetNext() const { return next_; }
71
IntersectsWith(const LiveRange & other)72 bool IntersectsWith(const LiveRange& other) const {
73 return (start_ >= other.start_ && start_ < other.end_)
74 || (other.start_ >= start_ && other.start_ < end_);
75 }
76
IsBefore(const LiveRange & other)77 bool IsBefore(const LiveRange& other) const {
78 return end_ <= other.start_;
79 }
80
Dump(std::ostream & stream)81 void Dump(std::ostream& stream) const {
82 stream << "[" << start_ << "," << end_ << ")";
83 }
84
Dup(ScopedArenaAllocator * allocator)85 LiveRange* Dup(ScopedArenaAllocator* allocator) const {
86 return new (allocator) LiveRange(
87 start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
88 }
89
GetLastRange()90 LiveRange* GetLastRange() {
91 return next_ == nullptr ? this : next_->GetLastRange();
92 }
93
94 private:
95 size_t start_;
96 size_t end_;
97 LiveRange* next_;
98
99 friend class LiveInterval;
100
101 DISALLOW_COPY_AND_ASSIGN(LiveRange);
102 };
103
104 /**
105 * A use position represents a live interval use at a given position.
106 */
107 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
108 public IntrusiveForwardListNode<UsePosition> {
109 public:
UsePosition(HInstruction * user,size_t input_index,size_t position)110 UsePosition(HInstruction* user, size_t input_index, size_t position)
111 : user_(user),
112 input_index_(input_index),
113 position_(position) {
114 }
115
UsePosition(size_t position)116 explicit UsePosition(size_t position)
117 : user_(nullptr),
118 input_index_(kNoInput),
119 position_(dchecked_integral_cast<uint32_t>(position)) {
120 }
121
GetPosition()122 size_t GetPosition() const { return position_; }
123
GetUser()124 HInstruction* GetUser() const { return user_; }
125
IsSynthesized()126 bool IsSynthesized() const { return user_ == nullptr; }
127
GetInputIndex()128 size_t GetInputIndex() const { return input_index_; }
129
Dump(std::ostream & stream)130 void Dump(std::ostream& stream) const {
131 stream << position_;
132 }
133
GetLoopInformation()134 HLoopInformation* GetLoopInformation() const {
135 return user_->GetBlock()->GetLoopInformation();
136 }
137
Clone(ScopedArenaAllocator * allocator)138 UsePosition* Clone(ScopedArenaAllocator* allocator) const {
139 return new (allocator) UsePosition(user_, input_index_, position_);
140 }
141
RequiresRegister()142 bool RequiresRegister() const {
143 if (IsSynthesized()) return false;
144 Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
145 return location.IsUnallocated() && location.RequiresRegisterKind();
146 }
147
148 private:
149 static constexpr uint32_t kNoInput = static_cast<uint32_t>(-1);
150
151 HInstruction* const user_;
152 const size_t input_index_;
153 const size_t position_;
154
155 DISALLOW_COPY_AND_ASSIGN(UsePosition);
156 };
157 using UsePositionList = IntrusiveForwardList<UsePosition>;
158
159 /**
160 * An environment use position represents a live interval for environment use at a given position.
161 */
162 class EnvUsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
163 public IntrusiveForwardListNode<EnvUsePosition> {
164 public:
EnvUsePosition(HEnvironment * environment,size_t input_index,size_t position)165 EnvUsePosition(HEnvironment* environment,
166 size_t input_index,
167 size_t position)
168 : environment_(environment),
169 input_index_(input_index),
170 position_(position) {
171 DCHECK(environment != nullptr);
172 }
173
GetPosition()174 size_t GetPosition() const { return position_; }
175
GetEnvironment()176 HEnvironment* GetEnvironment() const { return environment_; }
GetInputIndex()177 size_t GetInputIndex() const { return input_index_; }
178
Dump(std::ostream & stream)179 void Dump(std::ostream& stream) const {
180 stream << position_;
181 }
182
Clone(ScopedArenaAllocator * allocator)183 EnvUsePosition* Clone(ScopedArenaAllocator* allocator) const {
184 return new (allocator) EnvUsePosition(environment_, input_index_, position_);
185 }
186
187 private:
188 HEnvironment* const environment_;
189 const size_t input_index_;
190 const size_t position_;
191
192 DISALLOW_COPY_AND_ASSIGN(EnvUsePosition);
193 };
194 using EnvUsePositionList = IntrusiveForwardList<EnvUsePosition>;
195
196 template <typename Iterator>
FindUseAtOrAfterPosition(Iterator first,Iterator last,size_t position)197 inline Iterator FindUseAtOrAfterPosition(Iterator first, Iterator last, size_t position) {
198 using value_type = const typename Iterator::value_type;
199 static_assert(std::is_same<value_type, const UsePosition>::value ||
200 std::is_same<value_type, const EnvUsePosition>::value,
201 "Expecting value type UsePosition or EnvUsePosition.");
202 Iterator ret = std::find_if(
203 first, last, [position](const value_type& use) { return use.GetPosition() >= position; });
204 // Check that the processed range is sorted. Do not check the rest of the range to avoid
205 // increasing the complexity of callers from O(n) to O(n^2).
206 DCHECK(std::is_sorted(
207 first,
208 ret,
209 [](const value_type& lhs, const value_type& rhs) {
210 return lhs.GetPosition() < rhs.GetPosition();
211 }));
212 return ret;
213 }
214
215 template <typename Iterator>
FindMatchingUseRange(Iterator first,Iterator last,size_t position_begin,size_t position_end)216 inline IterationRange<Iterator> FindMatchingUseRange(Iterator first,
217 Iterator last,
218 size_t position_begin,
219 size_t position_end) {
220 Iterator begin = FindUseAtOrAfterPosition(first, last, position_begin);
221 Iterator end = FindUseAtOrAfterPosition(begin, last, position_end);
222 return MakeIterationRange(begin, end);
223 }
224
225 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> {
226 public:
SafepointPosition(HInstruction * instruction)227 explicit SafepointPosition(HInstruction* instruction)
228 : instruction_(instruction),
229 next_(nullptr) {}
230
ComputePosition(HInstruction * instruction)231 static size_t ComputePosition(HInstruction* instruction) {
232 // We special case instructions emitted at use site, as their
233 // safepoint position needs to be at their use.
234 if (instruction->IsEmittedAtUseSite()) {
235 // Currently only applies to implicit null checks, which are emitted
236 // at the next instruction.
237 DCHECK(instruction->IsNullCheck()) << instruction->DebugName();
238 return instruction->GetLifetimePosition() + 2;
239 } else {
240 return instruction->GetLifetimePosition();
241 }
242 }
243
SetNext(SafepointPosition * next)244 void SetNext(SafepointPosition* next) {
245 next_ = next;
246 }
247
GetPosition()248 size_t GetPosition() const {
249 return ComputePosition(instruction_);
250 }
251
GetNext()252 SafepointPosition* GetNext() const {
253 return next_;
254 }
255
GetLocations()256 LocationSummary* GetLocations() const {
257 return instruction_->GetLocations();
258 }
259
GetInstruction()260 HInstruction* GetInstruction() const {
261 return instruction_;
262 }
263
264 private:
265 HInstruction* const instruction_;
266 SafepointPosition* next_;
267
268 DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
269 };
270
271 /**
272 * An interval is a list of disjoint live ranges where an instruction is live.
273 * Each instruction that has uses gets an interval.
274 */
275 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
276 public:
277 static LiveInterval* MakeInterval(ScopedArenaAllocator* allocator,
278 DataType::Type type,
279 HInstruction* instruction = nullptr) {
280 return new (allocator) LiveInterval(allocator, type, instruction);
281 }
282
MakeFixedInterval(ScopedArenaAllocator * allocator,int reg,DataType::Type type)283 static LiveInterval* MakeFixedInterval(ScopedArenaAllocator* allocator,
284 int reg,
285 DataType::Type type) {
286 return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
287 }
288
MakeTempInterval(ScopedArenaAllocator * allocator,DataType::Type type)289 static LiveInterval* MakeTempInterval(ScopedArenaAllocator* allocator, DataType::Type type) {
290 return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
291 }
292
IsFixed()293 bool IsFixed() const { return is_fixed_; }
IsTemp()294 bool IsTemp() const { return is_temp_; }
295 // This interval is the result of a split.
IsSplit()296 bool IsSplit() const { return parent_ != this; }
297
AddTempUse(HInstruction * instruction,size_t temp_index)298 void AddTempUse(HInstruction* instruction, size_t temp_index) {
299 DCHECK(IsTemp());
300 DCHECK(GetUses().empty()) << "A temporary can only have one user";
301 DCHECK(GetEnvironmentUses().empty()) << "A temporary cannot have environment user";
302 size_t position = instruction->GetLifetimePosition();
303 UsePosition* new_use = new (allocator_) UsePosition(instruction, temp_index, position);
304 uses_.push_front(*new_use);
305 AddRange(position, position + 1);
306 }
307
308 // Record use of an input. The use will be recorded as an environment use if
309 // `environment` is not null and as register use otherwise. If `actual_user`
310 // is specified, the use will be recorded at `actual_user`'s lifetime position.
311 void AddUse(HInstruction* instruction,
312 HEnvironment* environment,
313 size_t input_index,
314 HInstruction* actual_user = nullptr) {
315 bool is_environment = (environment != nullptr);
316 LocationSummary* locations = instruction->GetLocations();
317 if (actual_user == nullptr) {
318 actual_user = instruction;
319 }
320
321 // Set the use within the instruction.
322 size_t position = actual_user->GetLifetimePosition() + 1;
323 if (!is_environment) {
324 if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
325 // For fixed inputs and output same as input, the register allocator
326 // requires to have inputs die at the instruction, so that input moves use the
327 // location of the input just before that instruction (and not potential moves due
328 // to splitting).
329 DCHECK_EQ(instruction, actual_user);
330 position = actual_user->GetLifetimePosition();
331 } else if (!locations->InAt(input_index).IsValid()) {
332 return;
333 }
334 }
335
336 if (!is_environment && instruction->IsInLoop()) {
337 AddBackEdgeUses(*instruction->GetBlock());
338 }
339
340 if ((!uses_.empty()) &&
341 (uses_.front().GetUser() == actual_user) &&
342 (uses_.front().GetPosition() < position)) {
343 // The user uses the instruction multiple times, and one use dies before the other.
344 // We update the use list so that the latter is first.
345 DCHECK(!is_environment);
346 DCHECK(uses_.front().GetPosition() + 1 == position);
347 UsePositionList::iterator next_pos = uses_.begin();
348 UsePositionList::iterator insert_pos;
349 do {
350 insert_pos = next_pos;
351 ++next_pos;
352 } while (next_pos != uses_.end() && next_pos->GetPosition() < position);
353 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
354 uses_.insert_after(insert_pos, *new_use);
355 if (first_range_->GetEnd() == uses_.front().GetPosition()) {
356 first_range_->end_ = position;
357 }
358 return;
359 }
360
361 if (is_environment) {
362 DCHECK(env_uses_.empty() || position <= env_uses_.front().GetPosition());
363 EnvUsePosition* new_env_use =
364 new (allocator_) EnvUsePosition(environment, input_index, position);
365 env_uses_.push_front(*new_env_use);
366 } else {
367 DCHECK(uses_.empty() || position <= uses_.front().GetPosition());
368 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
369 uses_.push_front(*new_use);
370 }
371
372 size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
373 if (first_range_ == nullptr) {
374 // First time we see a use of that interval.
375 first_range_ = last_range_ = range_search_start_ =
376 new (allocator_) LiveRange(start_block_position, position, nullptr);
377 } else if (first_range_->GetStart() == start_block_position) {
378 // There is a use later in the same block or in a following block.
379 // Note that in such a case, `AddRange` for the whole blocks has been called
380 // before arriving in this method, and this is the reason the start of
381 // `first_range_` is before the given `position`.
382 DCHECK_LE(position, first_range_->GetEnd());
383 } else {
384 DCHECK(first_range_->GetStart() > position);
385 // There is a hole in the interval. Create a new range.
386 // Note that the start of `first_range_` can be equal to `end`: two blocks
387 // having adjacent lifetime positions are not necessarily
388 // predecessor/successor. When two blocks are predecessor/successor, the
389 // liveness algorithm has called `AddRange` before arriving in this method,
390 // and the check line 205 would succeed.
391 first_range_ = range_search_start_ =
392 new (allocator_) LiveRange(start_block_position, position, first_range_);
393 }
394 }
395
AddPhiUse(HInstruction * instruction,size_t input_index,HBasicBlock * block)396 void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
397 DCHECK(instruction->IsPhi());
398 if (block->IsInLoop()) {
399 AddBackEdgeUses(*block);
400 }
401 UsePosition* new_use =
402 new (allocator_) UsePosition(instruction, input_index, block->GetLifetimeEnd());
403 uses_.push_front(*new_use);
404 }
405
AddRange(size_t start,size_t end)406 ALWAYS_INLINE void AddRange(size_t start, size_t end) {
407 if (first_range_ == nullptr) {
408 first_range_ = last_range_ = range_search_start_ =
409 new (allocator_) LiveRange(start, end, first_range_);
410 } else if (first_range_->GetStart() == end) {
411 // There is a use in the following block.
412 first_range_->start_ = start;
413 } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
414 DCHECK(is_fixed_);
415 } else {
416 DCHECK_GT(first_range_->GetStart(), end);
417 // There is a hole in the interval. Create a new range.
418 first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
419 }
420 }
421
AddLoopRange(size_t start,size_t end)422 void AddLoopRange(size_t start, size_t end) {
423 DCHECK(first_range_ != nullptr);
424 DCHECK_LE(start, first_range_->GetStart());
425 // Find the range that covers the positions after the loop.
426 LiveRange* after_loop = first_range_;
427 LiveRange* last_in_loop = nullptr;
428 while (after_loop != nullptr && after_loop->GetEnd() < end) {
429 DCHECK_LE(start, after_loop->GetStart());
430 last_in_loop = after_loop;
431 after_loop = after_loop->GetNext();
432 }
433 if (after_loop == nullptr) {
434 // Uses are only in the loop.
435 first_range_ = last_range_ = range_search_start_ =
436 new (allocator_) LiveRange(start, end, nullptr);
437 } else if (after_loop->GetStart() <= end) {
438 first_range_ = range_search_start_ = after_loop;
439 // There are uses after the loop.
440 first_range_->start_ = start;
441 } else {
442 // The use after the loop is after a lifetime hole.
443 DCHECK(last_in_loop != nullptr);
444 first_range_ = range_search_start_ = last_in_loop;
445 first_range_->start_ = start;
446 first_range_->end_ = end;
447 }
448 }
449
HasSpillSlot()450 bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
SetSpillSlot(int slot)451 void SetSpillSlot(int slot) {
452 DCHECK(!is_fixed_);
453 DCHECK(!is_temp_);
454 spill_slot_ = slot;
455 }
GetSpillSlot()456 int GetSpillSlot() const { return spill_slot_; }
457
SetFrom(size_t from)458 void SetFrom(size_t from) {
459 if (first_range_ != nullptr) {
460 first_range_->start_ = from;
461 } else {
462 // Instruction without uses.
463 DCHECK(uses_.empty());
464 DCHECK(from == defined_by_->GetLifetimePosition());
465 first_range_ = last_range_ = range_search_start_ =
466 new (allocator_) LiveRange(from, from + 2, nullptr);
467 }
468 }
469
GetParent()470 LiveInterval* GetParent() const { return parent_; }
471
472 // Returns whether this interval is the parent interval, that is, the interval
473 // that starts where the HInstruction is defined.
IsParent()474 bool IsParent() const { return parent_ == this; }
475
GetFirstRange()476 LiveRange* GetFirstRange() const { return first_range_; }
GetLastRange()477 LiveRange* GetLastRange() const { return last_range_; }
478
GetRegister()479 int GetRegister() const { return register_; }
SetRegister(int reg)480 void SetRegister(int reg) { register_ = reg; }
ClearRegister()481 void ClearRegister() { register_ = kNoRegister; }
HasRegister()482 bool HasRegister() const { return register_ != kNoRegister; }
483
IsDeadAt(size_t position)484 bool IsDeadAt(size_t position) const {
485 return GetEnd() <= position;
486 }
487
IsDefinedAt(size_t position)488 bool IsDefinedAt(size_t position) const {
489 return GetStart() <= position && !IsDeadAt(position);
490 }
491
492 // Returns true if the interval contains a LiveRange covering `position`.
493 // The range at or immediately after the current position of linear scan
494 // is cached for better performance. If `position` can be smaller than
495 // that, CoversSlow should be used instead.
Covers(size_t position)496 bool Covers(size_t position) {
497 LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
498 range_search_start_ = candidate;
499 return (candidate != nullptr && candidate->GetStart() <= position);
500 }
501
502 // Same as Covers but always tests all ranges.
CoversSlow(size_t position)503 bool CoversSlow(size_t position) const {
504 LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
505 return candidate != nullptr && candidate->GetStart() <= position;
506 }
507
508 // Returns the first intersection of this interval with `current`, which
509 // must be the interval currently being allocated by linear scan.
FirstIntersectionWith(LiveInterval * current)510 size_t FirstIntersectionWith(LiveInterval* current) const {
511 // Find the first range after the start of `current`. We use the search
512 // cache to improve performance.
513 DCHECK(GetStart() <= current->GetStart() || IsFixed());
514 LiveRange* other_range = current->first_range_;
515 LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
516 if (my_range == nullptr) {
517 return kNoLifetime;
518 }
519
520 // Advance both intervals and find the first matching range start in
521 // this interval.
522 do {
523 if (my_range->IsBefore(*other_range)) {
524 my_range = my_range->GetNext();
525 if (my_range == nullptr) {
526 return kNoLifetime;
527 }
528 } else if (other_range->IsBefore(*my_range)) {
529 other_range = other_range->GetNext();
530 if (other_range == nullptr) {
531 return kNoLifetime;
532 }
533 } else {
534 DCHECK(my_range->IntersectsWith(*other_range));
535 return std::max(my_range->GetStart(), other_range->GetStart());
536 }
537 } while (true);
538 }
539
GetStart()540 size_t GetStart() const {
541 return first_range_->GetStart();
542 }
543
GetEnd()544 size_t GetEnd() const {
545 return last_range_->GetEnd();
546 }
547
GetLength()548 size_t GetLength() const {
549 return GetEnd() - GetStart();
550 }
551
FirstRegisterUseAfter(size_t position)552 size_t FirstRegisterUseAfter(size_t position) const {
553 if (is_temp_) {
554 return position == GetStart() ? position : kNoLifetime;
555 }
556
557 if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
558 return position;
559 }
560
561 size_t end = GetEnd();
562 for (const UsePosition& use : GetUses()) {
563 size_t use_position = use.GetPosition();
564 if (use_position > end) {
565 break;
566 }
567 if (use_position > position) {
568 if (use.RequiresRegister()) {
569 return use_position;
570 }
571 }
572 }
573 return kNoLifetime;
574 }
575
576 // Returns the location of the first register use for this live interval,
577 // including a register definition if applicable.
FirstRegisterUse()578 size_t FirstRegisterUse() const {
579 return FirstRegisterUseAfter(GetStart());
580 }
581
582 // Whether the interval requires a register rather than a stack location.
583 // If needed for performance, this could be cached.
RequiresRegister()584 bool RequiresRegister() const {
585 return !HasRegister() && FirstRegisterUse() != kNoLifetime;
586 }
587
FirstUseAfter(size_t position)588 size_t FirstUseAfter(size_t position) const {
589 if (is_temp_) {
590 return position == GetStart() ? position : kNoLifetime;
591 }
592
593 if (IsDefiningPosition(position)) {
594 DCHECK(defined_by_->GetLocations()->Out().IsValid());
595 return position;
596 }
597
598 size_t end = GetEnd();
599 for (const UsePosition& use : GetUses()) {
600 size_t use_position = use.GetPosition();
601 if (use_position > end) {
602 break;
603 }
604 if (use_position > position) {
605 return use_position;
606 }
607 }
608 return kNoLifetime;
609 }
610
GetUses()611 const UsePositionList& GetUses() const {
612 return parent_->uses_;
613 }
614
GetEnvironmentUses()615 const EnvUsePositionList& GetEnvironmentUses() const {
616 return parent_->env_uses_;
617 }
618
GetType()619 DataType::Type GetType() const {
620 return type_;
621 }
622
GetDefinedBy()623 HInstruction* GetDefinedBy() const {
624 return defined_by_;
625 }
626
HasWillCallSafepoint()627 bool HasWillCallSafepoint() const {
628 for (SafepointPosition* safepoint = first_safepoint_;
629 safepoint != nullptr;
630 safepoint = safepoint->GetNext()) {
631 if (safepoint->GetLocations()->WillCall()) return true;
632 }
633 return false;
634 }
635
FindSafepointJustBefore(size_t position)636 SafepointPosition* FindSafepointJustBefore(size_t position) const {
637 for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
638 safepoint != nullptr;
639 previous = safepoint, safepoint = safepoint->GetNext()) {
640 if (safepoint->GetPosition() >= position) return previous;
641 }
642 return last_safepoint_;
643 }
644
645 /**
646 * Split this interval at `position`. This interval is changed to:
647 * [start ... position).
648 *
649 * The new interval covers:
650 * [position ... end)
651 */
SplitAt(size_t position)652 LiveInterval* SplitAt(size_t position) {
653 DCHECK(!is_temp_);
654 DCHECK(!is_fixed_);
655 DCHECK_GT(position, GetStart());
656
657 if (GetEnd() <= position) {
658 // This range dies before `position`, no need to split.
659 return nullptr;
660 }
661
662 LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
663 SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
664 if (new_last_safepoint == nullptr) {
665 new_interval->first_safepoint_ = first_safepoint_;
666 new_interval->last_safepoint_ = last_safepoint_;
667 first_safepoint_ = last_safepoint_ = nullptr;
668 } else if (last_safepoint_ != new_last_safepoint) {
669 new_interval->last_safepoint_ = last_safepoint_;
670 new_interval->first_safepoint_ = new_last_safepoint->GetNext();
671 DCHECK(new_interval->first_safepoint_ != nullptr);
672 last_safepoint_ = new_last_safepoint;
673 last_safepoint_->SetNext(nullptr);
674 }
675
676 new_interval->next_sibling_ = next_sibling_;
677 next_sibling_ = new_interval;
678 new_interval->parent_ = parent_;
679
680 LiveRange* current = first_range_;
681 LiveRange* previous = nullptr;
682 // Iterate over the ranges, and either find a range that covers this position, or
683 // two ranges in between this position (that is, the position is in a lifetime hole).
684 do {
685 if (position >= current->GetEnd()) {
686 // Move to next range.
687 previous = current;
688 current = current->next_;
689 } else if (position <= current->GetStart()) {
690 // If the previous range did not cover this position, we know position is in
691 // a lifetime hole. We can just break the first_range_ and last_range_ links
692 // and return the new interval.
693 DCHECK(previous != nullptr);
694 DCHECK(current != first_range_);
695 new_interval->last_range_ = last_range_;
696 last_range_ = previous;
697 previous->next_ = nullptr;
698 new_interval->first_range_ = current;
699 if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
700 // Search start point is inside `new_interval`. Change it to null
701 // (i.e. the end of the interval) in the original interval.
702 range_search_start_ = nullptr;
703 }
704 new_interval->range_search_start_ = new_interval->first_range_;
705 return new_interval;
706 } else {
707 // This range covers position. We create a new last_range_ for this interval
708 // that covers last_range_->Start() and position. We also shorten the current
709 // range and make it the first range of the new interval.
710 DCHECK(position < current->GetEnd() && position > current->GetStart());
711 new_interval->last_range_ = last_range_;
712 last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
713 if (previous != nullptr) {
714 previous->next_ = last_range_;
715 } else {
716 first_range_ = last_range_;
717 }
718 new_interval->first_range_ = current;
719 current->start_ = position;
720 if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
721 // Search start point is inside `new_interval`. Change it to `last_range`
722 // in the original interval. This is conservative but always correct.
723 range_search_start_ = last_range_;
724 }
725 new_interval->range_search_start_ = new_interval->first_range_;
726 return new_interval;
727 }
728 } while (current != nullptr);
729
730 LOG(FATAL) << "Unreachable";
731 return nullptr;
732 }
733
StartsBeforeOrAt(LiveInterval * other)734 bool StartsBeforeOrAt(LiveInterval* other) const {
735 return GetStart() <= other->GetStart();
736 }
737
StartsAfter(LiveInterval * other)738 bool StartsAfter(LiveInterval* other) const {
739 return GetStart() > other->GetStart();
740 }
741
Dump(std::ostream & stream)742 void Dump(std::ostream& stream) const {
743 stream << "ranges: { ";
744 LiveRange* current = first_range_;
745 while (current != nullptr) {
746 current->Dump(stream);
747 stream << " ";
748 current = current->GetNext();
749 }
750 stream << "}, uses: { ";
751 for (const UsePosition& use : GetUses()) {
752 use.Dump(stream);
753 stream << " ";
754 }
755 stream << "}, { ";
756 for (const EnvUsePosition& env_use : GetEnvironmentUses()) {
757 env_use.Dump(stream);
758 stream << " ";
759 }
760 stream << "}";
761 stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
762 stream << " is_low: " << IsLowInterval();
763 stream << " is_high: " << IsHighInterval();
764 }
765
766 // Same as Dump, but adds context such as the instruction defining this interval, and
767 // the register currently assigned to this interval.
768 void DumpWithContext(std::ostream& stream, const CodeGenerator& codegen) const;
769
GetNextSibling()770 LiveInterval* GetNextSibling() const { return next_sibling_; }
GetLastSibling()771 LiveInterval* GetLastSibling() {
772 LiveInterval* result = this;
773 while (result->next_sibling_ != nullptr) {
774 result = result->next_sibling_;
775 }
776 return result;
777 }
778
779 // Returns the first register hint that is at least free before
780 // the value contained in `free_until`. If none is found, returns
781 // `kNoRegister`.
782 int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
783
784 // If there is enough at the definition site to find a register (for example
785 // it uses the same input as the first input), returns the register as a hint.
786 // Returns kNoRegister otherwise.
787 int FindHintAtDefinition() const;
788
789 // Returns the number of required spilling slots (measured as a multiple of the
790 // Dex virtual register size `kVRegSize`).
791 size_t NumberOfSpillSlotsNeeded() const;
792
IsFloatingPoint()793 bool IsFloatingPoint() const {
794 return type_ == DataType::Type::kFloat32 || type_ == DataType::Type::kFloat64;
795 }
796
797 // Converts the location of the interval to a `Location` object.
798 Location ToLocation() const;
799
800 // Returns the location of the interval following its siblings at `position`.
801 Location GetLocationAt(size_t position);
802
803 // Finds the sibling that is defined at `position`.
804 LiveInterval* GetSiblingAt(size_t position);
805
806 // Returns whether `other` and `this` share the same kind of register.
807 bool SameRegisterKind(Location other) const;
SameRegisterKind(const LiveInterval & other)808 bool SameRegisterKind(const LiveInterval& other) const {
809 return IsFloatingPoint() == other.IsFloatingPoint();
810 }
811
HasHighInterval()812 bool HasHighInterval() const {
813 return IsLowInterval();
814 }
815
HasLowInterval()816 bool HasLowInterval() const {
817 return IsHighInterval();
818 }
819
GetLowInterval()820 LiveInterval* GetLowInterval() const {
821 DCHECK(HasLowInterval());
822 return high_or_low_interval_;
823 }
824
GetHighInterval()825 LiveInterval* GetHighInterval() const {
826 DCHECK(HasHighInterval());
827 return high_or_low_interval_;
828 }
829
IsHighInterval()830 bool IsHighInterval() const {
831 return GetParent()->is_high_interval_;
832 }
833
IsLowInterval()834 bool IsLowInterval() const {
835 return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
836 }
837
SetLowInterval(LiveInterval * low)838 void SetLowInterval(LiveInterval* low) {
839 DCHECK(IsHighInterval());
840 high_or_low_interval_ = low;
841 }
842
SetHighInterval(LiveInterval * high)843 void SetHighInterval(LiveInterval* high) {
844 DCHECK(IsLowInterval());
845 high_or_low_interval_ = high;
846 }
847
848 void AddHighInterval(bool is_temp = false) {
849 DCHECK(IsParent());
850 DCHECK(!HasHighInterval());
851 DCHECK(!HasLowInterval());
852 high_or_low_interval_ = new (allocator_) LiveInterval(
853 allocator_, type_, defined_by_, false, kNoRegister, is_temp, true);
854 high_or_low_interval_->high_or_low_interval_ = this;
855 if (first_range_ != nullptr) {
856 high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
857 high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
858 high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
859 }
860 auto pos = high_or_low_interval_->uses_.before_begin();
861 for (const UsePosition& use : uses_) {
862 UsePosition* new_use = use.Clone(allocator_);
863 pos = high_or_low_interval_->uses_.insert_after(pos, *new_use);
864 }
865
866 auto env_pos = high_or_low_interval_->env_uses_.before_begin();
867 for (const EnvUsePosition& env_use : env_uses_) {
868 EnvUsePosition* new_env_use = env_use.Clone(allocator_);
869 env_pos = high_or_low_interval_->env_uses_.insert_after(env_pos, *new_env_use);
870 }
871 }
872
873 // Returns whether an interval, when it is non-split, is using
874 // the same register of one of its input. This function should
875 // be used only for DCHECKs.
IsUsingInputRegister()876 bool IsUsingInputRegister() const {
877 if (defined_by_ != nullptr && !IsSplit()) {
878 for (const HInstruction* input : defined_by_->GetInputs()) {
879 LiveInterval* interval = input->GetLiveInterval();
880
881 // Find the interval that covers `defined_by`_. Calls to this function
882 // are made outside the linear scan, hence we need to use CoversSlow.
883 while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
884 interval = interval->GetNextSibling();
885 }
886
887 // Check if both intervals have the same register of the same kind.
888 if (interval != nullptr
889 && interval->SameRegisterKind(*this)
890 && interval->GetRegister() == GetRegister()) {
891 return true;
892 }
893 }
894 }
895 return false;
896 }
897
898 // Returns whether an interval, when it is non-split, can safely use
899 // the same register of one of its input. Note that this method requires
900 // IsUsingInputRegister() to be true. This function should be used only
901 // for DCHECKs.
CanUseInputRegister()902 bool CanUseInputRegister() const {
903 DCHECK(IsUsingInputRegister());
904 if (defined_by_ != nullptr && !IsSplit()) {
905 LocationSummary* locations = defined_by_->GetLocations();
906 if (locations->OutputCanOverlapWithInputs()) {
907 return false;
908 }
909 for (const HInstruction* input : defined_by_->GetInputs()) {
910 LiveInterval* interval = input->GetLiveInterval();
911
912 // Find the interval that covers `defined_by`_. Calls to this function
913 // are made outside the linear scan, hence we need to use CoversSlow.
914 while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
915 interval = interval->GetNextSibling();
916 }
917
918 if (interval != nullptr
919 && interval->SameRegisterKind(*this)
920 && interval->GetRegister() == GetRegister()) {
921 // We found the input that has the same register. Check if it is live after
922 // `defined_by`_.
923 return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
924 }
925 }
926 }
927 LOG(FATAL) << "Unreachable";
928 UNREACHABLE();
929 }
930
AddSafepoint(HInstruction * instruction)931 void AddSafepoint(HInstruction* instruction) {
932 SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
933 if (first_safepoint_ == nullptr) {
934 first_safepoint_ = last_safepoint_ = safepoint;
935 } else {
936 DCHECK_LE(last_safepoint_->GetPosition(), safepoint->GetPosition());
937 last_safepoint_->SetNext(safepoint);
938 last_safepoint_ = safepoint;
939 }
940 }
941
GetFirstSafepoint()942 SafepointPosition* GetFirstSafepoint() const {
943 return first_safepoint_;
944 }
945
946 // Resets the starting point for range-searching queries to the first range.
947 // Intervals must be reset prior to starting a new linear scan over them.
ResetSearchCache()948 void ResetSearchCache() {
949 range_search_start_ = first_range_;
950 }
951
DefinitionRequiresRegister()952 bool DefinitionRequiresRegister() const {
953 DCHECK(IsParent());
954 LocationSummary* locations = defined_by_->GetLocations();
955 Location location = locations->Out();
956 // This interval is the first interval of the instruction. If the output
957 // of the instruction requires a register, we return the position of that instruction
958 // as the first register use.
959 if (location.IsUnallocated()) {
960 if ((location.GetPolicy() == Location::kRequiresRegister)
961 || (location.GetPolicy() == Location::kSameAsFirstInput
962 && (locations->InAt(0).IsRegister()
963 || locations->InAt(0).IsRegisterPair()
964 || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
965 return true;
966 } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
967 || (location.GetPolicy() == Location::kSameAsFirstInput
968 && (locations->InAt(0).IsFpuRegister()
969 || locations->InAt(0).IsFpuRegisterPair()
970 || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
971 return true;
972 }
973 } else if (location.IsRegister() || location.IsRegisterPair()) {
974 return true;
975 }
976 return false;
977 }
978
979 private:
980 LiveInterval(ScopedArenaAllocator* allocator,
981 DataType::Type type,
982 HInstruction* defined_by = nullptr,
983 bool is_fixed = false,
984 int reg = kNoRegister,
985 bool is_temp = false,
986 bool is_high_interval = false)
allocator_(allocator)987 : allocator_(allocator),
988 first_range_(nullptr),
989 last_range_(nullptr),
990 range_search_start_(nullptr),
991 first_safepoint_(nullptr),
992 last_safepoint_(nullptr),
993 uses_(),
994 env_uses_(),
995 type_(type),
996 next_sibling_(nullptr),
997 parent_(this),
998 register_(reg),
999 spill_slot_(kNoSpillSlot),
1000 is_fixed_(is_fixed),
1001 is_temp_(is_temp),
1002 is_high_interval_(is_high_interval),
1003 high_or_low_interval_(nullptr),
1004 defined_by_(defined_by) {}
1005
1006 // Searches for a LiveRange that either covers the given position or is the
1007 // first next LiveRange. Returns null if no such LiveRange exists. Ranges
1008 // known to end before `position` can be skipped with `search_start`.
FindRangeAtOrAfter(size_t position,LiveRange * search_start)1009 LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
1010 if (kIsDebugBuild) {
1011 if (search_start != first_range_) {
1012 // If we are not searching the entire list of ranges, make sure we do
1013 // not skip the range we are searching for.
1014 if (search_start == nullptr) {
1015 DCHECK(IsDeadAt(position));
1016 } else if (search_start->GetStart() > position) {
1017 DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
1018 }
1019 }
1020 }
1021
1022 LiveRange* range;
1023 for (range = search_start;
1024 range != nullptr && range->GetEnd() <= position;
1025 range = range->GetNext()) {
1026 continue;
1027 }
1028 return range;
1029 }
1030
IsDefiningPosition(size_t position)1031 bool IsDefiningPosition(size_t position) const {
1032 return IsParent() && (position == GetStart());
1033 }
1034
HasSynthesizeUseAt(size_t position)1035 bool HasSynthesizeUseAt(size_t position) const {
1036 for (const UsePosition& use : GetUses()) {
1037 size_t use_position = use.GetPosition();
1038 if ((use_position == position) && use.IsSynthesized()) {
1039 return true;
1040 }
1041 if (use_position > position) break;
1042 }
1043 return false;
1044 }
1045
AddBackEdgeUses(const HBasicBlock & block_at_use)1046 void AddBackEdgeUses(const HBasicBlock& block_at_use) {
1047 DCHECK(block_at_use.IsInLoop());
1048 if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
1049 // Linear order may not be well formed when irreducible loops are present,
1050 // i.e. loop blocks may not be adjacent and a back edge may not be last,
1051 // which violates assumptions made in this method.
1052 return;
1053 }
1054
1055 // Add synthesized uses at the back edge of loops to help the register allocator.
1056 // Note that this method is called in decreasing liveness order, to faciliate adding
1057 // uses at the head of the `uses_` list. Because below
1058 // we iterate from inner-most to outer-most, which is in increasing liveness order,
1059 // we need to add subsequent entries after the last inserted entry.
1060 const UsePositionList::iterator old_begin = uses_.begin();
1061 UsePositionList::iterator insert_pos = uses_.before_begin();
1062 for (HLoopInformationOutwardIterator it(block_at_use);
1063 !it.Done();
1064 it.Advance()) {
1065 HLoopInformation* current = it.Current();
1066 if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
1067 // This interval is defined in the loop. We can stop going outward.
1068 break;
1069 }
1070
1071 // We're only adding a synthesized use at the last back edge. Adding synthesized uses on
1072 // all back edges is not necessary: anything used in the loop will have its use at the
1073 // last back edge. If we want branches in a loop to have better register allocation than
1074 // another branch, then it is the linear order we should change.
1075 size_t back_edge_use_position = current->GetLifetimeEnd();
1076 if ((old_begin != uses_.end()) && (old_begin->GetPosition() <= back_edge_use_position)) {
1077 // There was a use already seen in this loop. Therefore the previous call to `AddUse`
1078 // already inserted the backedge use. We can stop going outward.
1079 DCHECK(HasSynthesizeUseAt(back_edge_use_position));
1080 break;
1081 }
1082
1083 DCHECK(insert_pos != uses_.before_begin()
1084 ? back_edge_use_position > insert_pos->GetPosition()
1085 : current == block_at_use.GetLoopInformation())
1086 << std::distance(uses_.before_begin(), insert_pos);
1087
1088 UsePosition* new_use = new (allocator_) UsePosition(back_edge_use_position);
1089 insert_pos = uses_.insert_after(insert_pos, *new_use);
1090 }
1091 }
1092
1093 ScopedArenaAllocator* const allocator_;
1094
1095 // Ranges of this interval. We need a quick access to the last range to test
1096 // for liveness (see `IsDeadAt`).
1097 LiveRange* first_range_;
1098 LiveRange* last_range_;
1099
1100 // The first range at or after the current position of a linear scan. It is
1101 // used to optimize range-searching queries.
1102 LiveRange* range_search_start_;
1103
1104 // Safepoints where this interval is live.
1105 SafepointPosition* first_safepoint_;
1106 SafepointPosition* last_safepoint_;
1107
1108 // Uses of this interval. Only the parent interval keeps these lists.
1109 UsePositionList uses_;
1110 EnvUsePositionList env_uses_;
1111
1112 // The instruction type this interval corresponds to.
1113 const DataType::Type type_;
1114
1115 // Live interval that is the result of a split.
1116 LiveInterval* next_sibling_;
1117
1118 // The first interval from which split intervals come from.
1119 LiveInterval* parent_;
1120
1121 // The register allocated to this interval.
1122 int register_;
1123
1124 // The spill slot allocated to this interval.
1125 int spill_slot_;
1126
1127 // Whether the interval is for a fixed register.
1128 const bool is_fixed_;
1129
1130 // Whether the interval is for a temporary.
1131 const bool is_temp_;
1132
1133 // Whether this interval is a synthesized interval for register pair.
1134 const bool is_high_interval_;
1135
1136 // If this interval needs a register pair, the high or low equivalent.
1137 // `is_high_interval_` tells whether this holds the low or the high.
1138 LiveInterval* high_or_low_interval_;
1139
1140 // The instruction represented by this interval.
1141 HInstruction* const defined_by_;
1142
1143 static constexpr int kNoRegister = -1;
1144 static constexpr int kNoSpillSlot = -1;
1145
1146 ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1147
1148 DISALLOW_COPY_AND_ASSIGN(LiveInterval);
1149 };
1150
1151 /**
1152 * Analysis that computes the liveness of instructions:
1153 *
1154 * (a) Non-environment uses of an instruction always make
1155 * the instruction live.
1156 * (b) Environment uses of an instruction whose type is object (that is, non-primitive), make the
1157 * instruction live, unless the class has an @DeadReferenceSafe annotation.
1158 * This avoids unexpected premature reference enqueuing or finalization, which could
1159 * result in premature deletion of native objects. In the presence of @DeadReferenceSafe,
1160 * object references are treated like primitive types.
1161 * (c) When the graph has the debuggable property, environment uses
1162 * of an instruction that has a primitive type make the instruction live.
1163 * If the graph does not have the debuggable property, the environment
1164 * use has no effect, and may get a 'none' value after register allocation.
1165 * (d) When compiling in OSR mode, all loops in the compiled method may be entered
1166 * from the interpreter via SuspendCheck; such use in SuspendCheck makes the instruction
1167 * live.
1168 *
1169 * (b), (c) and (d) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
1170 */
1171 class SsaLivenessAnalysis : public ValueObject {
1172 public:
SsaLivenessAnalysis(HGraph * graph,CodeGenerator * codegen,ScopedArenaAllocator * allocator)1173 SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen, ScopedArenaAllocator* allocator)
1174 : graph_(graph),
1175 codegen_(codegen),
1176 allocator_(allocator),
1177 block_infos_(graph->GetBlocks().size(),
1178 nullptr,
1179 allocator_->Adapter(kArenaAllocSsaLiveness)),
1180 instructions_from_ssa_index_(allocator_->Adapter(kArenaAllocSsaLiveness)),
1181 instructions_from_lifetime_position_(allocator_->Adapter(kArenaAllocSsaLiveness)),
1182 number_of_ssa_values_(0) {
1183 }
1184
1185 void Analyze();
1186
GetLiveInSet(const HBasicBlock & block)1187 BitVector* GetLiveInSet(const HBasicBlock& block) const {
1188 return &block_infos_[block.GetBlockId()]->live_in_;
1189 }
1190
GetLiveOutSet(const HBasicBlock & block)1191 BitVector* GetLiveOutSet(const HBasicBlock& block) const {
1192 return &block_infos_[block.GetBlockId()]->live_out_;
1193 }
1194
GetKillSet(const HBasicBlock & block)1195 BitVector* GetKillSet(const HBasicBlock& block) const {
1196 return &block_infos_[block.GetBlockId()]->kill_;
1197 }
1198
GetInstructionFromSsaIndex(size_t index)1199 HInstruction* GetInstructionFromSsaIndex(size_t index) const {
1200 return instructions_from_ssa_index_[index];
1201 }
1202
GetInstructionFromPosition(size_t index)1203 HInstruction* GetInstructionFromPosition(size_t index) const {
1204 return instructions_from_lifetime_position_[index];
1205 }
1206
GetBlockFromPosition(size_t index)1207 HBasicBlock* GetBlockFromPosition(size_t index) const {
1208 HInstruction* instruction = GetInstructionFromPosition(index);
1209 if (instruction == nullptr) {
1210 // If we are at a block boundary, get the block following.
1211 instruction = GetInstructionFromPosition(index + 1);
1212 }
1213 return instruction->GetBlock();
1214 }
1215
IsAtBlockBoundary(size_t index)1216 bool IsAtBlockBoundary(size_t index) const {
1217 return GetInstructionFromPosition(index) == nullptr;
1218 }
1219
GetTempUser(LiveInterval * temp)1220 HInstruction* GetTempUser(LiveInterval* temp) const {
1221 // A temporary shares the same lifetime start as the instruction that requires it.
1222 DCHECK(temp->IsTemp());
1223 HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
1224 DCHECK_EQ(user, temp->GetUses().front().GetUser());
1225 return user;
1226 }
1227
GetTempIndex(LiveInterval * temp)1228 size_t GetTempIndex(LiveInterval* temp) const {
1229 // We use the input index to store the index of the temporary in the user's temporary list.
1230 DCHECK(temp->IsTemp());
1231 return temp->GetUses().front().GetInputIndex();
1232 }
1233
GetMaxLifetimePosition()1234 size_t GetMaxLifetimePosition() const {
1235 return instructions_from_lifetime_position_.size() * 2 - 1;
1236 }
1237
GetNumberOfSsaValues()1238 size_t GetNumberOfSsaValues() const {
1239 return number_of_ssa_values_;
1240 }
1241
1242 static constexpr const char* kLivenessPassName = "liveness";
1243
1244 private:
1245 // Give an SSA number to each instruction that defines a value used by another instruction,
1246 // and setup the lifetime information of each instruction and block.
1247 void NumberInstructions();
1248
1249 // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
1250 void ComputeLiveness();
1251
1252 // Compute the live ranges of instructions, as well as the initial live_in, live_out and
1253 // kill sets, that do not take into account backward branches.
1254 void ComputeLiveRanges();
1255
1256 // After computing the initial sets, this method does a fixed point
1257 // calculation over the live_in and live_out set to take into account
1258 // backwards branches.
1259 void ComputeLiveInAndLiveOutSets();
1260
1261 // Update the live_in set of the block and returns whether it has changed.
1262 bool UpdateLiveIn(const HBasicBlock& block);
1263
1264 // Update the live_out set of the block and returns whether it has changed.
1265 bool UpdateLiveOut(const HBasicBlock& block);
1266
1267 static void ProcessEnvironment(HInstruction* instruction,
1268 HInstruction* actual_user,
1269 BitVector* live_in);
1270 static void RecursivelyProcessInputs(HInstruction* instruction,
1271 HInstruction* actual_user,
1272 BitVector* live_in);
1273
1274 // Returns whether `instruction` in an HEnvironment held by `env_holder`
1275 // should be kept live by the HEnvironment.
ShouldBeLiveForEnvironment(HInstruction * env_holder,HInstruction * instruction)1276 static bool ShouldBeLiveForEnvironment(HInstruction* env_holder, HInstruction* instruction) {
1277 DCHECK(instruction != nullptr);
1278 // A value that's not live in compiled code may still be needed in interpreter,
1279 // due to code motion, etc.
1280 if (env_holder->IsDeoptimize()) return true;
1281 // A value live at a throwing instruction in a try block may be copied by
1282 // the exception handler to its location at the top of the catch block.
1283 if (env_holder->CanThrowIntoCatchBlock()) return true;
1284 HGraph* graph = instruction->GetBlock()->GetGraph();
1285 if (graph->IsDebuggable()) return true;
1286 // When compiling in OSR mode, all loops in the compiled method may be entered
1287 // from the interpreter via SuspendCheck; thus we need to preserve the environment.
1288 if (env_holder->IsSuspendCheck() && graph->IsCompilingOsr()) return true;
1289 if (graph -> IsDeadReferenceSafe()) return false;
1290 return instruction->GetType() == DataType::Type::kReference;
1291 }
1292
CheckNoLiveInIrreducibleLoop(const HBasicBlock & block)1293 void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const {
1294 if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) {
1295 return;
1296 }
1297 BitVector* live_in = GetLiveInSet(block);
1298 // To satisfy our liveness algorithm, we need to ensure loop headers of
1299 // irreducible loops do not have any live-in instructions, except constants
1300 // and the current method, which can be trivially re-materialized.
1301 for (uint32_t idx : live_in->Indexes()) {
1302 HInstruction* instruction = GetInstructionFromSsaIndex(idx);
1303 DCHECK(instruction->GetBlock()->IsEntryBlock()) << instruction->DebugName();
1304 DCHECK(!instruction->IsParameterValue());
1305 DCHECK(instruction->IsCurrentMethod() || instruction->IsConstant())
1306 << instruction->DebugName();
1307 }
1308 }
1309
1310 HGraph* const graph_;
1311 CodeGenerator* const codegen_;
1312
1313 // Use a local ScopedArenaAllocator for allocating memory.
1314 // This allocator must remain alive while doing register allocation.
1315 ScopedArenaAllocator* const allocator_;
1316
1317 ScopedArenaVector<BlockInfo*> block_infos_;
1318
1319 // Temporary array used when computing live_in, live_out, and kill sets.
1320 ScopedArenaVector<HInstruction*> instructions_from_ssa_index_;
1321
1322 // Temporary array used when inserting moves in the graph.
1323 ScopedArenaVector<HInstruction*> instructions_from_lifetime_position_;
1324 size_t number_of_ssa_values_;
1325
1326 ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1327 ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
1328
1329 DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
1330 };
1331
1332 } // namespace art
1333
1334 #endif // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
1335