1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
19 
20 #include "nodes.h"
21 #include <iostream>
22 
23 namespace art {
24 
25 class CodeGenerator;
26 class SsaLivenessAnalysis;
27 
28 static constexpr int kNoRegister = -1;
29 
30 class BlockInfo : public ArenaObject<kArenaAllocMisc> {
31  public:
BlockInfo(ArenaAllocator * allocator,const HBasicBlock & block,size_t number_of_ssa_values)32   BlockInfo(ArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
33       : block_(block),
34         live_in_(allocator, number_of_ssa_values, false),
35         live_out_(allocator, number_of_ssa_values, false),
36         kill_(allocator, number_of_ssa_values, false) {
37     UNUSED(block_);
38     live_in_.ClearAllBits();
39     live_out_.ClearAllBits();
40     kill_.ClearAllBits();
41   }
42 
43  private:
44   const HBasicBlock& block_;
45   ArenaBitVector live_in_;
46   ArenaBitVector live_out_;
47   ArenaBitVector kill_;
48 
49   friend class SsaLivenessAnalysis;
50 
51   DISALLOW_COPY_AND_ASSIGN(BlockInfo);
52 };
53 
54 /**
55  * A live range contains the start and end of a range where an instruction or a temporary
56  * is live.
57  */
58 class LiveRange FINAL : public ArenaObject<kArenaAllocMisc> {
59  public:
LiveRange(size_t start,size_t end,LiveRange * next)60   LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
61     DCHECK_LT(start, end);
62     DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
63   }
64 
GetStart()65   size_t GetStart() const { return start_; }
GetEnd()66   size_t GetEnd() const { return end_; }
GetNext()67   LiveRange* GetNext() const { return next_; }
68 
IntersectsWith(const LiveRange & other)69   bool IntersectsWith(const LiveRange& other) const {
70     return (start_ >= other.start_ && start_ < other.end_)
71         || (other.start_ >= start_ && other.start_ < end_);
72   }
73 
IsBefore(const LiveRange & other)74   bool IsBefore(const LiveRange& other) const {
75     return end_ <= other.start_;
76   }
77 
Dump(std::ostream & stream)78   void Dump(std::ostream& stream) const {
79     stream << "[" << start_ << ", " << end_ << ")";
80   }
81 
Dup(ArenaAllocator * allocator)82   LiveRange* Dup(ArenaAllocator* allocator) const {
83     return new (allocator) LiveRange(
84         start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
85   }
86 
GetLastRange()87   LiveRange* GetLastRange() {
88     return next_ == nullptr ? this : next_->GetLastRange();
89   }
90 
91  private:
92   size_t start_;
93   size_t end_;
94   LiveRange* next_;
95 
96   friend class LiveInterval;
97 
98   DISALLOW_COPY_AND_ASSIGN(LiveRange);
99 };
100 
101 /**
102  * A use position represents a live interval use at a given position.
103  */
104 class UsePosition : public ArenaObject<kArenaAllocMisc> {
105  public:
UsePosition(HInstruction * user,HEnvironment * environment,size_t input_index,size_t position,UsePosition * next)106   UsePosition(HInstruction* user,
107               HEnvironment* environment,
108               size_t input_index,
109               size_t position,
110               UsePosition* next)
111       : user_(user),
112         environment_(environment),
113         input_index_(input_index),
114         position_(position),
115         next_(next) {
116     DCHECK((user == nullptr)
117         || user->IsPhi()
118         || (GetPosition() == user->GetLifetimePosition() + 1)
119         || (GetPosition() == user->GetLifetimePosition()));
120     DCHECK(next_ == nullptr || next->GetPosition() >= GetPosition());
121   }
122 
123   static constexpr size_t kNoInput = -1;
124 
GetPosition()125   size_t GetPosition() const { return position_; }
126 
GetNext()127   UsePosition* GetNext() const { return next_; }
SetNext(UsePosition * next)128   void SetNext(UsePosition* next) { next_ = next; }
129 
GetUser()130   HInstruction* GetUser() const { return user_; }
131 
GetIsEnvironment()132   bool GetIsEnvironment() const { return environment_ != nullptr; }
IsSynthesized()133   bool IsSynthesized() const { return user_ == nullptr; }
134 
GetInputIndex()135   size_t GetInputIndex() const { return input_index_; }
136 
Dump(std::ostream & stream)137   void Dump(std::ostream& stream) const {
138     stream << position_;
139   }
140 
GetLoopInformation()141   HLoopInformation* GetLoopInformation() const {
142     return user_->GetBlock()->GetLoopInformation();
143   }
144 
Dup(ArenaAllocator * allocator)145   UsePosition* Dup(ArenaAllocator* allocator) const {
146     return new (allocator) UsePosition(
147         user_, environment_, input_index_, position_,
148         next_ == nullptr ? nullptr : next_->Dup(allocator));
149   }
150 
RequiresRegister()151   bool RequiresRegister() const {
152     if (GetIsEnvironment()) return false;
153     if (IsSynthesized()) return false;
154     Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
155     return location.IsUnallocated()
156         && (location.GetPolicy() == Location::kRequiresRegister
157             || location.GetPolicy() == Location::kRequiresFpuRegister);
158   }
159 
160  private:
161   HInstruction* const user_;
162   HEnvironment* const environment_;
163   const size_t input_index_;
164   const size_t position_;
165   UsePosition* next_;
166 
167   DISALLOW_COPY_AND_ASSIGN(UsePosition);
168 };
169 
170 class SafepointPosition : public ArenaObject<kArenaAllocMisc> {
171  public:
SafepointPosition(HInstruction * instruction)172   explicit SafepointPosition(HInstruction* instruction)
173       : instruction_(instruction),
174         next_(nullptr) {}
175 
SetNext(SafepointPosition * next)176   void SetNext(SafepointPosition* next) {
177     next_ = next;
178   }
179 
GetPosition()180   size_t GetPosition() const {
181     return instruction_->GetLifetimePosition();
182   }
183 
GetNext()184   SafepointPosition* GetNext() const {
185     return next_;
186   }
187 
GetLocations()188   LocationSummary* GetLocations() const {
189     return instruction_->GetLocations();
190   }
191 
GetInstruction()192   HInstruction* GetInstruction() const {
193     return instruction_;
194   }
195 
196  private:
197   HInstruction* const instruction_;
198   SafepointPosition* next_;
199 
200   DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
201 };
202 
203 /**
204  * An interval is a list of disjoint live ranges where an instruction is live.
205  * Each instruction that has uses gets an interval.
206  */
207 class LiveInterval : public ArenaObject<kArenaAllocMisc> {
208  public:
209   static LiveInterval* MakeInterval(ArenaAllocator* allocator,
210                                     Primitive::Type type,
211                                     HInstruction* instruction = nullptr) {
212     return new (allocator) LiveInterval(allocator, type, instruction);
213   }
214 
MakeSlowPathInterval(ArenaAllocator * allocator,HInstruction * instruction)215   static LiveInterval* MakeSlowPathInterval(ArenaAllocator* allocator, HInstruction* instruction) {
216     return new (allocator) LiveInterval(
217         allocator, Primitive::kPrimVoid, instruction, false, kNoRegister, false, true);
218   }
219 
MakeFixedInterval(ArenaAllocator * allocator,int reg,Primitive::Type type)220   static LiveInterval* MakeFixedInterval(ArenaAllocator* allocator, int reg, Primitive::Type type) {
221     return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
222   }
223 
MakeTempInterval(ArenaAllocator * allocator,Primitive::Type type)224   static LiveInterval* MakeTempInterval(ArenaAllocator* allocator, Primitive::Type type) {
225     return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
226   }
227 
IsFixed()228   bool IsFixed() const { return is_fixed_; }
IsTemp()229   bool IsTemp() const { return is_temp_; }
IsSlowPathSafepoint()230   bool IsSlowPathSafepoint() const { return is_slow_path_safepoint_; }
231   // This interval is the result of a split.
IsSplit()232   bool IsSplit() const { return parent_ != this; }
233 
AddTempUse(HInstruction * instruction,size_t temp_index)234   void AddTempUse(HInstruction* instruction, size_t temp_index) {
235     DCHECK(IsTemp());
236     DCHECK(first_use_ == nullptr) << "A temporary can only have one user";
237     DCHECK(first_env_use_ == nullptr) << "A temporary cannot have environment user";
238     size_t position = instruction->GetLifetimePosition();
239     first_use_ = new (allocator_) UsePosition(
240         instruction, /* environment */ nullptr, temp_index, position, first_use_);
241     AddRange(position, position + 1);
242   }
243 
244   void AddUse(HInstruction* instruction,
245               HEnvironment* environment,
246               size_t input_index,
247               bool keep_alive = false) {
248     // Set the use within the instruction.
249     bool is_environment = (environment != nullptr);
250     size_t position = instruction->GetLifetimePosition() + 1;
251     LocationSummary* locations = instruction->GetLocations();
252     if (!is_environment) {
253       if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
254         // For fixed inputs and output same as input, the register allocator
255         // requires to have inputs die at the instruction, so that input moves use the
256         // location of the input just before that instruction (and not potential moves due
257         // to splitting).
258         position = instruction->GetLifetimePosition();
259       } else if (!locations->InAt(input_index).IsValid()) {
260         return;
261       }
262     }
263 
264     if (!is_environment && instruction->IsInLoop()) {
265       AddBackEdgeUses(*instruction->GetBlock());
266     }
267 
268     DCHECK(position == instruction->GetLifetimePosition()
269            || position == instruction->GetLifetimePosition() + 1);
270 
271     if ((first_use_ != nullptr)
272         && (first_use_->GetUser() == instruction)
273         && (first_use_->GetPosition() < position)) {
274       // The user uses the instruction multiple times, and one use dies before the other.
275       // We update the use list so that the latter is first.
276       DCHECK(!is_environment);
277       UsePosition* cursor = first_use_;
278       while ((cursor->GetNext() != nullptr) && (cursor->GetNext()->GetPosition() < position)) {
279         cursor = cursor->GetNext();
280       }
281       DCHECK(first_use_->GetPosition() + 1 == position);
282       UsePosition* new_use = new (allocator_) UsePosition(
283           instruction, environment, input_index, position, cursor->GetNext());
284       cursor->SetNext(new_use);
285       if (first_range_->GetEnd() == first_use_->GetPosition()) {
286         first_range_->end_ = position;
287       }
288       return;
289     }
290 
291     if (is_environment) {
292       first_env_use_ = new (allocator_) UsePosition(
293           instruction, environment, input_index, position, first_env_use_);
294     } else {
295       first_use_ = new (allocator_) UsePosition(
296           instruction, environment, input_index, position, first_use_);
297     }
298 
299     if (is_environment && !keep_alive) {
300       // If this environment use does not keep the instruction live, it does not
301       // affect the live range of that instruction.
302       return;
303     }
304 
305     size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
306     if (first_range_ == nullptr) {
307       // First time we see a use of that interval.
308       first_range_ = last_range_ = range_search_start_ =
309           new (allocator_) LiveRange(start_block_position, position, nullptr);
310     } else if (first_range_->GetStart() == start_block_position) {
311       // There is a use later in the same block or in a following block.
312       // Note that in such a case, `AddRange` for the whole blocks has been called
313       // before arriving in this method, and this is the reason the start of
314       // `first_range_` is before the given `position`.
315       DCHECK_LE(position, first_range_->GetEnd());
316     } else {
317       DCHECK(first_range_->GetStart() > position);
318       // There is a hole in the interval. Create a new range.
319       // Note that the start of `first_range_` can be equal to `end`: two blocks
320       // having adjacent lifetime positions are not necessarily
321       // predecessor/successor. When two blocks are predecessor/successor, the
322       // liveness algorithm has called `AddRange` before arriving in this method,
323       // and the check line 205 would succeed.
324       first_range_ = range_search_start_ =
325           new (allocator_) LiveRange(start_block_position, position, first_range_);
326     }
327   }
328 
AddPhiUse(HInstruction * instruction,size_t input_index,HBasicBlock * block)329   void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
330     DCHECK(instruction->IsPhi());
331     if (block->IsInLoop()) {
332       AddBackEdgeUses(*block);
333     }
334     first_use_ = new (allocator_) UsePosition(
335         instruction, /* environment */ nullptr, input_index, block->GetLifetimeEnd(), first_use_);
336   }
337 
AddRange(size_t start,size_t end)338   void AddRange(size_t start, size_t end) {
339     if (first_range_ == nullptr) {
340       first_range_ = last_range_ = range_search_start_ =
341           new (allocator_) LiveRange(start, end, first_range_);
342     } else if (first_range_->GetStart() == end) {
343       // There is a use in the following block.
344       first_range_->start_ = start;
345     } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
346       DCHECK(is_fixed_);
347     } else {
348       DCHECK_GT(first_range_->GetStart(), end);
349       // There is a hole in the interval. Create a new range.
350       first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
351     }
352   }
353 
AddLoopRange(size_t start,size_t end)354   void AddLoopRange(size_t start, size_t end) {
355     DCHECK(first_range_ != nullptr);
356     DCHECK_LE(start, first_range_->GetStart());
357     // Find the range that covers the positions after the loop.
358     LiveRange* after_loop = first_range_;
359     LiveRange* last_in_loop = nullptr;
360     while (after_loop != nullptr && after_loop->GetEnd() < end) {
361       DCHECK_LE(start, after_loop->GetStart());
362       last_in_loop = after_loop;
363       after_loop = after_loop->GetNext();
364     }
365     if (after_loop == nullptr) {
366       // Uses are only in the loop.
367       first_range_ = last_range_ = range_search_start_ =
368           new (allocator_) LiveRange(start, end, nullptr);
369     } else if (after_loop->GetStart() <= end) {
370       first_range_ = range_search_start_ = after_loop;
371       // There are uses after the loop.
372       first_range_->start_ = start;
373     } else {
374       // The use after the loop is after a lifetime hole.
375       DCHECK(last_in_loop != nullptr);
376       first_range_ = range_search_start_ = last_in_loop;
377       first_range_->start_ = start;
378       first_range_->end_ = end;
379     }
380   }
381 
HasSpillSlot()382   bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
SetSpillSlot(int slot)383   void SetSpillSlot(int slot) {
384     DCHECK(!is_fixed_);
385     DCHECK(!is_temp_);
386     spill_slot_ = slot;
387   }
GetSpillSlot()388   int GetSpillSlot() const { return spill_slot_; }
389 
SetFrom(size_t from)390   void SetFrom(size_t from) {
391     if (first_range_ != nullptr) {
392       first_range_->start_ = from;
393     } else {
394       // Instruction without uses.
395       DCHECK(!defined_by_->HasNonEnvironmentUses());
396       DCHECK(from == defined_by_->GetLifetimePosition());
397       first_range_ = last_range_ = range_search_start_ =
398           new (allocator_) LiveRange(from, from + 2, nullptr);
399     }
400   }
401 
GetParent()402   LiveInterval* GetParent() const { return parent_; }
403 
404   // Returns whether this interval is the parent interval, that is, the interval
405   // that starts where the HInstruction is defined.
IsParent()406   bool IsParent() const { return parent_ == this; }
407 
GetFirstRange()408   LiveRange* GetFirstRange() const { return first_range_; }
GetLastRange()409   LiveRange* GetLastRange() const { return last_range_; }
410 
GetRegister()411   int GetRegister() const { return register_; }
SetRegister(int reg)412   void SetRegister(int reg) { register_ = reg; }
ClearRegister()413   void ClearRegister() { register_ = kNoRegister; }
HasRegister()414   bool HasRegister() const { return register_ != kNoRegister; }
415 
IsDeadAt(size_t position)416   bool IsDeadAt(size_t position) const {
417     return GetEnd() <= position;
418   }
419 
IsDefinedAt(size_t position)420   bool IsDefinedAt(size_t position) const {
421     return GetStart() <= position && !IsDeadAt(position);
422   }
423 
424   // Returns true if the interval contains a LiveRange covering `position`.
425   // The range at or immediately after the current position of linear scan
426   // is cached for better performance. If `position` can be smaller than
427   // that, CoversSlow should be used instead.
Covers(size_t position)428   bool Covers(size_t position) {
429     LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
430     range_search_start_ = candidate;
431     return (candidate != nullptr && candidate->GetStart() <= position);
432   }
433 
434   // Same as Covers but always tests all ranges.
CoversSlow(size_t position)435   bool CoversSlow(size_t position) const {
436     LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
437     return candidate != nullptr && candidate->GetStart() <= position;
438   }
439 
440   // Returns the first intersection of this interval with `current`, which
441   // must be the interval currently being allocated by linear scan.
FirstIntersectionWith(LiveInterval * current)442   size_t FirstIntersectionWith(LiveInterval* current) const {
443     // Find the first range after the start of `current`. We use the search
444     // cache to improve performance.
445     DCHECK(GetStart() <= current->GetStart() || IsFixed());
446     LiveRange* other_range = current->first_range_;
447     LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
448     if (my_range == nullptr) {
449       return kNoLifetime;
450     }
451 
452     // Advance both intervals and find the first matching range start in
453     // this interval.
454     do {
455       if (my_range->IsBefore(*other_range)) {
456         my_range = my_range->GetNext();
457         if (my_range == nullptr) {
458           return kNoLifetime;
459         }
460       } else if (other_range->IsBefore(*my_range)) {
461         other_range = other_range->GetNext();
462         if (other_range == nullptr) {
463           return kNoLifetime;
464         }
465       } else {
466         DCHECK(my_range->IntersectsWith(*other_range));
467         return std::max(my_range->GetStart(), other_range->GetStart());
468       }
469     } while (true);
470   }
471 
GetStart()472   size_t GetStart() const {
473     return first_range_->GetStart();
474   }
475 
GetEnd()476   size_t GetEnd() const {
477     return last_range_->GetEnd();
478   }
479 
FirstRegisterUseAfter(size_t position)480   size_t FirstRegisterUseAfter(size_t position) const {
481     if (is_temp_) {
482       return position == GetStart() ? position : kNoLifetime;
483     }
484 
485     if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
486       return position;
487     }
488 
489     UsePosition* use = first_use_;
490     size_t end = GetEnd();
491     while (use != nullptr && use->GetPosition() <= end) {
492       size_t use_position = use->GetPosition();
493       if (use_position > position) {
494         if (use->RequiresRegister()) {
495           return use_position;
496         }
497       }
498       use = use->GetNext();
499     }
500     return kNoLifetime;
501   }
502 
FirstRegisterUse()503   size_t FirstRegisterUse() const {
504     return FirstRegisterUseAfter(GetStart());
505   }
506 
FirstUseAfter(size_t position)507   size_t FirstUseAfter(size_t position) const {
508     if (is_temp_) {
509       return position == GetStart() ? position : kNoLifetime;
510     }
511 
512     if (IsDefiningPosition(position)) {
513       DCHECK(defined_by_->GetLocations()->Out().IsValid());
514       return position;
515     }
516 
517     UsePosition* use = first_use_;
518     size_t end = GetEnd();
519     while (use != nullptr && use->GetPosition() <= end) {
520       size_t use_position = use->GetPosition();
521       if (use_position > position) {
522         return use_position;
523       }
524       use = use->GetNext();
525     }
526     return kNoLifetime;
527   }
528 
GetFirstUse()529   UsePosition* GetFirstUse() const {
530     return first_use_;
531   }
532 
GetFirstEnvironmentUse()533   UsePosition* GetFirstEnvironmentUse() const {
534     return first_env_use_;
535   }
536 
GetType()537   Primitive::Type GetType() const {
538     return type_;
539   }
540 
GetDefinedBy()541   HInstruction* GetDefinedBy() const {
542     return defined_by_;
543   }
544 
FindSafepointJustBefore(size_t position)545   SafepointPosition* FindSafepointJustBefore(size_t position) const {
546     for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
547          safepoint != nullptr;
548          previous = safepoint, safepoint = safepoint->GetNext()) {
549       if (safepoint->GetPosition() >= position) return previous;
550     }
551     return last_safepoint_;
552   }
553 
554   /**
555    * Split this interval at `position`. This interval is changed to:
556    * [start ... position).
557    *
558    * The new interval covers:
559    * [position ... end)
560    */
SplitAt(size_t position)561   LiveInterval* SplitAt(size_t position) {
562     DCHECK(!is_temp_);
563     DCHECK(!is_fixed_);
564     DCHECK_GT(position, GetStart());
565 
566     if (GetEnd() <= position) {
567       // This range dies before `position`, no need to split.
568       return nullptr;
569     }
570 
571     LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
572     SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
573     if (new_last_safepoint == nullptr) {
574       new_interval->first_safepoint_ = first_safepoint_;
575       new_interval->last_safepoint_ = last_safepoint_;
576       first_safepoint_ = last_safepoint_ = nullptr;
577     } else if (last_safepoint_ != new_last_safepoint) {
578       new_interval->last_safepoint_ = last_safepoint_;
579       new_interval->first_safepoint_ = new_last_safepoint->GetNext();
580       DCHECK(new_interval->first_safepoint_ != nullptr);
581       last_safepoint_ = new_last_safepoint;
582       last_safepoint_->SetNext(nullptr);
583     }
584 
585     new_interval->next_sibling_ = next_sibling_;
586     next_sibling_ = new_interval;
587     new_interval->parent_ = parent_;
588 
589     new_interval->first_use_ = first_use_;
590     new_interval->first_env_use_ = first_env_use_;
591     LiveRange* current = first_range_;
592     LiveRange* previous = nullptr;
593     // Iterate over the ranges, and either find a range that covers this position, or
594     // two ranges in between this position (that is, the position is in a lifetime hole).
595     do {
596       if (position >= current->GetEnd()) {
597         // Move to next range.
598         previous = current;
599         current = current->next_;
600       } else if (position <= current->GetStart()) {
601         // If the previous range did not cover this position, we know position is in
602         // a lifetime hole. We can just break the first_range_ and last_range_ links
603         // and return the new interval.
604         DCHECK(previous != nullptr);
605         DCHECK(current != first_range_);
606         new_interval->last_range_ = last_range_;
607         last_range_ = previous;
608         previous->next_ = nullptr;
609         new_interval->first_range_ = current;
610         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
611           // Search start point is inside `new_interval`. Change it to null
612           // (i.e. the end of the interval) in the original interval.
613           range_search_start_ = nullptr;
614         }
615         new_interval->range_search_start_ = new_interval->first_range_;
616         return new_interval;
617       } else {
618         // This range covers position. We create a new last_range_ for this interval
619         // that covers last_range_->Start() and position. We also shorten the current
620         // range and make it the first range of the new interval.
621         DCHECK(position < current->GetEnd() && position > current->GetStart());
622         new_interval->last_range_ = last_range_;
623         last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
624         if (previous != nullptr) {
625           previous->next_ = last_range_;
626         } else {
627           first_range_ = last_range_;
628         }
629         new_interval->first_range_ = current;
630         current->start_ = position;
631         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
632           // Search start point is inside `new_interval`. Change it to `last_range`
633           // in the original interval. This is conservative but always correct.
634           range_search_start_ = last_range_;
635         }
636         new_interval->range_search_start_ = new_interval->first_range_;
637         return new_interval;
638       }
639     } while (current != nullptr);
640 
641     LOG(FATAL) << "Unreachable";
642     return nullptr;
643   }
644 
StartsBeforeOrAt(LiveInterval * other)645   bool StartsBeforeOrAt(LiveInterval* other) const {
646     return GetStart() <= other->GetStart();
647   }
648 
StartsAfter(LiveInterval * other)649   bool StartsAfter(LiveInterval* other) const {
650     return GetStart() > other->GetStart();
651   }
652 
Dump(std::ostream & stream)653   void Dump(std::ostream& stream) const {
654     stream << "ranges: { ";
655     LiveRange* current = first_range_;
656     while (current != nullptr) {
657       current->Dump(stream);
658       stream << " ";
659       current = current->GetNext();
660     }
661     stream << "}, uses: { ";
662     UsePosition* use = first_use_;
663     if (use != nullptr) {
664       do {
665         use->Dump(stream);
666         stream << " ";
667       } while ((use = use->GetNext()) != nullptr);
668     }
669     stream << "}, { ";
670     use = first_env_use_;
671     if (use != nullptr) {
672       do {
673         use->Dump(stream);
674         stream << " ";
675       } while ((use = use->GetNext()) != nullptr);
676     }
677     stream << "}";
678     stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
679     stream << " is_low: " << IsLowInterval();
680     stream << " is_high: " << IsHighInterval();
681   }
682 
GetNextSibling()683   LiveInterval* GetNextSibling() const { return next_sibling_; }
GetLastSibling()684   LiveInterval* GetLastSibling() {
685     LiveInterval* result = this;
686     while (result->next_sibling_ != nullptr) {
687       result = result->next_sibling_;
688     }
689     return result;
690   }
691 
692   // Returns the first register hint that is at least free before
693   // the value contained in `free_until`. If none is found, returns
694   // `kNoRegister`.
695   int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
696 
697   // If there is enough at the definition site to find a register (for example
698   // it uses the same input as the first input), returns the register as a hint.
699   // Returns kNoRegister otherwise.
700   int FindHintAtDefinition() const;
701 
702   // Returns whether the interval needs two (Dex virtual register size `kVRegSize`)
703   // slots for spilling.
704   bool NeedsTwoSpillSlots() const;
705 
IsFloatingPoint()706   bool IsFloatingPoint() const {
707     return type_ == Primitive::kPrimFloat || type_ == Primitive::kPrimDouble;
708   }
709 
710   // Converts the location of the interval to a `Location` object.
711   Location ToLocation() const;
712 
713   // Returns the location of the interval following its siblings at `position`.
714   Location GetLocationAt(size_t position);
715 
716   // Finds the sibling that is defined at `position`.
717   LiveInterval* GetSiblingAt(size_t position);
718 
719   // Returns whether `other` and `this` share the same kind of register.
720   bool SameRegisterKind(Location other) const;
SameRegisterKind(const LiveInterval & other)721   bool SameRegisterKind(const LiveInterval& other) const {
722     return IsFloatingPoint() == other.IsFloatingPoint();
723   }
724 
HasHighInterval()725   bool HasHighInterval() const {
726     return IsLowInterval();
727   }
728 
HasLowInterval()729   bool HasLowInterval() const {
730     return IsHighInterval();
731   }
732 
GetLowInterval()733   LiveInterval* GetLowInterval() const {
734     DCHECK(HasLowInterval());
735     return high_or_low_interval_;
736   }
737 
GetHighInterval()738   LiveInterval* GetHighInterval() const {
739     DCHECK(HasHighInterval());
740     return high_or_low_interval_;
741   }
742 
IsHighInterval()743   bool IsHighInterval() const {
744     return GetParent()->is_high_interval_;
745   }
746 
IsLowInterval()747   bool IsLowInterval() const {
748     return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
749   }
750 
SetLowInterval(LiveInterval * low)751   void SetLowInterval(LiveInterval* low) {
752     DCHECK(IsHighInterval());
753     high_or_low_interval_ = low;
754   }
755 
SetHighInterval(LiveInterval * high)756   void SetHighInterval(LiveInterval* high) {
757     DCHECK(IsLowInterval());
758     high_or_low_interval_ = high;
759   }
760 
761   void AddHighInterval(bool is_temp = false) {
762     DCHECK(IsParent());
763     DCHECK(!HasHighInterval());
764     DCHECK(!HasLowInterval());
765     high_or_low_interval_ = new (allocator_) LiveInterval(
766         allocator_, type_, defined_by_, false, kNoRegister, is_temp, false, true);
767     high_or_low_interval_->high_or_low_interval_ = this;
768     if (first_range_ != nullptr) {
769       high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
770       high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
771       high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
772     }
773     if (first_use_ != nullptr) {
774       high_or_low_interval_->first_use_ = first_use_->Dup(allocator_);
775     }
776 
777     if (first_env_use_ != nullptr) {
778       high_or_low_interval_->first_env_use_ = first_env_use_->Dup(allocator_);
779     }
780   }
781 
782   // Returns whether an interval, when it is non-split, is using
783   // the same register of one of its input.
IsUsingInputRegister()784   bool IsUsingInputRegister() const {
785     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
786     if (defined_by_ != nullptr && !IsSplit()) {
787       for (HInputIterator it(defined_by_); !it.Done(); it.Advance()) {
788         LiveInterval* interval = it.Current()->GetLiveInterval();
789 
790         // Find the interval that covers `defined_by`_. Calls to this function
791         // are made outside the linear scan, hence we need to use CoversSlow.
792         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
793           interval = interval->GetNextSibling();
794         }
795 
796         // Check if both intervals have the same register of the same kind.
797         if (interval != nullptr
798             && interval->SameRegisterKind(*this)
799             && interval->GetRegister() == GetRegister()) {
800           return true;
801         }
802       }
803     }
804     return false;
805   }
806 
807   // Returns whether an interval, when it is non-split, can safely use
808   // the same register of one of its input. Note that this method requires
809   // IsUsingInputRegister() to be true.
CanUseInputRegister()810   bool CanUseInputRegister() const {
811     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
812     DCHECK(IsUsingInputRegister());
813     if (defined_by_ != nullptr && !IsSplit()) {
814       LocationSummary* locations = defined_by_->GetLocations();
815       if (locations->OutputCanOverlapWithInputs()) {
816         return false;
817       }
818       for (HInputIterator it(defined_by_); !it.Done(); it.Advance()) {
819         LiveInterval* interval = it.Current()->GetLiveInterval();
820 
821         // Find the interval that covers `defined_by`_. Calls to this function
822         // are made outside the linear scan, hence we need to use CoversSlow.
823         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
824           interval = interval->GetNextSibling();
825         }
826 
827         if (interval != nullptr
828             && interval->SameRegisterKind(*this)
829             && interval->GetRegister() == GetRegister()) {
830           // We found the input that has the same register. Check if it is live after
831           // `defined_by`_.
832           return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
833         }
834       }
835     }
836     LOG(FATAL) << "Unreachable";
837     UNREACHABLE();
838   }
839 
AddSafepoint(HInstruction * instruction)840   void AddSafepoint(HInstruction* instruction) {
841     SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
842     if (first_safepoint_ == nullptr) {
843       first_safepoint_ = last_safepoint_ = safepoint;
844     } else {
845       DCHECK_LT(last_safepoint_->GetPosition(), safepoint->GetPosition());
846       last_safepoint_->SetNext(safepoint);
847       last_safepoint_ = safepoint;
848     }
849   }
850 
GetFirstSafepoint()851   SafepointPosition* GetFirstSafepoint() const {
852     return first_safepoint_;
853   }
854 
855   // Resets the starting point for range-searching queries to the first range.
856   // Intervals must be reset prior to starting a new linear scan over them.
ResetSearchCache()857   void ResetSearchCache() {
858     range_search_start_ = first_range_;
859   }
860 
861  private:
862   LiveInterval(ArenaAllocator* allocator,
863                Primitive::Type type,
864                HInstruction* defined_by = nullptr,
865                bool is_fixed = false,
866                int reg = kNoRegister,
867                bool is_temp = false,
868                bool is_slow_path_safepoint = false,
869                bool is_high_interval = false)
allocator_(allocator)870       : allocator_(allocator),
871         first_range_(nullptr),
872         last_range_(nullptr),
873         range_search_start_(nullptr),
874         first_safepoint_(nullptr),
875         last_safepoint_(nullptr),
876         first_use_(nullptr),
877         first_env_use_(nullptr),
878         type_(type),
879         next_sibling_(nullptr),
880         parent_(this),
881         register_(reg),
882         spill_slot_(kNoSpillSlot),
883         is_fixed_(is_fixed),
884         is_temp_(is_temp),
885         is_slow_path_safepoint_(is_slow_path_safepoint),
886         is_high_interval_(is_high_interval),
887         high_or_low_interval_(nullptr),
888         defined_by_(defined_by) {}
889 
890   // Searches for a LiveRange that either covers the given position or is the
891   // first next LiveRange. Returns null if no such LiveRange exists. Ranges
892   // known to end before `position` can be skipped with `search_start`.
FindRangeAtOrAfter(size_t position,LiveRange * search_start)893   LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
894     if (kIsDebugBuild) {
895       if (search_start != first_range_) {
896         // If we are not searching the entire list of ranges, make sure we do
897         // not skip the range we are searching for.
898         if (search_start == nullptr) {
899           DCHECK(IsDeadAt(position));
900         } else if (search_start->GetStart() > position) {
901           DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
902         }
903       }
904     }
905 
906     LiveRange* range;
907     for (range = search_start;
908          range != nullptr && range->GetEnd() <= position;
909          range = range->GetNext()) {
910       continue;
911     }
912     return range;
913   }
914 
DefinitionRequiresRegister()915   bool DefinitionRequiresRegister() const {
916     DCHECK(IsParent());
917     LocationSummary* locations = defined_by_->GetLocations();
918     Location location = locations->Out();
919     // This interval is the first interval of the instruction. If the output
920     // of the instruction requires a register, we return the position of that instruction
921     // as the first register use.
922     if (location.IsUnallocated()) {
923       if ((location.GetPolicy() == Location::kRequiresRegister)
924            || (location.GetPolicy() == Location::kSameAsFirstInput
925                && (locations->InAt(0).IsRegister()
926                    || locations->InAt(0).IsRegisterPair()
927                    || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
928         return true;
929       } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
930                  || (location.GetPolicy() == Location::kSameAsFirstInput
931                      && (locations->InAt(0).IsFpuRegister()
932                          || locations->InAt(0).IsFpuRegisterPair()
933                          || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
934         return true;
935       }
936     } else if (location.IsRegister() || location.IsRegisterPair()) {
937       return true;
938     }
939     return false;
940   }
941 
IsDefiningPosition(size_t position)942   bool IsDefiningPosition(size_t position) const {
943     return IsParent() && (position == GetStart());
944   }
945 
HasSynthesizeUseAt(size_t position)946   bool HasSynthesizeUseAt(size_t position) const {
947     UsePosition* use = first_use_;
948     while (use != nullptr) {
949       size_t use_position = use->GetPosition();
950       if ((use_position == position) && use->IsSynthesized()) {
951         return true;
952       }
953       if (use_position > position) break;
954       use = use->GetNext();
955     }
956     return false;
957   }
958 
AddBackEdgeUses(const HBasicBlock & block_at_use)959   void AddBackEdgeUses(const HBasicBlock& block_at_use) {
960     DCHECK(block_at_use.IsInLoop());
961     // Add synthesized uses at the back edge of loops to help the register allocator.
962     // Note that this method is called in decreasing liveness order, to faciliate adding
963     // uses at the head of the `first_use_` linked list. Because below
964     // we iterate from inner-most to outer-most, which is in increasing liveness order,
965     // we need to take extra care of how the `first_use_` linked list is being updated.
966     UsePosition* first_in_new_list = nullptr;
967     UsePosition* last_in_new_list = nullptr;
968     for (HLoopInformationOutwardIterator it(block_at_use);
969          !it.Done();
970          it.Advance()) {
971       HLoopInformation* current = it.Current();
972       if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
973         // This interval is defined in the loop. We can stop going outward.
974         break;
975       }
976 
977       // We're only adding a synthesized use at the last back edge. Adding syntehsized uses on
978       // all back edges is not necessary: anything used in the loop will have its use at the
979       // last back edge. If we want branches in a loop to have better register allocation than
980       // another branch, then it is the linear order we should change.
981       size_t back_edge_use_position = current->GetLifetimeEnd();
982       if ((first_use_ != nullptr) && (first_use_->GetPosition() <= back_edge_use_position)) {
983         // There was a use already seen in this loop. Therefore the previous call to `AddUse`
984         // already inserted the backedge use. We can stop going outward.
985         DCHECK(HasSynthesizeUseAt(back_edge_use_position));
986         break;
987       }
988 
989       DCHECK(last_in_new_list == nullptr
990              || back_edge_use_position > last_in_new_list->GetPosition());
991 
992       UsePosition* new_use = new (allocator_) UsePosition(
993           /* user */ nullptr,
994           /* environment */ nullptr,
995           UsePosition::kNoInput,
996           back_edge_use_position,
997           /* next */ nullptr);
998 
999       if (last_in_new_list != nullptr) {
1000         // Going outward. The latest created use needs to point to the new use.
1001         last_in_new_list->SetNext(new_use);
1002       } else {
1003         // This is the inner-most loop.
1004         DCHECK_EQ(current, block_at_use.GetLoopInformation());
1005         first_in_new_list = new_use;
1006       }
1007       last_in_new_list = new_use;
1008     }
1009     // Link the newly created linked list with `first_use_`.
1010     if (last_in_new_list != nullptr) {
1011       last_in_new_list->SetNext(first_use_);
1012       first_use_ = first_in_new_list;
1013     }
1014   }
1015 
1016   ArenaAllocator* const allocator_;
1017 
1018   // Ranges of this interval. We need a quick access to the last range to test
1019   // for liveness (see `IsDeadAt`).
1020   LiveRange* first_range_;
1021   LiveRange* last_range_;
1022 
1023   // The first range at or after the current position of a linear scan. It is
1024   // used to optimize range-searching queries.
1025   LiveRange* range_search_start_;
1026 
1027   // Safepoints where this interval is live.
1028   SafepointPosition* first_safepoint_;
1029   SafepointPosition* last_safepoint_;
1030 
1031   // Uses of this interval. Note that this linked list is shared amongst siblings.
1032   UsePosition* first_use_;
1033   UsePosition* first_env_use_;
1034 
1035   // The instruction type this interval corresponds to.
1036   const Primitive::Type type_;
1037 
1038   // Live interval that is the result of a split.
1039   LiveInterval* next_sibling_;
1040 
1041   // The first interval from which split intervals come from.
1042   LiveInterval* parent_;
1043 
1044   // The register allocated to this interval.
1045   int register_;
1046 
1047   // The spill slot allocated to this interval.
1048   int spill_slot_;
1049 
1050   // Whether the interval is for a fixed register.
1051   const bool is_fixed_;
1052 
1053   // Whether the interval is for a temporary.
1054   const bool is_temp_;
1055 
1056   // Whether the interval is for a safepoint that calls on slow path.
1057   const bool is_slow_path_safepoint_;
1058 
1059   // Whether this interval is a synthesized interval for register pair.
1060   const bool is_high_interval_;
1061 
1062   // If this interval needs a register pair, the high or low equivalent.
1063   // `is_high_interval_` tells whether this holds the low or the high.
1064   LiveInterval* high_or_low_interval_;
1065 
1066   // The instruction represented by this interval.
1067   HInstruction* const defined_by_;
1068 
1069   static constexpr int kNoRegister = -1;
1070   static constexpr int kNoSpillSlot = -1;
1071 
1072   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1073 
1074   DISALLOW_COPY_AND_ASSIGN(LiveInterval);
1075 };
1076 
1077 /**
1078  * Analysis that computes the liveness of instructions:
1079  *
1080  * (a) Non-environment uses of an instruction always make
1081  *     the instruction live.
1082  * (b) Environment uses of an instruction whose type is
1083  *     object (that is, non-primitive), make the instruction live.
1084  *     This is due to having to keep alive objects that have
1085  *     finalizers deleting native objects.
1086  * (c) When the graph has the debuggable property, environment uses
1087  *     of an instruction that has a primitive type make the instruction live.
1088  *     If the graph does not have the debuggable property, the environment
1089  *     use has no effect, and may get a 'none' value after register allocation.
1090  *
1091  * (b) and (c) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
1092  */
1093 class SsaLivenessAnalysis : public ValueObject {
1094  public:
SsaLivenessAnalysis(HGraph * graph,CodeGenerator * codegen)1095   SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen)
1096       : graph_(graph),
1097         codegen_(codegen),
1098         block_infos_(graph->GetArena(), graph->GetBlocks().Size()),
1099         instructions_from_ssa_index_(graph->GetArena(), 0),
1100         instructions_from_lifetime_position_(graph->GetArena(), 0),
1101         number_of_ssa_values_(0) {
1102     block_infos_.SetSize(graph->GetBlocks().Size());
1103   }
1104 
1105   void Analyze();
1106 
GetLiveInSet(const HBasicBlock & block)1107   BitVector* GetLiveInSet(const HBasicBlock& block) const {
1108     return &block_infos_.Get(block.GetBlockId())->live_in_;
1109   }
1110 
GetLiveOutSet(const HBasicBlock & block)1111   BitVector* GetLiveOutSet(const HBasicBlock& block) const {
1112     return &block_infos_.Get(block.GetBlockId())->live_out_;
1113   }
1114 
GetKillSet(const HBasicBlock & block)1115   BitVector* GetKillSet(const HBasicBlock& block) const {
1116     return &block_infos_.Get(block.GetBlockId())->kill_;
1117   }
1118 
GetInstructionFromSsaIndex(size_t index)1119   HInstruction* GetInstructionFromSsaIndex(size_t index) const {
1120     return instructions_from_ssa_index_.Get(index);
1121   }
1122 
GetInstructionFromPosition(size_t index)1123   HInstruction* GetInstructionFromPosition(size_t index) const {
1124     return instructions_from_lifetime_position_.Get(index);
1125   }
1126 
GetBlockFromPosition(size_t index)1127   HBasicBlock* GetBlockFromPosition(size_t index) const {
1128     HInstruction* instruction = GetInstructionFromPosition(index);
1129     if (instruction == nullptr) {
1130       // If we are at a block boundary, get the block following.
1131       instruction = GetInstructionFromPosition(index + 1);
1132     }
1133     return instruction->GetBlock();
1134   }
1135 
IsAtBlockBoundary(size_t index)1136   bool IsAtBlockBoundary(size_t index) const {
1137     return GetInstructionFromPosition(index) == nullptr;
1138   }
1139 
GetTempUser(LiveInterval * temp)1140   HInstruction* GetTempUser(LiveInterval* temp) const {
1141     // A temporary shares the same lifetime start as the instruction that requires it.
1142     DCHECK(temp->IsTemp());
1143     HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
1144     DCHECK_EQ(user, temp->GetFirstUse()->GetUser());
1145     return user;
1146   }
1147 
GetTempIndex(LiveInterval * temp)1148   size_t GetTempIndex(LiveInterval* temp) const {
1149     // We use the input index to store the index of the temporary in the user's temporary list.
1150     DCHECK(temp->IsTemp());
1151     return temp->GetFirstUse()->GetInputIndex();
1152   }
1153 
GetMaxLifetimePosition()1154   size_t GetMaxLifetimePosition() const {
1155     return instructions_from_lifetime_position_.Size() * 2 - 1;
1156   }
1157 
GetNumberOfSsaValues()1158   size_t GetNumberOfSsaValues() const {
1159     return number_of_ssa_values_;
1160   }
1161 
1162   static constexpr const char* kLivenessPassName = "liveness";
1163 
1164  private:
1165   // Linearize the graph so that:
1166   // (1): a block is always after its dominator,
1167   // (2): blocks of loops are contiguous.
1168   // This creates a natural and efficient ordering when visualizing live ranges.
1169   void LinearizeGraph();
1170 
1171   // Give an SSA number to each instruction that defines a value used by another instruction,
1172   // and setup the lifetime information of each instruction and block.
1173   void NumberInstructions();
1174 
1175   // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
1176   void ComputeLiveness();
1177 
1178   // Compute the live ranges of instructions, as well as the initial live_in, live_out and
1179   // kill sets, that do not take into account backward branches.
1180   void ComputeLiveRanges();
1181 
1182   // After computing the initial sets, this method does a fixed point
1183   // calculation over the live_in and live_out set to take into account
1184   // backwards branches.
1185   void ComputeLiveInAndLiveOutSets();
1186 
1187   // Update the live_in set of the block and returns whether it has changed.
1188   bool UpdateLiveIn(const HBasicBlock& block);
1189 
1190   // Update the live_out set of the block and returns whether it has changed.
1191   bool UpdateLiveOut(const HBasicBlock& block);
1192 
1193   // Returns whether `instruction` in an HEnvironment held by `env_holder`
1194   // should be kept live by the HEnvironment.
ShouldBeLiveForEnvironment(HInstruction * env_holder,HInstruction * instruction)1195   static bool ShouldBeLiveForEnvironment(HInstruction* env_holder,
1196                                          HInstruction* instruction) {
1197     if (instruction == nullptr) return false;
1198     // A value that's not live in compiled code may still be needed in interpreter,
1199     // due to code motion, etc.
1200     if (env_holder->IsDeoptimize()) return true;
1201     if (instruction->GetBlock()->GetGraph()->IsDebuggable()) return true;
1202     return instruction->GetType() == Primitive::kPrimNot;
1203   }
1204 
1205   HGraph* const graph_;
1206   CodeGenerator* const codegen_;
1207   GrowableArray<BlockInfo*> block_infos_;
1208 
1209   // Temporary array used when computing live_in, live_out, and kill sets.
1210   GrowableArray<HInstruction*> instructions_from_ssa_index_;
1211 
1212   // Temporary array used when inserting moves in the graph.
1213   GrowableArray<HInstruction*> instructions_from_lifetime_position_;
1214   size_t number_of_ssa_values_;
1215 
1216   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1217 
1218   DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
1219 };
1220 
1221 }  // namespace art
1222 
1223 #endif  // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
1224