1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
19 
20 #include <algorithm>
21 #include <array>
22 #include <type_traits>
23 
24 #include "base/arena_bit_vector.h"
25 #include "base/arena_containers.h"
26 #include "base/arena_object.h"
27 #include "base/array_ref.h"
28 #include "base/iteration_range.h"
29 #include "base/quasi_atomic.h"
30 #include "base/stl_util.h"
31 #include "base/transform_array_ref.h"
32 #include "data_type.h"
33 #include "deoptimization_kind.h"
34 #include "dex/dex_file.h"
35 #include "dex/dex_file_types.h"
36 #include "dex/invoke_type.h"
37 #include "dex/method_reference.h"
38 #include "entrypoints/quick/quick_entrypoints_enum.h"
39 #include "handle.h"
40 #include "handle_scope.h"
41 #include "intrinsics_enum.h"
42 #include "locations.h"
43 #include "mirror/class.h"
44 #include "offsets.h"
45 #include "utils/intrusive_forward_list.h"
46 
47 namespace art {
48 
49 class ArenaStack;
50 class GraphChecker;
51 class HBasicBlock;
52 class HConstructorFence;
53 class HCurrentMethod;
54 class HDoubleConstant;
55 class HEnvironment;
56 class HFloatConstant;
57 class HGraphBuilder;
58 class HGraphVisitor;
59 class HInstruction;
60 class HIntConstant;
61 class HInvoke;
62 class HLongConstant;
63 class HNullConstant;
64 class HParameterValue;
65 class HPhi;
66 class HSuspendCheck;
67 class HTryBoundary;
68 class LiveInterval;
69 class LocationSummary;
70 class SlowPathCode;
71 class SsaBuilder;
72 
73 namespace mirror {
74 class DexCache;
75 }  // namespace mirror
76 
77 static const int kDefaultNumberOfBlocks = 8;
78 static const int kDefaultNumberOfSuccessors = 2;
79 static const int kDefaultNumberOfPredecessors = 2;
80 static const int kDefaultNumberOfExceptionalPredecessors = 0;
81 static const int kDefaultNumberOfDominatedBlocks = 1;
82 static const int kDefaultNumberOfBackEdges = 1;
83 
84 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
85 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
86 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
87 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
88 
89 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
90 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
91 
92 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
93 
94 static constexpr uint32_t kNoDexPc = -1;
95 
IsSameDexFile(const DexFile & lhs,const DexFile & rhs)96 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
97   // For the purposes of the compiler, the dex files must actually be the same object
98   // if we want to safely treat them as the same. This is especially important for JIT
99   // as custom class loaders can open the same underlying file (or memory) multiple
100   // times and provide different class resolution but no two class loaders should ever
101   // use the same DexFile object - doing so is an unsupported hack that can lead to
102   // all sorts of weird failures.
103   return &lhs == &rhs;
104 }
105 
106 enum IfCondition {
107   // All types.
108   kCondEQ,  // ==
109   kCondNE,  // !=
110   // Signed integers and floating-point numbers.
111   kCondLT,  // <
112   kCondLE,  // <=
113   kCondGT,  // >
114   kCondGE,  // >=
115   // Unsigned integers.
116   kCondB,   // <
117   kCondBE,  // <=
118   kCondA,   // >
119   kCondAE,  // >=
120   // First and last aliases.
121   kCondFirst = kCondEQ,
122   kCondLast = kCondAE,
123 };
124 
125 enum GraphAnalysisResult {
126   kAnalysisSkipped,
127   kAnalysisInvalidBytecode,
128   kAnalysisFailThrowCatchLoop,
129   kAnalysisFailAmbiguousArrayOp,
130   kAnalysisSuccess,
131 };
132 
133 template <typename T>
MakeUnsigned(T x)134 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
135   return static_cast<typename std::make_unsigned<T>::type>(x);
136 }
137 
138 class HInstructionList : public ValueObject {
139  public:
HInstructionList()140   HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
141 
142   void AddInstruction(HInstruction* instruction);
143   void RemoveInstruction(HInstruction* instruction);
144 
145   // Insert `instruction` before/after an existing instruction `cursor`.
146   void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
147   void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
148 
149   // Return true if this list contains `instruction`.
150   bool Contains(HInstruction* instruction) const;
151 
152   // Return true if `instruction1` is found before `instruction2` in
153   // this instruction list and false otherwise.  Abort if none
154   // of these instructions is found.
155   bool FoundBefore(const HInstruction* instruction1,
156                    const HInstruction* instruction2) const;
157 
IsEmpty()158   bool IsEmpty() const { return first_instruction_ == nullptr; }
Clear()159   void Clear() { first_instruction_ = last_instruction_ = nullptr; }
160 
161   // Update the block of all instructions to be `block`.
162   void SetBlockOfInstructions(HBasicBlock* block) const;
163 
164   void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
165   void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
166   void Add(const HInstructionList& instruction_list);
167 
168   // Return the number of instructions in the list. This is an expensive operation.
169   size_t CountSize() const;
170 
171  private:
172   HInstruction* first_instruction_;
173   HInstruction* last_instruction_;
174 
175   friend class HBasicBlock;
176   friend class HGraph;
177   friend class HInstruction;
178   friend class HInstructionIterator;
179   friend class HInstructionIteratorHandleChanges;
180   friend class HBackwardInstructionIterator;
181 
182   DISALLOW_COPY_AND_ASSIGN(HInstructionList);
183 };
184 
185 class ReferenceTypeInfo : ValueObject {
186  public:
187   typedef Handle<mirror::Class> TypeHandle;
188 
189   static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
190 
Create(TypeHandle type_handle)191   static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
192     return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
193   }
194 
CreateUnchecked(TypeHandle type_handle,bool is_exact)195   static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
196     return ReferenceTypeInfo(type_handle, is_exact);
197   }
198 
CreateInvalid()199   static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
200 
IsValidHandle(TypeHandle handle)201   static bool IsValidHandle(TypeHandle handle) {
202     return handle.GetReference() != nullptr;
203   }
204 
IsValid()205   bool IsValid() const {
206     return IsValidHandle(type_handle_);
207   }
208 
IsExact()209   bool IsExact() const { return is_exact_; }
210 
IsObjectClass()211   bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
212     DCHECK(IsValid());
213     return GetTypeHandle()->IsObjectClass();
214   }
215 
IsStringClass()216   bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
217     DCHECK(IsValid());
218     return GetTypeHandle()->IsStringClass();
219   }
220 
IsObjectArray()221   bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
222     DCHECK(IsValid());
223     return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
224   }
225 
IsInterface()226   bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
227     DCHECK(IsValid());
228     return GetTypeHandle()->IsInterface();
229   }
230 
IsArrayClass()231   bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
232     DCHECK(IsValid());
233     return GetTypeHandle()->IsArrayClass();
234   }
235 
IsPrimitiveArrayClass()236   bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
237     DCHECK(IsValid());
238     return GetTypeHandle()->IsPrimitiveArray();
239   }
240 
IsNonPrimitiveArrayClass()241   bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
242     DCHECK(IsValid());
243     return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
244   }
245 
CanArrayHold(ReferenceTypeInfo rti)246   bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
247     DCHECK(IsValid());
248     if (!IsExact()) return false;
249     if (!IsArrayClass()) return false;
250     return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
251   }
252 
CanArrayHoldValuesOf(ReferenceTypeInfo rti)253   bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
254     DCHECK(IsValid());
255     if (!IsExact()) return false;
256     if (!IsArrayClass()) return false;
257     if (!rti.IsArrayClass()) return false;
258     return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
259         rti.GetTypeHandle()->GetComponentType());
260   }
261 
GetTypeHandle()262   Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
263 
IsSupertypeOf(ReferenceTypeInfo rti)264   bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
265     DCHECK(IsValid());
266     DCHECK(rti.IsValid());
267     return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
268   }
269 
IsStrictSupertypeOf(ReferenceTypeInfo rti)270   bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
271     DCHECK(IsValid());
272     DCHECK(rti.IsValid());
273     return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
274         GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
275   }
276 
277   // Returns true if the type information provide the same amount of details.
278   // Note that it does not mean that the instructions have the same actual type
279   // (because the type can be the result of a merge).
IsEqual(ReferenceTypeInfo rti)280   bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
281     if (!IsValid() && !rti.IsValid()) {
282       // Invalid types are equal.
283       return true;
284     }
285     if (!IsValid() || !rti.IsValid()) {
286       // One is valid, the other not.
287       return false;
288     }
289     return IsExact() == rti.IsExact()
290         && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
291   }
292 
293  private:
ReferenceTypeInfo()294   ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
ReferenceTypeInfo(TypeHandle type_handle,bool is_exact)295   ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
296       : type_handle_(type_handle), is_exact_(is_exact) { }
297 
298   // The class of the object.
299   TypeHandle type_handle_;
300   // Whether or not the type is exact or a superclass of the actual type.
301   // Whether or not we have any information about this type.
302   bool is_exact_;
303 };
304 
305 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
306 
307 // Control-flow graph of a method. Contains a list of basic blocks.
308 class HGraph : public ArenaObject<kArenaAllocGraph> {
309  public:
310   HGraph(ArenaAllocator* allocator,
311          ArenaStack* arena_stack,
312          const DexFile& dex_file,
313          uint32_t method_idx,
314          InstructionSet instruction_set,
315          InvokeType invoke_type = kInvalidInvokeType,
316          bool debuggable = false,
317          bool osr = false,
318          int start_instruction_id = 0)
allocator_(allocator)319       : allocator_(allocator),
320         arena_stack_(arena_stack),
321         blocks_(allocator->Adapter(kArenaAllocBlockList)),
322         reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
323         linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
324         entry_block_(nullptr),
325         exit_block_(nullptr),
326         maximum_number_of_out_vregs_(0),
327         number_of_vregs_(0),
328         number_of_in_vregs_(0),
329         temporaries_vreg_slots_(0),
330         has_bounds_checks_(false),
331         has_try_catch_(false),
332         has_simd_(false),
333         has_loops_(false),
334         has_irreducible_loops_(false),
335         debuggable_(debuggable),
336         current_instruction_id_(start_instruction_id),
337         dex_file_(dex_file),
338         method_idx_(method_idx),
339         invoke_type_(invoke_type),
340         in_ssa_form_(false),
341         number_of_cha_guards_(0),
342         instruction_set_(instruction_set),
343         cached_null_constant_(nullptr),
344         cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
345         cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
346         cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
347         cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
348         cached_current_method_(nullptr),
349         art_method_(nullptr),
350         inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
351         osr_(osr),
352         cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
353     blocks_.reserve(kDefaultNumberOfBlocks);
354   }
355 
356   // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
357   void InitializeInexactObjectRTI(VariableSizedHandleScope* handles);
358 
GetAllocator()359   ArenaAllocator* GetAllocator() const { return allocator_; }
GetArenaStack()360   ArenaStack* GetArenaStack() const { return arena_stack_; }
GetBlocks()361   const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
362 
IsInSsaForm()363   bool IsInSsaForm() const { return in_ssa_form_; }
SetInSsaForm()364   void SetInSsaForm() { in_ssa_form_ = true; }
365 
GetEntryBlock()366   HBasicBlock* GetEntryBlock() const { return entry_block_; }
GetExitBlock()367   HBasicBlock* GetExitBlock() const { return exit_block_; }
HasExitBlock()368   bool HasExitBlock() const { return exit_block_ != nullptr; }
369 
SetEntryBlock(HBasicBlock * block)370   void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
SetExitBlock(HBasicBlock * block)371   void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
372 
373   void AddBlock(HBasicBlock* block);
374 
375   void ComputeDominanceInformation();
376   void ClearDominanceInformation();
377   void ClearLoopInformation();
378   void FindBackEdges(ArenaBitVector* visited);
379   GraphAnalysisResult BuildDominatorTree();
380   void SimplifyCFG();
381   void SimplifyCatchBlocks();
382 
383   // Analyze all natural loops in this graph. Returns a code specifying that it
384   // was successful or the reason for failure. The method will fail if a loop
385   // is a throw-catch loop, i.e. the header is a catch block.
386   GraphAnalysisResult AnalyzeLoops() const;
387 
388   // Iterate over blocks to compute try block membership. Needs reverse post
389   // order and loop information.
390   void ComputeTryBlockInformation();
391 
392   // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
393   // Returns the instruction to replace the invoke expression or null if the
394   // invoke is for a void method. Note that the caller is responsible for replacing
395   // and removing the invoke instruction.
396   HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
397 
398   // Update the loop and try membership of `block`, which was spawned from `reference`.
399   // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
400   // should be the new back edge.
401   void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
402                                              HBasicBlock* reference,
403                                              bool replace_if_back_edge);
404 
405   // Need to add a couple of blocks to test if the loop body is entered and
406   // put deoptimization instructions, etc.
407   void TransformLoopHeaderForBCE(HBasicBlock* header);
408 
409   // Adds a new loop directly after the loop with the given header and exit.
410   // Returns the new preheader.
411   HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
412                                              HBasicBlock* body,
413                                              HBasicBlock* exit);
414 
415   // Removes `block` from the graph. Assumes `block` has been disconnected from
416   // other blocks and has no instructions or phis.
417   void DeleteDeadEmptyBlock(HBasicBlock* block);
418 
419   // Splits the edge between `block` and `successor` while preserving the
420   // indices in the predecessor/successor lists. If there are multiple edges
421   // between the blocks, the lowest indices are used.
422   // Returns the new block which is empty and has the same dex pc as `successor`.
423   HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
424 
425   void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
426   void OrderLoopHeaderPredecessors(HBasicBlock* header);
427 
428   // Transform a loop into a format with a single preheader.
429   //
430   // Each phi in the header should be split: original one in the header should only hold
431   // inputs reachable from the back edges and a single input from the preheader. The newly created
432   // phi in the preheader should collate the inputs from the original multiple incoming blocks.
433   //
434   // Loops in the graph typically have a single preheader, so this method is used to "repair" loops
435   // that no longer have this property.
436   void TransformLoopToSinglePreheaderFormat(HBasicBlock* header);
437 
438   void SimplifyLoop(HBasicBlock* header);
439 
GetNextInstructionId()440   int32_t GetNextInstructionId() {
441     CHECK_NE(current_instruction_id_, INT32_MAX);
442     return current_instruction_id_++;
443   }
444 
GetCurrentInstructionId()445   int32_t GetCurrentInstructionId() const {
446     return current_instruction_id_;
447   }
448 
SetCurrentInstructionId(int32_t id)449   void SetCurrentInstructionId(int32_t id) {
450     CHECK_GE(id, current_instruction_id_);
451     current_instruction_id_ = id;
452   }
453 
GetMaximumNumberOfOutVRegs()454   uint16_t GetMaximumNumberOfOutVRegs() const {
455     return maximum_number_of_out_vregs_;
456   }
457 
SetMaximumNumberOfOutVRegs(uint16_t new_value)458   void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
459     maximum_number_of_out_vregs_ = new_value;
460   }
461 
UpdateMaximumNumberOfOutVRegs(uint16_t other_value)462   void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
463     maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
464   }
465 
UpdateTemporariesVRegSlots(size_t slots)466   void UpdateTemporariesVRegSlots(size_t slots) {
467     temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
468   }
469 
GetTemporariesVRegSlots()470   size_t GetTemporariesVRegSlots() const {
471     DCHECK(!in_ssa_form_);
472     return temporaries_vreg_slots_;
473   }
474 
SetNumberOfVRegs(uint16_t number_of_vregs)475   void SetNumberOfVRegs(uint16_t number_of_vregs) {
476     number_of_vregs_ = number_of_vregs;
477   }
478 
GetNumberOfVRegs()479   uint16_t GetNumberOfVRegs() const {
480     return number_of_vregs_;
481   }
482 
SetNumberOfInVRegs(uint16_t value)483   void SetNumberOfInVRegs(uint16_t value) {
484     number_of_in_vregs_ = value;
485   }
486 
GetNumberOfInVRegs()487   uint16_t GetNumberOfInVRegs() const {
488     return number_of_in_vregs_;
489   }
490 
GetNumberOfLocalVRegs()491   uint16_t GetNumberOfLocalVRegs() const {
492     DCHECK(!in_ssa_form_);
493     return number_of_vregs_ - number_of_in_vregs_;
494   }
495 
GetReversePostOrder()496   const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
497     return reverse_post_order_;
498   }
499 
GetReversePostOrderSkipEntryBlock()500   ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() {
501     DCHECK(GetReversePostOrder()[0] == entry_block_);
502     return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
503   }
504 
GetPostOrder()505   IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
506     return ReverseRange(GetReversePostOrder());
507   }
508 
GetLinearOrder()509   const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
510     return linear_order_;
511   }
512 
GetLinearPostOrder()513   IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
514     return ReverseRange(GetLinearOrder());
515   }
516 
HasBoundsChecks()517   bool HasBoundsChecks() const {
518     return has_bounds_checks_;
519   }
520 
SetHasBoundsChecks(bool value)521   void SetHasBoundsChecks(bool value) {
522     has_bounds_checks_ = value;
523   }
524 
IsDebuggable()525   bool IsDebuggable() const { return debuggable_; }
526 
527   // Returns a constant of the given type and value. If it does not exist
528   // already, it is created and inserted into the graph. This method is only for
529   // integral types.
530   HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
531 
532   // TODO: This is problematic for the consistency of reference type propagation
533   // because it can be created anytime after the pass and thus it will be left
534   // with an invalid type.
535   HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
536 
537   HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
538     return CreateConstant(value, &cached_int_constants_, dex_pc);
539   }
540   HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
541     return CreateConstant(value, &cached_long_constants_, dex_pc);
542   }
543   HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
544     return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
545   }
546   HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
547     return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
548   }
549 
550   HCurrentMethod* GetCurrentMethod();
551 
GetDexFile()552   const DexFile& GetDexFile() const {
553     return dex_file_;
554   }
555 
GetMethodIdx()556   uint32_t GetMethodIdx() const {
557     return method_idx_;
558   }
559 
560   // Get the method name (without the signature), e.g. "<init>"
561   const char* GetMethodName() const;
562 
563   // Get the pretty method name (class + name + optionally signature).
564   std::string PrettyMethod(bool with_signature = true) const;
565 
GetInvokeType()566   InvokeType GetInvokeType() const {
567     return invoke_type_;
568   }
569 
GetInstructionSet()570   InstructionSet GetInstructionSet() const {
571     return instruction_set_;
572   }
573 
IsCompilingOsr()574   bool IsCompilingOsr() const { return osr_; }
575 
GetCHASingleImplementationList()576   ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
577     return cha_single_implementation_list_;
578   }
579 
AddCHASingleImplementationDependency(ArtMethod * method)580   void AddCHASingleImplementationDependency(ArtMethod* method) {
581     cha_single_implementation_list_.insert(method);
582   }
583 
HasShouldDeoptimizeFlag()584   bool HasShouldDeoptimizeFlag() const {
585     return number_of_cha_guards_ != 0;
586   }
587 
HasTryCatch()588   bool HasTryCatch() const { return has_try_catch_; }
SetHasTryCatch(bool value)589   void SetHasTryCatch(bool value) { has_try_catch_ = value; }
590 
HasSIMD()591   bool HasSIMD() const { return has_simd_; }
SetHasSIMD(bool value)592   void SetHasSIMD(bool value) { has_simd_ = value; }
593 
HasLoops()594   bool HasLoops() const { return has_loops_; }
SetHasLoops(bool value)595   void SetHasLoops(bool value) { has_loops_ = value; }
596 
HasIrreducibleLoops()597   bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
SetHasIrreducibleLoops(bool value)598   void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
599 
GetArtMethod()600   ArtMethod* GetArtMethod() const { return art_method_; }
SetArtMethod(ArtMethod * method)601   void SetArtMethod(ArtMethod* method) { art_method_ = method; }
602 
603   // Returns an instruction with the opposite Boolean value from 'cond'.
604   // The instruction has been inserted into the graph, either as a constant, or
605   // before cursor.
606   HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
607 
GetInexactObjectRti()608   ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
609 
GetNumberOfCHAGuards()610   uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
SetNumberOfCHAGuards(uint32_t num)611   void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
IncrementNumberOfCHAGuards()612   void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
613 
614  private:
615   void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
616   void RemoveDeadBlocks(const ArenaBitVector& visited);
617 
618   template <class InstructionType, typename ValueType>
619   InstructionType* CreateConstant(ValueType value,
620                                   ArenaSafeMap<ValueType, InstructionType*>* cache,
621                                   uint32_t dex_pc = kNoDexPc) {
622     // Try to find an existing constant of the given value.
623     InstructionType* constant = nullptr;
624     auto cached_constant = cache->find(value);
625     if (cached_constant != cache->end()) {
626       constant = cached_constant->second;
627     }
628 
629     // If not found or previously deleted, create and cache a new instruction.
630     // Don't bother reviving a previously deleted instruction, for simplicity.
631     if (constant == nullptr || constant->GetBlock() == nullptr) {
632       constant = new (allocator_) InstructionType(value, dex_pc);
633       cache->Overwrite(value, constant);
634       InsertConstant(constant);
635     }
636     return constant;
637   }
638 
639   void InsertConstant(HConstant* instruction);
640 
641   // Cache a float constant into the graph. This method should only be
642   // called by the SsaBuilder when creating "equivalent" instructions.
643   void CacheFloatConstant(HFloatConstant* constant);
644 
645   // See CacheFloatConstant comment.
646   void CacheDoubleConstant(HDoubleConstant* constant);
647 
648   ArenaAllocator* const allocator_;
649   ArenaStack* const arena_stack_;
650 
651   // List of blocks in insertion order.
652   ArenaVector<HBasicBlock*> blocks_;
653 
654   // List of blocks to perform a reverse post order tree traversal.
655   ArenaVector<HBasicBlock*> reverse_post_order_;
656 
657   // List of blocks to perform a linear order tree traversal. Unlike the reverse
658   // post order, this order is not incrementally kept up-to-date.
659   ArenaVector<HBasicBlock*> linear_order_;
660 
661   HBasicBlock* entry_block_;
662   HBasicBlock* exit_block_;
663 
664   // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
665   uint16_t maximum_number_of_out_vregs_;
666 
667   // The number of virtual registers in this method. Contains the parameters.
668   uint16_t number_of_vregs_;
669 
670   // The number of virtual registers used by parameters of this method.
671   uint16_t number_of_in_vregs_;
672 
673   // Number of vreg size slots that the temporaries use (used in baseline compiler).
674   size_t temporaries_vreg_slots_;
675 
676   // Flag whether there are bounds checks in the graph. We can skip
677   // BCE if it's false. It's only best effort to keep it up to date in
678   // the presence of code elimination so there might be false positives.
679   bool has_bounds_checks_;
680 
681   // Flag whether there are try/catch blocks in the graph. We will skip
682   // try/catch-related passes if it's false. It's only best effort to keep
683   // it up to date in the presence of code elimination so there might be
684   // false positives.
685   bool has_try_catch_;
686 
687   // Flag whether SIMD instructions appear in the graph. If true, the
688   // code generators may have to be more careful spilling the wider
689   // contents of SIMD registers.
690   bool has_simd_;
691 
692   // Flag whether there are any loops in the graph. We can skip loop
693   // optimization if it's false. It's only best effort to keep it up
694   // to date in the presence of code elimination so there might be false
695   // positives.
696   bool has_loops_;
697 
698   // Flag whether there are any irreducible loops in the graph. It's only
699   // best effort to keep it up to date in the presence of code elimination
700   // so there might be false positives.
701   bool has_irreducible_loops_;
702 
703   // Indicates whether the graph should be compiled in a way that
704   // ensures full debuggability. If false, we can apply more
705   // aggressive optimizations that may limit the level of debugging.
706   const bool debuggable_;
707 
708   // The current id to assign to a newly added instruction. See HInstruction.id_.
709   int32_t current_instruction_id_;
710 
711   // The dex file from which the method is from.
712   const DexFile& dex_file_;
713 
714   // The method index in the dex file.
715   const uint32_t method_idx_;
716 
717   // If inlined, this encodes how the callee is being invoked.
718   const InvokeType invoke_type_;
719 
720   // Whether the graph has been transformed to SSA form. Only used
721   // in debug mode to ensure we are not using properties only valid
722   // for non-SSA form (like the number of temporaries).
723   bool in_ssa_form_;
724 
725   // Number of CHA guards in the graph. Used to short-circuit the
726   // CHA guard optimization pass when there is no CHA guard left.
727   uint32_t number_of_cha_guards_;
728 
729   const InstructionSet instruction_set_;
730 
731   // Cached constants.
732   HNullConstant* cached_null_constant_;
733   ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
734   ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
735   ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
736   ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
737 
738   HCurrentMethod* cached_current_method_;
739 
740   // The ArtMethod this graph is for. Note that for AOT, it may be null,
741   // for example for methods whose declaring class could not be resolved
742   // (such as when the superclass could not be found).
743   ArtMethod* art_method_;
744 
745   // Keep the RTI of inexact Object to avoid having to pass stack handle
746   // collection pointer to passes which may create NullConstant.
747   ReferenceTypeInfo inexact_object_rti_;
748 
749   // Whether we are compiling this graph for on stack replacement: this will
750   // make all loops seen as irreducible and emit special stack maps to mark
751   // compiled code entries which the interpreter can directly jump to.
752   const bool osr_;
753 
754   // List of methods that are assumed to have single implementation.
755   ArenaSet<ArtMethod*> cha_single_implementation_list_;
756 
757   friend class SsaBuilder;           // For caching constants.
758   friend class SsaLivenessAnalysis;  // For the linear order.
759   friend class HInliner;             // For the reverse post order.
760   ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
761   DISALLOW_COPY_AND_ASSIGN(HGraph);
762 };
763 
764 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
765  public:
HLoopInformation(HBasicBlock * header,HGraph * graph)766   HLoopInformation(HBasicBlock* header, HGraph* graph)
767       : header_(header),
768         suspend_check_(nullptr),
769         irreducible_(false),
770         contains_irreducible_loop_(false),
771         back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
772         // Make bit vector growable, as the number of blocks may change.
773         blocks_(graph->GetAllocator(),
774                 graph->GetBlocks().size(),
775                 true,
776                 kArenaAllocLoopInfoBackEdges) {
777     back_edges_.reserve(kDefaultNumberOfBackEdges);
778   }
779 
IsIrreducible()780   bool IsIrreducible() const { return irreducible_; }
ContainsIrreducibleLoop()781   bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
782 
783   void Dump(std::ostream& os);
784 
GetHeader()785   HBasicBlock* GetHeader() const {
786     return header_;
787   }
788 
SetHeader(HBasicBlock * block)789   void SetHeader(HBasicBlock* block) {
790     header_ = block;
791   }
792 
GetSuspendCheck()793   HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
SetSuspendCheck(HSuspendCheck * check)794   void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
HasSuspendCheck()795   bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
796 
AddBackEdge(HBasicBlock * back_edge)797   void AddBackEdge(HBasicBlock* back_edge) {
798     back_edges_.push_back(back_edge);
799   }
800 
RemoveBackEdge(HBasicBlock * back_edge)801   void RemoveBackEdge(HBasicBlock* back_edge) {
802     RemoveElement(back_edges_, back_edge);
803   }
804 
IsBackEdge(const HBasicBlock & block)805   bool IsBackEdge(const HBasicBlock& block) const {
806     return ContainsElement(back_edges_, &block);
807   }
808 
NumberOfBackEdges()809   size_t NumberOfBackEdges() const {
810     return back_edges_.size();
811   }
812 
813   HBasicBlock* GetPreHeader() const;
814 
GetBackEdges()815   const ArenaVector<HBasicBlock*>& GetBackEdges() const {
816     return back_edges_;
817   }
818 
819   // Returns the lifetime position of the back edge that has the
820   // greatest lifetime position.
821   size_t GetLifetimeEnd() const;
822 
ReplaceBackEdge(HBasicBlock * existing,HBasicBlock * new_back_edge)823   void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
824     ReplaceElement(back_edges_, existing, new_back_edge);
825   }
826 
827   // Finds blocks that are part of this loop.
828   void Populate();
829 
830   // Updates blocks population of the loop and all of its outer' ones recursively after the
831   // population of the inner loop is updated.
832   void PopulateInnerLoopUpwards(HLoopInformation* inner_loop);
833 
834   // Returns whether this loop information contains `block`.
835   // Note that this loop information *must* be populated before entering this function.
836   bool Contains(const HBasicBlock& block) const;
837 
838   // Returns whether this loop information is an inner loop of `other`.
839   // Note that `other` *must* be populated before entering this function.
840   bool IsIn(const HLoopInformation& other) const;
841 
842   // Returns true if instruction is not defined within this loop.
843   bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
844 
GetBlocks()845   const ArenaBitVector& GetBlocks() const { return blocks_; }
846 
847   void Add(HBasicBlock* block);
848   void Remove(HBasicBlock* block);
849 
ClearAllBlocks()850   void ClearAllBlocks() {
851     blocks_.ClearAllBits();
852   }
853 
854   bool HasBackEdgeNotDominatedByHeader() const;
855 
IsPopulated()856   bool IsPopulated() const {
857     return blocks_.GetHighestBitSet() != -1;
858   }
859 
860   bool DominatesAllBackEdges(HBasicBlock* block);
861 
862   bool HasExitEdge() const;
863 
864   // Resets back edge and blocks-in-loop data.
ResetBasicBlockData()865   void ResetBasicBlockData() {
866     back_edges_.clear();
867     ClearAllBlocks();
868   }
869 
870  private:
871   // Internal recursive implementation of `Populate`.
872   void PopulateRecursive(HBasicBlock* block);
873   void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
874 
875   HBasicBlock* header_;
876   HSuspendCheck* suspend_check_;
877   bool irreducible_;
878   bool contains_irreducible_loop_;
879   ArenaVector<HBasicBlock*> back_edges_;
880   ArenaBitVector blocks_;
881 
882   DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
883 };
884 
885 // Stores try/catch information for basic blocks.
886 // Note that HGraph is constructed so that catch blocks cannot simultaneously
887 // be try blocks.
888 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
889  public:
890   // Try block information constructor.
TryCatchInformation(const HTryBoundary & try_entry)891   explicit TryCatchInformation(const HTryBoundary& try_entry)
892       : try_entry_(&try_entry),
893         catch_dex_file_(nullptr),
894         catch_type_index_(DexFile::kDexNoIndex16) {
895     DCHECK(try_entry_ != nullptr);
896   }
897 
898   // Catch block information constructor.
TryCatchInformation(dex::TypeIndex catch_type_index,const DexFile & dex_file)899   TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
900       : try_entry_(nullptr),
901         catch_dex_file_(&dex_file),
902         catch_type_index_(catch_type_index) {}
903 
IsTryBlock()904   bool IsTryBlock() const { return try_entry_ != nullptr; }
905 
GetTryEntry()906   const HTryBoundary& GetTryEntry() const {
907     DCHECK(IsTryBlock());
908     return *try_entry_;
909   }
910 
IsCatchBlock()911   bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
912 
IsCatchAllTypeIndex()913   bool IsCatchAllTypeIndex() const {
914     DCHECK(IsCatchBlock());
915     return !catch_type_index_.IsValid();
916   }
917 
GetCatchTypeIndex()918   dex::TypeIndex GetCatchTypeIndex() const {
919     DCHECK(IsCatchBlock());
920     return catch_type_index_;
921   }
922 
GetCatchDexFile()923   const DexFile& GetCatchDexFile() const {
924     DCHECK(IsCatchBlock());
925     return *catch_dex_file_;
926   }
927 
928  private:
929   // One of possibly several TryBoundary instructions entering the block's try.
930   // Only set for try blocks.
931   const HTryBoundary* try_entry_;
932 
933   // Exception type information. Only set for catch blocks.
934   const DexFile* catch_dex_file_;
935   const dex::TypeIndex catch_type_index_;
936 };
937 
938 static constexpr size_t kNoLifetime = -1;
939 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
940 
941 // A block in a method. Contains the list of instructions represented
942 // as a double linked list. Each block knows its predecessors and
943 // successors.
944 
945 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
946  public:
947   explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
graph_(graph)948       : graph_(graph),
949         predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
950         successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
951         loop_information_(nullptr),
952         dominator_(nullptr),
953         dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
954         block_id_(kInvalidBlockId),
955         dex_pc_(dex_pc),
956         lifetime_start_(kNoLifetime),
957         lifetime_end_(kNoLifetime),
958         try_catch_information_(nullptr) {
959     predecessors_.reserve(kDefaultNumberOfPredecessors);
960     successors_.reserve(kDefaultNumberOfSuccessors);
961     dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
962   }
963 
GetPredecessors()964   const ArenaVector<HBasicBlock*>& GetPredecessors() const {
965     return predecessors_;
966   }
967 
GetSuccessors()968   const ArenaVector<HBasicBlock*>& GetSuccessors() const {
969     return successors_;
970   }
971 
972   ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
973   ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
974 
975   bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
976     return ContainsElement(successors_, block, start_from);
977   }
978 
GetDominatedBlocks()979   const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
980     return dominated_blocks_;
981   }
982 
IsEntryBlock()983   bool IsEntryBlock() const {
984     return graph_->GetEntryBlock() == this;
985   }
986 
IsExitBlock()987   bool IsExitBlock() const {
988     return graph_->GetExitBlock() == this;
989   }
990 
991   bool IsSingleGoto() const;
992   bool IsSingleReturn() const;
993   bool IsSingleReturnOrReturnVoidAllowingPhis() const;
994   bool IsSingleTryBoundary() const;
995 
996   // Returns true if this block emits nothing but a jump.
IsSingleJump()997   bool IsSingleJump() const {
998     HLoopInformation* loop_info = GetLoopInformation();
999     return (IsSingleGoto() || IsSingleTryBoundary())
1000            // Back edges generate a suspend check.
1001            && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
1002   }
1003 
AddBackEdge(HBasicBlock * back_edge)1004   void AddBackEdge(HBasicBlock* back_edge) {
1005     if (loop_information_ == nullptr) {
1006       loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1007     }
1008     DCHECK_EQ(loop_information_->GetHeader(), this);
1009     loop_information_->AddBackEdge(back_edge);
1010   }
1011 
1012   // Registers a back edge; if the block was not a loop header before the call associates a newly
1013   // created loop info with it.
1014   //
1015   // Used in SuperblockCloner to preserve LoopInformation object instead of reseting loop
1016   // info for all blocks during back edges recalculation.
AddBackEdgeWhileUpdating(HBasicBlock * back_edge)1017   void AddBackEdgeWhileUpdating(HBasicBlock* back_edge) {
1018     if (loop_information_ == nullptr || loop_information_->GetHeader() != this) {
1019       loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1020     }
1021     loop_information_->AddBackEdge(back_edge);
1022   }
1023 
GetGraph()1024   HGraph* GetGraph() const { return graph_; }
SetGraph(HGraph * graph)1025   void SetGraph(HGraph* graph) { graph_ = graph; }
1026 
GetBlockId()1027   uint32_t GetBlockId() const { return block_id_; }
SetBlockId(int id)1028   void SetBlockId(int id) { block_id_ = id; }
GetDexPc()1029   uint32_t GetDexPc() const { return dex_pc_; }
1030 
GetDominator()1031   HBasicBlock* GetDominator() const { return dominator_; }
SetDominator(HBasicBlock * dominator)1032   void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
AddDominatedBlock(HBasicBlock * block)1033   void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
1034 
RemoveDominatedBlock(HBasicBlock * block)1035   void RemoveDominatedBlock(HBasicBlock* block) {
1036     RemoveElement(dominated_blocks_, block);
1037   }
1038 
ReplaceDominatedBlock(HBasicBlock * existing,HBasicBlock * new_block)1039   void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1040     ReplaceElement(dominated_blocks_, existing, new_block);
1041   }
1042 
1043   void ClearDominanceInformation();
1044 
NumberOfBackEdges()1045   int NumberOfBackEdges() const {
1046     return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1047   }
1048 
GetFirstInstruction()1049   HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
GetLastInstruction()1050   HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
GetInstructions()1051   const HInstructionList& GetInstructions() const { return instructions_; }
GetFirstPhi()1052   HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
GetLastPhi()1053   HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
GetPhis()1054   const HInstructionList& GetPhis() const { return phis_; }
1055 
1056   HInstruction* GetFirstInstructionDisregardMoves() const;
1057 
AddSuccessor(HBasicBlock * block)1058   void AddSuccessor(HBasicBlock* block) {
1059     successors_.push_back(block);
1060     block->predecessors_.push_back(this);
1061   }
1062 
ReplaceSuccessor(HBasicBlock * existing,HBasicBlock * new_block)1063   void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1064     size_t successor_index = GetSuccessorIndexOf(existing);
1065     existing->RemovePredecessor(this);
1066     new_block->predecessors_.push_back(this);
1067     successors_[successor_index] = new_block;
1068   }
1069 
ReplacePredecessor(HBasicBlock * existing,HBasicBlock * new_block)1070   void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1071     size_t predecessor_index = GetPredecessorIndexOf(existing);
1072     existing->RemoveSuccessor(this);
1073     new_block->successors_.push_back(this);
1074     predecessors_[predecessor_index] = new_block;
1075   }
1076 
1077   // Insert `this` between `predecessor` and `successor. This method
1078   // preserves the indicies, and will update the first edge found between
1079   // `predecessor` and `successor`.
InsertBetween(HBasicBlock * predecessor,HBasicBlock * successor)1080   void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1081     size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1082     size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1083     successor->predecessors_[predecessor_index] = this;
1084     predecessor->successors_[successor_index] = this;
1085     successors_.push_back(successor);
1086     predecessors_.push_back(predecessor);
1087   }
1088 
RemovePredecessor(HBasicBlock * block)1089   void RemovePredecessor(HBasicBlock* block) {
1090     predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1091   }
1092 
RemoveSuccessor(HBasicBlock * block)1093   void RemoveSuccessor(HBasicBlock* block) {
1094     successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1095   }
1096 
ClearAllPredecessors()1097   void ClearAllPredecessors() {
1098     predecessors_.clear();
1099   }
1100 
AddPredecessor(HBasicBlock * block)1101   void AddPredecessor(HBasicBlock* block) {
1102     predecessors_.push_back(block);
1103     block->successors_.push_back(this);
1104   }
1105 
SwapPredecessors()1106   void SwapPredecessors() {
1107     DCHECK_EQ(predecessors_.size(), 2u);
1108     std::swap(predecessors_[0], predecessors_[1]);
1109   }
1110 
SwapSuccessors()1111   void SwapSuccessors() {
1112     DCHECK_EQ(successors_.size(), 2u);
1113     std::swap(successors_[0], successors_[1]);
1114   }
1115 
GetPredecessorIndexOf(HBasicBlock * predecessor)1116   size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1117     return IndexOfElement(predecessors_, predecessor);
1118   }
1119 
GetSuccessorIndexOf(HBasicBlock * successor)1120   size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1121     return IndexOfElement(successors_, successor);
1122   }
1123 
GetSinglePredecessor()1124   HBasicBlock* GetSinglePredecessor() const {
1125     DCHECK_EQ(GetPredecessors().size(), 1u);
1126     return GetPredecessors()[0];
1127   }
1128 
GetSingleSuccessor()1129   HBasicBlock* GetSingleSuccessor() const {
1130     DCHECK_EQ(GetSuccessors().size(), 1u);
1131     return GetSuccessors()[0];
1132   }
1133 
1134   // Returns whether the first occurrence of `predecessor` in the list of
1135   // predecessors is at index `idx`.
IsFirstIndexOfPredecessor(HBasicBlock * predecessor,size_t idx)1136   bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1137     DCHECK_EQ(GetPredecessors()[idx], predecessor);
1138     return GetPredecessorIndexOf(predecessor) == idx;
1139   }
1140 
1141   // Create a new block between this block and its predecessors. The new block
1142   // is added to the graph, all predecessor edges are relinked to it and an edge
1143   // is created to `this`. Returns the new empty block. Reverse post order or
1144   // loop and try/catch information are not updated.
1145   HBasicBlock* CreateImmediateDominator();
1146 
1147   // Split the block into two blocks just before `cursor`. Returns the newly
1148   // created, latter block. Note that this method will add the block to the
1149   // graph, create a Goto at the end of the former block and will create an edge
1150   // between the blocks. It will not, however, update the reverse post order or
1151   // loop and try/catch information.
1152   HBasicBlock* SplitBefore(HInstruction* cursor);
1153 
1154   // Split the block into two blocks just before `cursor`. Returns the newly
1155   // created block. Note that this method just updates raw block information,
1156   // like predecessors, successors, dominators, and instruction list. It does not
1157   // update the graph, reverse post order, loop information, nor make sure the
1158   // blocks are consistent (for example ending with a control flow instruction).
1159   HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1160 
1161   // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1162   HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1163 
1164   // Merge `other` at the end of `this`. Successors and dominated blocks of
1165   // `other` are changed to be successors and dominated blocks of `this`. Note
1166   // that this method does not update the graph, reverse post order, loop
1167   // information, nor make sure the blocks are consistent (for example ending
1168   // with a control flow instruction).
1169   void MergeWithInlined(HBasicBlock* other);
1170 
1171   // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1172   // of `this` are moved to `other`.
1173   // Note that this method does not update the graph, reverse post order, loop
1174   // information, nor make sure the blocks are consistent (for example ending
1175   // with a control flow instruction).
1176   void ReplaceWith(HBasicBlock* other);
1177 
1178   // Merges the instructions of `other` at the end of `this`.
1179   void MergeInstructionsWith(HBasicBlock* other);
1180 
1181   // Merge `other` at the end of `this`. This method updates loops, reverse post
1182   // order, links to predecessors, successors, dominators and deletes the block
1183   // from the graph. The two blocks must be successive, i.e. `this` the only
1184   // predecessor of `other` and vice versa.
1185   void MergeWith(HBasicBlock* other);
1186 
1187   // Disconnects `this` from all its predecessors, successors and dominator,
1188   // removes it from all loops it is included in and eventually from the graph.
1189   // The block must not dominate any other block. Predecessors and successors
1190   // are safely updated.
1191   void DisconnectAndDelete();
1192 
1193   void AddInstruction(HInstruction* instruction);
1194   // Insert `instruction` before/after an existing instruction `cursor`.
1195   void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1196   void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1197   // Replace phi `initial` with `replacement` within this block.
1198   void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1199   // Replace instruction `initial` with `replacement` within this block.
1200   void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1201                                        HInstruction* replacement);
1202   void AddPhi(HPhi* phi);
1203   void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1204   // RemoveInstruction and RemovePhi delete a given instruction from the respective
1205   // instruction list. With 'ensure_safety' set to true, it verifies that the
1206   // instruction is not in use and removes it from the use lists of its inputs.
1207   void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1208   void RemovePhi(HPhi* phi, bool ensure_safety = true);
1209   void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1210 
IsLoopHeader()1211   bool IsLoopHeader() const {
1212     return IsInLoop() && (loop_information_->GetHeader() == this);
1213   }
1214 
IsLoopPreHeaderFirstPredecessor()1215   bool IsLoopPreHeaderFirstPredecessor() const {
1216     DCHECK(IsLoopHeader());
1217     return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1218   }
1219 
IsFirstPredecessorBackEdge()1220   bool IsFirstPredecessorBackEdge() const {
1221     DCHECK(IsLoopHeader());
1222     return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1223   }
1224 
GetLoopInformation()1225   HLoopInformation* GetLoopInformation() const {
1226     return loop_information_;
1227   }
1228 
1229   // Set the loop_information_ on this block. Overrides the current
1230   // loop_information if it is an outer loop of the passed loop information.
1231   // Note that this method is called while creating the loop information.
SetInLoop(HLoopInformation * info)1232   void SetInLoop(HLoopInformation* info) {
1233     if (IsLoopHeader()) {
1234       // Nothing to do. This just means `info` is an outer loop.
1235     } else if (!IsInLoop()) {
1236       loop_information_ = info;
1237     } else if (loop_information_->Contains(*info->GetHeader())) {
1238       // Block is currently part of an outer loop. Make it part of this inner loop.
1239       // Note that a non loop header having a loop information means this loop information
1240       // has already been populated
1241       loop_information_ = info;
1242     } else {
1243       // Block is part of an inner loop. Do not update the loop information.
1244       // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1245       // at this point, because this method is being called while populating `info`.
1246     }
1247   }
1248 
1249   // Raw update of the loop information.
SetLoopInformation(HLoopInformation * info)1250   void SetLoopInformation(HLoopInformation* info) {
1251     loop_information_ = info;
1252   }
1253 
IsInLoop()1254   bool IsInLoop() const { return loop_information_ != nullptr; }
1255 
GetTryCatchInformation()1256   TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1257 
SetTryCatchInformation(TryCatchInformation * try_catch_information)1258   void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1259     try_catch_information_ = try_catch_information;
1260   }
1261 
IsTryBlock()1262   bool IsTryBlock() const {
1263     return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1264   }
1265 
IsCatchBlock()1266   bool IsCatchBlock() const {
1267     return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1268   }
1269 
1270   // Returns the try entry that this block's successors should have. They will
1271   // be in the same try, unless the block ends in a try boundary. In that case,
1272   // the appropriate try entry will be returned.
1273   const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1274 
1275   bool HasThrowingInstructions() const;
1276 
1277   // Returns whether this block dominates the blocked passed as parameter.
1278   bool Dominates(HBasicBlock* block) const;
1279 
GetLifetimeStart()1280   size_t GetLifetimeStart() const { return lifetime_start_; }
GetLifetimeEnd()1281   size_t GetLifetimeEnd() const { return lifetime_end_; }
1282 
SetLifetimeStart(size_t start)1283   void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
SetLifetimeEnd(size_t end)1284   void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1285 
1286   bool EndsWithControlFlowInstruction() const;
1287   bool EndsWithIf() const;
1288   bool EndsWithTryBoundary() const;
1289   bool HasSinglePhi() const;
1290 
1291  private:
1292   HGraph* graph_;
1293   ArenaVector<HBasicBlock*> predecessors_;
1294   ArenaVector<HBasicBlock*> successors_;
1295   HInstructionList instructions_;
1296   HInstructionList phis_;
1297   HLoopInformation* loop_information_;
1298   HBasicBlock* dominator_;
1299   ArenaVector<HBasicBlock*> dominated_blocks_;
1300   uint32_t block_id_;
1301   // The dex program counter of the first instruction of this block.
1302   const uint32_t dex_pc_;
1303   size_t lifetime_start_;
1304   size_t lifetime_end_;
1305   TryCatchInformation* try_catch_information_;
1306 
1307   friend class HGraph;
1308   friend class HInstruction;
1309 
1310   DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1311 };
1312 
1313 // Iterates over the LoopInformation of all loops which contain 'block'
1314 // from the innermost to the outermost.
1315 class HLoopInformationOutwardIterator : public ValueObject {
1316  public:
HLoopInformationOutwardIterator(const HBasicBlock & block)1317   explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1318       : current_(block.GetLoopInformation()) {}
1319 
Done()1320   bool Done() const { return current_ == nullptr; }
1321 
Advance()1322   void Advance() {
1323     DCHECK(!Done());
1324     current_ = current_->GetPreHeader()->GetLoopInformation();
1325   }
1326 
Current()1327   HLoopInformation* Current() const {
1328     DCHECK(!Done());
1329     return current_;
1330   }
1331 
1332  private:
1333   HLoopInformation* current_;
1334 
1335   DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1336 };
1337 
1338 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1339   M(Above, Condition)                                                   \
1340   M(AboveOrEqual, Condition)                                            \
1341   M(Add, BinaryOperation)                                               \
1342   M(And, BinaryOperation)                                               \
1343   M(ArrayGet, Instruction)                                              \
1344   M(ArrayLength, Instruction)                                           \
1345   M(ArraySet, Instruction)                                              \
1346   M(Below, Condition)                                                   \
1347   M(BelowOrEqual, Condition)                                            \
1348   M(BooleanNot, UnaryOperation)                                         \
1349   M(BoundsCheck, Instruction)                                           \
1350   M(BoundType, Instruction)                                             \
1351   M(CheckCast, Instruction)                                             \
1352   M(ClassTableGet, Instruction)                                         \
1353   M(ClearException, Instruction)                                        \
1354   M(ClinitCheck, Instruction)                                           \
1355   M(Compare, BinaryOperation)                                           \
1356   M(ConstructorFence, Instruction)                                      \
1357   M(CurrentMethod, Instruction)                                         \
1358   M(ShouldDeoptimizeFlag, Instruction)                                  \
1359   M(Deoptimize, Instruction)                                            \
1360   M(Div, BinaryOperation)                                               \
1361   M(DivZeroCheck, Instruction)                                          \
1362   M(DoubleConstant, Constant)                                           \
1363   M(Equal, Condition)                                                   \
1364   M(Exit, Instruction)                                                  \
1365   M(FloatConstant, Constant)                                            \
1366   M(Goto, Instruction)                                                  \
1367   M(GreaterThan, Condition)                                             \
1368   M(GreaterThanOrEqual, Condition)                                      \
1369   M(If, Instruction)                                                    \
1370   M(InstanceFieldGet, Instruction)                                      \
1371   M(InstanceFieldSet, Instruction)                                      \
1372   M(InstanceOf, Instruction)                                            \
1373   M(IntConstant, Constant)                                              \
1374   M(IntermediateAddress, Instruction)                                   \
1375   M(InvokeUnresolved, Invoke)                                           \
1376   M(InvokeInterface, Invoke)                                            \
1377   M(InvokeStaticOrDirect, Invoke)                                       \
1378   M(InvokeVirtual, Invoke)                                              \
1379   M(InvokePolymorphic, Invoke)                                          \
1380   M(LessThan, Condition)                                                \
1381   M(LessThanOrEqual, Condition)                                         \
1382   M(LoadClass, Instruction)                                             \
1383   M(LoadException, Instruction)                                         \
1384   M(LoadString, Instruction)                                            \
1385   M(LongConstant, Constant)                                             \
1386   M(MemoryBarrier, Instruction)                                         \
1387   M(MonitorOperation, Instruction)                                      \
1388   M(Mul, BinaryOperation)                                               \
1389   M(NativeDebugInfo, Instruction)                                       \
1390   M(Neg, UnaryOperation)                                                \
1391   M(NewArray, Instruction)                                              \
1392   M(NewInstance, Instruction)                                           \
1393   M(Not, UnaryOperation)                                                \
1394   M(NotEqual, Condition)                                                \
1395   M(NullConstant, Instruction)                                          \
1396   M(NullCheck, Instruction)                                             \
1397   M(Or, BinaryOperation)                                                \
1398   M(PackedSwitch, Instruction)                                          \
1399   M(ParallelMove, Instruction)                                          \
1400   M(ParameterValue, Instruction)                                        \
1401   M(Phi, Instruction)                                                   \
1402   M(Rem, BinaryOperation)                                               \
1403   M(Return, Instruction)                                                \
1404   M(ReturnVoid, Instruction)                                            \
1405   M(Ror, BinaryOperation)                                               \
1406   M(Shl, BinaryOperation)                                               \
1407   M(Shr, BinaryOperation)                                               \
1408   M(StaticFieldGet, Instruction)                                        \
1409   M(StaticFieldSet, Instruction)                                        \
1410   M(UnresolvedInstanceFieldGet, Instruction)                            \
1411   M(UnresolvedInstanceFieldSet, Instruction)                            \
1412   M(UnresolvedStaticFieldGet, Instruction)                              \
1413   M(UnresolvedStaticFieldSet, Instruction)                              \
1414   M(Select, Instruction)                                                \
1415   M(Sub, BinaryOperation)                                               \
1416   M(SuspendCheck, Instruction)                                          \
1417   M(Throw, Instruction)                                                 \
1418   M(TryBoundary, Instruction)                                           \
1419   M(TypeConversion, Instruction)                                        \
1420   M(UShr, BinaryOperation)                                              \
1421   M(Xor, BinaryOperation)                                               \
1422   M(VecReplicateScalar, VecUnaryOperation)                              \
1423   M(VecExtractScalar, VecUnaryOperation)                                \
1424   M(VecReduce, VecUnaryOperation)                                       \
1425   M(VecCnv, VecUnaryOperation)                                          \
1426   M(VecNeg, VecUnaryOperation)                                          \
1427   M(VecAbs, VecUnaryOperation)                                          \
1428   M(VecNot, VecUnaryOperation)                                          \
1429   M(VecAdd, VecBinaryOperation)                                         \
1430   M(VecHalvingAdd, VecBinaryOperation)                                  \
1431   M(VecSub, VecBinaryOperation)                                         \
1432   M(VecMul, VecBinaryOperation)                                         \
1433   M(VecDiv, VecBinaryOperation)                                         \
1434   M(VecMin, VecBinaryOperation)                                         \
1435   M(VecMax, VecBinaryOperation)                                         \
1436   M(VecAnd, VecBinaryOperation)                                         \
1437   M(VecAndNot, VecBinaryOperation)                                      \
1438   M(VecOr, VecBinaryOperation)                                          \
1439   M(VecXor, VecBinaryOperation)                                         \
1440   M(VecShl, VecBinaryOperation)                                         \
1441   M(VecShr, VecBinaryOperation)                                         \
1442   M(VecUShr, VecBinaryOperation)                                        \
1443   M(VecSetScalars, VecOperation)                                        \
1444   M(VecMultiplyAccumulate, VecOperation)                                \
1445   M(VecSADAccumulate, VecOperation)                                     \
1446   M(VecLoad, VecMemoryOperation)                                        \
1447   M(VecStore, VecMemoryOperation)                                       \
1448 
1449 /*
1450  * Instructions, shared across several (not all) architectures.
1451  */
1452 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1453 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1454 #else
1455 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1456   M(BitwiseNegatedRight, Instruction)                                   \
1457   M(DataProcWithShifterOp, Instruction)                                 \
1458   M(MultiplyAccumulate, Instruction)                                    \
1459   M(IntermediateAddressIndex, Instruction)
1460 #endif
1461 
1462 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1463 
1464 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1465 
1466 #ifndef ART_ENABLE_CODEGEN_mips
1467 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1468 #else
1469 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                           \
1470   M(MipsComputeBaseMethodAddress, Instruction)                          \
1471   M(MipsPackedSwitch, Instruction)                                      \
1472   M(IntermediateArrayAddressIndex, Instruction)
1473 #endif
1474 
1475 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1476 
1477 #ifndef ART_ENABLE_CODEGEN_x86
1478 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1479 #else
1480 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1481   M(X86ComputeBaseMethodAddress, Instruction)                           \
1482   M(X86LoadFromConstantTable, Instruction)                              \
1483   M(X86FPNeg, Instruction)                                              \
1484   M(X86PackedSwitch, Instruction)
1485 #endif
1486 
1487 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1488 
1489 #define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1490   FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1491   FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1492   FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1493   FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1494   FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)                                 \
1495   FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)                               \
1496   FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1497   FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1498 
1499 #define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1500   M(Condition, BinaryOperation)                                         \
1501   M(Constant, Instruction)                                              \
1502   M(UnaryOperation, Instruction)                                        \
1503   M(BinaryOperation, Instruction)                                       \
1504   M(Invoke, Instruction)                                                \
1505   M(VecOperation, Instruction)                                          \
1506   M(VecUnaryOperation, VecOperation)                                    \
1507   M(VecBinaryOperation, VecOperation)                                   \
1508   M(VecMemoryOperation, VecOperation)
1509 
1510 #define FOR_EACH_INSTRUCTION(M)                                         \
1511   FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1512   FOR_EACH_ABSTRACT_INSTRUCTION(M)
1513 
1514 #define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)1515 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1516 #undef FORWARD_DECLARATION
1517 
1518 #define DECLARE_INSTRUCTION(type)                                         \
1519   private:                                                                \
1520   H##type& operator=(const H##type&) = delete;                            \
1521   public:                                                                 \
1522   const char* DebugName() const OVERRIDE { return #type; }                \
1523   bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE {  \
1524     return other->Is##type();                                             \
1525   }                                                                       \
1526   HInstruction* Clone(ArenaAllocator* arena) const OVERRIDE {             \
1527     DCHECK(IsClonable());                                                 \
1528     return new (arena) H##type(*this->As##type());                        \
1529   }                                                                       \
1530   void Accept(HGraphVisitor* visitor) OVERRIDE
1531 
1532 #define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1533   private:                                                              \
1534   H##type& operator=(const H##type&) = delete;                          \
1535   public:                                                               \
1536   bool Is##type() const { return As##type() != nullptr; }               \
1537   const H##type* As##type() const { return this; }                      \
1538   H##type* As##type() { return this; }
1539 
1540 #define DEFAULT_COPY_CONSTRUCTOR(type)                                  \
1541   explicit H##type(const H##type& other) = default;
1542 
1543 template <typename T>
1544 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1545                      public IntrusiveForwardListNode<HUseListNode<T>> {
1546  public:
1547   // Get the instruction which has this use as one of the inputs.
1548   T GetUser() const { return user_; }
1549   // Get the position of the input record that this use corresponds to.
1550   size_t GetIndex() const { return index_; }
1551   // Set the position of the input record that this use corresponds to.
1552   void SetIndex(size_t index) { index_ = index; }
1553 
1554  private:
1555   HUseListNode(T user, size_t index)
1556       : user_(user), index_(index) {}
1557 
1558   T const user_;
1559   size_t index_;
1560 
1561   friend class HInstruction;
1562 
1563   DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1564 };
1565 
1566 template <typename T>
1567 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1568 
1569 // This class is used by HEnvironment and HInstruction classes to record the
1570 // instructions they use and pointers to the corresponding HUseListNodes kept
1571 // by the used instructions.
1572 template <typename T>
1573 class HUserRecord : public ValueObject {
1574  public:
HUserRecord()1575   HUserRecord() : instruction_(nullptr), before_use_node_() {}
HUserRecord(HInstruction * instruction)1576   explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1577 
HUserRecord(const HUserRecord<T> & old_record,typename HUseList<T>::iterator before_use_node)1578   HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1579       : HUserRecord(old_record.instruction_, before_use_node) {}
HUserRecord(HInstruction * instruction,typename HUseList<T>::iterator before_use_node)1580   HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1581       : instruction_(instruction), before_use_node_(before_use_node) {
1582     DCHECK(instruction_ != nullptr);
1583   }
1584 
GetInstruction()1585   HInstruction* GetInstruction() const { return instruction_; }
GetBeforeUseNode()1586   typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
GetUseNode()1587   typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1588 
1589  private:
1590   // Instruction used by the user.
1591   HInstruction* instruction_;
1592 
1593   // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1594   typename HUseList<T>::iterator before_use_node_;
1595 };
1596 
1597 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1598 // This is used for HInstruction::GetInputs() to return a container wrapper providing
1599 // HInstruction* values even though the underlying container has HUserRecord<>s.
1600 struct HInputExtractor {
operatorHInputExtractor1601   HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1602     return record.GetInstruction();
1603   }
operatorHInputExtractor1604   const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1605     return record.GetInstruction();
1606   }
1607 };
1608 
1609 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1610 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1611 
1612 /**
1613  * Side-effects representation.
1614  *
1615  * For write/read dependences on fields/arrays, the dependence analysis uses
1616  * type disambiguation (e.g. a float field write cannot modify the value of an
1617  * integer field read) and the access type (e.g.  a reference array write cannot
1618  * modify the value of a reference field read [although it may modify the
1619  * reference fetch prior to reading the field, which is represented by its own
1620  * write/read dependence]). The analysis makes conservative points-to
1621  * assumptions on reference types (e.g. two same typed arrays are assumed to be
1622  * the same, and any reference read depends on any reference read without
1623  * further regard of its type).
1624  *
1625  * The internal representation uses 38-bit and is described in the table below.
1626  * The first line indicates the side effect, and for field/array accesses the
1627  * second line indicates the type of the access (in the order of the
1628  * DataType::Type enum).
1629  * The two numbered lines below indicate the bit position in the bitfield (read
1630  * vertically).
1631  *
1632  *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1633  *   +-------------+---------+---------+--------------+---------+---------+
1634  *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1635  *   |      3      |333333322|222222221|       1      |111111110|000000000|
1636  *   |      7      |654321098|765432109|       8      |765432109|876543210|
1637  *
1638  * Note that, to ease the implementation, 'changes' bits are least significant
1639  * bits, while 'dependency' bits are most significant bits.
1640  */
1641 class SideEffects : public ValueObject {
1642  public:
SideEffects()1643   SideEffects() : flags_(0) {}
1644 
None()1645   static SideEffects None() {
1646     return SideEffects(0);
1647   }
1648 
All()1649   static SideEffects All() {
1650     return SideEffects(kAllChangeBits | kAllDependOnBits);
1651   }
1652 
AllChanges()1653   static SideEffects AllChanges() {
1654     return SideEffects(kAllChangeBits);
1655   }
1656 
AllDependencies()1657   static SideEffects AllDependencies() {
1658     return SideEffects(kAllDependOnBits);
1659   }
1660 
AllExceptGCDependency()1661   static SideEffects AllExceptGCDependency() {
1662     return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1663   }
1664 
AllWritesAndReads()1665   static SideEffects AllWritesAndReads() {
1666     return SideEffects(kAllWrites | kAllReads);
1667   }
1668 
AllWrites()1669   static SideEffects AllWrites() {
1670     return SideEffects(kAllWrites);
1671   }
1672 
AllReads()1673   static SideEffects AllReads() {
1674     return SideEffects(kAllReads);
1675   }
1676 
FieldWriteOfType(DataType::Type type,bool is_volatile)1677   static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1678     return is_volatile
1679         ? AllWritesAndReads()
1680         : SideEffects(TypeFlag(type, kFieldWriteOffset));
1681   }
1682 
ArrayWriteOfType(DataType::Type type)1683   static SideEffects ArrayWriteOfType(DataType::Type type) {
1684     return SideEffects(TypeFlag(type, kArrayWriteOffset));
1685   }
1686 
FieldReadOfType(DataType::Type type,bool is_volatile)1687   static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1688     return is_volatile
1689         ? AllWritesAndReads()
1690         : SideEffects(TypeFlag(type, kFieldReadOffset));
1691   }
1692 
ArrayReadOfType(DataType::Type type)1693   static SideEffects ArrayReadOfType(DataType::Type type) {
1694     return SideEffects(TypeFlag(type, kArrayReadOffset));
1695   }
1696 
CanTriggerGC()1697   static SideEffects CanTriggerGC() {
1698     return SideEffects(1ULL << kCanTriggerGCBit);
1699   }
1700 
DependsOnGC()1701   static SideEffects DependsOnGC() {
1702     return SideEffects(1ULL << kDependsOnGCBit);
1703   }
1704 
1705   // Combines the side-effects of this and the other.
Union(SideEffects other)1706   SideEffects Union(SideEffects other) const {
1707     return SideEffects(flags_ | other.flags_);
1708   }
1709 
Exclusion(SideEffects other)1710   SideEffects Exclusion(SideEffects other) const {
1711     return SideEffects(flags_ & ~other.flags_);
1712   }
1713 
Add(SideEffects other)1714   void Add(SideEffects other) {
1715     flags_ |= other.flags_;
1716   }
1717 
Includes(SideEffects other)1718   bool Includes(SideEffects other) const {
1719     return (other.flags_ & flags_) == other.flags_;
1720   }
1721 
HasSideEffects()1722   bool HasSideEffects() const {
1723     return (flags_ & kAllChangeBits);
1724   }
1725 
HasDependencies()1726   bool HasDependencies() const {
1727     return (flags_ & kAllDependOnBits);
1728   }
1729 
1730   // Returns true if there are no side effects or dependencies.
DoesNothing()1731   bool DoesNothing() const {
1732     return flags_ == 0;
1733   }
1734 
1735   // Returns true if something is written.
DoesAnyWrite()1736   bool DoesAnyWrite() const {
1737     return (flags_ & kAllWrites);
1738   }
1739 
1740   // Returns true if something is read.
DoesAnyRead()1741   bool DoesAnyRead() const {
1742     return (flags_ & kAllReads);
1743   }
1744 
1745   // Returns true if potentially everything is written and read
1746   // (every type and every kind of access).
DoesAllReadWrite()1747   bool DoesAllReadWrite() const {
1748     return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1749   }
1750 
DoesAll()1751   bool DoesAll() const {
1752     return flags_ == (kAllChangeBits | kAllDependOnBits);
1753   }
1754 
1755   // Returns true if `this` may read something written by `other`.
MayDependOn(SideEffects other)1756   bool MayDependOn(SideEffects other) const {
1757     const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1758     return (other.flags_ & depends_on_flags);
1759   }
1760 
1761   // Returns string representation of flags (for debugging only).
1762   // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
ToString()1763   std::string ToString() const {
1764     std::string flags = "|";
1765     for (int s = kLastBit; s >= 0; s--) {
1766       bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1767       if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1768         // This is a bit for the GC side effect.
1769         if (current_bit_is_set) {
1770           flags += "GC";
1771         }
1772         flags += "|";
1773       } else {
1774         // This is a bit for the array/field analysis.
1775         // The underscore character stands for the 'can trigger GC' bit.
1776         static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1777         if (current_bit_is_set) {
1778           flags += kDebug[s];
1779         }
1780         if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1781             (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1782           flags += "|";
1783         }
1784       }
1785     }
1786     return flags;
1787   }
1788 
Equals(const SideEffects & other)1789   bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1790 
1791  private:
1792   static constexpr int kFieldArrayAnalysisBits = 9;
1793 
1794   static constexpr int kFieldWriteOffset = 0;
1795   static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1796   static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1797   static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1798 
1799   static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1800 
1801   static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1802   static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1803   static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1804   static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1805 
1806   static constexpr int kLastBit = kDependsOnGCBit;
1807   static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1808 
1809   // Aliases.
1810 
1811   static_assert(kChangeBits == kDependOnBits,
1812                 "the 'change' bits should match the 'depend on' bits.");
1813 
1814   static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1815   static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1816   static constexpr uint64_t kAllWrites =
1817       ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1818   static constexpr uint64_t kAllReads =
1819       ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1820 
1821   // Translates type to bit flag. The type must correspond to a Java type.
TypeFlag(DataType::Type type,int offset)1822   static uint64_t TypeFlag(DataType::Type type, int offset) {
1823     int shift;
1824     switch (type) {
1825       case DataType::Type::kReference: shift = 0; break;
1826       case DataType::Type::kBool:      shift = 1; break;
1827       case DataType::Type::kInt8:      shift = 2; break;
1828       case DataType::Type::kUint16:    shift = 3; break;
1829       case DataType::Type::kInt16:     shift = 4; break;
1830       case DataType::Type::kInt32:     shift = 5; break;
1831       case DataType::Type::kInt64:     shift = 6; break;
1832       case DataType::Type::kFloat32:   shift = 7; break;
1833       case DataType::Type::kFloat64:   shift = 8; break;
1834       default:
1835         LOG(FATAL) << "Unexpected data type " << type;
1836         UNREACHABLE();
1837     }
1838     DCHECK_LE(kFieldWriteOffset, shift);
1839     DCHECK_LT(shift, kArrayWriteOffset);
1840     return UINT64_C(1) << (shift + offset);
1841   }
1842 
1843   // Private constructor on direct flags value.
SideEffects(uint64_t flags)1844   explicit SideEffects(uint64_t flags) : flags_(flags) {}
1845 
1846   uint64_t flags_;
1847 };
1848 
1849 // A HEnvironment object contains the values of virtual registers at a given location.
1850 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1851  public:
HEnvironment(ArenaAllocator * allocator,size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)1852   ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1853                              size_t number_of_vregs,
1854                              ArtMethod* method,
1855                              uint32_t dex_pc,
1856                              HInstruction* holder)
1857      : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
1858        locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
1859        parent_(nullptr),
1860        method_(method),
1861        dex_pc_(dex_pc),
1862        holder_(holder) {
1863   }
1864 
HEnvironment(ArenaAllocator * allocator,const HEnvironment & to_copy,HInstruction * holder)1865   ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1866                              const HEnvironment& to_copy,
1867                              HInstruction* holder)
1868       : HEnvironment(allocator,
1869                      to_copy.Size(),
1870                      to_copy.GetMethod(),
1871                      to_copy.GetDexPc(),
1872                      holder) {}
1873 
AllocateLocations()1874   void AllocateLocations() {
1875     DCHECK(locations_.empty());
1876     locations_.resize(vregs_.size());
1877   }
1878 
SetAndCopyParentChain(ArenaAllocator * allocator,HEnvironment * parent)1879   void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1880     if (parent_ != nullptr) {
1881       parent_->SetAndCopyParentChain(allocator, parent);
1882     } else {
1883       parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1884       parent_->CopyFrom(parent);
1885       if (parent->GetParent() != nullptr) {
1886         parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1887       }
1888     }
1889   }
1890 
1891   void CopyFrom(ArrayRef<HInstruction* const> locals);
1892   void CopyFrom(HEnvironment* environment);
1893 
1894   // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1895   // input to the loop phi instead. This is for inserting instructions that
1896   // require an environment (like HDeoptimization) in the loop pre-header.
1897   void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1898 
SetRawEnvAt(size_t index,HInstruction * instruction)1899   void SetRawEnvAt(size_t index, HInstruction* instruction) {
1900     vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1901   }
1902 
GetInstructionAt(size_t index)1903   HInstruction* GetInstructionAt(size_t index) const {
1904     return vregs_[index].GetInstruction();
1905   }
1906 
1907   void RemoveAsUserOfInput(size_t index) const;
1908 
Size()1909   size_t Size() const { return vregs_.size(); }
1910 
GetParent()1911   HEnvironment* GetParent() const { return parent_; }
1912 
SetLocationAt(size_t index,Location location)1913   void SetLocationAt(size_t index, Location location) {
1914     locations_[index] = location;
1915   }
1916 
GetLocationAt(size_t index)1917   Location GetLocationAt(size_t index) const {
1918     return locations_[index];
1919   }
1920 
GetDexPc()1921   uint32_t GetDexPc() const {
1922     return dex_pc_;
1923   }
1924 
GetMethod()1925   ArtMethod* GetMethod() const {
1926     return method_;
1927   }
1928 
GetHolder()1929   HInstruction* GetHolder() const {
1930     return holder_;
1931   }
1932 
1933 
IsFromInlinedInvoke()1934   bool IsFromInlinedInvoke() const {
1935     return GetParent() != nullptr;
1936   }
1937 
1938  private:
1939   ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1940   ArenaVector<Location> locations_;
1941   HEnvironment* parent_;
1942   ArtMethod* method_;
1943   const uint32_t dex_pc_;
1944 
1945   // The instruction that holds this environment.
1946   HInstruction* const holder_;
1947 
1948   friend class HInstruction;
1949 
1950   DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1951 };
1952 
1953 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1954  public:
1955 #define DECLARE_KIND(type, super) k##type,
1956   enum InstructionKind {
1957     FOR_EACH_INSTRUCTION(DECLARE_KIND)
1958     kLastInstructionKind
1959   };
1960 #undef DECLARE_KIND
1961 
HInstruction(InstructionKind kind,SideEffects side_effects,uint32_t dex_pc)1962   HInstruction(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
1963       : previous_(nullptr),
1964         next_(nullptr),
1965         block_(nullptr),
1966         dex_pc_(dex_pc),
1967         id_(-1),
1968         ssa_index_(-1),
1969         packed_fields_(0u),
1970         environment_(nullptr),
1971         locations_(nullptr),
1972         live_interval_(nullptr),
1973         lifetime_position_(kNoLifetime),
1974         side_effects_(side_effects),
1975         reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1976     SetPackedField<InstructionKindField>(kind);
1977     SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1978   }
1979 
~HInstruction()1980   virtual ~HInstruction() {}
1981 
1982 
GetNext()1983   HInstruction* GetNext() const { return next_; }
GetPrevious()1984   HInstruction* GetPrevious() const { return previous_; }
1985 
1986   HInstruction* GetNextDisregardingMoves() const;
1987   HInstruction* GetPreviousDisregardingMoves() const;
1988 
GetBlock()1989   HBasicBlock* GetBlock() const { return block_; }
GetAllocator()1990   ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
SetBlock(HBasicBlock * block)1991   void SetBlock(HBasicBlock* block) { block_ = block; }
IsInBlock()1992   bool IsInBlock() const { return block_ != nullptr; }
IsInLoop()1993   bool IsInLoop() const { return block_->IsInLoop(); }
IsLoopHeaderPhi()1994   bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
IsIrreducibleLoopHeaderPhi()1995   bool IsIrreducibleLoopHeaderPhi() const {
1996     return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1997   }
1998 
1999   virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
2000 
GetInputRecords()2001   ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
2002     // One virtual method is enough, just const_cast<> and then re-add the const.
2003     return ArrayRef<const HUserRecord<HInstruction*>>(
2004         const_cast<HInstruction*>(this)->GetInputRecords());
2005   }
2006 
GetInputs()2007   HInputsRef GetInputs() {
2008     return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2009   }
2010 
GetInputs()2011   HConstInputsRef GetInputs() const {
2012     return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2013   }
2014 
InputCount()2015   size_t InputCount() const { return GetInputRecords().size(); }
InputAt(size_t i)2016   HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
2017 
HasInput(HInstruction * input)2018   bool HasInput(HInstruction* input) const {
2019     for (const HInstruction* i : GetInputs()) {
2020       if (i == input) {
2021         return true;
2022       }
2023     }
2024     return false;
2025   }
2026 
SetRawInputAt(size_t index,HInstruction * input)2027   void SetRawInputAt(size_t index, HInstruction* input) {
2028     SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
2029   }
2030 
2031   virtual void Accept(HGraphVisitor* visitor) = 0;
2032   virtual const char* DebugName() const = 0;
2033 
GetType()2034   virtual DataType::Type GetType() const { return DataType::Type::kVoid; }
2035 
NeedsEnvironment()2036   virtual bool NeedsEnvironment() const { return false; }
2037 
GetDexPc()2038   uint32_t GetDexPc() const { return dex_pc_; }
2039 
IsControlFlow()2040   virtual bool IsControlFlow() const { return false; }
2041 
2042   // Can the instruction throw?
2043   // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2044   // could throw OOME, but it is still OK to remove them if they are unused.
CanThrow()2045   virtual bool CanThrow() const { return false; }
2046 
2047   // Does the instruction always throw an exception unconditionally?
AlwaysThrows()2048   virtual bool AlwaysThrows() const { return false; }
2049 
CanThrowIntoCatchBlock()2050   bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2051 
HasSideEffects()2052   bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
DoesAnyWrite()2053   bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2054 
2055   // Does not apply for all instructions, but having this at top level greatly
2056   // simplifies the null check elimination.
2057   // TODO: Consider merging can_be_null into ReferenceTypeInfo.
CanBeNull()2058   virtual bool CanBeNull() const {
2059     DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2060     return true;
2061   }
2062 
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)2063   virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
2064     return false;
2065   }
2066 
IsActualObject()2067   virtual bool IsActualObject() const {
2068     return GetType() == DataType::Type::kReference;
2069   }
2070 
2071   void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2072 
GetReferenceTypeInfo()2073   ReferenceTypeInfo GetReferenceTypeInfo() const {
2074     DCHECK_EQ(GetType(), DataType::Type::kReference);
2075     return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2076                                               GetPackedFlag<kFlagReferenceTypeIsExact>());
2077   }
2078 
AddUseAt(HInstruction * user,size_t index)2079   void AddUseAt(HInstruction* user, size_t index) {
2080     DCHECK(user != nullptr);
2081     // Note: fixup_end remains valid across push_front().
2082     auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2083     HUseListNode<HInstruction*>* new_node =
2084         new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HInstruction*>(user, index);
2085     uses_.push_front(*new_node);
2086     FixUpUserRecordsAfterUseInsertion(fixup_end);
2087   }
2088 
AddEnvUseAt(HEnvironment * user,size_t index)2089   void AddEnvUseAt(HEnvironment* user, size_t index) {
2090     DCHECK(user != nullptr);
2091     // Note: env_fixup_end remains valid across push_front().
2092     auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2093     HUseListNode<HEnvironment*>* new_node =
2094         new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2095     env_uses_.push_front(*new_node);
2096     FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2097   }
2098 
RemoveAsUserOfInput(size_t input)2099   void RemoveAsUserOfInput(size_t input) {
2100     HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2101     HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2102     input_use.GetInstruction()->uses_.erase_after(before_use_node);
2103     input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2104   }
2105 
RemoveAsUserOfAllInputs()2106   void RemoveAsUserOfAllInputs() {
2107     for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2108       HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2109       input_use.GetInstruction()->uses_.erase_after(before_use_node);
2110       input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2111     }
2112   }
2113 
GetUses()2114   const HUseList<HInstruction*>& GetUses() const { return uses_; }
GetEnvUses()2115   const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2116 
HasUses()2117   bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
HasEnvironmentUses()2118   bool HasEnvironmentUses() const { return !env_uses_.empty(); }
HasNonEnvironmentUses()2119   bool HasNonEnvironmentUses() const { return !uses_.empty(); }
HasOnlyOneNonEnvironmentUse()2120   bool HasOnlyOneNonEnvironmentUse() const {
2121     return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2122   }
2123 
IsRemovable()2124   bool IsRemovable() const {
2125     return
2126         !DoesAnyWrite() &&
2127         !CanThrow() &&
2128         !IsSuspendCheck() &&
2129         !IsControlFlow() &&
2130         !IsNativeDebugInfo() &&
2131         !IsParameterValue() &&
2132         // If we added an explicit barrier then we should keep it.
2133         !IsMemoryBarrier() &&
2134         !IsConstructorFence();
2135   }
2136 
IsDeadAndRemovable()2137   bool IsDeadAndRemovable() const {
2138     return IsRemovable() && !HasUses();
2139   }
2140 
2141   // Does this instruction strictly dominate `other_instruction`?
2142   // Returns false if this instruction and `other_instruction` are the same.
2143   // Aborts if this instruction and `other_instruction` are both phis.
2144   bool StrictlyDominates(HInstruction* other_instruction) const;
2145 
GetId()2146   int GetId() const { return id_; }
SetId(int id)2147   void SetId(int id) { id_ = id; }
2148 
GetSsaIndex()2149   int GetSsaIndex() const { return ssa_index_; }
SetSsaIndex(int ssa_index)2150   void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
HasSsaIndex()2151   bool HasSsaIndex() const { return ssa_index_ != -1; }
2152 
HasEnvironment()2153   bool HasEnvironment() const { return environment_ != nullptr; }
GetEnvironment()2154   HEnvironment* GetEnvironment() const { return environment_; }
2155   // Set the `environment_` field. Raw because this method does not
2156   // update the uses lists.
SetRawEnvironment(HEnvironment * environment)2157   void SetRawEnvironment(HEnvironment* environment) {
2158     DCHECK(environment_ == nullptr);
2159     DCHECK_EQ(environment->GetHolder(), this);
2160     environment_ = environment;
2161   }
2162 
InsertRawEnvironment(HEnvironment * environment)2163   void InsertRawEnvironment(HEnvironment* environment) {
2164     DCHECK(environment_ != nullptr);
2165     DCHECK_EQ(environment->GetHolder(), this);
2166     DCHECK(environment->GetParent() == nullptr);
2167     environment->parent_ = environment_;
2168     environment_ = environment;
2169   }
2170 
2171   void RemoveEnvironment();
2172 
2173   // Set the environment of this instruction, copying it from `environment`. While
2174   // copying, the uses lists are being updated.
CopyEnvironmentFrom(HEnvironment * environment)2175   void CopyEnvironmentFrom(HEnvironment* environment) {
2176     DCHECK(environment_ == nullptr);
2177     ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2178     environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2179     environment_->CopyFrom(environment);
2180     if (environment->GetParent() != nullptr) {
2181       environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2182     }
2183   }
2184 
CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment * environment,HBasicBlock * block)2185   void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2186                                                 HBasicBlock* block) {
2187     DCHECK(environment_ == nullptr);
2188     ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2189     environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2190     environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2191     if (environment->GetParent() != nullptr) {
2192       environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2193     }
2194   }
2195 
2196   // Returns the number of entries in the environment. Typically, that is the
2197   // number of dex registers in a method. It could be more in case of inlining.
2198   size_t EnvironmentSize() const;
2199 
GetLocations()2200   LocationSummary* GetLocations() const { return locations_; }
SetLocations(LocationSummary * locations)2201   void SetLocations(LocationSummary* locations) { locations_ = locations; }
2202 
2203   void ReplaceWith(HInstruction* instruction);
2204   void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2205   void ReplaceInput(HInstruction* replacement, size_t index);
2206 
2207   // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2208   // uses of this instruction by `other` are *not* updated.
ReplaceWithExceptInReplacementAtIndex(HInstruction * other,size_t use_index)2209   void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2210     ReplaceWith(other);
2211     other->ReplaceInput(this, use_index);
2212   }
2213 
2214   // Move `this` instruction before `cursor`
2215   void MoveBefore(HInstruction* cursor, bool do_checks = true);
2216 
2217   // Move `this` before its first user and out of any loops. If there is no
2218   // out-of-loop user that dominates all other users, move the instruction
2219   // to the end of the out-of-loop common dominator of the user's blocks.
2220   //
2221   // This can be used only on non-throwing instructions with no side effects that
2222   // have at least one use but no environment uses.
2223   void MoveBeforeFirstUserAndOutOfLoops();
2224 
2225 #define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2226   bool Is##type() const;                                                       \
2227   const H##type* As##type() const;                                             \
2228   H##type* As##type();
2229 
2230   FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2231 #undef INSTRUCTION_TYPE_CHECK
2232 
2233 #define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2234   bool Is##type() const { return (As##type() != nullptr); }                    \
2235   virtual const H##type* As##type() const { return nullptr; }                  \
2236   virtual H##type* As##type() { return nullptr; }
FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)2237   FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2238 #undef INSTRUCTION_TYPE_CHECK
2239 
2240   // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2241   // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2242   // the instruction then the behaviour of this function is undefined.
2243   //
2244   // Note: It is semantically valid to create a clone of the instruction only until
2245   // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2246   // copied.
2247   //
2248   // Note: HEnvironment and some other fields are not copied and are set to default values, see
2249   // 'explicit HInstruction(const HInstruction& other)' for details.
2250   virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const {
2251     LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2252                   DebugName() << " " << GetId();
2253     UNREACHABLE();
2254   }
2255 
2256   // Return whether instruction can be cloned (copied).
IsClonable()2257   virtual bool IsClonable() const { return false; }
2258 
2259   // Returns whether the instruction can be moved within the graph.
2260   // TODO: this method is used by LICM and GVN with possibly different
2261   //       meanings? split and rename?
CanBeMoved()2262   virtual bool CanBeMoved() const { return false; }
2263 
2264   // Returns whether the two instructions are of the same kind.
InstructionTypeEquals(const HInstruction * other ATTRIBUTE_UNUSED)2265   virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2266     return false;
2267   }
2268 
2269   // Returns whether any data encoded in the two instructions is equal.
2270   // This method does not look at the inputs. Both instructions must be
2271   // of the same type, otherwise the method has undefined behavior.
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)2272   virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2273     return false;
2274   }
2275 
2276   // Returns whether two instructions are equal, that is:
2277   // 1) They have the same type and contain the same data (InstructionDataEquals).
2278   // 2) Their inputs are identical.
2279   bool Equals(const HInstruction* other) const;
2280 
2281   // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
2282   // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
2283   // the virtual function because the __attribute__((__pure__)) doesn't really
2284   // apply the strong requirement for virtual functions, preventing optimizations.
GetKind()2285   InstructionKind GetKind() const { return GetPackedField<InstructionKindField>(); }
2286 
ComputeHashCode()2287   virtual size_t ComputeHashCode() const {
2288     size_t result = GetKind();
2289     for (const HInstruction* input : GetInputs()) {
2290       result = (result * 31) + input->GetId();
2291     }
2292     return result;
2293   }
2294 
GetSideEffects()2295   SideEffects GetSideEffects() const { return side_effects_; }
SetSideEffects(SideEffects other)2296   void SetSideEffects(SideEffects other) { side_effects_ = other; }
AddSideEffects(SideEffects other)2297   void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2298 
GetLifetimePosition()2299   size_t GetLifetimePosition() const { return lifetime_position_; }
SetLifetimePosition(size_t position)2300   void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
GetLiveInterval()2301   LiveInterval* GetLiveInterval() const { return live_interval_; }
SetLiveInterval(LiveInterval * interval)2302   void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
HasLiveInterval()2303   bool HasLiveInterval() const { return live_interval_ != nullptr; }
2304 
IsSuspendCheckEntry()2305   bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2306 
2307   // Returns whether the code generation of the instruction will require to have access
2308   // to the current method. Such instructions are:
2309   // (1): Instructions that require an environment, as calling the runtime requires
2310   //      to walk the stack and have the current method stored at a specific stack address.
2311   // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2312   //      to access the dex cache.
NeedsCurrentMethod()2313   bool NeedsCurrentMethod() const {
2314     return NeedsEnvironment() || IsCurrentMethod();
2315   }
2316 
2317   // Returns whether the code generation of the instruction will require to have access
2318   // to the dex cache of the current method's declaring class via the current method.
NeedsDexCacheOfDeclaringClass()2319   virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2320 
2321   // Does this instruction have any use in an environment before
2322   // control flow hits 'other'?
2323   bool HasAnyEnvironmentUseBefore(HInstruction* other);
2324 
2325   // Remove all references to environment uses of this instruction.
2326   // The caller must ensure that this is safe to do.
2327   void RemoveEnvironmentUsers();
2328 
IsEmittedAtUseSite()2329   bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
MarkEmittedAtUseSite()2330   void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2331 
2332  protected:
2333   // If set, the machine code for this instruction is assumed to be generated by
2334   // its users. Used by liveness analysis to compute use positions accordingly.
2335   static constexpr size_t kFlagEmittedAtUseSite = 0u;
2336   static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2337   static constexpr size_t kFieldInstructionKind = kFlagReferenceTypeIsExact + 1;
2338   static constexpr size_t kFieldInstructionKindSize =
2339       MinimumBitsToStore(static_cast<size_t>(InstructionKind::kLastInstructionKind - 1));
2340   static constexpr size_t kNumberOfGenericPackedBits =
2341       kFieldInstructionKind + kFieldInstructionKindSize;
2342   static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2343 
2344   static_assert(kNumberOfGenericPackedBits <= kMaxNumberOfPackedBits,
2345                 "Too many generic packed fields");
2346 
InputRecordAt(size_t i)2347   const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2348     return GetInputRecords()[i];
2349   }
2350 
SetRawInputRecordAt(size_t index,const HUserRecord<HInstruction * > & input)2351   void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2352     ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2353     input_records[index] = input;
2354   }
2355 
GetPackedFields()2356   uint32_t GetPackedFields() const {
2357     return packed_fields_;
2358   }
2359 
2360   template <size_t flag>
GetPackedFlag()2361   bool GetPackedFlag() const {
2362     return (packed_fields_ & (1u << flag)) != 0u;
2363   }
2364 
2365   template <size_t flag>
2366   void SetPackedFlag(bool value = true) {
2367     packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2368   }
2369 
2370   template <typename BitFieldType>
GetPackedField()2371   typename BitFieldType::value_type GetPackedField() const {
2372     return BitFieldType::Decode(packed_fields_);
2373   }
2374 
2375   template <typename BitFieldType>
SetPackedField(typename BitFieldType::value_type value)2376   void SetPackedField(typename BitFieldType::value_type value) {
2377     DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2378     packed_fields_ = BitFieldType::Update(value, packed_fields_);
2379   }
2380 
2381   // Copy construction for the instruction (used for Clone function).
2382   //
2383   // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2384   // prepare_for_register_allocator are not copied (set to default values).
2385   //
2386   // Copy constructors must be provided for every HInstruction type; default copy constructor is
2387   // fine for most of them. However for some of the instructions a custom copy constructor must be
2388   // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2389   // for copying them).
HInstruction(const HInstruction & other)2390   explicit HInstruction(const HInstruction& other)
2391       : previous_(nullptr),
2392         next_(nullptr),
2393         block_(nullptr),
2394         dex_pc_(other.dex_pc_),
2395         id_(-1),
2396         ssa_index_(-1),
2397         packed_fields_(other.packed_fields_),
2398         environment_(nullptr),
2399         locations_(nullptr),
2400         live_interval_(nullptr),
2401         lifetime_position_(kNoLifetime),
2402         side_effects_(other.side_effects_),
2403         reference_type_handle_(other.reference_type_handle_) {
2404   }
2405 
2406  private:
2407   using InstructionKindField =
2408      BitField<InstructionKind, kFieldInstructionKind, kFieldInstructionKindSize>;
2409 
FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction * >::iterator fixup_end)2410   void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2411     auto before_use_node = uses_.before_begin();
2412     for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2413       HInstruction* user = use_node->GetUser();
2414       size_t input_index = use_node->GetIndex();
2415       user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2416       before_use_node = use_node;
2417     }
2418   }
2419 
FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction * >::iterator before_use_node)2420   void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2421     auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2422     if (next != uses_.end()) {
2423       HInstruction* next_user = next->GetUser();
2424       size_t next_index = next->GetIndex();
2425       DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2426       next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2427     }
2428   }
2429 
FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment * >::iterator env_fixup_end)2430   void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2431     auto before_env_use_node = env_uses_.before_begin();
2432     for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2433       HEnvironment* user = env_use_node->GetUser();
2434       size_t input_index = env_use_node->GetIndex();
2435       user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2436       before_env_use_node = env_use_node;
2437     }
2438   }
2439 
FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment * >::iterator before_env_use_node)2440   void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2441     auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2442     if (next != env_uses_.end()) {
2443       HEnvironment* next_user = next->GetUser();
2444       size_t next_index = next->GetIndex();
2445       DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2446       next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2447     }
2448   }
2449 
2450   HInstruction* previous_;
2451   HInstruction* next_;
2452   HBasicBlock* block_;
2453   const uint32_t dex_pc_;
2454 
2455   // An instruction gets an id when it is added to the graph.
2456   // It reflects creation order. A negative id means the instruction
2457   // has not been added to the graph.
2458   int id_;
2459 
2460   // When doing liveness analysis, instructions that have uses get an SSA index.
2461   int ssa_index_;
2462 
2463   // Packed fields.
2464   uint32_t packed_fields_;
2465 
2466   // List of instructions that have this instruction as input.
2467   HUseList<HInstruction*> uses_;
2468 
2469   // List of environments that contain this instruction.
2470   HUseList<HEnvironment*> env_uses_;
2471 
2472   // The environment associated with this instruction. Not null if the instruction
2473   // might jump out of the method.
2474   HEnvironment* environment_;
2475 
2476   // Set by the code generator.
2477   LocationSummary* locations_;
2478 
2479   // Set by the liveness analysis.
2480   LiveInterval* live_interval_;
2481 
2482   // Set by the liveness analysis, this is the position in a linear
2483   // order of blocks where this instruction's live interval start.
2484   size_t lifetime_position_;
2485 
2486   SideEffects side_effects_;
2487 
2488   // The reference handle part of the reference type info.
2489   // The IsExact() flag is stored in packed fields.
2490   // TODO: for primitive types this should be marked as invalid.
2491   ReferenceTypeInfo::TypeHandle reference_type_handle_;
2492 
2493   friend class GraphChecker;
2494   friend class HBasicBlock;
2495   friend class HEnvironment;
2496   friend class HGraph;
2497   friend class HInstructionList;
2498 };
2499 std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2500 
2501 // Iterates over the instructions, while preserving the next instruction
2502 // in case the current instruction gets removed from the list by the user
2503 // of this iterator.
2504 class HInstructionIterator : public ValueObject {
2505  public:
HInstructionIterator(const HInstructionList & instructions)2506   explicit HInstructionIterator(const HInstructionList& instructions)
2507       : instruction_(instructions.first_instruction_) {
2508     next_ = Done() ? nullptr : instruction_->GetNext();
2509   }
2510 
Done()2511   bool Done() const { return instruction_ == nullptr; }
Current()2512   HInstruction* Current() const { return instruction_; }
Advance()2513   void Advance() {
2514     instruction_ = next_;
2515     next_ = Done() ? nullptr : instruction_->GetNext();
2516   }
2517 
2518  private:
2519   HInstruction* instruction_;
2520   HInstruction* next_;
2521 
2522   DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2523 };
2524 
2525 // Iterates over the instructions without saving the next instruction,
2526 // therefore handling changes in the graph potentially made by the user
2527 // of this iterator.
2528 class HInstructionIteratorHandleChanges : public ValueObject {
2529  public:
HInstructionIteratorHandleChanges(const HInstructionList & instructions)2530   explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2531       : instruction_(instructions.first_instruction_) {
2532   }
2533 
Done()2534   bool Done() const { return instruction_ == nullptr; }
Current()2535   HInstruction* Current() const { return instruction_; }
Advance()2536   void Advance() {
2537     instruction_ = instruction_->GetNext();
2538   }
2539 
2540  private:
2541   HInstruction* instruction_;
2542 
2543   DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2544 };
2545 
2546 
2547 class HBackwardInstructionIterator : public ValueObject {
2548  public:
HBackwardInstructionIterator(const HInstructionList & instructions)2549   explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2550       : instruction_(instructions.last_instruction_) {
2551     next_ = Done() ? nullptr : instruction_->GetPrevious();
2552   }
2553 
Done()2554   bool Done() const { return instruction_ == nullptr; }
Current()2555   HInstruction* Current() const { return instruction_; }
Advance()2556   void Advance() {
2557     instruction_ = next_;
2558     next_ = Done() ? nullptr : instruction_->GetPrevious();
2559   }
2560 
2561  private:
2562   HInstruction* instruction_;
2563   HInstruction* next_;
2564 
2565   DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2566 };
2567 
2568 class HVariableInputSizeInstruction : public HInstruction {
2569  public:
2570   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()2571   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
2572     return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2573   }
2574 
2575   void AddInput(HInstruction* input);
2576   void InsertInputAt(size_t index, HInstruction* input);
2577   void RemoveInputAt(size_t index);
2578 
2579   // Removes all the inputs.
2580   // Also removes this instructions from each input's use list
2581   // (for non-environment uses only).
2582   void RemoveAllInputs();
2583 
2584  protected:
HVariableInputSizeInstruction(InstructionKind inst_kind,SideEffects side_effects,uint32_t dex_pc,ArenaAllocator * allocator,size_t number_of_inputs,ArenaAllocKind kind)2585   HVariableInputSizeInstruction(InstructionKind inst_kind,
2586                                 SideEffects side_effects,
2587                                 uint32_t dex_pc,
2588                                 ArenaAllocator* allocator,
2589                                 size_t number_of_inputs,
2590                                 ArenaAllocKind kind)
2591       : HInstruction(inst_kind, side_effects, dex_pc),
2592         inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2593 
2594   DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
2595 
2596   ArenaVector<HUserRecord<HInstruction*>> inputs_;
2597 };
2598 
2599 template<size_t N>
2600 class HTemplateInstruction: public HInstruction {
2601  public:
2602   HTemplateInstruction<N>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
HInstruction(kind,side_effects,dex_pc)2603       : HInstruction(kind, side_effects, dex_pc), inputs_() {}
~HTemplateInstruction()2604   virtual ~HTemplateInstruction() {}
2605 
2606   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()2607   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2608     return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2609   }
2610 
2611  protected:
2612   DEFAULT_COPY_CONSTRUCTOR(TemplateInstruction<N>);
2613 
2614  private:
2615   std::array<HUserRecord<HInstruction*>, N> inputs_;
2616 
2617   friend class SsaBuilder;
2618 };
2619 
2620 // HTemplateInstruction specialization for N=0.
2621 template<>
2622 class HTemplateInstruction<0>: public HInstruction {
2623  public:
2624   explicit HTemplateInstruction<0>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
HInstruction(kind,side_effects,dex_pc)2625       : HInstruction(kind, side_effects, dex_pc) {}
2626 
~HTemplateInstruction()2627   virtual ~HTemplateInstruction() {}
2628 
2629   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()2630   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
2631     return ArrayRef<HUserRecord<HInstruction*>>();
2632   }
2633 
2634  protected:
2635   DEFAULT_COPY_CONSTRUCTOR(TemplateInstruction<0>);
2636 
2637  private:
2638   friend class SsaBuilder;
2639 };
2640 
2641 template<intptr_t N>
2642 class HExpression : public HTemplateInstruction<N> {
2643  public:
2644   using HInstruction::InstructionKind;
2645   HExpression<N>(InstructionKind kind,
2646                  DataType::Type type,
2647                  SideEffects side_effects,
2648                  uint32_t dex_pc)
2649       : HTemplateInstruction<N>(kind, side_effects, dex_pc) {
2650     this->template SetPackedField<TypeField>(type);
2651   }
~HExpression()2652   virtual ~HExpression() {}
2653 
GetType()2654   DataType::Type GetType() const OVERRIDE {
2655     return TypeField::Decode(this->GetPackedFields());
2656   }
2657 
2658  protected:
2659   static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2660   static constexpr size_t kFieldTypeSize =
2661       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2662   static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2663   static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2664                 "Too many packed fields.");
2665   using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2666   DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
2667 };
2668 
2669 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2670 // instruction that branches to the exit block.
2671 class HReturnVoid FINAL : public HTemplateInstruction<0> {
2672  public:
2673   explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kReturnVoid,SideEffects::None (),dex_pc)2674       : HTemplateInstruction(kReturnVoid, SideEffects::None(), dex_pc) {
2675   }
2676 
IsControlFlow()2677   bool IsControlFlow() const OVERRIDE { return true; }
2678 
2679   DECLARE_INSTRUCTION(ReturnVoid);
2680 
2681  protected:
2682   DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
2683 };
2684 
2685 // Represents dex's RETURN opcodes. A HReturn is a control flow
2686 // instruction that branches to the exit block.
2687 class HReturn FINAL : public HTemplateInstruction<1> {
2688  public:
2689   explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kReturn,SideEffects::None (),dex_pc)2690       : HTemplateInstruction(kReturn, SideEffects::None(), dex_pc) {
2691     SetRawInputAt(0, value);
2692   }
2693 
IsControlFlow()2694   bool IsControlFlow() const OVERRIDE { return true; }
2695 
2696   DECLARE_INSTRUCTION(Return);
2697 
2698  protected:
2699   DEFAULT_COPY_CONSTRUCTOR(Return);
2700 };
2701 
2702 class HPhi FINAL : public HVariableInputSizeInstruction {
2703  public:
2704   HPhi(ArenaAllocator* allocator,
2705        uint32_t reg_number,
2706        size_t number_of_inputs,
2707        DataType::Type type,
2708        uint32_t dex_pc = kNoDexPc)
HVariableInputSizeInstruction(kPhi,SideEffects::None (),dex_pc,allocator,number_of_inputs,kArenaAllocPhiInputs)2709       : HVariableInputSizeInstruction(
2710             kPhi,
2711             SideEffects::None(),
2712             dex_pc,
2713             allocator,
2714             number_of_inputs,
2715             kArenaAllocPhiInputs),
2716         reg_number_(reg_number) {
2717     SetPackedField<TypeField>(ToPhiType(type));
2718     DCHECK_NE(GetType(), DataType::Type::kVoid);
2719     // Phis are constructed live and marked dead if conflicting or unused.
2720     // Individual steps of SsaBuilder should assume that if a phi has been
2721     // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2722     SetPackedFlag<kFlagIsLive>(true);
2723     SetPackedFlag<kFlagCanBeNull>(true);
2724   }
2725 
IsClonable()2726   bool IsClonable() const OVERRIDE { return true; }
2727 
2728   // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
ToPhiType(DataType::Type type)2729   static DataType::Type ToPhiType(DataType::Type type) {
2730     return DataType::Kind(type);
2731   }
2732 
IsCatchPhi()2733   bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2734 
GetType()2735   DataType::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
SetType(DataType::Type new_type)2736   void SetType(DataType::Type new_type) {
2737     // Make sure that only valid type changes occur. The following are allowed:
2738     //  (1) int  -> float/ref (primitive type propagation),
2739     //  (2) long -> double (primitive type propagation).
2740     DCHECK(GetType() == new_type ||
2741            (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
2742            (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
2743            (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
2744     SetPackedField<TypeField>(new_type);
2745   }
2746 
CanBeNull()2747   bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
SetCanBeNull(bool can_be_null)2748   void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2749 
GetRegNumber()2750   uint32_t GetRegNumber() const { return reg_number_; }
2751 
SetDead()2752   void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
SetLive()2753   void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
IsDead()2754   bool IsDead() const { return !IsLive(); }
IsLive()2755   bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2756 
IsVRegEquivalentOf(const HInstruction * other)2757   bool IsVRegEquivalentOf(const HInstruction* other) const {
2758     return other != nullptr
2759         && other->IsPhi()
2760         && other->AsPhi()->GetBlock() == GetBlock()
2761         && other->AsPhi()->GetRegNumber() == GetRegNumber();
2762   }
2763 
HasEquivalentPhi()2764   bool HasEquivalentPhi() const {
2765     if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2766       return true;
2767     }
2768     if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2769       return true;
2770     }
2771     return false;
2772   }
2773 
2774   // Returns the next equivalent phi (starting from the current one) or null if there is none.
2775   // An equivalent phi is a phi having the same dex register and type.
2776   // It assumes that phis with the same dex register are adjacent.
GetNextEquivalentPhiWithSameType()2777   HPhi* GetNextEquivalentPhiWithSameType() {
2778     HInstruction* next = GetNext();
2779     while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2780       if (next->GetType() == GetType()) {
2781         return next->AsPhi();
2782       }
2783       next = next->GetNext();
2784     }
2785     return nullptr;
2786   }
2787 
2788   DECLARE_INSTRUCTION(Phi);
2789 
2790  protected:
2791   DEFAULT_COPY_CONSTRUCTOR(Phi);
2792 
2793  private:
2794   static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2795   static constexpr size_t kFieldTypeSize =
2796       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2797   static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2798   static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2799   static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2800   static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2801   using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2802 
2803   const uint32_t reg_number_;
2804 };
2805 
2806 // The exit instruction is the only instruction of the exit block.
2807 // Instructions aborting the method (HThrow and HReturn) must branch to the
2808 // exit block.
2809 class HExit FINAL : public HTemplateInstruction<0> {
2810  public:
2811   explicit HExit(uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kExit,SideEffects::None (),dex_pc)2812       : HTemplateInstruction(kExit, SideEffects::None(), dex_pc) {
2813   }
2814 
IsControlFlow()2815   bool IsControlFlow() const OVERRIDE { return true; }
2816 
2817   DECLARE_INSTRUCTION(Exit);
2818 
2819  protected:
2820   DEFAULT_COPY_CONSTRUCTOR(Exit);
2821 };
2822 
2823 // Jumps from one block to another.
2824 class HGoto FINAL : public HTemplateInstruction<0> {
2825  public:
2826   explicit HGoto(uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kGoto,SideEffects::None (),dex_pc)2827       : HTemplateInstruction(kGoto, SideEffects::None(), dex_pc) {
2828   }
2829 
IsClonable()2830   bool IsClonable() const OVERRIDE { return true; }
IsControlFlow()2831   bool IsControlFlow() const OVERRIDE { return true; }
2832 
GetSuccessor()2833   HBasicBlock* GetSuccessor() const {
2834     return GetBlock()->GetSingleSuccessor();
2835   }
2836 
2837   DECLARE_INSTRUCTION(Goto);
2838 
2839  protected:
2840   DEFAULT_COPY_CONSTRUCTOR(Goto);
2841 };
2842 
2843 class HConstant : public HExpression<0> {
2844  public:
2845   explicit HConstant(InstructionKind kind, DataType::Type type, uint32_t dex_pc = kNoDexPc)
HExpression(kind,type,SideEffects::None (),dex_pc)2846       : HExpression(kind, type, SideEffects::None(), dex_pc) {
2847   }
2848 
CanBeMoved()2849   bool CanBeMoved() const OVERRIDE { return true; }
2850 
2851   // Is this constant -1 in the arithmetic sense?
IsMinusOne()2852   virtual bool IsMinusOne() const { return false; }
2853   // Is this constant 0 in the arithmetic sense?
IsArithmeticZero()2854   virtual bool IsArithmeticZero() const { return false; }
2855   // Is this constant a 0-bit pattern?
IsZeroBitPattern()2856   virtual bool IsZeroBitPattern() const { return false; }
2857   // Is this constant 1 in the arithmetic sense?
IsOne()2858   virtual bool IsOne() const { return false; }
2859 
2860   virtual uint64_t GetValueAsUint64() const = 0;
2861 
2862   DECLARE_ABSTRACT_INSTRUCTION(Constant);
2863 
2864  protected:
2865   DEFAULT_COPY_CONSTRUCTOR(Constant);
2866 };
2867 
2868 class HNullConstant FINAL : public HConstant {
2869  public:
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)2870   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2871     return true;
2872   }
2873 
GetValueAsUint64()2874   uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2875 
ComputeHashCode()2876   size_t ComputeHashCode() const OVERRIDE { return 0; }
2877 
2878   // The null constant representation is a 0-bit pattern.
IsZeroBitPattern()2879   virtual bool IsZeroBitPattern() const { return true; }
2880 
2881   DECLARE_INSTRUCTION(NullConstant);
2882 
2883  protected:
2884   DEFAULT_COPY_CONSTRUCTOR(NullConstant);
2885 
2886  private:
2887   explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
HConstant(kNullConstant,DataType::Type::kReference,dex_pc)2888       : HConstant(kNullConstant, DataType::Type::kReference, dex_pc) {
2889   }
2890 
2891   friend class HGraph;
2892 };
2893 
2894 // Constants of the type int. Those can be from Dex instructions, or
2895 // synthesized (for example with the if-eqz instruction).
2896 class HIntConstant FINAL : public HConstant {
2897  public:
GetValue()2898   int32_t GetValue() const { return value_; }
2899 
GetValueAsUint64()2900   uint64_t GetValueAsUint64() const OVERRIDE {
2901     return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2902   }
2903 
InstructionDataEquals(const HInstruction * other)2904   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2905     DCHECK(other->IsIntConstant()) << other->DebugName();
2906     return other->AsIntConstant()->value_ == value_;
2907   }
2908 
ComputeHashCode()2909   size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2910 
IsMinusOne()2911   bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
IsArithmeticZero()2912   bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
IsZeroBitPattern()2913   bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
IsOne()2914   bool IsOne() const OVERRIDE { return GetValue() == 1; }
2915 
2916   // Integer constants are used to encode Boolean values as well,
2917   // where 1 means true and 0 means false.
IsTrue()2918   bool IsTrue() const { return GetValue() == 1; }
IsFalse()2919   bool IsFalse() const { return GetValue() == 0; }
2920 
2921   DECLARE_INSTRUCTION(IntConstant);
2922 
2923  protected:
2924   DEFAULT_COPY_CONSTRUCTOR(IntConstant);
2925 
2926  private:
2927   explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kIntConstant,DataType::Type::kInt32,dex_pc)2928       : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc), value_(value) {
2929   }
2930   explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
HConstant(kIntConstant,DataType::Type::kInt32,dex_pc)2931       : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc),
2932         value_(value ? 1 : 0) {
2933   }
2934 
2935   const int32_t value_;
2936 
2937   friend class HGraph;
2938   ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2939   ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2940 };
2941 
2942 class HLongConstant FINAL : public HConstant {
2943  public:
GetValue()2944   int64_t GetValue() const { return value_; }
2945 
GetValueAsUint64()2946   uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2947 
InstructionDataEquals(const HInstruction * other)2948   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2949     DCHECK(other->IsLongConstant()) << other->DebugName();
2950     return other->AsLongConstant()->value_ == value_;
2951   }
2952 
ComputeHashCode()2953   size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2954 
IsMinusOne()2955   bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
IsArithmeticZero()2956   bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
IsZeroBitPattern()2957   bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
IsOne()2958   bool IsOne() const OVERRIDE { return GetValue() == 1; }
2959 
2960   DECLARE_INSTRUCTION(LongConstant);
2961 
2962  protected:
2963   DEFAULT_COPY_CONSTRUCTOR(LongConstant);
2964 
2965  private:
2966   explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kLongConstant,DataType::Type::kInt64,dex_pc)2967       : HConstant(kLongConstant, DataType::Type::kInt64, dex_pc),
2968         value_(value) {
2969   }
2970 
2971   const int64_t value_;
2972 
2973   friend class HGraph;
2974 };
2975 
2976 class HFloatConstant FINAL : public HConstant {
2977  public:
GetValue()2978   float GetValue() const { return value_; }
2979 
GetValueAsUint64()2980   uint64_t GetValueAsUint64() const OVERRIDE {
2981     return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2982   }
2983 
InstructionDataEquals(const HInstruction * other)2984   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
2985     DCHECK(other->IsFloatConstant()) << other->DebugName();
2986     return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2987   }
2988 
ComputeHashCode()2989   size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2990 
IsMinusOne()2991   bool IsMinusOne() const OVERRIDE {
2992     return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2993   }
IsArithmeticZero()2994   bool IsArithmeticZero() const OVERRIDE {
2995     return std::fpclassify(value_) == FP_ZERO;
2996   }
IsArithmeticPositiveZero()2997   bool IsArithmeticPositiveZero() const {
2998     return IsArithmeticZero() && !std::signbit(value_);
2999   }
IsArithmeticNegativeZero()3000   bool IsArithmeticNegativeZero() const {
3001     return IsArithmeticZero() && std::signbit(value_);
3002   }
IsZeroBitPattern()3003   bool IsZeroBitPattern() const OVERRIDE {
3004     return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
3005   }
IsOne()3006   bool IsOne() const OVERRIDE {
3007     return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
3008   }
IsNaN()3009   bool IsNaN() const {
3010     return std::isnan(value_);
3011   }
3012 
3013   DECLARE_INSTRUCTION(FloatConstant);
3014 
3015  protected:
3016   DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
3017 
3018  private:
3019   explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
HConstant(kFloatConstant,DataType::Type::kFloat32,dex_pc)3020       : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3021         value_(value) {
3022   }
3023   explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kFloatConstant,DataType::Type::kFloat32,dex_pc)3024       : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3025         value_(bit_cast<float, int32_t>(value)) {
3026   }
3027 
3028   const float value_;
3029 
3030   // Only the SsaBuilder and HGraph can create floating-point constants.
3031   friend class SsaBuilder;
3032   friend class HGraph;
3033 };
3034 
3035 class HDoubleConstant FINAL : public HConstant {
3036  public:
GetValue()3037   double GetValue() const { return value_; }
3038 
GetValueAsUint64()3039   uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
3040 
InstructionDataEquals(const HInstruction * other)3041   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3042     DCHECK(other->IsDoubleConstant()) << other->DebugName();
3043     return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
3044   }
3045 
ComputeHashCode()3046   size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
3047 
IsMinusOne()3048   bool IsMinusOne() const OVERRIDE {
3049     return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
3050   }
IsArithmeticZero()3051   bool IsArithmeticZero() const OVERRIDE {
3052     return std::fpclassify(value_) == FP_ZERO;
3053   }
IsArithmeticPositiveZero()3054   bool IsArithmeticPositiveZero() const {
3055     return IsArithmeticZero() && !std::signbit(value_);
3056   }
IsArithmeticNegativeZero()3057   bool IsArithmeticNegativeZero() const {
3058     return IsArithmeticZero() && std::signbit(value_);
3059   }
IsZeroBitPattern()3060   bool IsZeroBitPattern() const OVERRIDE {
3061     return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
3062   }
IsOne()3063   bool IsOne() const OVERRIDE {
3064     return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
3065   }
IsNaN()3066   bool IsNaN() const {
3067     return std::isnan(value_);
3068   }
3069 
3070   DECLARE_INSTRUCTION(DoubleConstant);
3071 
3072  protected:
3073   DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3074 
3075  private:
3076   explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
HConstant(kDoubleConstant,DataType::Type::kFloat64,dex_pc)3077       : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3078         value_(value) {
3079   }
3080   explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kDoubleConstant,DataType::Type::kFloat64,dex_pc)3081       : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3082         value_(bit_cast<double, int64_t>(value)) {
3083   }
3084 
3085   const double value_;
3086 
3087   // Only the SsaBuilder and HGraph can create floating-point constants.
3088   friend class SsaBuilder;
3089   friend class HGraph;
3090 };
3091 
3092 // Conditional branch. A block ending with an HIf instruction must have
3093 // two successors.
3094 class HIf FINAL : public HTemplateInstruction<1> {
3095  public:
3096   explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kIf,SideEffects::None (),dex_pc)3097       : HTemplateInstruction(kIf, SideEffects::None(), dex_pc) {
3098     SetRawInputAt(0, input);
3099   }
3100 
IsClonable()3101   bool IsClonable() const OVERRIDE { return true; }
IsControlFlow()3102   bool IsControlFlow() const OVERRIDE { return true; }
3103 
IfTrueSuccessor()3104   HBasicBlock* IfTrueSuccessor() const {
3105     return GetBlock()->GetSuccessors()[0];
3106   }
3107 
IfFalseSuccessor()3108   HBasicBlock* IfFalseSuccessor() const {
3109     return GetBlock()->GetSuccessors()[1];
3110   }
3111 
3112   DECLARE_INSTRUCTION(If);
3113 
3114  protected:
3115   DEFAULT_COPY_CONSTRUCTOR(If);
3116 };
3117 
3118 
3119 // Abstract instruction which marks the beginning and/or end of a try block and
3120 // links it to the respective exception handlers. Behaves the same as a Goto in
3121 // non-exceptional control flow.
3122 // Normal-flow successor is stored at index zero, exception handlers under
3123 // higher indices in no particular order.
3124 class HTryBoundary FINAL : public HTemplateInstruction<0> {
3125  public:
3126   enum class BoundaryKind {
3127     kEntry,
3128     kExit,
3129     kLast = kExit
3130   };
3131 
3132   explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kTryBoundary,SideEffects::None (),dex_pc)3133       : HTemplateInstruction(kTryBoundary, SideEffects::None(), dex_pc) {
3134     SetPackedField<BoundaryKindField>(kind);
3135   }
3136 
IsControlFlow()3137   bool IsControlFlow() const OVERRIDE { return true; }
3138 
3139   // Returns the block's non-exceptional successor (index zero).
GetNormalFlowSuccessor()3140   HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3141 
GetExceptionHandlers()3142   ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3143     return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3144   }
3145 
3146   // Returns whether `handler` is among its exception handlers (non-zero index
3147   // successors).
HasExceptionHandler(const HBasicBlock & handler)3148   bool HasExceptionHandler(const HBasicBlock& handler) const {
3149     DCHECK(handler.IsCatchBlock());
3150     return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3151   }
3152 
3153   // If not present already, adds `handler` to its block's list of exception
3154   // handlers.
AddExceptionHandler(HBasicBlock * handler)3155   void AddExceptionHandler(HBasicBlock* handler) {
3156     if (!HasExceptionHandler(*handler)) {
3157       GetBlock()->AddSuccessor(handler);
3158     }
3159   }
3160 
GetBoundaryKind()3161   BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
IsEntry()3162   bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3163 
3164   bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3165 
3166   DECLARE_INSTRUCTION(TryBoundary);
3167 
3168  protected:
3169   DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3170 
3171  private:
3172   static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3173   static constexpr size_t kFieldBoundaryKindSize =
3174       MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3175   static constexpr size_t kNumberOfTryBoundaryPackedBits =
3176       kFieldBoundaryKind + kFieldBoundaryKindSize;
3177   static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3178                 "Too many packed fields.");
3179   using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3180 };
3181 
3182 // Deoptimize to interpreter, upon checking a condition.
3183 class HDeoptimize FINAL : public HVariableInputSizeInstruction {
3184  public:
3185   // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3186   // across.
HDeoptimize(ArenaAllocator * allocator,HInstruction * cond,DeoptimizationKind kind,uint32_t dex_pc)3187   HDeoptimize(ArenaAllocator* allocator,
3188               HInstruction* cond,
3189               DeoptimizationKind kind,
3190               uint32_t dex_pc)
3191       : HVariableInputSizeInstruction(
3192             kDeoptimize,
3193             SideEffects::All(),
3194             dex_pc,
3195             allocator,
3196             /* number_of_inputs */ 1,
3197             kArenaAllocMisc) {
3198     SetPackedFlag<kFieldCanBeMoved>(false);
3199     SetPackedField<DeoptimizeKindField>(kind);
3200     SetRawInputAt(0, cond);
3201   }
3202 
IsClonable()3203   bool IsClonable() const OVERRIDE { return true; }
3204 
3205   // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3206   // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3207   // instead of `guard`.
3208   // We set CanTriggerGC to prevent any intermediate address to be live
3209   // at the point of the `HDeoptimize`.
HDeoptimize(ArenaAllocator * allocator,HInstruction * cond,HInstruction * guard,DeoptimizationKind kind,uint32_t dex_pc)3210   HDeoptimize(ArenaAllocator* allocator,
3211               HInstruction* cond,
3212               HInstruction* guard,
3213               DeoptimizationKind kind,
3214               uint32_t dex_pc)
3215       : HVariableInputSizeInstruction(
3216             kDeoptimize,
3217             SideEffects::CanTriggerGC(),
3218             dex_pc,
3219             allocator,
3220             /* number_of_inputs */ 2,
3221             kArenaAllocMisc) {
3222     SetPackedFlag<kFieldCanBeMoved>(true);
3223     SetPackedField<DeoptimizeKindField>(kind);
3224     SetRawInputAt(0, cond);
3225     SetRawInputAt(1, guard);
3226   }
3227 
CanBeMoved()3228   bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); }
3229 
InstructionDataEquals(const HInstruction * other)3230   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3231     return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3232   }
3233 
NeedsEnvironment()3234   bool NeedsEnvironment() const OVERRIDE { return true; }
3235 
CanThrow()3236   bool CanThrow() const OVERRIDE { return true; }
3237 
GetDeoptimizationKind()3238   DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3239 
GetType()3240   DataType::Type GetType() const OVERRIDE {
3241     return GuardsAnInput() ? GuardedInput()->GetType() : DataType::Type::kVoid;
3242   }
3243 
GuardsAnInput()3244   bool GuardsAnInput() const {
3245     return InputCount() == 2;
3246   }
3247 
GuardedInput()3248   HInstruction* GuardedInput() const {
3249     DCHECK(GuardsAnInput());
3250     return InputAt(1);
3251   }
3252 
RemoveGuard()3253   void RemoveGuard() {
3254     RemoveInputAt(1);
3255   }
3256 
3257   DECLARE_INSTRUCTION(Deoptimize);
3258 
3259  protected:
3260   DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3261 
3262  private:
3263   static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3264   static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3265   static constexpr size_t kFieldDeoptimizeKindSize =
3266       MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3267   static constexpr size_t kNumberOfDeoptimizePackedBits =
3268       kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3269   static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3270                 "Too many packed fields.");
3271   using DeoptimizeKindField =
3272       BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3273 };
3274 
3275 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3276 // The compiled code checks this flag value in a guard before devirtualized call and
3277 // if it's true, starts to do deoptimization.
3278 // It has a 4-byte slot on stack.
3279 // TODO: allocate a register for this flag.
3280 class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction {
3281  public:
3282   // CHA guards are only optimized in a separate pass and it has no side effects
3283   // with regard to other passes.
HShouldDeoptimizeFlag(ArenaAllocator * allocator,uint32_t dex_pc)3284   HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3285       : HVariableInputSizeInstruction(kShouldDeoptimizeFlag,
3286                                       SideEffects::None(),
3287                                       dex_pc,
3288                                       allocator,
3289                                       0,
3290                                       kArenaAllocCHA) {
3291   }
3292 
GetType()3293   DataType::Type GetType() const OVERRIDE { return DataType::Type::kInt32; }
3294 
3295   // We do all CHA guard elimination/motion in a single pass, after which there is no
3296   // further guard elimination/motion since a guard might have been used for justification
3297   // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3298   // to avoid other optimizations trying to move it.
CanBeMoved()3299   bool CanBeMoved() const OVERRIDE { return false; }
3300 
3301   DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3302 
3303  protected:
3304   DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3305 };
3306 
3307 // Represents the ArtMethod that was passed as a first argument to
3308 // the method. It is used by instructions that depend on it, like
3309 // instructions that work with the dex cache.
3310 class HCurrentMethod FINAL : public HExpression<0> {
3311  public:
3312   explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
HExpression(kCurrentMethod,type,SideEffects::None (),dex_pc)3313       : HExpression(kCurrentMethod, type, SideEffects::None(), dex_pc) {
3314   }
3315 
3316   DECLARE_INSTRUCTION(CurrentMethod);
3317 
3318  protected:
3319   DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3320 };
3321 
3322 // Fetches an ArtMethod from the virtual table or the interface method table
3323 // of a class.
3324 class HClassTableGet FINAL : public HExpression<1> {
3325  public:
3326   enum class TableKind {
3327     kVTable,
3328     kIMTable,
3329     kLast = kIMTable
3330   };
HClassTableGet(HInstruction * cls,DataType::Type type,TableKind kind,size_t index,uint32_t dex_pc)3331   HClassTableGet(HInstruction* cls,
3332                  DataType::Type type,
3333                  TableKind kind,
3334                  size_t index,
3335                  uint32_t dex_pc)
3336       : HExpression(kClassTableGet, type, SideEffects::None(), dex_pc),
3337         index_(index) {
3338     SetPackedField<TableKindField>(kind);
3339     SetRawInputAt(0, cls);
3340   }
3341 
IsClonable()3342   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()3343   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other)3344   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3345     return other->AsClassTableGet()->GetIndex() == index_ &&
3346         other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3347   }
3348 
GetTableKind()3349   TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
GetIndex()3350   size_t GetIndex() const { return index_; }
3351 
3352   DECLARE_INSTRUCTION(ClassTableGet);
3353 
3354  protected:
3355   DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3356 
3357  private:
3358   static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
3359   static constexpr size_t kFieldTableKindSize =
3360       MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3361   static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3362   static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3363                 "Too many packed fields.");
3364   using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3365 
3366   // The index of the ArtMethod in the table.
3367   const size_t index_;
3368 };
3369 
3370 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3371 // have one successor for each entry in the switch table, and the final successor
3372 // will be the block containing the next Dex opcode.
3373 class HPackedSwitch FINAL : public HTemplateInstruction<1> {
3374  public:
3375   HPackedSwitch(int32_t start_value,
3376                 uint32_t num_entries,
3377                 HInstruction* input,
3378                 uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kPackedSwitch,SideEffects::None (),dex_pc)3379     : HTemplateInstruction(kPackedSwitch, SideEffects::None(), dex_pc),
3380       start_value_(start_value),
3381       num_entries_(num_entries) {
3382     SetRawInputAt(0, input);
3383   }
3384 
IsClonable()3385   bool IsClonable() const OVERRIDE { return true; }
3386 
IsControlFlow()3387   bool IsControlFlow() const OVERRIDE { return true; }
3388 
GetStartValue()3389   int32_t GetStartValue() const { return start_value_; }
3390 
GetNumEntries()3391   uint32_t GetNumEntries() const { return num_entries_; }
3392 
GetDefaultBlock()3393   HBasicBlock* GetDefaultBlock() const {
3394     // Last entry is the default block.
3395     return GetBlock()->GetSuccessors()[num_entries_];
3396   }
3397   DECLARE_INSTRUCTION(PackedSwitch);
3398 
3399  protected:
3400   DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3401 
3402  private:
3403   const int32_t start_value_;
3404   const uint32_t num_entries_;
3405 };
3406 
3407 class HUnaryOperation : public HExpression<1> {
3408  public:
3409   HUnaryOperation(InstructionKind kind,
3410                   DataType::Type result_type,
3411                   HInstruction* input,
3412                   uint32_t dex_pc = kNoDexPc)
HExpression(kind,result_type,SideEffects::None (),dex_pc)3413       : HExpression(kind, result_type, SideEffects::None(), dex_pc) {
3414     SetRawInputAt(0, input);
3415   }
3416 
3417   // All of the UnaryOperation instructions are clonable.
IsClonable()3418   bool IsClonable() const OVERRIDE { return true; }
3419 
GetInput()3420   HInstruction* GetInput() const { return InputAt(0); }
GetResultType()3421   DataType::Type GetResultType() const { return GetType(); }
3422 
CanBeMoved()3423   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)3424   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3425     return true;
3426   }
3427 
3428   // Try to statically evaluate `this` and return a HConstant
3429   // containing the result of this evaluation.  If `this` cannot
3430   // be evaluated as a constant, return null.
3431   HConstant* TryStaticEvaluation() const;
3432 
3433   // Apply this operation to `x`.
3434   virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3435   virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3436   virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3437   virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3438 
3439   DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3440 
3441  protected:
3442   DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3443 };
3444 
3445 class HBinaryOperation : public HExpression<2> {
3446  public:
3447   HBinaryOperation(InstructionKind kind,
3448                    DataType::Type result_type,
3449                    HInstruction* left,
3450                    HInstruction* right,
3451                    SideEffects side_effects = SideEffects::None(),
3452                    uint32_t dex_pc = kNoDexPc)
HExpression(kind,result_type,side_effects,dex_pc)3453       : HExpression(kind, result_type, side_effects, dex_pc) {
3454     SetRawInputAt(0, left);
3455     SetRawInputAt(1, right);
3456   }
3457 
3458   // All of the BinaryOperation instructions are clonable.
IsClonable()3459   bool IsClonable() const OVERRIDE { return true; }
3460 
GetLeft()3461   HInstruction* GetLeft() const { return InputAt(0); }
GetRight()3462   HInstruction* GetRight() const { return InputAt(1); }
GetResultType()3463   DataType::Type GetResultType() const { return GetType(); }
3464 
IsCommutative()3465   virtual bool IsCommutative() const { return false; }
3466 
3467   // Put constant on the right.
3468   // Returns whether order is changed.
OrderInputsWithConstantOnTheRight()3469   bool OrderInputsWithConstantOnTheRight() {
3470     HInstruction* left = InputAt(0);
3471     HInstruction* right = InputAt(1);
3472     if (left->IsConstant() && !right->IsConstant()) {
3473       ReplaceInput(right, 0);
3474       ReplaceInput(left, 1);
3475       return true;
3476     }
3477     return false;
3478   }
3479 
3480   // Order inputs by instruction id, but favor constant on the right side.
3481   // This helps GVN for commutative ops.
OrderInputs()3482   void OrderInputs() {
3483     DCHECK(IsCommutative());
3484     HInstruction* left = InputAt(0);
3485     HInstruction* right = InputAt(1);
3486     if (left == right || (!left->IsConstant() && right->IsConstant())) {
3487       return;
3488     }
3489     if (OrderInputsWithConstantOnTheRight()) {
3490       return;
3491     }
3492     // Order according to instruction id.
3493     if (left->GetId() > right->GetId()) {
3494       ReplaceInput(right, 0);
3495       ReplaceInput(left, 1);
3496     }
3497   }
3498 
CanBeMoved()3499   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)3500   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
3501     return true;
3502   }
3503 
3504   // Try to statically evaluate `this` and return a HConstant
3505   // containing the result of this evaluation.  If `this` cannot
3506   // be evaluated as a constant, return null.
3507   HConstant* TryStaticEvaluation() const;
3508 
3509   // Apply this operation to `x` and `y`.
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3510   virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3511                               HNullConstant* y ATTRIBUTE_UNUSED) const {
3512     LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3513     UNREACHABLE();
3514   }
3515   virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3516   virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
Evaluate(HLongConstant * x ATTRIBUTE_UNUSED,HIntConstant * y ATTRIBUTE_UNUSED)3517   virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3518                               HIntConstant* y ATTRIBUTE_UNUSED) const {
3519     LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3520     UNREACHABLE();
3521   }
3522   virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3523   virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3524 
3525   // Returns an input that can legally be used as the right input and is
3526   // constant, or null.
3527   HConstant* GetConstantRight() const;
3528 
3529   // If `GetConstantRight()` returns one of the input, this returns the other
3530   // one. Otherwise it returns null.
3531   HInstruction* GetLeastConstantLeft() const;
3532 
3533   DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3534 
3535  protected:
3536   DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
3537 };
3538 
3539 // The comparison bias applies for floating point operations and indicates how NaN
3540 // comparisons are treated:
3541 enum class ComparisonBias {
3542   kNoBias,  // bias is not applicable (i.e. for long operation)
3543   kGtBias,  // return 1 for NaN comparisons
3544   kLtBias,  // return -1 for NaN comparisons
3545   kLast = kLtBias
3546 };
3547 
3548 std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3549 
3550 class HCondition : public HBinaryOperation {
3551  public:
3552   HCondition(InstructionKind kind,
3553              HInstruction* first,
3554              HInstruction* second,
3555              uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kind,DataType::Type::kBool,first,second,SideEffects::None (),dex_pc)3556       : HBinaryOperation(kind,
3557                          DataType::Type::kBool,
3558                          first,
3559                          second,
3560                          SideEffects::None(),
3561                          dex_pc) {
3562     SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3563   }
3564 
3565   // For code generation purposes, returns whether this instruction is just before
3566   // `instruction`, and disregard moves in between.
3567   bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3568 
3569   DECLARE_ABSTRACT_INSTRUCTION(Condition);
3570 
3571   virtual IfCondition GetCondition() const = 0;
3572 
3573   virtual IfCondition GetOppositeCondition() const = 0;
3574 
IsGtBias()3575   bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
IsLtBias()3576   bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3577 
GetBias()3578   ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
SetBias(ComparisonBias bias)3579   void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3580 
InstructionDataEquals(const HInstruction * other)3581   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
3582     return GetPackedFields() == other->AsCondition()->GetPackedFields();
3583   }
3584 
IsFPConditionTrueIfNaN()3585   bool IsFPConditionTrueIfNaN() const {
3586     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3587     IfCondition if_cond = GetCondition();
3588     if (if_cond == kCondNE) {
3589       return true;
3590     } else if (if_cond == kCondEQ) {
3591       return false;
3592     }
3593     return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3594   }
3595 
IsFPConditionFalseIfNaN()3596   bool IsFPConditionFalseIfNaN() const {
3597     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3598     IfCondition if_cond = GetCondition();
3599     if (if_cond == kCondEQ) {
3600       return true;
3601     } else if (if_cond == kCondNE) {
3602       return false;
3603     }
3604     return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3605   }
3606 
3607  protected:
3608   // Needed if we merge a HCompare into a HCondition.
3609   static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3610   static constexpr size_t kFieldComparisonBiasSize =
3611       MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3612   static constexpr size_t kNumberOfConditionPackedBits =
3613       kFieldComparisonBias + kFieldComparisonBiasSize;
3614   static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3615   using ComparisonBiasField =
3616       BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3617 
3618   template <typename T>
Compare(T x,T y)3619   int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3620 
3621   template <typename T>
CompareFP(T x,T y)3622   int32_t CompareFP(T x, T y) const {
3623     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3624     DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3625     // Handle the bias.
3626     return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3627   }
3628 
3629   // Return an integer constant containing the result of a condition evaluated at compile time.
MakeConstantCondition(bool value,uint32_t dex_pc)3630   HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3631     return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3632   }
3633 
3634   DEFAULT_COPY_CONSTRUCTOR(Condition);
3635 };
3636 
3637 // Instruction to check if two inputs are equal to each other.
3638 class HEqual FINAL : public HCondition {
3639  public:
3640   HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kEqual,first,second,dex_pc)3641       : HCondition(kEqual, first, second, dex_pc) {
3642   }
3643 
IsCommutative()3644   bool IsCommutative() const OVERRIDE { return true; }
3645 
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3646   HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3647                       HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3648     return MakeConstantCondition(true, GetDexPc());
3649   }
Evaluate(HIntConstant * x,HIntConstant * y)3650   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3651     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3652   }
3653   // In the following Evaluate methods, a HCompare instruction has
3654   // been merged into this HEqual instruction; evaluate it as
3655   // `Compare(x, y) == 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3656   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3657     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3658                                  GetDexPc());
3659   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3660   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3661     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3662   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3663   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3664     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3665   }
3666 
3667   DECLARE_INSTRUCTION(Equal);
3668 
GetCondition()3669   IfCondition GetCondition() const OVERRIDE {
3670     return kCondEQ;
3671   }
3672 
GetOppositeCondition()3673   IfCondition GetOppositeCondition() const OVERRIDE {
3674     return kCondNE;
3675   }
3676 
3677  protected:
3678   DEFAULT_COPY_CONSTRUCTOR(Equal);
3679 
3680  private:
Compute(T x,T y)3681   template <typename T> static bool Compute(T x, T y) { return x == y; }
3682 };
3683 
3684 class HNotEqual FINAL : public HCondition {
3685  public:
3686   HNotEqual(HInstruction* first, HInstruction* second,
3687             uint32_t dex_pc = kNoDexPc)
HCondition(kNotEqual,first,second,dex_pc)3688       : HCondition(kNotEqual, first, second, dex_pc) {
3689   }
3690 
IsCommutative()3691   bool IsCommutative() const OVERRIDE { return true; }
3692 
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3693   HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3694                       HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3695     return MakeConstantCondition(false, GetDexPc());
3696   }
Evaluate(HIntConstant * x,HIntConstant * y)3697   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3698     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3699   }
3700   // In the following Evaluate methods, a HCompare instruction has
3701   // been merged into this HNotEqual instruction; evaluate it as
3702   // `Compare(x, y) != 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3703   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3704     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3705   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3706   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3707     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3708   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3709   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3710     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3711   }
3712 
3713   DECLARE_INSTRUCTION(NotEqual);
3714 
GetCondition()3715   IfCondition GetCondition() const OVERRIDE {
3716     return kCondNE;
3717   }
3718 
GetOppositeCondition()3719   IfCondition GetOppositeCondition() const OVERRIDE {
3720     return kCondEQ;
3721   }
3722 
3723  protected:
3724   DEFAULT_COPY_CONSTRUCTOR(NotEqual);
3725 
3726  private:
Compute(T x,T y)3727   template <typename T> static bool Compute(T x, T y) { return x != y; }
3728 };
3729 
3730 class HLessThan FINAL : public HCondition {
3731  public:
3732   HLessThan(HInstruction* first, HInstruction* second,
3733             uint32_t dex_pc = kNoDexPc)
HCondition(kLessThan,first,second,dex_pc)3734       : HCondition(kLessThan, first, second, dex_pc) {
3735   }
3736 
Evaluate(HIntConstant * x,HIntConstant * y)3737   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3738     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3739   }
3740   // In the following Evaluate methods, a HCompare instruction has
3741   // been merged into this HLessThan instruction; evaluate it as
3742   // `Compare(x, y) < 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3743   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3744     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3745   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3746   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3747     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3748   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3749   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3750     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3751   }
3752 
3753   DECLARE_INSTRUCTION(LessThan);
3754 
GetCondition()3755   IfCondition GetCondition() const OVERRIDE {
3756     return kCondLT;
3757   }
3758 
GetOppositeCondition()3759   IfCondition GetOppositeCondition() const OVERRIDE {
3760     return kCondGE;
3761   }
3762 
3763  protected:
3764   DEFAULT_COPY_CONSTRUCTOR(LessThan);
3765 
3766  private:
Compute(T x,T y)3767   template <typename T> static bool Compute(T x, T y) { return x < y; }
3768 };
3769 
3770 class HLessThanOrEqual FINAL : public HCondition {
3771  public:
3772   HLessThanOrEqual(HInstruction* first, HInstruction* second,
3773                    uint32_t dex_pc = kNoDexPc)
HCondition(kLessThanOrEqual,first,second,dex_pc)3774       : HCondition(kLessThanOrEqual, first, second, dex_pc) {
3775   }
3776 
Evaluate(HIntConstant * x,HIntConstant * y)3777   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3778     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3779   }
3780   // In the following Evaluate methods, a HCompare instruction has
3781   // been merged into this HLessThanOrEqual instruction; evaluate it as
3782   // `Compare(x, y) <= 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3783   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3784     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3785   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3786   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3787     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3788   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3789   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3790     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3791   }
3792 
3793   DECLARE_INSTRUCTION(LessThanOrEqual);
3794 
GetCondition()3795   IfCondition GetCondition() const OVERRIDE {
3796     return kCondLE;
3797   }
3798 
GetOppositeCondition()3799   IfCondition GetOppositeCondition() const OVERRIDE {
3800     return kCondGT;
3801   }
3802 
3803  protected:
3804   DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
3805 
3806  private:
Compute(T x,T y)3807   template <typename T> static bool Compute(T x, T y) { return x <= y; }
3808 };
3809 
3810 class HGreaterThan FINAL : public HCondition {
3811  public:
3812   HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kGreaterThan,first,second,dex_pc)3813       : HCondition(kGreaterThan, first, second, dex_pc) {
3814   }
3815 
Evaluate(HIntConstant * x,HIntConstant * y)3816   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3817     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3818   }
3819   // In the following Evaluate methods, a HCompare instruction has
3820   // been merged into this HGreaterThan instruction; evaluate it as
3821   // `Compare(x, y) > 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3822   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3823     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3824   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3825   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3826     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3827   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3828   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3829     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3830   }
3831 
3832   DECLARE_INSTRUCTION(GreaterThan);
3833 
GetCondition()3834   IfCondition GetCondition() const OVERRIDE {
3835     return kCondGT;
3836   }
3837 
GetOppositeCondition()3838   IfCondition GetOppositeCondition() const OVERRIDE {
3839     return kCondLE;
3840   }
3841 
3842  protected:
3843   DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
3844 
3845  private:
Compute(T x,T y)3846   template <typename T> static bool Compute(T x, T y) { return x > y; }
3847 };
3848 
3849 class HGreaterThanOrEqual FINAL : public HCondition {
3850  public:
3851   HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kGreaterThanOrEqual,first,second,dex_pc)3852       : HCondition(kGreaterThanOrEqual, first, second, dex_pc) {
3853   }
3854 
Evaluate(HIntConstant * x,HIntConstant * y)3855   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3856     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3857   }
3858   // In the following Evaluate methods, a HCompare instruction has
3859   // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3860   // `Compare(x, y) >= 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3861   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3862     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3863   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3864   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3865     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3866   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3867   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3868     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3869   }
3870 
3871   DECLARE_INSTRUCTION(GreaterThanOrEqual);
3872 
GetCondition()3873   IfCondition GetCondition() const OVERRIDE {
3874     return kCondGE;
3875   }
3876 
GetOppositeCondition()3877   IfCondition GetOppositeCondition() const OVERRIDE {
3878     return kCondLT;
3879   }
3880 
3881  protected:
3882   DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
3883 
3884  private:
Compute(T x,T y)3885   template <typename T> static bool Compute(T x, T y) { return x >= y; }
3886 };
3887 
3888 class HBelow FINAL : public HCondition {
3889  public:
3890   HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kBelow,first,second,dex_pc)3891       : HCondition(kBelow, first, second, dex_pc) {
3892   }
3893 
Evaluate(HIntConstant * x,HIntConstant * y)3894   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3895     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3896   }
Evaluate(HLongConstant * x,HLongConstant * y)3897   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3898     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3899   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)3900   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3901                       HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3902     LOG(FATAL) << DebugName() << " is not defined for float values";
3903     UNREACHABLE();
3904   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)3905   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3906                       HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3907     LOG(FATAL) << DebugName() << " is not defined for double values";
3908     UNREACHABLE();
3909   }
3910 
3911   DECLARE_INSTRUCTION(Below);
3912 
GetCondition()3913   IfCondition GetCondition() const OVERRIDE {
3914     return kCondB;
3915   }
3916 
GetOppositeCondition()3917   IfCondition GetOppositeCondition() const OVERRIDE {
3918     return kCondAE;
3919   }
3920 
3921  protected:
3922   DEFAULT_COPY_CONSTRUCTOR(Below);
3923 
3924  private:
Compute(T x,T y)3925   template <typename T> static bool Compute(T x, T y) {
3926     return MakeUnsigned(x) < MakeUnsigned(y);
3927   }
3928 };
3929 
3930 class HBelowOrEqual FINAL : public HCondition {
3931  public:
3932   HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kBelowOrEqual,first,second,dex_pc)3933       : HCondition(kBelowOrEqual, first, second, dex_pc) {
3934   }
3935 
Evaluate(HIntConstant * x,HIntConstant * y)3936   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3937     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3938   }
Evaluate(HLongConstant * x,HLongConstant * y)3939   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3940     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3941   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)3942   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3943                       HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3944     LOG(FATAL) << DebugName() << " is not defined for float values";
3945     UNREACHABLE();
3946   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)3947   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3948                       HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3949     LOG(FATAL) << DebugName() << " is not defined for double values";
3950     UNREACHABLE();
3951   }
3952 
3953   DECLARE_INSTRUCTION(BelowOrEqual);
3954 
GetCondition()3955   IfCondition GetCondition() const OVERRIDE {
3956     return kCondBE;
3957   }
3958 
GetOppositeCondition()3959   IfCondition GetOppositeCondition() const OVERRIDE {
3960     return kCondA;
3961   }
3962 
3963  protected:
3964   DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
3965 
3966  private:
Compute(T x,T y)3967   template <typename T> static bool Compute(T x, T y) {
3968     return MakeUnsigned(x) <= MakeUnsigned(y);
3969   }
3970 };
3971 
3972 class HAbove FINAL : public HCondition {
3973  public:
3974   HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kAbove,first,second,dex_pc)3975       : HCondition(kAbove, first, second, dex_pc) {
3976   }
3977 
Evaluate(HIntConstant * x,HIntConstant * y)3978   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3979     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3980   }
Evaluate(HLongConstant * x,HLongConstant * y)3981   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3982     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3983   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)3984   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3985                       HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3986     LOG(FATAL) << DebugName() << " is not defined for float values";
3987     UNREACHABLE();
3988   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)3989   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3990                       HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3991     LOG(FATAL) << DebugName() << " is not defined for double values";
3992     UNREACHABLE();
3993   }
3994 
3995   DECLARE_INSTRUCTION(Above);
3996 
GetCondition()3997   IfCondition GetCondition() const OVERRIDE {
3998     return kCondA;
3999   }
4000 
GetOppositeCondition()4001   IfCondition GetOppositeCondition() const OVERRIDE {
4002     return kCondBE;
4003   }
4004 
4005  protected:
4006   DEFAULT_COPY_CONSTRUCTOR(Above);
4007 
4008  private:
Compute(T x,T y)4009   template <typename T> static bool Compute(T x, T y) {
4010     return MakeUnsigned(x) > MakeUnsigned(y);
4011   }
4012 };
4013 
4014 class HAboveOrEqual FINAL : public HCondition {
4015  public:
4016   HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kAboveOrEqual,first,second,dex_pc)4017       : HCondition(kAboveOrEqual, first, second, dex_pc) {
4018   }
4019 
Evaluate(HIntConstant * x,HIntConstant * y)4020   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4021     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4022   }
Evaluate(HLongConstant * x,HLongConstant * y)4023   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4024     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4025   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4026   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4027                       HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4028     LOG(FATAL) << DebugName() << " is not defined for float values";
4029     UNREACHABLE();
4030   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4031   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4032                       HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4033     LOG(FATAL) << DebugName() << " is not defined for double values";
4034     UNREACHABLE();
4035   }
4036 
4037   DECLARE_INSTRUCTION(AboveOrEqual);
4038 
GetCondition()4039   IfCondition GetCondition() const OVERRIDE {
4040     return kCondAE;
4041   }
4042 
GetOppositeCondition()4043   IfCondition GetOppositeCondition() const OVERRIDE {
4044     return kCondB;
4045   }
4046 
4047  protected:
4048   DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
4049 
4050  private:
Compute(T x,T y)4051   template <typename T> static bool Compute(T x, T y) {
4052     return MakeUnsigned(x) >= MakeUnsigned(y);
4053   }
4054 };
4055 
4056 // Instruction to check how two inputs compare to each other.
4057 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
4058 class HCompare FINAL : public HBinaryOperation {
4059  public:
4060   // Note that `comparison_type` is the type of comparison performed
4061   // between the comparison's inputs, not the type of the instantiated
4062   // HCompare instruction (which is always DataType::Type::kInt).
HCompare(DataType::Type comparison_type,HInstruction * first,HInstruction * second,ComparisonBias bias,uint32_t dex_pc)4063   HCompare(DataType::Type comparison_type,
4064            HInstruction* first,
4065            HInstruction* second,
4066            ComparisonBias bias,
4067            uint32_t dex_pc)
4068       : HBinaryOperation(kCompare,
4069                          DataType::Type::kInt32,
4070                          first,
4071                          second,
4072                          SideEffectsForArchRuntimeCalls(comparison_type),
4073                          dex_pc) {
4074     SetPackedField<ComparisonBiasField>(bias);
4075     DCHECK_EQ(comparison_type, DataType::Kind(first->GetType()));
4076     DCHECK_EQ(comparison_type, DataType::Kind(second->GetType()));
4077   }
4078 
4079   template <typename T>
Compute(T x,T y)4080   int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4081 
4082   template <typename T>
ComputeFP(T x,T y)4083   int32_t ComputeFP(T x, T y) const {
4084     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4085     DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4086     // Handle the bias.
4087     return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
4088   }
4089 
Evaluate(HIntConstant * x,HIntConstant * y)4090   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4091     // Note that there is no "cmp-int" Dex instruction so we shouldn't
4092     // reach this code path when processing a freshly built HIR
4093     // graph. However HCompare integer instructions can be synthesized
4094     // by the instruction simplifier to implement IntegerCompare and
4095     // IntegerSignum intrinsics, so we have to handle this case.
4096     return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4097   }
Evaluate(HLongConstant * x,HLongConstant * y)4098   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4099     return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4100   }
Evaluate(HFloatConstant * x,HFloatConstant * y)4101   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4102     return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4103   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4104   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4105     return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4106   }
4107 
InstructionDataEquals(const HInstruction * other)4108   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4109     return GetPackedFields() == other->AsCompare()->GetPackedFields();
4110   }
4111 
GetBias()4112   ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4113 
4114   // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4115   // Only meaningful for floating-point comparisons.
IsGtBias()4116   bool IsGtBias() const {
4117     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4118     return GetBias() == ComparisonBias::kGtBias;
4119   }
4120 
SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED)4121   static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
4122     // Comparisons do not require a runtime call in any back end.
4123     return SideEffects::None();
4124   }
4125 
4126   DECLARE_INSTRUCTION(Compare);
4127 
4128  protected:
4129   static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
4130   static constexpr size_t kFieldComparisonBiasSize =
4131       MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4132   static constexpr size_t kNumberOfComparePackedBits =
4133       kFieldComparisonBias + kFieldComparisonBiasSize;
4134   static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4135   using ComparisonBiasField =
4136       BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4137 
4138   // Return an integer constant containing the result of a comparison evaluated at compile time.
MakeConstantComparison(int32_t value,uint32_t dex_pc)4139   HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
4140     DCHECK(value == -1 || value == 0 || value == 1) << value;
4141     return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4142   }
4143 
4144   DEFAULT_COPY_CONSTRUCTOR(Compare);
4145 };
4146 
4147 class HNewInstance FINAL : public HExpression<1> {
4148  public:
HNewInstance(HInstruction * cls,uint32_t dex_pc,dex::TypeIndex type_index,const DexFile & dex_file,bool finalizable,QuickEntrypointEnum entrypoint)4149   HNewInstance(HInstruction* cls,
4150                uint32_t dex_pc,
4151                dex::TypeIndex type_index,
4152                const DexFile& dex_file,
4153                bool finalizable,
4154                QuickEntrypointEnum entrypoint)
4155       : HExpression(kNewInstance,
4156                     DataType::Type::kReference,
4157                     SideEffects::CanTriggerGC(),
4158                     dex_pc),
4159         type_index_(type_index),
4160         dex_file_(dex_file),
4161         entrypoint_(entrypoint) {
4162     SetPackedFlag<kFlagFinalizable>(finalizable);
4163     SetRawInputAt(0, cls);
4164   }
4165 
IsClonable()4166   bool IsClonable() const OVERRIDE { return true; }
4167 
GetTypeIndex()4168   dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetDexFile()4169   const DexFile& GetDexFile() const { return dex_file_; }
4170 
4171   // Calls runtime so needs an environment.
NeedsEnvironment()4172   bool NeedsEnvironment() const OVERRIDE { return true; }
4173 
4174   // Can throw errors when out-of-memory or if it's not instantiable/accessible.
CanThrow()4175   bool CanThrow() const OVERRIDE { return true; }
4176 
NeedsChecks()4177   bool NeedsChecks() const {
4178     return entrypoint_ == kQuickAllocObjectWithChecks;
4179   }
4180 
IsFinalizable()4181   bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4182 
CanBeNull()4183   bool CanBeNull() const OVERRIDE { return false; }
4184 
GetEntrypoint()4185   QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4186 
SetEntrypoint(QuickEntrypointEnum entrypoint)4187   void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4188     entrypoint_ = entrypoint;
4189   }
4190 
GetLoadClass()4191   HLoadClass* GetLoadClass() const {
4192     HInstruction* input = InputAt(0);
4193     if (input->IsClinitCheck()) {
4194       input = input->InputAt(0);
4195     }
4196     DCHECK(input->IsLoadClass());
4197     return input->AsLoadClass();
4198   }
4199 
4200   bool IsStringAlloc() const;
4201 
4202   DECLARE_INSTRUCTION(NewInstance);
4203 
4204  protected:
4205   DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4206 
4207  private:
4208   static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
4209   static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
4210   static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4211                 "Too many packed fields.");
4212 
4213   const dex::TypeIndex type_index_;
4214   const DexFile& dex_file_;
4215   QuickEntrypointEnum entrypoint_;
4216 };
4217 
4218 enum IntrinsicNeedsEnvironmentOrCache {
4219   kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
4220   kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
4221 };
4222 
4223 enum IntrinsicSideEffects {
4224   kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
4225   kReadSideEffects,   // Intrinsic may read heap memory.
4226   kWriteSideEffects,  // Intrinsic may write heap memory.
4227   kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
4228 };
4229 
4230 enum IntrinsicExceptions {
4231   kNoThrow,  // Intrinsic does not throw any exceptions.
4232   kCanThrow  // Intrinsic may throw exceptions.
4233 };
4234 
4235 class HInvoke : public HVariableInputSizeInstruction {
4236  public:
4237   bool NeedsEnvironment() const OVERRIDE;
4238 
SetArgumentAt(size_t index,HInstruction * argument)4239   void SetArgumentAt(size_t index, HInstruction* argument) {
4240     SetRawInputAt(index, argument);
4241   }
4242 
4243   // Return the number of arguments.  This number can be lower than
4244   // the number of inputs returned by InputCount(), as some invoke
4245   // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4246   // inputs at the end of their list of inputs.
GetNumberOfArguments()4247   uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4248 
GetType()4249   DataType::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
4250 
GetDexMethodIndex()4251   uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4252 
GetInvokeType()4253   InvokeType GetInvokeType() const {
4254     return GetPackedField<InvokeTypeField>();
4255   }
4256 
GetIntrinsic()4257   Intrinsics GetIntrinsic() const {
4258     return intrinsic_;
4259   }
4260 
4261   void SetIntrinsic(Intrinsics intrinsic,
4262                     IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4263                     IntrinsicSideEffects side_effects,
4264                     IntrinsicExceptions exceptions);
4265 
IsFromInlinedInvoke()4266   bool IsFromInlinedInvoke() const {
4267     return GetEnvironment()->IsFromInlinedInvoke();
4268   }
4269 
SetCanThrow(bool can_throw)4270   void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4271 
CanThrow()4272   bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
4273 
SetAlwaysThrows(bool always_throws)4274   void SetAlwaysThrows(bool always_throws) { SetPackedFlag<kFlagAlwaysThrows>(always_throws); }
4275 
AlwaysThrows()4276   bool AlwaysThrows() const OVERRIDE { return GetPackedFlag<kFlagAlwaysThrows>(); }
4277 
CanBeMoved()4278   bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
4279 
InstructionDataEquals(const HInstruction * other)4280   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
4281     return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4282   }
4283 
GetIntrinsicOptimizations()4284   uint32_t* GetIntrinsicOptimizations() {
4285     return &intrinsic_optimizations_;
4286   }
4287 
GetIntrinsicOptimizations()4288   const uint32_t* GetIntrinsicOptimizations() const {
4289     return &intrinsic_optimizations_;
4290   }
4291 
IsIntrinsic()4292   bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4293 
GetResolvedMethod()4294   ArtMethod* GetResolvedMethod() const { return resolved_method_; }
SetResolvedMethod(ArtMethod * method)4295   void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
4296 
4297   DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4298 
4299  protected:
4300   static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4301   static constexpr size_t kFieldInvokeTypeSize =
4302       MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4303   static constexpr size_t kFieldReturnType =
4304       kFieldInvokeType + kFieldInvokeTypeSize;
4305   static constexpr size_t kFieldReturnTypeSize =
4306       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
4307   static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
4308   static constexpr size_t kFlagAlwaysThrows = kFlagCanThrow + 1;
4309   static constexpr size_t kNumberOfInvokePackedBits = kFlagAlwaysThrows + 1;
4310   static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4311   using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4312   using ReturnTypeField = BitField<DataType::Type, kFieldReturnType, kFieldReturnTypeSize>;
4313 
HInvoke(InstructionKind kind,ArenaAllocator * allocator,uint32_t number_of_arguments,uint32_t number_of_other_inputs,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,InvokeType invoke_type)4314   HInvoke(InstructionKind kind,
4315           ArenaAllocator* allocator,
4316           uint32_t number_of_arguments,
4317           uint32_t number_of_other_inputs,
4318           DataType::Type return_type,
4319           uint32_t dex_pc,
4320           uint32_t dex_method_index,
4321           ArtMethod* resolved_method,
4322           InvokeType invoke_type)
4323     : HVariableInputSizeInstruction(
4324           kind,
4325           SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4326           dex_pc,
4327           allocator,
4328           number_of_arguments + number_of_other_inputs,
4329           kArenaAllocInvokeInputs),
4330       number_of_arguments_(number_of_arguments),
4331       resolved_method_(resolved_method),
4332       dex_method_index_(dex_method_index),
4333       intrinsic_(Intrinsics::kNone),
4334       intrinsic_optimizations_(0) {
4335     SetPackedField<ReturnTypeField>(return_type);
4336     SetPackedField<InvokeTypeField>(invoke_type);
4337     SetPackedFlag<kFlagCanThrow>(true);
4338   }
4339 
4340   DEFAULT_COPY_CONSTRUCTOR(Invoke);
4341 
4342   uint32_t number_of_arguments_;
4343   ArtMethod* resolved_method_;
4344   const uint32_t dex_method_index_;
4345   Intrinsics intrinsic_;
4346 
4347   // A magic word holding optimizations for intrinsics. See intrinsics.h.
4348   uint32_t intrinsic_optimizations_;
4349 };
4350 
4351 class HInvokeUnresolved FINAL : public HInvoke {
4352  public:
HInvokeUnresolved(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,InvokeType invoke_type)4353   HInvokeUnresolved(ArenaAllocator* allocator,
4354                     uint32_t number_of_arguments,
4355                     DataType::Type return_type,
4356                     uint32_t dex_pc,
4357                     uint32_t dex_method_index,
4358                     InvokeType invoke_type)
4359       : HInvoke(kInvokeUnresolved,
4360                 allocator,
4361                 number_of_arguments,
4362                 0u /* number_of_other_inputs */,
4363                 return_type,
4364                 dex_pc,
4365                 dex_method_index,
4366                 nullptr,
4367                 invoke_type) {
4368   }
4369 
IsClonable()4370   bool IsClonable() const OVERRIDE { return true; }
4371 
4372   DECLARE_INSTRUCTION(InvokeUnresolved);
4373 
4374  protected:
4375   DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4376 };
4377 
4378 class HInvokePolymorphic FINAL : public HInvoke {
4379  public:
HInvokePolymorphic(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index)4380   HInvokePolymorphic(ArenaAllocator* allocator,
4381                      uint32_t number_of_arguments,
4382                      DataType::Type return_type,
4383                      uint32_t dex_pc,
4384                      uint32_t dex_method_index)
4385       : HInvoke(kInvokePolymorphic,
4386                 allocator,
4387                 number_of_arguments,
4388                 0u /* number_of_other_inputs */,
4389                 return_type,
4390                 dex_pc,
4391                 dex_method_index,
4392                 nullptr,
4393                 kVirtual) {
4394   }
4395 
IsClonable()4396   bool IsClonable() const OVERRIDE { return true; }
4397 
4398   DECLARE_INSTRUCTION(InvokePolymorphic);
4399 
4400  protected:
4401   DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4402 };
4403 
4404 class HInvokeStaticOrDirect FINAL : public HInvoke {
4405  public:
4406   // Requirements of this method call regarding the class
4407   // initialization (clinit) check of its declaring class.
4408   enum class ClinitCheckRequirement {
4409     kNone,      // Class already initialized.
4410     kExplicit,  // Static call having explicit clinit check as last input.
4411     kImplicit,  // Static call implicitly requiring a clinit check.
4412     kLast = kImplicit
4413   };
4414 
4415   // Determines how to load the target ArtMethod*.
4416   enum class MethodLoadKind {
4417     // Use a String init ArtMethod* loaded from Thread entrypoints.
4418     kStringInit,
4419 
4420     // Use the method's own ArtMethod* loaded by the register allocator.
4421     kRecursive,
4422 
4423     // Use PC-relative boot image ArtMethod* address that will be known at link time.
4424     // Used for boot image methods referenced by boot image code.
4425     kBootImageLinkTimePcRelative,
4426 
4427     // Use ArtMethod* at a known address, embed the direct address in the code.
4428     // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
4429     kDirectAddress,
4430 
4431     // Load from an entry in the .bss section using a PC-relative load.
4432     // Used for classes outside boot image when .bss is accessible with a PC-relative load.
4433     kBssEntry,
4434 
4435     // Make a runtime call to resolve and call the method. This is the last-resort-kind
4436     // used when other kinds are unimplemented on a particular architecture.
4437     kRuntimeCall,
4438   };
4439 
4440   // Determines the location of the code pointer.
4441   enum class CodePtrLocation {
4442     // Recursive call, use local PC-relative call instruction.
4443     kCallSelf,
4444 
4445     // Use code pointer from the ArtMethod*.
4446     // Used when we don't know the target code. This is also the last-resort-kind used when
4447     // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4448     kCallArtMethod,
4449   };
4450 
4451   struct DispatchInfo {
4452     MethodLoadKind method_load_kind;
4453     CodePtrLocation code_ptr_location;
4454     // The method load data holds
4455     //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4456     //     Note that there are multiple string init methods, each having its own offset.
4457     //   - the method address for kDirectAddress
4458     uint64_t method_load_data;
4459   };
4460 
HInvokeStaticOrDirect(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t method_index,ArtMethod * resolved_method,DispatchInfo dispatch_info,InvokeType invoke_type,MethodReference target_method,ClinitCheckRequirement clinit_check_requirement)4461   HInvokeStaticOrDirect(ArenaAllocator* allocator,
4462                         uint32_t number_of_arguments,
4463                         DataType::Type return_type,
4464                         uint32_t dex_pc,
4465                         uint32_t method_index,
4466                         ArtMethod* resolved_method,
4467                         DispatchInfo dispatch_info,
4468                         InvokeType invoke_type,
4469                         MethodReference target_method,
4470                         ClinitCheckRequirement clinit_check_requirement)
4471       : HInvoke(kInvokeStaticOrDirect,
4472                 allocator,
4473                 number_of_arguments,
4474                 // There is potentially one extra argument for the HCurrentMethod node, and
4475                 // potentially one other if the clinit check is explicit, and potentially
4476                 // one other if the method is a string factory.
4477                 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
4478                     (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4479                 return_type,
4480                 dex_pc,
4481                 method_index,
4482                 resolved_method,
4483                 invoke_type),
4484         target_method_(target_method),
4485         dispatch_info_(dispatch_info) {
4486     SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4487   }
4488 
IsClonable()4489   bool IsClonable() const OVERRIDE { return true; }
4490 
SetDispatchInfo(const DispatchInfo & dispatch_info)4491   void SetDispatchInfo(const DispatchInfo& dispatch_info) {
4492     bool had_current_method_input = HasCurrentMethodInput();
4493     bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
4494 
4495     // Using the current method is the default and once we find a better
4496     // method load kind, we should not go back to using the current method.
4497     DCHECK(had_current_method_input || !needs_current_method_input);
4498 
4499     if (had_current_method_input && !needs_current_method_input) {
4500       DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4501       RemoveInputAt(GetSpecialInputIndex());
4502     }
4503     dispatch_info_ = dispatch_info;
4504   }
4505 
GetDispatchInfo()4506   DispatchInfo GetDispatchInfo() const {
4507     return dispatch_info_;
4508   }
4509 
AddSpecialInput(HInstruction * input)4510   void AddSpecialInput(HInstruction* input) {
4511     // We allow only one special input.
4512     DCHECK(!IsStringInit() && !HasCurrentMethodInput());
4513     DCHECK(InputCount() == GetSpecialInputIndex() ||
4514            (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
4515     InsertInputAt(GetSpecialInputIndex(), input);
4516   }
4517 
4518   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()4519   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE {
4520     ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4521     if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4522       DCHECK(!input_records.empty());
4523       DCHECK_GT(input_records.size(), GetNumberOfArguments());
4524       HInstruction* last_input = input_records.back().GetInstruction();
4525       // Note: `last_input` may be null during arguments setup.
4526       if (last_input != nullptr) {
4527         // `last_input` is the last input of a static invoke marked as having
4528         // an explicit clinit check. It must either be:
4529         // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4530         // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4531         DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4532       }
4533     }
4534     return input_records;
4535   }
4536 
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)4537   bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
4538     // We access the method via the dex cache so we can't do an implicit null check.
4539     // TODO: for intrinsics we can generate implicit null checks.
4540     return false;
4541   }
4542 
CanBeNull()4543   bool CanBeNull() const OVERRIDE {
4544     return GetPackedField<ReturnTypeField>() == DataType::Type::kReference && !IsStringInit();
4545   }
4546 
4547   // Get the index of the special input, if any.
4548   //
4549   // If the invoke HasCurrentMethodInput(), the "special input" is the current
4550   // method pointer; otherwise there may be one platform-specific special input,
4551   // such as PC-relative addressing base.
GetSpecialInputIndex()4552   uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
HasSpecialInput()4553   bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
4554 
GetMethodLoadKind()4555   MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
GetCodePtrLocation()4556   CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
IsRecursive()4557   bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4558   bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
IsStringInit()4559   bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
HasMethodAddress()4560   bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
HasPcRelativeMethodLoadKind()4561   bool HasPcRelativeMethodLoadKind() const {
4562     return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4563            GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4564   }
HasCurrentMethodInput()4565   bool HasCurrentMethodInput() const {
4566     // This function can be called only after the invoke has been fully initialized by the builder.
4567     if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
4568       DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4569       return true;
4570     } else {
4571       DCHECK(InputCount() == GetSpecialInputIndex() ||
4572              !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
4573       return false;
4574     }
4575   }
4576 
GetStringInitEntryPoint()4577   QuickEntrypointEnum GetStringInitEntryPoint() const {
4578     DCHECK(IsStringInit());
4579     return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4580   }
4581 
GetMethodAddress()4582   uint64_t GetMethodAddress() const {
4583     DCHECK(HasMethodAddress());
4584     return dispatch_info_.method_load_data;
4585   }
4586 
4587   const DexFile& GetDexFileForPcRelativeDexCache() const;
4588 
GetClinitCheckRequirement()4589   ClinitCheckRequirement GetClinitCheckRequirement() const {
4590     return GetPackedField<ClinitCheckRequirementField>();
4591   }
4592 
4593   // Is this instruction a call to a static method?
IsStatic()4594   bool IsStatic() const {
4595     return GetInvokeType() == kStatic;
4596   }
4597 
GetTargetMethod()4598   MethodReference GetTargetMethod() const {
4599     return target_method_;
4600   }
4601 
4602   // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4603   // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4604   // instruction; only relevant for static calls with explicit clinit check.
RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement)4605   void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4606     DCHECK(IsStaticWithExplicitClinitCheck());
4607     size_t last_input_index = inputs_.size() - 1u;
4608     HInstruction* last_input = inputs_.back().GetInstruction();
4609     DCHECK(last_input != nullptr);
4610     DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4611     RemoveAsUserOfInput(last_input_index);
4612     inputs_.pop_back();
4613     SetPackedField<ClinitCheckRequirementField>(new_requirement);
4614     DCHECK(!IsStaticWithExplicitClinitCheck());
4615   }
4616 
4617   // Is this a call to a static method whose declaring class has an
4618   // explicit initialization check in the graph?
IsStaticWithExplicitClinitCheck()4619   bool IsStaticWithExplicitClinitCheck() const {
4620     return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4621   }
4622 
4623   // Is this a call to a static method whose declaring class has an
4624   // implicit intialization check requirement?
IsStaticWithImplicitClinitCheck()4625   bool IsStaticWithImplicitClinitCheck() const {
4626     return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4627   }
4628 
4629   // Does this method load kind need the current method as an input?
NeedsCurrentMethodInput(MethodLoadKind kind)4630   static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4631     return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kRuntimeCall;
4632   }
4633 
4634   DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4635 
4636  protected:
4637   DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
4638 
4639  private:
4640   static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4641   static constexpr size_t kFieldClinitCheckRequirementSize =
4642       MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4643   static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4644       kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4645   static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4646                 "Too many packed fields.");
4647   using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4648                                                kFieldClinitCheckRequirement,
4649                                                kFieldClinitCheckRequirementSize>;
4650 
4651   // Cached values of the resolved method, to avoid needing the mutator lock.
4652   const MethodReference target_method_;
4653   DispatchInfo dispatch_info_;
4654 };
4655 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4656 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4657 
4658 class HInvokeVirtual FINAL : public HInvoke {
4659  public:
HInvokeVirtual(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,uint32_t vtable_index)4660   HInvokeVirtual(ArenaAllocator* allocator,
4661                  uint32_t number_of_arguments,
4662                  DataType::Type return_type,
4663                  uint32_t dex_pc,
4664                  uint32_t dex_method_index,
4665                  ArtMethod* resolved_method,
4666                  uint32_t vtable_index)
4667       : HInvoke(kInvokeVirtual,
4668                 allocator,
4669                 number_of_arguments,
4670                 0u,
4671                 return_type,
4672                 dex_pc,
4673                 dex_method_index,
4674                 resolved_method,
4675                 kVirtual),
4676         vtable_index_(vtable_index) {
4677   }
4678 
IsClonable()4679   bool IsClonable() const OVERRIDE { return true; }
4680 
CanBeNull()4681   bool CanBeNull() const OVERRIDE {
4682     switch (GetIntrinsic()) {
4683       case Intrinsics::kThreadCurrentThread:
4684       case Intrinsics::kStringBufferAppend:
4685       case Intrinsics::kStringBufferToString:
4686       case Intrinsics::kStringBuilderAppend:
4687       case Intrinsics::kStringBuilderToString:
4688         return false;
4689       default:
4690         return HInvoke::CanBeNull();
4691     }
4692   }
4693 
CanDoImplicitNullCheckOn(HInstruction * obj)4694   bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4695     // TODO: Add implicit null checks in intrinsics.
4696     return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4697   }
4698 
GetVTableIndex()4699   uint32_t GetVTableIndex() const { return vtable_index_; }
4700 
4701   DECLARE_INSTRUCTION(InvokeVirtual);
4702 
4703  protected:
4704   DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
4705 
4706  private:
4707   // Cached value of the resolved method, to avoid needing the mutator lock.
4708   const uint32_t vtable_index_;
4709 };
4710 
4711 class HInvokeInterface FINAL : public HInvoke {
4712  public:
HInvokeInterface(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,uint32_t imt_index)4713   HInvokeInterface(ArenaAllocator* allocator,
4714                    uint32_t number_of_arguments,
4715                    DataType::Type return_type,
4716                    uint32_t dex_pc,
4717                    uint32_t dex_method_index,
4718                    ArtMethod* resolved_method,
4719                    uint32_t imt_index)
4720       : HInvoke(kInvokeInterface,
4721                 allocator,
4722                 number_of_arguments,
4723                 0u,
4724                 return_type,
4725                 dex_pc,
4726                 dex_method_index,
4727                 resolved_method,
4728                 kInterface),
4729         imt_index_(imt_index) {
4730   }
4731 
IsClonable()4732   bool IsClonable() const OVERRIDE { return true; }
4733 
CanDoImplicitNullCheckOn(HInstruction * obj)4734   bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4735     // TODO: Add implicit null checks in intrinsics.
4736     return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4737   }
4738 
NeedsDexCacheOfDeclaringClass()4739   bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
4740     // The assembly stub currently needs it.
4741     return true;
4742   }
4743 
GetImtIndex()4744   uint32_t GetImtIndex() const { return imt_index_; }
4745 
4746   DECLARE_INSTRUCTION(InvokeInterface);
4747 
4748  protected:
4749   DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
4750 
4751  private:
4752   // Cached value of the resolved method, to avoid needing the mutator lock.
4753   const uint32_t imt_index_;
4754 };
4755 
4756 class HNeg FINAL : public HUnaryOperation {
4757  public:
4758   HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kNeg,result_type,input,dex_pc)4759       : HUnaryOperation(kNeg, result_type, input, dex_pc) {
4760     DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
4761   }
4762 
Compute(T x)4763   template <typename T> static T Compute(T x) { return -x; }
4764 
Evaluate(HIntConstant * x)4765   HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4766     return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4767   }
Evaluate(HLongConstant * x)4768   HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4769     return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4770   }
Evaluate(HFloatConstant * x)4771   HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4772     return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4773   }
Evaluate(HDoubleConstant * x)4774   HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4775     return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4776   }
4777 
4778   DECLARE_INSTRUCTION(Neg);
4779 
4780  protected:
4781   DEFAULT_COPY_CONSTRUCTOR(Neg);
4782 };
4783 
4784 class HNewArray FINAL : public HExpression<2> {
4785  public:
HNewArray(HInstruction * cls,HInstruction * length,uint32_t dex_pc)4786   HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc)
4787       : HExpression(kNewArray, DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
4788     SetRawInputAt(0, cls);
4789     SetRawInputAt(1, length);
4790   }
4791 
IsClonable()4792   bool IsClonable() const OVERRIDE { return true; }
4793 
4794   // Calls runtime so needs an environment.
NeedsEnvironment()4795   bool NeedsEnvironment() const OVERRIDE { return true; }
4796 
4797   // May throw NegativeArraySizeException, OutOfMemoryError, etc.
CanThrow()4798   bool CanThrow() const OVERRIDE { return true; }
4799 
CanBeNull()4800   bool CanBeNull() const OVERRIDE { return false; }
4801 
GetLoadClass()4802   HLoadClass* GetLoadClass() const {
4803     DCHECK(InputAt(0)->IsLoadClass());
4804     return InputAt(0)->AsLoadClass();
4805   }
4806 
GetLength()4807   HInstruction* GetLength() const {
4808     return InputAt(1);
4809   }
4810 
4811   DECLARE_INSTRUCTION(NewArray);
4812 
4813  protected:
4814   DEFAULT_COPY_CONSTRUCTOR(NewArray);
4815 };
4816 
4817 class HAdd FINAL : public HBinaryOperation {
4818  public:
4819   HAdd(DataType::Type result_type,
4820        HInstruction* left,
4821        HInstruction* right,
4822        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kAdd,result_type,left,right,SideEffects::None (),dex_pc)4823       : HBinaryOperation(kAdd, result_type, left, right, SideEffects::None(), dex_pc) {
4824   }
4825 
IsCommutative()4826   bool IsCommutative() const OVERRIDE { return true; }
4827 
Compute(T x,T y)4828   template <typename T> static T Compute(T x, T y) { return x + y; }
4829 
Evaluate(HIntConstant * x,HIntConstant * y)4830   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4831     return GetBlock()->GetGraph()->GetIntConstant(
4832         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4833   }
Evaluate(HLongConstant * x,HLongConstant * y)4834   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4835     return GetBlock()->GetGraph()->GetLongConstant(
4836         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4837   }
Evaluate(HFloatConstant * x,HFloatConstant * y)4838   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4839     return GetBlock()->GetGraph()->GetFloatConstant(
4840         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4841   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4842   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4843     return GetBlock()->GetGraph()->GetDoubleConstant(
4844         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4845   }
4846 
4847   DECLARE_INSTRUCTION(Add);
4848 
4849  protected:
4850   DEFAULT_COPY_CONSTRUCTOR(Add);
4851 };
4852 
4853 class HSub FINAL : public HBinaryOperation {
4854  public:
4855   HSub(DataType::Type result_type,
4856        HInstruction* left,
4857        HInstruction* right,
4858        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kSub,result_type,left,right,SideEffects::None (),dex_pc)4859       : HBinaryOperation(kSub, result_type, left, right, SideEffects::None(), dex_pc) {
4860   }
4861 
Compute(T x,T y)4862   template <typename T> static T Compute(T x, T y) { return x - y; }
4863 
Evaluate(HIntConstant * x,HIntConstant * y)4864   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4865     return GetBlock()->GetGraph()->GetIntConstant(
4866         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4867   }
Evaluate(HLongConstant * x,HLongConstant * y)4868   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4869     return GetBlock()->GetGraph()->GetLongConstant(
4870         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4871   }
Evaluate(HFloatConstant * x,HFloatConstant * y)4872   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4873     return GetBlock()->GetGraph()->GetFloatConstant(
4874         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4875   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4876   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4877     return GetBlock()->GetGraph()->GetDoubleConstant(
4878         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4879   }
4880 
4881   DECLARE_INSTRUCTION(Sub);
4882 
4883  protected:
4884   DEFAULT_COPY_CONSTRUCTOR(Sub);
4885 };
4886 
4887 class HMul FINAL : public HBinaryOperation {
4888  public:
4889   HMul(DataType::Type result_type,
4890        HInstruction* left,
4891        HInstruction* right,
4892        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kMul,result_type,left,right,SideEffects::None (),dex_pc)4893       : HBinaryOperation(kMul, result_type, left, right, SideEffects::None(), dex_pc) {
4894   }
4895 
IsCommutative()4896   bool IsCommutative() const OVERRIDE { return true; }
4897 
Compute(T x,T y)4898   template <typename T> static T Compute(T x, T y) { return x * y; }
4899 
Evaluate(HIntConstant * x,HIntConstant * y)4900   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4901     return GetBlock()->GetGraph()->GetIntConstant(
4902         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4903   }
Evaluate(HLongConstant * x,HLongConstant * y)4904   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4905     return GetBlock()->GetGraph()->GetLongConstant(
4906         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4907   }
Evaluate(HFloatConstant * x,HFloatConstant * y)4908   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4909     return GetBlock()->GetGraph()->GetFloatConstant(
4910         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4911   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4912   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4913     return GetBlock()->GetGraph()->GetDoubleConstant(
4914         Compute(x->GetValue(), y->GetValue()), GetDexPc());
4915   }
4916 
4917   DECLARE_INSTRUCTION(Mul);
4918 
4919  protected:
4920   DEFAULT_COPY_CONSTRUCTOR(Mul);
4921 };
4922 
4923 class HDiv FINAL : public HBinaryOperation {
4924  public:
HDiv(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)4925   HDiv(DataType::Type result_type,
4926        HInstruction* left,
4927        HInstruction* right,
4928        uint32_t dex_pc)
4929       : HBinaryOperation(kDiv, result_type, left, right, SideEffects::None(), dex_pc) {
4930   }
4931 
4932   template <typename T>
ComputeIntegral(T x,T y)4933   T ComputeIntegral(T x, T y) const {
4934     DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4935     // Our graph structure ensures we never have 0 for `y` during
4936     // constant folding.
4937     DCHECK_NE(y, 0);
4938     // Special case -1 to avoid getting a SIGFPE on x86(_64).
4939     return (y == -1) ? -x : x / y;
4940   }
4941 
4942   template <typename T>
ComputeFP(T x,T y)4943   T ComputeFP(T x, T y) const {
4944     DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4945     return x / y;
4946   }
4947 
Evaluate(HIntConstant * x,HIntConstant * y)4948   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4949     return GetBlock()->GetGraph()->GetIntConstant(
4950         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4951   }
Evaluate(HLongConstant * x,HLongConstant * y)4952   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4953     return GetBlock()->GetGraph()->GetLongConstant(
4954         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4955   }
Evaluate(HFloatConstant * x,HFloatConstant * y)4956   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4957     return GetBlock()->GetGraph()->GetFloatConstant(
4958         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4959   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4960   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4961     return GetBlock()->GetGraph()->GetDoubleConstant(
4962         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4963   }
4964 
4965   DECLARE_INSTRUCTION(Div);
4966 
4967  protected:
4968   DEFAULT_COPY_CONSTRUCTOR(Div);
4969 };
4970 
4971 class HRem FINAL : public HBinaryOperation {
4972  public:
HRem(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)4973   HRem(DataType::Type result_type,
4974        HInstruction* left,
4975        HInstruction* right,
4976        uint32_t dex_pc)
4977       : HBinaryOperation(kRem, result_type, left, right, SideEffects::None(), dex_pc) {
4978   }
4979 
4980   template <typename T>
ComputeIntegral(T x,T y)4981   T ComputeIntegral(T x, T y) const {
4982     DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
4983     // Our graph structure ensures we never have 0 for `y` during
4984     // constant folding.
4985     DCHECK_NE(y, 0);
4986     // Special case -1 to avoid getting a SIGFPE on x86(_64).
4987     return (y == -1) ? 0 : x % y;
4988   }
4989 
4990   template <typename T>
ComputeFP(T x,T y)4991   T ComputeFP(T x, T y) const {
4992     DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
4993     return std::fmod(x, y);
4994   }
4995 
Evaluate(HIntConstant * x,HIntConstant * y)4996   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4997     return GetBlock()->GetGraph()->GetIntConstant(
4998         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4999   }
Evaluate(HLongConstant * x,HLongConstant * y)5000   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5001     return GetBlock()->GetGraph()->GetLongConstant(
5002         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5003   }
Evaluate(HFloatConstant * x,HFloatConstant * y)5004   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
5005     return GetBlock()->GetGraph()->GetFloatConstant(
5006         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5007   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5008   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
5009     return GetBlock()->GetGraph()->GetDoubleConstant(
5010         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5011   }
5012 
5013   DECLARE_INSTRUCTION(Rem);
5014 
5015  protected:
5016   DEFAULT_COPY_CONSTRUCTOR(Rem);
5017 };
5018 
5019 class HDivZeroCheck FINAL : public HExpression<1> {
5020  public:
5021   // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
5022   // constructor.
HDivZeroCheck(HInstruction * value,uint32_t dex_pc)5023   HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
5024       : HExpression(kDivZeroCheck, value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5025     SetRawInputAt(0, value);
5026   }
5027 
GetType()5028   DataType::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
5029 
CanBeMoved()5030   bool CanBeMoved() const OVERRIDE { return true; }
5031 
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5032   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5033     return true;
5034   }
5035 
NeedsEnvironment()5036   bool NeedsEnvironment() const OVERRIDE { return true; }
CanThrow()5037   bool CanThrow() const OVERRIDE { return true; }
5038 
5039   DECLARE_INSTRUCTION(DivZeroCheck);
5040 
5041  protected:
5042   DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
5043 };
5044 
5045 class HShl FINAL : public HBinaryOperation {
5046  public:
5047   HShl(DataType::Type result_type,
5048        HInstruction* value,
5049        HInstruction* distance,
5050        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kShl,result_type,value,distance,SideEffects::None (),dex_pc)5051       : HBinaryOperation(kShl, result_type, value, distance, SideEffects::None(), dex_pc) {
5052     DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5053     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5054   }
5055 
5056   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5057   static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5058     return value << (distance & max_shift_distance);
5059   }
5060 
Evaluate(HIntConstant * value,HIntConstant * distance)5061   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5062     return GetBlock()->GetGraph()->GetIntConstant(
5063         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5064   }
Evaluate(HLongConstant * value,HIntConstant * distance)5065   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5066     return GetBlock()->GetGraph()->GetLongConstant(
5067         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5068   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5069   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5070                       HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5071     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5072     UNREACHABLE();
5073   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5074   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5075                       HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5076     LOG(FATAL) << DebugName() << " is not defined for float values";
5077     UNREACHABLE();
5078   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5079   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5080                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5081     LOG(FATAL) << DebugName() << " is not defined for double values";
5082     UNREACHABLE();
5083   }
5084 
5085   DECLARE_INSTRUCTION(Shl);
5086 
5087  protected:
5088   DEFAULT_COPY_CONSTRUCTOR(Shl);
5089 };
5090 
5091 class HShr FINAL : public HBinaryOperation {
5092  public:
5093   HShr(DataType::Type result_type,
5094        HInstruction* value,
5095        HInstruction* distance,
5096        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kShr,result_type,value,distance,SideEffects::None (),dex_pc)5097       : HBinaryOperation(kShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5098     DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5099     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5100   }
5101 
5102   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5103   static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5104     return value >> (distance & max_shift_distance);
5105   }
5106 
Evaluate(HIntConstant * value,HIntConstant * distance)5107   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5108     return GetBlock()->GetGraph()->GetIntConstant(
5109         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5110   }
Evaluate(HLongConstant * value,HIntConstant * distance)5111   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5112     return GetBlock()->GetGraph()->GetLongConstant(
5113         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5114   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5115   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5116                       HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5117     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5118     UNREACHABLE();
5119   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5120   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5121                       HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5122     LOG(FATAL) << DebugName() << " is not defined for float values";
5123     UNREACHABLE();
5124   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5125   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5126                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5127     LOG(FATAL) << DebugName() << " is not defined for double values";
5128     UNREACHABLE();
5129   }
5130 
5131   DECLARE_INSTRUCTION(Shr);
5132 
5133  protected:
5134   DEFAULT_COPY_CONSTRUCTOR(Shr);
5135 };
5136 
5137 class HUShr FINAL : public HBinaryOperation {
5138  public:
5139   HUShr(DataType::Type result_type,
5140         HInstruction* value,
5141         HInstruction* distance,
5142         uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kUShr,result_type,value,distance,SideEffects::None (),dex_pc)5143       : HBinaryOperation(kUShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5144     DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5145     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5146   }
5147 
5148   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5149   static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5150     typedef typename std::make_unsigned<T>::type V;
5151     V ux = static_cast<V>(value);
5152     return static_cast<T>(ux >> (distance & max_shift_distance));
5153   }
5154 
Evaluate(HIntConstant * value,HIntConstant * distance)5155   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5156     return GetBlock()->GetGraph()->GetIntConstant(
5157         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5158   }
Evaluate(HLongConstant * value,HIntConstant * distance)5159   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5160     return GetBlock()->GetGraph()->GetLongConstant(
5161         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5162   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5163   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5164                       HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5165     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5166     UNREACHABLE();
5167   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5168   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5169                       HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5170     LOG(FATAL) << DebugName() << " is not defined for float values";
5171     UNREACHABLE();
5172   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5173   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5174                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5175     LOG(FATAL) << DebugName() << " is not defined for double values";
5176     UNREACHABLE();
5177   }
5178 
5179   DECLARE_INSTRUCTION(UShr);
5180 
5181  protected:
5182   DEFAULT_COPY_CONSTRUCTOR(UShr);
5183 };
5184 
5185 class HAnd FINAL : public HBinaryOperation {
5186  public:
5187   HAnd(DataType::Type result_type,
5188        HInstruction* left,
5189        HInstruction* right,
5190        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kAnd,result_type,left,right,SideEffects::None (),dex_pc)5191       : HBinaryOperation(kAnd, result_type, left, right, SideEffects::None(), dex_pc) {
5192   }
5193 
IsCommutative()5194   bool IsCommutative() const OVERRIDE { return true; }
5195 
Compute(T x,T y)5196   template <typename T> static T Compute(T x, T y) { return x & y; }
5197 
Evaluate(HIntConstant * x,HIntConstant * y)5198   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5199     return GetBlock()->GetGraph()->GetIntConstant(
5200         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5201   }
Evaluate(HLongConstant * x,HLongConstant * y)5202   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5203     return GetBlock()->GetGraph()->GetLongConstant(
5204         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5205   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5206   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5207                       HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5208     LOG(FATAL) << DebugName() << " is not defined for float values";
5209     UNREACHABLE();
5210   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5211   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5212                       HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5213     LOG(FATAL) << DebugName() << " is not defined for double values";
5214     UNREACHABLE();
5215   }
5216 
5217   DECLARE_INSTRUCTION(And);
5218 
5219  protected:
5220   DEFAULT_COPY_CONSTRUCTOR(And);
5221 };
5222 
5223 class HOr FINAL : public HBinaryOperation {
5224  public:
5225   HOr(DataType::Type result_type,
5226       HInstruction* left,
5227       HInstruction* right,
5228       uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kOr,result_type,left,right,SideEffects::None (),dex_pc)5229       : HBinaryOperation(kOr, result_type, left, right, SideEffects::None(), dex_pc) {
5230   }
5231 
IsCommutative()5232   bool IsCommutative() const OVERRIDE { return true; }
5233 
Compute(T x,T y)5234   template <typename T> static T Compute(T x, T y) { return x | y; }
5235 
Evaluate(HIntConstant * x,HIntConstant * y)5236   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5237     return GetBlock()->GetGraph()->GetIntConstant(
5238         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5239   }
Evaluate(HLongConstant * x,HLongConstant * y)5240   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5241     return GetBlock()->GetGraph()->GetLongConstant(
5242         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5243   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5244   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5245                       HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5246     LOG(FATAL) << DebugName() << " is not defined for float values";
5247     UNREACHABLE();
5248   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5249   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5250                       HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5251     LOG(FATAL) << DebugName() << " is not defined for double values";
5252     UNREACHABLE();
5253   }
5254 
5255   DECLARE_INSTRUCTION(Or);
5256 
5257  protected:
5258   DEFAULT_COPY_CONSTRUCTOR(Or);
5259 };
5260 
5261 class HXor FINAL : public HBinaryOperation {
5262  public:
5263   HXor(DataType::Type result_type,
5264        HInstruction* left,
5265        HInstruction* right,
5266        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kXor,result_type,left,right,SideEffects::None (),dex_pc)5267       : HBinaryOperation(kXor, result_type, left, right, SideEffects::None(), dex_pc) {
5268   }
5269 
IsCommutative()5270   bool IsCommutative() const OVERRIDE { return true; }
5271 
Compute(T x,T y)5272   template <typename T> static T Compute(T x, T y) { return x ^ y; }
5273 
Evaluate(HIntConstant * x,HIntConstant * y)5274   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
5275     return GetBlock()->GetGraph()->GetIntConstant(
5276         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5277   }
Evaluate(HLongConstant * x,HLongConstant * y)5278   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
5279     return GetBlock()->GetGraph()->GetLongConstant(
5280         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5281   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5282   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5283                       HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5284     LOG(FATAL) << DebugName() << " is not defined for float values";
5285     UNREACHABLE();
5286   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5287   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5288                       HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
5289     LOG(FATAL) << DebugName() << " is not defined for double values";
5290     UNREACHABLE();
5291   }
5292 
5293   DECLARE_INSTRUCTION(Xor);
5294 
5295  protected:
5296   DEFAULT_COPY_CONSTRUCTOR(Xor);
5297 };
5298 
5299 class HRor FINAL : public HBinaryOperation {
5300  public:
HRor(DataType::Type result_type,HInstruction * value,HInstruction * distance)5301   HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5302       : HBinaryOperation(kRor, result_type, value, distance) {
5303     DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5304     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5305   }
5306 
5307   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_value)5308   static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5309     typedef typename std::make_unsigned<T>::type V;
5310     V ux = static_cast<V>(value);
5311     if ((distance & max_shift_value) == 0) {
5312       return static_cast<T>(ux);
5313     } else {
5314       const V reg_bits = sizeof(T) * 8;
5315       return static_cast<T>(ux >> (distance & max_shift_value)) |
5316                            (value << (reg_bits - (distance & max_shift_value)));
5317     }
5318   }
5319 
Evaluate(HIntConstant * value,HIntConstant * distance)5320   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
5321     return GetBlock()->GetGraph()->GetIntConstant(
5322         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5323   }
Evaluate(HLongConstant * value,HIntConstant * distance)5324   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
5325     return GetBlock()->GetGraph()->GetLongConstant(
5326         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5327   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5328   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5329                       HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5330     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5331     UNREACHABLE();
5332   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5333   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5334                       HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5335     LOG(FATAL) << DebugName() << " is not defined for float values";
5336     UNREACHABLE();
5337   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5338   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5339                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
5340     LOG(FATAL) << DebugName() << " is not defined for double values";
5341     UNREACHABLE();
5342   }
5343 
5344   DECLARE_INSTRUCTION(Ror);
5345 
5346  protected:
5347   DEFAULT_COPY_CONSTRUCTOR(Ror);
5348 };
5349 
5350 // The value of a parameter in this method. Its location depends on
5351 // the calling convention.
5352 class HParameterValue FINAL : public HExpression<0> {
5353  public:
5354   HParameterValue(const DexFile& dex_file,
5355                   dex::TypeIndex type_index,
5356                   uint8_t index,
5357                   DataType::Type parameter_type,
5358                   bool is_this = false)
HExpression(kParameterValue,parameter_type,SideEffects::None (),kNoDexPc)5359       : HExpression(kParameterValue, parameter_type, SideEffects::None(), kNoDexPc),
5360         dex_file_(dex_file),
5361         type_index_(type_index),
5362         index_(index) {
5363     SetPackedFlag<kFlagIsThis>(is_this);
5364     SetPackedFlag<kFlagCanBeNull>(!is_this);
5365   }
5366 
GetDexFile()5367   const DexFile& GetDexFile() const { return dex_file_; }
GetTypeIndex()5368   dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetIndex()5369   uint8_t GetIndex() const { return index_; }
IsThis()5370   bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5371 
CanBeNull()5372   bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
SetCanBeNull(bool can_be_null)5373   void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5374 
5375   DECLARE_INSTRUCTION(ParameterValue);
5376 
5377  protected:
5378   DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
5379 
5380  private:
5381   // Whether or not the parameter value corresponds to 'this' argument.
5382   static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
5383   static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5384   static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5385   static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5386                 "Too many packed fields.");
5387 
5388   const DexFile& dex_file_;
5389   const dex::TypeIndex type_index_;
5390   // The index of this parameter in the parameters list. Must be less
5391   // than HGraph::number_of_in_vregs_.
5392   const uint8_t index_;
5393 };
5394 
5395 class HNot FINAL : public HUnaryOperation {
5396  public:
5397   HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kNot,result_type,input,dex_pc)5398       : HUnaryOperation(kNot, result_type, input, dex_pc) {
5399   }
5400 
CanBeMoved()5401   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5402   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5403     return true;
5404   }
5405 
Compute(T x)5406   template <typename T> static T Compute(T x) { return ~x; }
5407 
Evaluate(HIntConstant * x)5408   HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5409     return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5410   }
Evaluate(HLongConstant * x)5411   HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
5412     return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5413   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED)5414   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5415     LOG(FATAL) << DebugName() << " is not defined for float values";
5416     UNREACHABLE();
5417   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED)5418   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5419     LOG(FATAL) << DebugName() << " is not defined for double values";
5420     UNREACHABLE();
5421   }
5422 
5423   DECLARE_INSTRUCTION(Not);
5424 
5425  protected:
5426   DEFAULT_COPY_CONSTRUCTOR(Not);
5427 };
5428 
5429 class HBooleanNot FINAL : public HUnaryOperation {
5430  public:
5431   explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kBooleanNot,DataType::Type::kBool,input,dex_pc)5432       : HUnaryOperation(kBooleanNot, DataType::Type::kBool, input, dex_pc) {
5433   }
5434 
CanBeMoved()5435   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5436   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5437     return true;
5438   }
5439 
Compute(T x)5440   template <typename T> static bool Compute(T x) {
5441     DCHECK(IsUint<1>(x)) << x;
5442     return !x;
5443   }
5444 
Evaluate(HIntConstant * x)5445   HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
5446     return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5447   }
Evaluate(HLongConstant * x ATTRIBUTE_UNUSED)5448   HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5449     LOG(FATAL) << DebugName() << " is not defined for long values";
5450     UNREACHABLE();
5451   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED)5452   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5453     LOG(FATAL) << DebugName() << " is not defined for float values";
5454     UNREACHABLE();
5455   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED)5456   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
5457     LOG(FATAL) << DebugName() << " is not defined for double values";
5458     UNREACHABLE();
5459   }
5460 
5461   DECLARE_INSTRUCTION(BooleanNot);
5462 
5463  protected:
5464   DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
5465 };
5466 
5467 class HTypeConversion FINAL : public HExpression<1> {
5468  public:
5469   // Instantiate a type conversion of `input` to `result_type`.
5470   HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HExpression(kTypeConversion,result_type,SideEffects::None (),dex_pc)5471       : HExpression(kTypeConversion, result_type, SideEffects::None(), dex_pc) {
5472     SetRawInputAt(0, input);
5473     // Invariant: We should never generate a conversion to a Boolean value.
5474     DCHECK_NE(DataType::Type::kBool, result_type);
5475   }
5476 
GetInput()5477   HInstruction* GetInput() const { return InputAt(0); }
GetInputType()5478   DataType::Type GetInputType() const { return GetInput()->GetType(); }
GetResultType()5479   DataType::Type GetResultType() const { return GetType(); }
5480 
CanBeMoved()5481   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5482   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5483     return true;
5484   }
5485 
5486   // Try to statically evaluate the conversion and return a HConstant
5487   // containing the result.  If the input cannot be converted, return nullptr.
5488   HConstant* TryStaticEvaluation() const;
5489 
5490   DECLARE_INSTRUCTION(TypeConversion);
5491 
5492  protected:
5493   DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
5494 };
5495 
5496 static constexpr uint32_t kNoRegNumber = -1;
5497 
5498 class HNullCheck FINAL : public HExpression<1> {
5499  public:
5500   // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5501   // constructor.
HNullCheck(HInstruction * value,uint32_t dex_pc)5502   HNullCheck(HInstruction* value, uint32_t dex_pc)
5503       : HExpression(kNullCheck, value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5504     SetRawInputAt(0, value);
5505   }
5506 
IsClonable()5507   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()5508   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5509   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5510     return true;
5511   }
5512 
NeedsEnvironment()5513   bool NeedsEnvironment() const OVERRIDE { return true; }
5514 
CanThrow()5515   bool CanThrow() const OVERRIDE { return true; }
5516 
CanBeNull()5517   bool CanBeNull() const OVERRIDE { return false; }
5518 
5519   DECLARE_INSTRUCTION(NullCheck);
5520 
5521  protected:
5522   DEFAULT_COPY_CONSTRUCTOR(NullCheck);
5523 };
5524 
5525 // Embeds an ArtField and all the information required by the compiler. We cache
5526 // that information to avoid requiring the mutator lock every time we need it.
5527 class FieldInfo : public ValueObject {
5528  public:
FieldInfo(ArtField * field,MemberOffset field_offset,DataType::Type field_type,bool is_volatile,uint32_t index,uint16_t declaring_class_def_index,const DexFile & dex_file)5529   FieldInfo(ArtField* field,
5530             MemberOffset field_offset,
5531             DataType::Type field_type,
5532             bool is_volatile,
5533             uint32_t index,
5534             uint16_t declaring_class_def_index,
5535             const DexFile& dex_file)
5536       : field_(field),
5537         field_offset_(field_offset),
5538         field_type_(field_type),
5539         is_volatile_(is_volatile),
5540         index_(index),
5541         declaring_class_def_index_(declaring_class_def_index),
5542         dex_file_(dex_file) {}
5543 
GetField()5544   ArtField* GetField() const { return field_; }
GetFieldOffset()5545   MemberOffset GetFieldOffset() const { return field_offset_; }
GetFieldType()5546   DataType::Type GetFieldType() const { return field_type_; }
GetFieldIndex()5547   uint32_t GetFieldIndex() const { return index_; }
GetDeclaringClassDefIndex()5548   uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
GetDexFile()5549   const DexFile& GetDexFile() const { return dex_file_; }
IsVolatile()5550   bool IsVolatile() const { return is_volatile_; }
5551 
5552  private:
5553   ArtField* const field_;
5554   const MemberOffset field_offset_;
5555   const DataType::Type field_type_;
5556   const bool is_volatile_;
5557   const uint32_t index_;
5558   const uint16_t declaring_class_def_index_;
5559   const DexFile& dex_file_;
5560 };
5561 
5562 class HInstanceFieldGet FINAL : public HExpression<1> {
5563  public:
HInstanceFieldGet(HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)5564   HInstanceFieldGet(HInstruction* value,
5565                     ArtField* field,
5566                     DataType::Type field_type,
5567                     MemberOffset field_offset,
5568                     bool is_volatile,
5569                     uint32_t field_idx,
5570                     uint16_t declaring_class_def_index,
5571                     const DexFile& dex_file,
5572                     uint32_t dex_pc)
5573       : HExpression(kInstanceFieldGet,
5574                     field_type,
5575                     SideEffects::FieldReadOfType(field_type, is_volatile),
5576                     dex_pc),
5577         field_info_(field,
5578                     field_offset,
5579                     field_type,
5580                     is_volatile,
5581                     field_idx,
5582                     declaring_class_def_index,
5583                     dex_file) {
5584     SetRawInputAt(0, value);
5585   }
5586 
IsClonable()5587   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()5588   bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5589 
InstructionDataEquals(const HInstruction * other)5590   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
5591     const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5592     return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5593   }
5594 
CanDoImplicitNullCheckOn(HInstruction * obj)5595   bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5596     return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5597   }
5598 
ComputeHashCode()5599   size_t ComputeHashCode() const OVERRIDE {
5600     return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5601   }
5602 
GetFieldInfo()5603   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()5604   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()5605   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()5606   bool IsVolatile() const { return field_info_.IsVolatile(); }
5607 
SetType(DataType::Type new_type)5608   void SetType(DataType::Type new_type) {
5609     DCHECK(DataType::IsIntegralType(GetType()));
5610     DCHECK(DataType::IsIntegralType(new_type));
5611     DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
5612     SetPackedField<TypeField>(new_type);
5613   }
5614 
5615   DECLARE_INSTRUCTION(InstanceFieldGet);
5616 
5617  protected:
5618   DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
5619 
5620  private:
5621   const FieldInfo field_info_;
5622 };
5623 
5624 class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5625  public:
HInstanceFieldSet(HInstruction * object,HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)5626   HInstanceFieldSet(HInstruction* object,
5627                     HInstruction* value,
5628                     ArtField* field,
5629                     DataType::Type field_type,
5630                     MemberOffset field_offset,
5631                     bool is_volatile,
5632                     uint32_t field_idx,
5633                     uint16_t declaring_class_def_index,
5634                     const DexFile& dex_file,
5635                     uint32_t dex_pc)
5636       : HTemplateInstruction(kInstanceFieldSet,
5637                              SideEffects::FieldWriteOfType(field_type, is_volatile),
5638                              dex_pc),
5639         field_info_(field,
5640                     field_offset,
5641                     field_type,
5642                     is_volatile,
5643                     field_idx,
5644                     declaring_class_def_index,
5645                     dex_file) {
5646     SetPackedFlag<kFlagValueCanBeNull>(true);
5647     SetRawInputAt(0, object);
5648     SetRawInputAt(1, value);
5649   }
5650 
IsClonable()5651   bool IsClonable() const OVERRIDE { return true; }
5652 
CanDoImplicitNullCheckOn(HInstruction * obj)5653   bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5654     return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5655   }
5656 
GetFieldInfo()5657   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()5658   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()5659   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()5660   bool IsVolatile() const { return field_info_.IsVolatile(); }
GetValue()5661   HInstruction* GetValue() const { return InputAt(1); }
GetValueCanBeNull()5662   bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
ClearValueCanBeNull()5663   void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5664 
5665   DECLARE_INSTRUCTION(InstanceFieldSet);
5666 
5667  protected:
5668   DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
5669 
5670  private:
5671   static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5672   static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5673   static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5674                 "Too many packed fields.");
5675 
5676   const FieldInfo field_info_;
5677 };
5678 
5679 class HArrayGet FINAL : public HExpression<2> {
5680  public:
HArrayGet(HInstruction * array,HInstruction * index,DataType::Type type,uint32_t dex_pc)5681   HArrayGet(HInstruction* array,
5682             HInstruction* index,
5683             DataType::Type type,
5684             uint32_t dex_pc)
5685      : HArrayGet(array,
5686                  index,
5687                  type,
5688                  SideEffects::ArrayReadOfType(type),
5689                  dex_pc,
5690                  /* is_string_char_at */ false) {
5691   }
5692 
HArrayGet(HInstruction * array,HInstruction * index,DataType::Type type,SideEffects side_effects,uint32_t dex_pc,bool is_string_char_at)5693   HArrayGet(HInstruction* array,
5694             HInstruction* index,
5695             DataType::Type type,
5696             SideEffects side_effects,
5697             uint32_t dex_pc,
5698             bool is_string_char_at)
5699       : HExpression(kArrayGet, type, side_effects, dex_pc) {
5700     SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5701     SetRawInputAt(0, array);
5702     SetRawInputAt(1, index);
5703   }
5704 
IsClonable()5705   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()5706   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5707   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5708     return true;
5709   }
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)5710   bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5711     // TODO: We can be smarter here.
5712     // Currently, unless the array is the result of NewArray, the array access is always
5713     // preceded by some form of null NullCheck necessary for the bounds check, usually
5714     // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
5715     // dynamic BCE. There are cases when these could be removed to produce better code.
5716     // If we ever add optimizations to do so we should allow an implicit check here
5717     // (as long as the address falls in the first page).
5718     //
5719     // As an example of such fancy optimization, we could eliminate BoundsCheck for
5720     //     a = cond ? new int[1] : null;
5721     //     a[0];  // The Phi does not need bounds check for either input.
5722     return false;
5723   }
5724 
IsEquivalentOf(HArrayGet * other)5725   bool IsEquivalentOf(HArrayGet* other) const {
5726     bool result = (GetDexPc() == other->GetDexPc());
5727     if (kIsDebugBuild && result) {
5728       DCHECK_EQ(GetBlock(), other->GetBlock());
5729       DCHECK_EQ(GetArray(), other->GetArray());
5730       DCHECK_EQ(GetIndex(), other->GetIndex());
5731       if (DataType::IsIntOrLongType(GetType())) {
5732         DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
5733       } else {
5734         DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5735         DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
5736       }
5737     }
5738     return result;
5739   }
5740 
IsStringCharAt()5741   bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5742 
GetArray()5743   HInstruction* GetArray() const { return InputAt(0); }
GetIndex()5744   HInstruction* GetIndex() const { return InputAt(1); }
5745 
SetType(DataType::Type new_type)5746   void SetType(DataType::Type new_type) {
5747     DCHECK(DataType::IsIntegralType(GetType()));
5748     DCHECK(DataType::IsIntegralType(new_type));
5749     DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
5750     SetPackedField<TypeField>(new_type);
5751   }
5752 
5753   DECLARE_INSTRUCTION(ArrayGet);
5754 
5755  protected:
5756   DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
5757 
5758  private:
5759   // We treat a String as an array, creating the HArrayGet from String.charAt()
5760   // intrinsic in the instruction simplifier. We can always determine whether
5761   // a particular HArrayGet is actually a String.charAt() by looking at the type
5762   // of the input but that requires holding the mutator lock, so we prefer to use
5763   // a flag, so that code generators don't need to do the locking.
5764   static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5765   static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
5766   static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5767                 "Too many packed fields.");
5768 };
5769 
5770 class HArraySet FINAL : public HTemplateInstruction<3> {
5771  public:
HArraySet(HInstruction * array,HInstruction * index,HInstruction * value,DataType::Type expected_component_type,uint32_t dex_pc)5772   HArraySet(HInstruction* array,
5773             HInstruction* index,
5774             HInstruction* value,
5775             DataType::Type expected_component_type,
5776             uint32_t dex_pc)
5777       : HArraySet(array,
5778                   index,
5779                   value,
5780                   expected_component_type,
5781                   // Make a best guess for side effects now, may be refined during SSA building.
5782                   ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
5783                   dex_pc) {
5784   }
5785 
HArraySet(HInstruction * array,HInstruction * index,HInstruction * value,DataType::Type expected_component_type,SideEffects side_effects,uint32_t dex_pc)5786   HArraySet(HInstruction* array,
5787             HInstruction* index,
5788             HInstruction* value,
5789             DataType::Type expected_component_type,
5790             SideEffects side_effects,
5791             uint32_t dex_pc)
5792       : HTemplateInstruction(kArraySet, side_effects, dex_pc) {
5793     SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5794     SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
5795     SetPackedFlag<kFlagValueCanBeNull>(true);
5796     SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5797     SetRawInputAt(0, array);
5798     SetRawInputAt(1, index);
5799     SetRawInputAt(2, value);
5800   }
5801 
IsClonable()5802   bool IsClonable() const OVERRIDE { return true; }
5803 
NeedsEnvironment()5804   bool NeedsEnvironment() const OVERRIDE {
5805     // We call a runtime method to throw ArrayStoreException.
5806     return NeedsTypeCheck();
5807   }
5808 
5809   // Can throw ArrayStoreException.
CanThrow()5810   bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5811 
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)5812   bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5813     // TODO: Same as for ArrayGet.
5814     return false;
5815   }
5816 
ClearNeedsTypeCheck()5817   void ClearNeedsTypeCheck() {
5818     SetPackedFlag<kFlagNeedsTypeCheck>(false);
5819   }
5820 
ClearValueCanBeNull()5821   void ClearValueCanBeNull() {
5822     SetPackedFlag<kFlagValueCanBeNull>(false);
5823   }
5824 
SetStaticTypeOfArrayIsObjectArray()5825   void SetStaticTypeOfArrayIsObjectArray() {
5826     SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5827   }
5828 
GetValueCanBeNull()5829   bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
NeedsTypeCheck()5830   bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
StaticTypeOfArrayIsObjectArray()5831   bool StaticTypeOfArrayIsObjectArray() const {
5832     return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5833   }
5834 
GetArray()5835   HInstruction* GetArray() const { return InputAt(0); }
GetIndex()5836   HInstruction* GetIndex() const { return InputAt(1); }
GetValue()5837   HInstruction* GetValue() const { return InputAt(2); }
5838 
GetComponentType()5839   DataType::Type GetComponentType() const {
5840     return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
5841   }
5842 
GetComponentType(DataType::Type value_type,DataType::Type expected_component_type)5843   static DataType::Type GetComponentType(DataType::Type value_type,
5844                                          DataType::Type expected_component_type) {
5845     // The Dex format does not type floating point index operations. Since the
5846     // `expected_component_type` comes from SSA building and can therefore not
5847     // be correct, we also check what is the value type. If it is a floating
5848     // point type, we must use that type.
5849     return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
5850         ? value_type
5851         : expected_component_type;
5852   }
5853 
GetRawExpectedComponentType()5854   DataType::Type GetRawExpectedComponentType() const {
5855     return GetPackedField<ExpectedComponentTypeField>();
5856   }
5857 
ComputeSideEffects(DataType::Type type)5858   static SideEffects ComputeSideEffects(DataType::Type type) {
5859     return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
5860   }
5861 
SideEffectsForArchRuntimeCalls(DataType::Type value_type)5862   static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
5863     return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
5864                                                       : SideEffects::None();
5865   }
5866 
5867   DECLARE_INSTRUCTION(ArraySet);
5868 
5869  protected:
5870   DEFAULT_COPY_CONSTRUCTOR(ArraySet);
5871 
5872  private:
5873   static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5874   static constexpr size_t kFieldExpectedComponentTypeSize =
5875       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
5876   static constexpr size_t kFlagNeedsTypeCheck =
5877       kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5878   static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5879   // Cached information for the reference_type_info_ so that codegen
5880   // does not need to inspect the static type.
5881   static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5882   static constexpr size_t kNumberOfArraySetPackedBits =
5883       kFlagStaticTypeOfArrayIsObjectArray + 1;
5884   static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5885   using ExpectedComponentTypeField =
5886       BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5887 };
5888 
5889 class HArrayLength FINAL : public HExpression<1> {
5890  public:
5891   HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
HExpression(kArrayLength,DataType::Type::kInt32,SideEffects::None (),dex_pc)5892       : HExpression(kArrayLength, DataType::Type::kInt32, SideEffects::None(), dex_pc) {
5893     SetPackedFlag<kFlagIsStringLength>(is_string_length);
5894     // Note that arrays do not change length, so the instruction does not
5895     // depend on any write.
5896     SetRawInputAt(0, array);
5897   }
5898 
IsClonable()5899   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()5900   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5901   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5902     return true;
5903   }
CanDoImplicitNullCheckOn(HInstruction * obj)5904   bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5905     return obj == InputAt(0);
5906   }
5907 
IsStringLength()5908   bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5909 
5910   DECLARE_INSTRUCTION(ArrayLength);
5911 
5912  protected:
5913   DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
5914 
5915  private:
5916   // We treat a String as an array, creating the HArrayLength from String.length()
5917   // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5918   // determine whether a particular HArrayLength is actually a String.length() by
5919   // looking at the type of the input but that requires holding the mutator lock, so
5920   // we prefer to use a flag, so that code generators don't need to do the locking.
5921   static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5922   static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5923   static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5924                 "Too many packed fields.");
5925 };
5926 
5927 class HBoundsCheck FINAL : public HExpression<2> {
5928  public:
5929   // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5930   // constructor.
5931   HBoundsCheck(HInstruction* index,
5932                HInstruction* length,
5933                uint32_t dex_pc,
5934                bool is_string_char_at = false)
5935       : HExpression(kBoundsCheck, index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5936     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
5937     SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
5938     SetRawInputAt(0, index);
5939     SetRawInputAt(1, length);
5940   }
5941 
IsClonable()5942   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()5943   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5944   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5945     return true;
5946   }
5947 
NeedsEnvironment()5948   bool NeedsEnvironment() const OVERRIDE { return true; }
5949 
CanThrow()5950   bool CanThrow() const OVERRIDE { return true; }
5951 
IsStringCharAt()5952   bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
5953 
GetIndex()5954   HInstruction* GetIndex() const { return InputAt(0); }
5955 
5956   DECLARE_INSTRUCTION(BoundsCheck);
5957 
5958  protected:
5959   DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
5960 
5961  private:
5962   static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
5963 };
5964 
5965 class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5966  public:
5967   explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kSuspendCheck,SideEffects::CanTriggerGC (),dex_pc)5968       : HTemplateInstruction(kSuspendCheck, SideEffects::CanTriggerGC(), dex_pc),
5969         slow_path_(nullptr) {
5970   }
5971 
IsClonable()5972   bool IsClonable() const OVERRIDE { return true; }
5973 
NeedsEnvironment()5974   bool NeedsEnvironment() const OVERRIDE {
5975     return true;
5976   }
5977 
SetSlowPath(SlowPathCode * slow_path)5978   void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
GetSlowPath()5979   SlowPathCode* GetSlowPath() const { return slow_path_; }
5980 
5981   DECLARE_INSTRUCTION(SuspendCheck);
5982 
5983  protected:
5984   DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
5985 
5986  private:
5987   // Only used for code generation, in order to share the same slow path between back edges
5988   // of a same loop.
5989   SlowPathCode* slow_path_;
5990 };
5991 
5992 // Pseudo-instruction which provides the native debugger with mapping information.
5993 // It ensures that we can generate line number and local variables at this point.
5994 class HNativeDebugInfo : public HTemplateInstruction<0> {
5995  public:
HNativeDebugInfo(uint32_t dex_pc)5996   explicit HNativeDebugInfo(uint32_t dex_pc)
5997       : HTemplateInstruction<0>(kNativeDebugInfo, SideEffects::None(), dex_pc) {
5998   }
5999 
NeedsEnvironment()6000   bool NeedsEnvironment() const OVERRIDE {
6001     return true;
6002   }
6003 
6004   DECLARE_INSTRUCTION(NativeDebugInfo);
6005 
6006  protected:
6007   DEFAULT_COPY_CONSTRUCTOR(NativeDebugInfo);
6008 };
6009 
6010 /**
6011  * Instruction to load a Class object.
6012  */
6013 class HLoadClass FINAL : public HInstruction {
6014  public:
6015   // Determines how to load the Class.
6016   enum class LoadKind {
6017     // We cannot load this class. See HSharpening::SharpenLoadClass.
6018     kInvalid = -1,
6019 
6020     // Use the Class* from the method's own ArtMethod*.
6021     kReferrersClass,
6022 
6023     // Use PC-relative boot image Class* address that will be known at link time.
6024     // Used for boot image classes referenced by boot image code.
6025     kBootImageLinkTimePcRelative,
6026 
6027     // Use a known boot image Class* address, embedded in the code by the codegen.
6028     // Used for boot image classes referenced by apps in AOT- and JIT-compiled code.
6029     kBootImageAddress,
6030 
6031     // Use a PC-relative load from a boot image ClassTable mmapped into the .bss
6032     // of the oat file.
6033     kBootImageClassTable,
6034 
6035     // Load from an entry in the .bss section using a PC-relative load.
6036     // Used for classes outside boot image when .bss is accessible with a PC-relative load.
6037     kBssEntry,
6038 
6039     // Load from the root table associated with the JIT compiled method.
6040     kJitTableAddress,
6041 
6042     // Load using a simple runtime call. This is the fall-back load kind when
6043     // the codegen is unable to use another appropriate kind.
6044     kRuntimeCall,
6045 
6046     kLast = kRuntimeCall
6047   };
6048 
HLoadClass(HCurrentMethod * current_method,dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,bool is_referrers_class,uint32_t dex_pc,bool needs_access_check)6049   HLoadClass(HCurrentMethod* current_method,
6050              dex::TypeIndex type_index,
6051              const DexFile& dex_file,
6052              Handle<mirror::Class> klass,
6053              bool is_referrers_class,
6054              uint32_t dex_pc,
6055              bool needs_access_check)
6056       : HInstruction(kLoadClass, SideEffectsForArchRuntimeCalls(), dex_pc),
6057         special_input_(HUserRecord<HInstruction*>(current_method)),
6058         type_index_(type_index),
6059         dex_file_(dex_file),
6060         klass_(klass),
6061         loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
6062     // Referrers class should not need access check. We never inline unverified
6063     // methods so we can't possibly end up in this situation.
6064     DCHECK(!is_referrers_class || !needs_access_check);
6065 
6066     SetPackedField<LoadKindField>(
6067         is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
6068     SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
6069     SetPackedFlag<kFlagIsInBootImage>(false);
6070     SetPackedFlag<kFlagGenerateClInitCheck>(false);
6071   }
6072 
IsClonable()6073   bool IsClonable() const OVERRIDE { return true; }
6074 
6075   void SetLoadKind(LoadKind load_kind);
6076 
GetLoadKind()6077   LoadKind GetLoadKind() const {
6078     return GetPackedField<LoadKindField>();
6079   }
6080 
CanBeMoved()6081   bool CanBeMoved() const OVERRIDE { return true; }
6082 
6083   bool InstructionDataEquals(const HInstruction* other) const;
6084 
ComputeHashCode()6085   size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; }
6086 
CanBeNull()6087   bool CanBeNull() const OVERRIDE { return false; }
6088 
NeedsEnvironment()6089   bool NeedsEnvironment() const OVERRIDE {
6090     return CanCallRuntime();
6091   }
6092 
SetMustGenerateClinitCheck(bool generate_clinit_check)6093   void SetMustGenerateClinitCheck(bool generate_clinit_check) {
6094     // The entrypoint the code generator is going to call does not do
6095     // clinit of the class.
6096     DCHECK(!NeedsAccessCheck());
6097     SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
6098   }
6099 
CanCallRuntime()6100   bool CanCallRuntime() const {
6101     return NeedsAccessCheck() ||
6102            MustGenerateClinitCheck() ||
6103            GetLoadKind() == LoadKind::kRuntimeCall ||
6104            GetLoadKind() == LoadKind::kBssEntry;
6105   }
6106 
CanThrow()6107   bool CanThrow() const OVERRIDE {
6108     return NeedsAccessCheck() ||
6109            MustGenerateClinitCheck() ||
6110            // If the class is in the boot image, the lookup in the runtime call cannot throw.
6111            // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
6112            // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
6113            ((GetLoadKind() == LoadKind::kRuntimeCall ||
6114              GetLoadKind() == LoadKind::kBssEntry) &&
6115             !IsInBootImage());
6116   }
6117 
GetLoadedClassRTI()6118   ReferenceTypeInfo GetLoadedClassRTI() {
6119     return loaded_class_rti_;
6120   }
6121 
SetLoadedClassRTI(ReferenceTypeInfo rti)6122   void SetLoadedClassRTI(ReferenceTypeInfo rti) {
6123     // Make sure we only set exact types (the loaded class should never be merged).
6124     DCHECK(rti.IsExact());
6125     loaded_class_rti_ = rti;
6126   }
6127 
GetTypeIndex()6128   dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetDexFile()6129   const DexFile& GetDexFile() const { return dex_file_; }
6130 
NeedsDexCacheOfDeclaringClass()6131   bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
6132     return GetLoadKind() == LoadKind::kRuntimeCall;
6133   }
6134 
SideEffectsForArchRuntimeCalls()6135   static SideEffects SideEffectsForArchRuntimeCalls() {
6136     return SideEffects::CanTriggerGC();
6137   }
6138 
IsReferrersClass()6139   bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
NeedsAccessCheck()6140   bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
IsInBootImage()6141   bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
MustGenerateClinitCheck()6142   bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
6143 
MarkInBootImage()6144   void MarkInBootImage() {
6145     SetPackedFlag<kFlagIsInBootImage>(true);
6146   }
6147 
6148   void AddSpecialInput(HInstruction* special_input);
6149 
6150   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()6151   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
6152     return ArrayRef<HUserRecord<HInstruction*>>(
6153         &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6154   }
6155 
GetType()6156   DataType::Type GetType() const OVERRIDE {
6157     return DataType::Type::kReference;
6158   }
6159 
GetClass()6160   Handle<mirror::Class> GetClass() const {
6161     return klass_;
6162   }
6163 
6164   DECLARE_INSTRUCTION(LoadClass);
6165 
6166  protected:
6167   DEFAULT_COPY_CONSTRUCTOR(LoadClass);
6168 
6169  private:
6170   static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
6171   static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
6172   // Whether this instruction must generate the initialization check.
6173   // Used for code generation.
6174   static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
6175   static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
6176   static constexpr size_t kFieldLoadKindSize =
6177       MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6178   static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6179   static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
6180   using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6181 
HasTypeReference(LoadKind load_kind)6182   static bool HasTypeReference(LoadKind load_kind) {
6183     return load_kind == LoadKind::kReferrersClass ||
6184         load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6185         load_kind == LoadKind::kBootImageClassTable ||
6186         load_kind == LoadKind::kBssEntry ||
6187         load_kind == LoadKind::kRuntimeCall;
6188   }
6189 
6190   void SetLoadKindInternal(LoadKind load_kind);
6191 
6192   // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
6193   // For other load kinds it's empty or possibly some architecture-specific instruction
6194   // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6195   HUserRecord<HInstruction*> special_input_;
6196 
6197   // A type index and dex file where the class can be accessed. The dex file can be:
6198   // - The compiling method's dex file if the class is defined there too.
6199   // - The compiling method's dex file if the class is referenced there.
6200   // - The dex file where the class is defined. When the load kind can only be
6201   //   kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
6202   const dex::TypeIndex type_index_;
6203   const DexFile& dex_file_;
6204 
6205   Handle<mirror::Class> klass_;
6206 
6207   ReferenceTypeInfo loaded_class_rti_;
6208 };
6209 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
6210 
6211 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
SetLoadKind(LoadKind load_kind)6212 inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
6213   // The load kind should be determined before inserting the instruction to the graph.
6214   DCHECK(GetBlock() == nullptr);
6215   DCHECK(GetEnvironment() == nullptr);
6216   SetPackedField<LoadKindField>(load_kind);
6217   if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
6218     special_input_ = HUserRecord<HInstruction*>(nullptr);
6219   }
6220   if (!NeedsEnvironment()) {
6221     SetSideEffects(SideEffects::None());
6222   }
6223 }
6224 
6225 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
AddSpecialInput(HInstruction * special_input)6226 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
6227   // The special input is used for PC-relative loads on some architectures,
6228   // including literal pool loads, which are PC-relative too.
6229   DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6230          GetLoadKind() == LoadKind::kBootImageAddress ||
6231          GetLoadKind() == LoadKind::kBootImageClassTable ||
6232          GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
6233   DCHECK(special_input_.GetInstruction() == nullptr);
6234   special_input_ = HUserRecord<HInstruction*>(special_input);
6235   special_input->AddUseAt(this, 0);
6236 }
6237 
6238 class HLoadString FINAL : public HInstruction {
6239  public:
6240   // Determines how to load the String.
6241   enum class LoadKind {
6242     // Use PC-relative boot image String* address that will be known at link time.
6243     // Used for boot image strings referenced by boot image code.
6244     kBootImageLinkTimePcRelative,
6245 
6246     // Use a known boot image String* address, embedded in the code by the codegen.
6247     // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
6248     kBootImageAddress,
6249 
6250     // Use a PC-relative load from a boot image InternTable mmapped into the .bss
6251     // of the oat file.
6252     kBootImageInternTable,
6253 
6254     // Load from an entry in the .bss section using a PC-relative load.
6255     // Used for strings outside boot image when .bss is accessible with a PC-relative load.
6256     kBssEntry,
6257 
6258     // Load from the root table associated with the JIT compiled method.
6259     kJitTableAddress,
6260 
6261     // Load using a simple runtime call. This is the fall-back load kind when
6262     // the codegen is unable to use another appropriate kind.
6263     kRuntimeCall,
6264 
6265     kLast = kRuntimeCall,
6266   };
6267 
HLoadString(HCurrentMethod * current_method,dex::StringIndex string_index,const DexFile & dex_file,uint32_t dex_pc)6268   HLoadString(HCurrentMethod* current_method,
6269               dex::StringIndex string_index,
6270               const DexFile& dex_file,
6271               uint32_t dex_pc)
6272       : HInstruction(kLoadString, SideEffectsForArchRuntimeCalls(), dex_pc),
6273         special_input_(HUserRecord<HInstruction*>(current_method)),
6274         string_index_(string_index),
6275         dex_file_(dex_file) {
6276     SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
6277   }
6278 
IsClonable()6279   bool IsClonable() const OVERRIDE { return true; }
6280 
6281   void SetLoadKind(LoadKind load_kind);
6282 
GetLoadKind()6283   LoadKind GetLoadKind() const {
6284     return GetPackedField<LoadKindField>();
6285   }
6286 
GetDexFile()6287   const DexFile& GetDexFile() const {
6288     return dex_file_;
6289   }
6290 
GetStringIndex()6291   dex::StringIndex GetStringIndex() const {
6292     return string_index_;
6293   }
6294 
GetString()6295   Handle<mirror::String> GetString() const {
6296     return string_;
6297   }
6298 
SetString(Handle<mirror::String> str)6299   void SetString(Handle<mirror::String> str) {
6300     string_ = str;
6301   }
6302 
CanBeMoved()6303   bool CanBeMoved() const OVERRIDE { return true; }
6304 
6305   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
6306 
ComputeHashCode()6307   size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
6308 
6309   // Will call the runtime if we need to load the string through
6310   // the dex cache and the string is not guaranteed to be there yet.
NeedsEnvironment()6311   bool NeedsEnvironment() const OVERRIDE {
6312     LoadKind load_kind = GetLoadKind();
6313     if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6314         load_kind == LoadKind::kBootImageAddress ||
6315         load_kind == LoadKind::kBootImageInternTable ||
6316         load_kind == LoadKind::kJitTableAddress) {
6317       return false;
6318     }
6319     return true;
6320   }
6321 
NeedsDexCacheOfDeclaringClass()6322   bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
6323     return GetLoadKind() == LoadKind::kRuntimeCall;
6324   }
6325 
CanBeNull()6326   bool CanBeNull() const OVERRIDE { return false; }
CanThrow()6327   bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
6328 
SideEffectsForArchRuntimeCalls()6329   static SideEffects SideEffectsForArchRuntimeCalls() {
6330     return SideEffects::CanTriggerGC();
6331   }
6332 
6333   void AddSpecialInput(HInstruction* special_input);
6334 
6335   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()6336   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL {
6337     return ArrayRef<HUserRecord<HInstruction*>>(
6338         &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6339   }
6340 
GetType()6341   DataType::Type GetType() const OVERRIDE {
6342     return DataType::Type::kReference;
6343   }
6344 
6345   DECLARE_INSTRUCTION(LoadString);
6346 
6347  protected:
6348   DEFAULT_COPY_CONSTRUCTOR(LoadString);
6349 
6350  private:
6351   static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6352   static constexpr size_t kFieldLoadKindSize =
6353       MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6354   static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6355   static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6356   using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6357 
6358   void SetLoadKindInternal(LoadKind load_kind);
6359 
6360   // The special input is the HCurrentMethod for kRuntimeCall.
6361   // For other load kinds it's empty or possibly some architecture-specific instruction
6362   // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6363   HUserRecord<HInstruction*> special_input_;
6364 
6365   dex::StringIndex string_index_;
6366   const DexFile& dex_file_;
6367 
6368   Handle<mirror::String> string_;
6369 };
6370 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6371 
6372 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
SetLoadKind(LoadKind load_kind)6373 inline void HLoadString::SetLoadKind(LoadKind load_kind) {
6374   // The load kind should be determined before inserting the instruction to the graph.
6375   DCHECK(GetBlock() == nullptr);
6376   DCHECK(GetEnvironment() == nullptr);
6377   DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
6378   SetPackedField<LoadKindField>(load_kind);
6379   if (load_kind != LoadKind::kRuntimeCall) {
6380     special_input_ = HUserRecord<HInstruction*>(nullptr);
6381   }
6382   if (!NeedsEnvironment()) {
6383     SetSideEffects(SideEffects::None());
6384   }
6385 }
6386 
6387 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
AddSpecialInput(HInstruction * special_input)6388 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6389   // The special input is used for PC-relative loads on some architectures,
6390   // including literal pool loads, which are PC-relative too.
6391   DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6392          GetLoadKind() == LoadKind::kBootImageAddress ||
6393          GetLoadKind() == LoadKind::kBootImageInternTable ||
6394          GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
6395   // HLoadString::GetInputRecords() returns an empty array at this point,
6396   // so use the GetInputRecords() from the base class to set the input record.
6397   DCHECK(special_input_.GetInstruction() == nullptr);
6398   special_input_ = HUserRecord<HInstruction*>(special_input);
6399   special_input->AddUseAt(this, 0);
6400 }
6401 
6402 /**
6403  * Performs an initialization check on its Class object input.
6404  */
6405 class HClinitCheck FINAL : public HExpression<1> {
6406  public:
HClinitCheck(HLoadClass * constant,uint32_t dex_pc)6407   HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6408       : HExpression(
6409             kClinitCheck,
6410             DataType::Type::kReference,
6411             SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6412             dex_pc) {
6413     SetRawInputAt(0, constant);
6414   }
6415 
IsClonable()6416   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()6417   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6418   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6419     return true;
6420   }
6421 
NeedsEnvironment()6422   bool NeedsEnvironment() const OVERRIDE {
6423     // May call runtime to initialize the class.
6424     return true;
6425   }
6426 
CanThrow()6427   bool CanThrow() const OVERRIDE { return true; }
6428 
GetLoadClass()6429   HLoadClass* GetLoadClass() const {
6430     DCHECK(InputAt(0)->IsLoadClass());
6431     return InputAt(0)->AsLoadClass();
6432   }
6433 
6434   DECLARE_INSTRUCTION(ClinitCheck);
6435 
6436 
6437  protected:
6438   DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
6439 };
6440 
6441 class HStaticFieldGet FINAL : public HExpression<1> {
6442  public:
HStaticFieldGet(HInstruction * cls,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)6443   HStaticFieldGet(HInstruction* cls,
6444                   ArtField* field,
6445                   DataType::Type field_type,
6446                   MemberOffset field_offset,
6447                   bool is_volatile,
6448                   uint32_t field_idx,
6449                   uint16_t declaring_class_def_index,
6450                   const DexFile& dex_file,
6451                   uint32_t dex_pc)
6452       : HExpression(kStaticFieldGet,
6453                     field_type,
6454                     SideEffects::FieldReadOfType(field_type, is_volatile),
6455                     dex_pc),
6456         field_info_(field,
6457                     field_offset,
6458                     field_type,
6459                     is_volatile,
6460                     field_idx,
6461                     declaring_class_def_index,
6462                     dex_file) {
6463     SetRawInputAt(0, cls);
6464   }
6465 
6466 
IsClonable()6467   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()6468   bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
6469 
InstructionDataEquals(const HInstruction * other)6470   bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
6471     const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6472     return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6473   }
6474 
ComputeHashCode()6475   size_t ComputeHashCode() const OVERRIDE {
6476     return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6477   }
6478 
GetFieldInfo()6479   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()6480   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()6481   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()6482   bool IsVolatile() const { return field_info_.IsVolatile(); }
6483 
SetType(DataType::Type new_type)6484   void SetType(DataType::Type new_type) {
6485     DCHECK(DataType::IsIntegralType(GetType()));
6486     DCHECK(DataType::IsIntegralType(new_type));
6487     DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6488     SetPackedField<TypeField>(new_type);
6489   }
6490 
6491   DECLARE_INSTRUCTION(StaticFieldGet);
6492 
6493  protected:
6494   DEFAULT_COPY_CONSTRUCTOR(StaticFieldGet);
6495 
6496  private:
6497   const FieldInfo field_info_;
6498 };
6499 
6500 class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
6501  public:
HStaticFieldSet(HInstruction * cls,HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)6502   HStaticFieldSet(HInstruction* cls,
6503                   HInstruction* value,
6504                   ArtField* field,
6505                   DataType::Type field_type,
6506                   MemberOffset field_offset,
6507                   bool is_volatile,
6508                   uint32_t field_idx,
6509                   uint16_t declaring_class_def_index,
6510                   const DexFile& dex_file,
6511                   uint32_t dex_pc)
6512       : HTemplateInstruction(kStaticFieldSet,
6513                              SideEffects::FieldWriteOfType(field_type, is_volatile),
6514                              dex_pc),
6515         field_info_(field,
6516                     field_offset,
6517                     field_type,
6518                     is_volatile,
6519                     field_idx,
6520                     declaring_class_def_index,
6521                     dex_file) {
6522     SetPackedFlag<kFlagValueCanBeNull>(true);
6523     SetRawInputAt(0, cls);
6524     SetRawInputAt(1, value);
6525   }
6526 
IsClonable()6527   bool IsClonable() const OVERRIDE { return true; }
GetFieldInfo()6528   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()6529   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()6530   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()6531   bool IsVolatile() const { return field_info_.IsVolatile(); }
6532 
GetValue()6533   HInstruction* GetValue() const { return InputAt(1); }
GetValueCanBeNull()6534   bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
ClearValueCanBeNull()6535   void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6536 
6537   DECLARE_INSTRUCTION(StaticFieldSet);
6538 
6539  protected:
6540   DEFAULT_COPY_CONSTRUCTOR(StaticFieldSet);
6541 
6542  private:
6543   static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6544   static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6545   static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6546                 "Too many packed fields.");
6547 
6548   const FieldInfo field_info_;
6549 };
6550 
6551 class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
6552  public:
HUnresolvedInstanceFieldGet(HInstruction * obj,DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)6553   HUnresolvedInstanceFieldGet(HInstruction* obj,
6554                               DataType::Type field_type,
6555                               uint32_t field_index,
6556                               uint32_t dex_pc)
6557       : HExpression(kUnresolvedInstanceFieldGet,
6558                     field_type,
6559                     SideEffects::AllExceptGCDependency(),
6560                     dex_pc),
6561         field_index_(field_index) {
6562     SetRawInputAt(0, obj);
6563   }
6564 
IsClonable()6565   bool IsClonable() const OVERRIDE { return true; }
NeedsEnvironment()6566   bool NeedsEnvironment() const OVERRIDE { return true; }
CanThrow()6567   bool CanThrow() const OVERRIDE { return true; }
6568 
GetFieldType()6569   DataType::Type GetFieldType() const { return GetType(); }
GetFieldIndex()6570   uint32_t GetFieldIndex() const { return field_index_; }
6571 
6572   DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
6573 
6574  protected:
6575   DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldGet);
6576 
6577  private:
6578   const uint32_t field_index_;
6579 };
6580 
6581 class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
6582  public:
HUnresolvedInstanceFieldSet(HInstruction * obj,HInstruction * value,DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)6583   HUnresolvedInstanceFieldSet(HInstruction* obj,
6584                               HInstruction* value,
6585                               DataType::Type field_type,
6586                               uint32_t field_index,
6587                               uint32_t dex_pc)
6588       : HTemplateInstruction(kUnresolvedInstanceFieldSet,
6589                              SideEffects::AllExceptGCDependency(),
6590                              dex_pc),
6591         field_index_(field_index) {
6592     SetPackedField<FieldTypeField>(field_type);
6593     DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6594     SetRawInputAt(0, obj);
6595     SetRawInputAt(1, value);
6596   }
6597 
IsClonable()6598   bool IsClonable() const OVERRIDE { return true; }
NeedsEnvironment()6599   bool NeedsEnvironment() const OVERRIDE { return true; }
CanThrow()6600   bool CanThrow() const OVERRIDE { return true; }
6601 
GetFieldType()6602   DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
GetFieldIndex()6603   uint32_t GetFieldIndex() const { return field_index_; }
6604 
6605   DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
6606 
6607  protected:
6608   DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldSet);
6609 
6610  private:
6611   static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6612   static constexpr size_t kFieldFieldTypeSize =
6613       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6614   static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6615       kFieldFieldType + kFieldFieldTypeSize;
6616   static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6617                 "Too many packed fields.");
6618   using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6619 
6620   const uint32_t field_index_;
6621 };
6622 
6623 class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
6624  public:
HUnresolvedStaticFieldGet(DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)6625   HUnresolvedStaticFieldGet(DataType::Type field_type,
6626                             uint32_t field_index,
6627                             uint32_t dex_pc)
6628       : HExpression(kUnresolvedStaticFieldGet,
6629                     field_type,
6630                     SideEffects::AllExceptGCDependency(),
6631                     dex_pc),
6632         field_index_(field_index) {
6633   }
6634 
IsClonable()6635   bool IsClonable() const OVERRIDE { return true; }
NeedsEnvironment()6636   bool NeedsEnvironment() const OVERRIDE { return true; }
CanThrow()6637   bool CanThrow() const OVERRIDE { return true; }
6638 
GetFieldType()6639   DataType::Type GetFieldType() const { return GetType(); }
GetFieldIndex()6640   uint32_t GetFieldIndex() const { return field_index_; }
6641 
6642   DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
6643 
6644  protected:
6645   DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldGet);
6646 
6647  private:
6648   const uint32_t field_index_;
6649 };
6650 
6651 class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
6652  public:
HUnresolvedStaticFieldSet(HInstruction * value,DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)6653   HUnresolvedStaticFieldSet(HInstruction* value,
6654                             DataType::Type field_type,
6655                             uint32_t field_index,
6656                             uint32_t dex_pc)
6657       : HTemplateInstruction(kUnresolvedStaticFieldSet,
6658                              SideEffects::AllExceptGCDependency(),
6659                              dex_pc),
6660         field_index_(field_index) {
6661     SetPackedField<FieldTypeField>(field_type);
6662     DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
6663     SetRawInputAt(0, value);
6664   }
6665 
IsClonable()6666   bool IsClonable() const OVERRIDE { return true; }
NeedsEnvironment()6667   bool NeedsEnvironment() const OVERRIDE { return true; }
CanThrow()6668   bool CanThrow() const OVERRIDE { return true; }
6669 
GetFieldType()6670   DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
GetFieldIndex()6671   uint32_t GetFieldIndex() const { return field_index_; }
6672 
6673   DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
6674 
6675  protected:
6676   DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldSet);
6677 
6678  private:
6679   static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
6680   static constexpr size_t kFieldFieldTypeSize =
6681       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6682   static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
6683       kFieldFieldType + kFieldFieldTypeSize;
6684   static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6685                 "Too many packed fields.");
6686   using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
6687 
6688   const uint32_t field_index_;
6689 };
6690 
6691 // Implement the move-exception DEX instruction.
6692 class HLoadException FINAL : public HExpression<0> {
6693  public:
6694   explicit HLoadException(uint32_t dex_pc = kNoDexPc)
HExpression(kLoadException,DataType::Type::kReference,SideEffects::None (),dex_pc)6695       : HExpression(kLoadException, DataType::Type::kReference, SideEffects::None(), dex_pc) {
6696   }
6697 
CanBeNull()6698   bool CanBeNull() const OVERRIDE { return false; }
6699 
6700   DECLARE_INSTRUCTION(LoadException);
6701 
6702  protected:
6703   DEFAULT_COPY_CONSTRUCTOR(LoadException);
6704 };
6705 
6706 // Implicit part of move-exception which clears thread-local exception storage.
6707 // Must not be removed because the runtime expects the TLS to get cleared.
6708 class HClearException FINAL : public HTemplateInstruction<0> {
6709  public:
6710   explicit HClearException(uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kClearException,SideEffects::AllWrites (),dex_pc)6711       : HTemplateInstruction(kClearException, SideEffects::AllWrites(), dex_pc) {
6712   }
6713 
6714   DECLARE_INSTRUCTION(ClearException);
6715 
6716  protected:
6717   DEFAULT_COPY_CONSTRUCTOR(ClearException);
6718 };
6719 
6720 class HThrow FINAL : public HTemplateInstruction<1> {
6721  public:
HThrow(HInstruction * exception,uint32_t dex_pc)6722   HThrow(HInstruction* exception, uint32_t dex_pc)
6723       : HTemplateInstruction(kThrow, SideEffects::CanTriggerGC(), dex_pc) {
6724     SetRawInputAt(0, exception);
6725   }
6726 
IsControlFlow()6727   bool IsControlFlow() const OVERRIDE { return true; }
6728 
NeedsEnvironment()6729   bool NeedsEnvironment() const OVERRIDE { return true; }
6730 
CanThrow()6731   bool CanThrow() const OVERRIDE { return true; }
6732 
AlwaysThrows()6733   bool AlwaysThrows() const OVERRIDE { return true; }
6734 
6735   DECLARE_INSTRUCTION(Throw);
6736 
6737  protected:
6738   DEFAULT_COPY_CONSTRUCTOR(Throw);
6739 };
6740 
6741 /**
6742  * Implementation strategies for the code generator of a HInstanceOf
6743  * or `HCheckCast`.
6744  */
6745 enum class TypeCheckKind {
6746   kUnresolvedCheck,       // Check against an unresolved type.
6747   kExactCheck,            // Can do a single class compare.
6748   kClassHierarchyCheck,   // Can just walk the super class chain.
6749   kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
6750   kInterfaceCheck,        // No optimization yet when checking against an interface.
6751   kArrayObjectCheck,      // Can just check if the array is not primitive.
6752   kArrayCheck,            // No optimization yet when checking against a generic array.
6753   kLast = kArrayCheck
6754 };
6755 
6756 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
6757 
6758 class HInstanceOf FINAL : public HExpression<2> {
6759  public:
HInstanceOf(HInstruction * object,HLoadClass * target_class,TypeCheckKind check_kind,uint32_t dex_pc)6760   HInstanceOf(HInstruction* object,
6761               HLoadClass* target_class,
6762               TypeCheckKind check_kind,
6763               uint32_t dex_pc)
6764       : HExpression(kInstanceOf,
6765                     DataType::Type::kBool,
6766                     SideEffectsForArchRuntimeCalls(check_kind),
6767                     dex_pc) {
6768     SetPackedField<TypeCheckKindField>(check_kind);
6769     SetPackedFlag<kFlagMustDoNullCheck>(true);
6770     SetRawInputAt(0, object);
6771     SetRawInputAt(1, target_class);
6772   }
6773 
GetTargetClass()6774   HLoadClass* GetTargetClass() const {
6775     HInstruction* load_class = InputAt(1);
6776     DCHECK(load_class->IsLoadClass());
6777     return load_class->AsLoadClass();
6778   }
6779 
IsClonable()6780   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()6781   bool CanBeMoved() const OVERRIDE { return true; }
6782 
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6783   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6784     return true;
6785   }
6786 
NeedsEnvironment()6787   bool NeedsEnvironment() const OVERRIDE {
6788     return CanCallRuntime(GetTypeCheckKind());
6789   }
6790 
6791   // Used only in code generation.
MustDoNullCheck()6792   bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
ClearMustDoNullCheck()6793   void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
GetTypeCheckKind()6794   TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
IsExactCheck()6795   bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6796 
CanCallRuntime(TypeCheckKind check_kind)6797   static bool CanCallRuntime(TypeCheckKind check_kind) {
6798     // Mips currently does runtime calls for any other checks.
6799     return check_kind != TypeCheckKind::kExactCheck;
6800   }
6801 
SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind)6802   static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
6803     return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
6804   }
6805 
6806   DECLARE_INSTRUCTION(InstanceOf);
6807 
6808  protected:
6809   DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
6810 
6811  private:
6812   static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
6813   static constexpr size_t kFieldTypeCheckKindSize =
6814       MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6815   static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6816   static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
6817   static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6818   using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6819 };
6820 
6821 class HBoundType FINAL : public HExpression<1> {
6822  public:
6823   explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HExpression(kBoundType,DataType::Type::kReference,SideEffects::None (),dex_pc)6824       : HExpression(kBoundType, DataType::Type::kReference, SideEffects::None(), dex_pc),
6825         upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6826     SetPackedFlag<kFlagUpperCanBeNull>(true);
6827     SetPackedFlag<kFlagCanBeNull>(true);
6828     DCHECK_EQ(input->GetType(), DataType::Type::kReference);
6829     SetRawInputAt(0, input);
6830   }
6831 
IsClonable()6832   bool IsClonable() const OVERRIDE { return true; }
6833 
6834   // {Get,Set}Upper* should only be used in reference type propagation.
GetUpperBound()6835   const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
GetUpperCanBeNull()6836   bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6837   void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6838 
SetCanBeNull(bool can_be_null)6839   void SetCanBeNull(bool can_be_null) {
6840     DCHECK(GetUpperCanBeNull() || !can_be_null);
6841     SetPackedFlag<kFlagCanBeNull>(can_be_null);
6842   }
6843 
CanBeNull()6844   bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6845 
6846   DECLARE_INSTRUCTION(BoundType);
6847 
6848  protected:
6849   DEFAULT_COPY_CONSTRUCTOR(BoundType);
6850 
6851  private:
6852   // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6853   // is false then CanBeNull() cannot be true).
6854   static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6855   static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6856   static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6857   static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6858 
6859   // Encodes the most upper class that this instruction can have. In other words
6860   // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6861   // It is used to bound the type in cases like:
6862   //   if (x instanceof ClassX) {
6863   //     // uper_bound_ will be ClassX
6864   //   }
6865   ReferenceTypeInfo upper_bound_;
6866 };
6867 
6868 class HCheckCast FINAL : public HTemplateInstruction<2> {
6869  public:
HCheckCast(HInstruction * object,HLoadClass * target_class,TypeCheckKind check_kind,uint32_t dex_pc)6870   HCheckCast(HInstruction* object,
6871              HLoadClass* target_class,
6872              TypeCheckKind check_kind,
6873              uint32_t dex_pc)
6874       : HTemplateInstruction(kCheckCast, SideEffects::CanTriggerGC(), dex_pc) {
6875     SetPackedField<TypeCheckKindField>(check_kind);
6876     SetPackedFlag<kFlagMustDoNullCheck>(true);
6877     SetRawInputAt(0, object);
6878     SetRawInputAt(1, target_class);
6879   }
6880 
GetTargetClass()6881   HLoadClass* GetTargetClass() const {
6882     HInstruction* load_class = InputAt(1);
6883     DCHECK(load_class->IsLoadClass());
6884     return load_class->AsLoadClass();
6885   }
6886 
IsClonable()6887   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()6888   bool CanBeMoved() const OVERRIDE { return true; }
6889 
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6890   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6891     return true;
6892   }
6893 
NeedsEnvironment()6894   bool NeedsEnvironment() const OVERRIDE {
6895     // Instruction may throw a CheckCastError.
6896     return true;
6897   }
6898 
CanThrow()6899   bool CanThrow() const OVERRIDE { return true; }
6900 
MustDoNullCheck()6901   bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
ClearMustDoNullCheck()6902   void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
GetTypeCheckKind()6903   TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
IsExactCheck()6904   bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6905 
6906   DECLARE_INSTRUCTION(CheckCast);
6907 
6908  protected:
6909   DEFAULT_COPY_CONSTRUCTOR(CheckCast);
6910 
6911  private:
6912   static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6913   static constexpr size_t kFieldTypeCheckKindSize =
6914       MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6915   static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6916   static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6917   static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6918   using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6919 };
6920 
6921 /**
6922  * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
6923  * @details We define the combined barrier types that are actually required
6924  * by the Java Memory Model, rather than using exactly the terminology from
6925  * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
6926  * primitives.  Note that the JSR-133 cookbook generally does not deal with
6927  * store atomicity issues, and the recipes there are not always entirely sufficient.
6928  * The current recipe is as follows:
6929  * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
6930  * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
6931  * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
6932  * -# Use StoreStore barrier after all stores but before return from any constructor whose
6933  *    class has final fields.
6934  * -# Use NTStoreStore to order non-temporal stores with respect to all later
6935  *    store-to-memory instructions.  Only generated together with non-temporal stores.
6936  */
6937 enum MemBarrierKind {
6938   kAnyStore,
6939   kLoadAny,
6940   kStoreStore,
6941   kAnyAny,
6942   kNTStoreStore,
6943   kLastBarrierKind = kNTStoreStore
6944 };
6945 std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind);
6946 
6947 class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6948  public:
6949   explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kMemoryBarrier,SideEffects::AllWritesAndReads (),dex_pc)6950       : HTemplateInstruction(
6951             kMemoryBarrier,
6952             SideEffects::AllWritesAndReads(),  // Assume write/read on all fields/arrays.
6953             dex_pc) {
6954     SetPackedField<BarrierKindField>(barrier_kind);
6955   }
6956 
IsClonable()6957   bool IsClonable() const OVERRIDE { return true; }
6958 
GetBarrierKind()6959   MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6960 
6961   DECLARE_INSTRUCTION(MemoryBarrier);
6962 
6963  protected:
6964   DEFAULT_COPY_CONSTRUCTOR(MemoryBarrier);
6965 
6966  private:
6967   static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6968   static constexpr size_t kFieldBarrierKindSize =
6969       MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6970   static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6971       kFieldBarrierKind + kFieldBarrierKindSize;
6972   static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6973                 "Too many packed fields.");
6974   using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6975 };
6976 
6977 // A constructor fence orders all prior stores to fields that could be accessed via a final field of
6978 // the specified object(s), with respect to any subsequent store that might "publish"
6979 // (i.e. make visible) the specified object to another thread.
6980 //
6981 // JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
6982 // for all final fields (that were set) at the end of the invoked constructor.
6983 //
6984 // The constructor fence models the freeze actions for the final fields of an object
6985 // being constructed (semantically at the end of the constructor). Constructor fences
6986 // have a per-object affinity; two separate objects being constructed get two separate
6987 // constructor fences.
6988 //
6989 // (Note: that if calling a super-constructor or forwarding to another constructor,
6990 // the freezes would happen at the end of *that* constructor being invoked).
6991 //
6992 // The memory model guarantees that when the object being constructed is "published" after
6993 // constructor completion (i.e. escapes the current thread via a store), then any final field
6994 // writes must be observable on other threads (once they observe that publication).
6995 //
6996 // Further, anything written before the freeze, and read by dereferencing through the final field,
6997 // must also be visible (so final object field could itself have an object with non-final fields;
6998 // yet the freeze must also extend to them).
6999 //
7000 // Constructor example:
7001 //
7002 //     class HasFinal {
7003 //        final int field;                              Optimizing IR for <init>()V:
7004 //        HasFinal() {
7005 //          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
7006 //          // freeze(this.field);                      HConstructorFence(this)
7007 //        }                                             HReturn
7008 //     }
7009 //
7010 // HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
7011 // already-initialized classes; in that case the allocation must act as a "default-initializer"
7012 // of the object which effectively writes the class pointer "final field".
7013 //
7014 // For example, we can model default-initialiation as roughly the equivalent of the following:
7015 //
7016 //     class Object {
7017 //       private final Class header;
7018 //     }
7019 //
7020 //  Java code:                                           Optimizing IR:
7021 //
7022 //     T new_instance<T>() {
7023 //       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
7024 //       obj.header = T.class;                           // header write is done by above call.
7025 //       // freeze(obj.header)                           HConstructorFence(obj)
7026 //       return (T)obj;
7027 //     }
7028 //
7029 // See also:
7030 // * CompilerDriver::RequiresConstructorBarrier
7031 // * QuasiAtomic::ThreadFenceForConstructor
7032 //
7033 class HConstructorFence FINAL : public HVariableInputSizeInstruction {
7034                                   // A fence has variable inputs because the inputs can be removed
7035                                   // after prepare_for_register_allocation phase.
7036                                   // (TODO: In the future a fence could freeze multiple objects
7037                                   //        after merging two fences together.)
7038  public:
7039   // `fence_object` is the reference that needs to be protected for correct publication.
7040   //
7041   // It makes sense in the following situations:
7042   // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
7043   // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
7044   //
7045   // After construction the `fence_object` becomes the 0th input.
7046   // This is not an input in a real sense, but just a convenient place to stash the information
7047   // about the associated object.
HConstructorFence(HInstruction * fence_object,uint32_t dex_pc,ArenaAllocator * allocator)7048   HConstructorFence(HInstruction* fence_object,
7049                     uint32_t dex_pc,
7050                     ArenaAllocator* allocator)
7051     // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
7052     // constraints described in the class header. We claim that these SideEffects constraints
7053     // enforce a superset of the real constraints.
7054     //
7055     // The ordering described above is conservatively modeled with SideEffects as follows:
7056     //
7057     // * To prevent reordering of the publication stores:
7058     // ----> "Reads of objects" is the initial SideEffect.
7059     // * For every primitive final field store in the constructor:
7060     // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
7061     // * If there are any stores to reference final fields in the constructor:
7062     // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
7063     //       that are reachable from `fence_object` also need to be prevented for reordering
7064     //       (and we do not want to do alias analysis to figure out what those stores are).
7065     //
7066     // In the implementation, this initially starts out as an "all reads" side effect; this is an
7067     // even more conservative approach than the one described above, and prevents all of the
7068     // above reordering without analyzing any of the instructions in the constructor.
7069     //
7070     // If in a later phase we discover that there are no writes to reference final fields,
7071     // we can refine the side effect to a smaller set of type reads (see above constraints).
7072       : HVariableInputSizeInstruction(kConstructorFence,
7073                                       SideEffects::AllReads(),
7074                                       dex_pc,
7075                                       allocator,
7076                                       /* number_of_inputs */ 1,
7077                                       kArenaAllocConstructorFenceInputs) {
7078     DCHECK(fence_object != nullptr);
7079     SetRawInputAt(0, fence_object);
7080   }
7081 
7082   // The object associated with this constructor fence.
7083   //
7084   // (Note: This will be null after the prepare_for_register_allocation phase,
7085   // as all constructor fence inputs are removed there).
GetFenceObject()7086   HInstruction* GetFenceObject() const {
7087     return InputAt(0);
7088   }
7089 
7090   // Find all the HConstructorFence uses (`fence_use`) for `this` and:
7091   // - Delete `fence_use` from `this`'s use list.
7092   // - Delete `this` from `fence_use`'s inputs list.
7093   // - If the `fence_use` is dead, remove it from the graph.
7094   //
7095   // A fence is considered dead once it no longer has any uses
7096   // and all of the inputs are dead.
7097   //
7098   // This must *not* be called during/after prepare_for_register_allocation,
7099   // because that removes all the inputs to the fences but the fence is actually
7100   // still considered live.
7101   //
7102   // Returns how many HConstructorFence instructions were removed from graph.
7103   static size_t RemoveConstructorFences(HInstruction* instruction);
7104 
7105   // Combine all inputs of `this` and `other` instruction and remove
7106   // `other` from the graph.
7107   //
7108   // Inputs are unique after the merge.
7109   //
7110   // Requirement: `this` must not be the same as `other.
7111   void Merge(HConstructorFence* other);
7112 
7113   // Check if this constructor fence is protecting
7114   // an HNewInstance or HNewArray that is also the immediate
7115   // predecessor of `this`.
7116   //
7117   // If `ignore_inputs` is true, then the immediate predecessor doesn't need
7118   // to be one of the inputs of `this`.
7119   //
7120   // Returns the associated HNewArray or HNewInstance,
7121   // or null otherwise.
7122   HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
7123 
7124   DECLARE_INSTRUCTION(ConstructorFence);
7125 
7126  protected:
7127   DEFAULT_COPY_CONSTRUCTOR(ConstructorFence);
7128 };
7129 
7130 class HMonitorOperation FINAL : public HTemplateInstruction<1> {
7131  public:
7132   enum class OperationKind {
7133     kEnter,
7134     kExit,
7135     kLast = kExit
7136   };
7137 
HMonitorOperation(HInstruction * object,OperationKind kind,uint32_t dex_pc)7138   HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
7139     : HTemplateInstruction(
7140           kMonitorOperation,
7141           SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
7142           dex_pc) {
7143     SetPackedField<OperationKindField>(kind);
7144     SetRawInputAt(0, object);
7145   }
7146 
7147   // Instruction may go into runtime, so we need an environment.
NeedsEnvironment()7148   bool NeedsEnvironment() const OVERRIDE { return true; }
7149 
CanThrow()7150   bool CanThrow() const OVERRIDE {
7151     // Verifier guarantees that monitor-exit cannot throw.
7152     // This is important because it allows the HGraphBuilder to remove
7153     // a dead throw-catch loop generated for `synchronized` blocks/methods.
7154     return IsEnter();
7155   }
7156 
GetOperationKind()7157   OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
IsEnter()7158   bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
7159 
7160   DECLARE_INSTRUCTION(MonitorOperation);
7161 
7162  protected:
7163   DEFAULT_COPY_CONSTRUCTOR(MonitorOperation);
7164 
7165  private:
7166   static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
7167   static constexpr size_t kFieldOperationKindSize =
7168       MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
7169   static constexpr size_t kNumberOfMonitorOperationPackedBits =
7170       kFieldOperationKind + kFieldOperationKindSize;
7171   static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7172                 "Too many packed fields.");
7173   using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
7174 };
7175 
7176 class HSelect FINAL : public HExpression<3> {
7177  public:
HSelect(HInstruction * condition,HInstruction * true_value,HInstruction * false_value,uint32_t dex_pc)7178   HSelect(HInstruction* condition,
7179           HInstruction* true_value,
7180           HInstruction* false_value,
7181           uint32_t dex_pc)
7182       : HExpression(kSelect, HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
7183     DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
7184 
7185     // First input must be `true_value` or `false_value` to allow codegens to
7186     // use the SameAsFirstInput allocation policy. We make it `false_value`, so
7187     // that architectures which implement HSelect as a conditional move also
7188     // will not need to invert the condition.
7189     SetRawInputAt(0, false_value);
7190     SetRawInputAt(1, true_value);
7191     SetRawInputAt(2, condition);
7192   }
7193 
IsClonable()7194   bool IsClonable() const OVERRIDE { return true; }
GetFalseValue()7195   HInstruction* GetFalseValue() const { return InputAt(0); }
GetTrueValue()7196   HInstruction* GetTrueValue() const { return InputAt(1); }
GetCondition()7197   HInstruction* GetCondition() const { return InputAt(2); }
7198 
CanBeMoved()7199   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)7200   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
7201     return true;
7202   }
7203 
CanBeNull()7204   bool CanBeNull() const OVERRIDE {
7205     return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
7206   }
7207 
7208   DECLARE_INSTRUCTION(Select);
7209 
7210  protected:
7211   DEFAULT_COPY_CONSTRUCTOR(Select);
7212 };
7213 
7214 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
7215  public:
MoveOperands(Location source,Location destination,DataType::Type type,HInstruction * instruction)7216   MoveOperands(Location source,
7217                Location destination,
7218                DataType::Type type,
7219                HInstruction* instruction)
7220       : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
7221 
GetSource()7222   Location GetSource() const { return source_; }
GetDestination()7223   Location GetDestination() const { return destination_; }
7224 
SetSource(Location value)7225   void SetSource(Location value) { source_ = value; }
SetDestination(Location value)7226   void SetDestination(Location value) { destination_ = value; }
7227 
7228   // The parallel move resolver marks moves as "in-progress" by clearing the
7229   // destination (but not the source).
MarkPending()7230   Location MarkPending() {
7231     DCHECK(!IsPending());
7232     Location dest = destination_;
7233     destination_ = Location::NoLocation();
7234     return dest;
7235   }
7236 
ClearPending(Location dest)7237   void ClearPending(Location dest) {
7238     DCHECK(IsPending());
7239     destination_ = dest;
7240   }
7241 
IsPending()7242   bool IsPending() const {
7243     DCHECK(source_.IsValid() || destination_.IsInvalid());
7244     return destination_.IsInvalid() && source_.IsValid();
7245   }
7246 
7247   // True if this blocks a move from the given location.
Blocks(Location loc)7248   bool Blocks(Location loc) const {
7249     return !IsEliminated() && source_.OverlapsWith(loc);
7250   }
7251 
7252   // A move is redundant if it's been eliminated, if its source and
7253   // destination are the same, or if its destination is unneeded.
IsRedundant()7254   bool IsRedundant() const {
7255     return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
7256   }
7257 
7258   // We clear both operands to indicate move that's been eliminated.
Eliminate()7259   void Eliminate() {
7260     source_ = destination_ = Location::NoLocation();
7261   }
7262 
IsEliminated()7263   bool IsEliminated() const {
7264     DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
7265     return source_.IsInvalid();
7266   }
7267 
GetType()7268   DataType::Type GetType() const { return type_; }
7269 
Is64BitMove()7270   bool Is64BitMove() const {
7271     return DataType::Is64BitType(type_);
7272   }
7273 
GetInstruction()7274   HInstruction* GetInstruction() const { return instruction_; }
7275 
7276  private:
7277   Location source_;
7278   Location destination_;
7279   // The type this move is for.
7280   DataType::Type type_;
7281   // The instruction this move is assocatied with. Null when this move is
7282   // for moving an input in the expected locations of user (including a phi user).
7283   // This is only used in debug mode, to ensure we do not connect interval siblings
7284   // in the same parallel move.
7285   HInstruction* instruction_;
7286 };
7287 
7288 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
7289 
7290 static constexpr size_t kDefaultNumberOfMoves = 4;
7291 
7292 class HParallelMove FINAL : public HTemplateInstruction<0> {
7293  public:
7294   explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
HTemplateInstruction(kParallelMove,SideEffects::None (),dex_pc)7295       : HTemplateInstruction(kParallelMove, SideEffects::None(), dex_pc),
7296         moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
7297     moves_.reserve(kDefaultNumberOfMoves);
7298   }
7299 
AddMove(Location source,Location destination,DataType::Type type,HInstruction * instruction)7300   void AddMove(Location source,
7301                Location destination,
7302                DataType::Type type,
7303                HInstruction* instruction) {
7304     DCHECK(source.IsValid());
7305     DCHECK(destination.IsValid());
7306     if (kIsDebugBuild) {
7307       if (instruction != nullptr) {
7308         for (const MoveOperands& move : moves_) {
7309           if (move.GetInstruction() == instruction) {
7310             // Special case the situation where the move is for the spill slot
7311             // of the instruction.
7312             if ((GetPrevious() == instruction)
7313                 || ((GetPrevious() == nullptr)
7314                     && instruction->IsPhi()
7315                     && instruction->GetBlock() == GetBlock())) {
7316               DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
7317                   << "Doing parallel moves for the same instruction.";
7318             } else {
7319               DCHECK(false) << "Doing parallel moves for the same instruction.";
7320             }
7321           }
7322         }
7323       }
7324       for (const MoveOperands& move : moves_) {
7325         DCHECK(!destination.OverlapsWith(move.GetDestination()))
7326             << "Overlapped destination for two moves in a parallel move: "
7327             << move.GetSource() << " ==> " << move.GetDestination() << " and "
7328             << source << " ==> " << destination;
7329       }
7330     }
7331     moves_.emplace_back(source, destination, type, instruction);
7332   }
7333 
MoveOperandsAt(size_t index)7334   MoveOperands* MoveOperandsAt(size_t index) {
7335     return &moves_[index];
7336   }
7337 
NumMoves()7338   size_t NumMoves() const { return moves_.size(); }
7339 
7340   DECLARE_INSTRUCTION(ParallelMove);
7341 
7342  protected:
7343   DEFAULT_COPY_CONSTRUCTOR(ParallelMove);
7344 
7345  private:
7346   ArenaVector<MoveOperands> moves_;
7347 };
7348 
7349 // This instruction computes an intermediate address pointing in the 'middle' of an object. The
7350 // result pointer cannot be handled by GC, so extra care is taken to make sure that this value is
7351 // never used across anything that can trigger GC.
7352 // The result of this instruction is not a pointer in the sense of `DataType::Type::kreference`.
7353 // So we represent it by the type `DataType::Type::kInt`.
7354 class HIntermediateAddress FINAL : public HExpression<2> {
7355  public:
HIntermediateAddress(HInstruction * base_address,HInstruction * offset,uint32_t dex_pc)7356   HIntermediateAddress(HInstruction* base_address, HInstruction* offset, uint32_t dex_pc)
7357       : HExpression(kIntermediateAddress,
7358                     DataType::Type::kInt32,
7359                     SideEffects::DependsOnGC(),
7360                     dex_pc) {
7361         DCHECK_EQ(DataType::Size(DataType::Type::kInt32),
7362                   DataType::Size(DataType::Type::kReference))
7363             << "kPrimInt and kPrimNot have different sizes.";
7364     SetRawInputAt(0, base_address);
7365     SetRawInputAt(1, offset);
7366   }
7367 
IsClonable()7368   bool IsClonable() const OVERRIDE { return true; }
CanBeMoved()7369   bool CanBeMoved() const OVERRIDE { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)7370   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
7371     return true;
7372   }
IsActualObject()7373   bool IsActualObject() const OVERRIDE { return false; }
7374 
GetBaseAddress()7375   HInstruction* GetBaseAddress() const { return InputAt(0); }
GetOffset()7376   HInstruction* GetOffset() const { return InputAt(1); }
7377 
7378   DECLARE_INSTRUCTION(IntermediateAddress);
7379 
7380  protected:
7381   DEFAULT_COPY_CONSTRUCTOR(IntermediateAddress);
7382 };
7383 
7384 
7385 }  // namespace art
7386 
7387 #include "nodes_vector.h"
7388 
7389 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
7390 #include "nodes_shared.h"
7391 #endif
7392 #ifdef ART_ENABLE_CODEGEN_mips
7393 #include "nodes_mips.h"
7394 #endif
7395 #ifdef ART_ENABLE_CODEGEN_x86
7396 #include "nodes_x86.h"
7397 #endif
7398 
7399 namespace art {
7400 
7401 class OptimizingCompilerStats;
7402 
7403 class HGraphVisitor : public ValueObject {
7404  public:
7405   explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
stats_(stats)7406       : stats_(stats),
7407         graph_(graph) {}
~HGraphVisitor()7408   virtual ~HGraphVisitor() {}
7409 
VisitInstruction(HInstruction * instruction ATTRIBUTE_UNUSED)7410   virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
7411   virtual void VisitBasicBlock(HBasicBlock* block);
7412 
7413   // Visit the graph following basic block insertion order.
7414   void VisitInsertionOrder();
7415 
7416   // Visit the graph following dominator tree reverse post-order.
7417   void VisitReversePostOrder();
7418 
GetGraph()7419   HGraph* GetGraph() const { return graph_; }
7420 
7421   // Visit functions for instruction classes.
7422 #define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
7423   virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
7424 
7425   FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
7426 
7427 #undef DECLARE_VISIT_INSTRUCTION
7428 
7429  protected:
7430   OptimizingCompilerStats* stats_;
7431 
7432  private:
7433   HGraph* const graph_;
7434 
7435   DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
7436 };
7437 
7438 class HGraphDelegateVisitor : public HGraphVisitor {
7439  public:
7440   explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
HGraphVisitor(graph,stats)7441       : HGraphVisitor(graph, stats) {}
~HGraphDelegateVisitor()7442   virtual ~HGraphDelegateVisitor() {}
7443 
7444   // Visit functions that delegate to to super class.
7445 #define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
7446   void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
7447 
7448   FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
7449 
7450 #undef DECLARE_VISIT_INSTRUCTION
7451 
7452  private:
7453   DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
7454 };
7455 
7456 // Create a clone of the instruction, insert it into the graph; replace the old one with a new
7457 // and remove the old instruction.
7458 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr);
7459 
7460 // Create a clone for each clonable instructions/phis and replace the original with the clone.
7461 //
7462 // Used for testing individual instruction cloner.
7463 class CloneAndReplaceInstructionVisitor : public HGraphDelegateVisitor {
7464  public:
CloneAndReplaceInstructionVisitor(HGraph * graph)7465   explicit CloneAndReplaceInstructionVisitor(HGraph* graph)
7466       : HGraphDelegateVisitor(graph), instr_replaced_by_clones_count_(0) {}
7467 
VisitInstruction(HInstruction * instruction)7468   void VisitInstruction(HInstruction* instruction) OVERRIDE {
7469     if (instruction->IsClonable()) {
7470       ReplaceInstrOrPhiByClone(instruction);
7471       instr_replaced_by_clones_count_++;
7472     }
7473   }
7474 
GetInstrReplacedByClonesCount()7475   size_t GetInstrReplacedByClonesCount() const { return instr_replaced_by_clones_count_; }
7476 
7477  private:
7478   size_t instr_replaced_by_clones_count_;
7479 
7480   DISALLOW_COPY_AND_ASSIGN(CloneAndReplaceInstructionVisitor);
7481 };
7482 
7483 // Iterator over the blocks that art part of the loop. Includes blocks part
7484 // of an inner loop. The order in which the blocks are iterated is on their
7485 // block id.
7486 class HBlocksInLoopIterator : public ValueObject {
7487  public:
HBlocksInLoopIterator(const HLoopInformation & info)7488   explicit HBlocksInLoopIterator(const HLoopInformation& info)
7489       : blocks_in_loop_(info.GetBlocks()),
7490         blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
7491         index_(0) {
7492     if (!blocks_in_loop_.IsBitSet(index_)) {
7493       Advance();
7494     }
7495   }
7496 
Done()7497   bool Done() const { return index_ == blocks_.size(); }
Current()7498   HBasicBlock* Current() const { return blocks_[index_]; }
Advance()7499   void Advance() {
7500     ++index_;
7501     for (size_t e = blocks_.size(); index_ < e; ++index_) {
7502       if (blocks_in_loop_.IsBitSet(index_)) {
7503         break;
7504       }
7505     }
7506   }
7507 
7508  private:
7509   const BitVector& blocks_in_loop_;
7510   const ArenaVector<HBasicBlock*>& blocks_;
7511   size_t index_;
7512 
7513   DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
7514 };
7515 
7516 // Iterator over the blocks that art part of the loop. Includes blocks part
7517 // of an inner loop. The order in which the blocks are iterated is reverse
7518 // post order.
7519 class HBlocksInLoopReversePostOrderIterator : public ValueObject {
7520  public:
HBlocksInLoopReversePostOrderIterator(const HLoopInformation & info)7521   explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
7522       : blocks_in_loop_(info.GetBlocks()),
7523         blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
7524         index_(0) {
7525     if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7526       Advance();
7527     }
7528   }
7529 
Done()7530   bool Done() const { return index_ == blocks_.size(); }
Current()7531   HBasicBlock* Current() const { return blocks_[index_]; }
Advance()7532   void Advance() {
7533     ++index_;
7534     for (size_t e = blocks_.size(); index_ < e; ++index_) {
7535       if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
7536         break;
7537       }
7538     }
7539   }
7540 
7541  private:
7542   const BitVector& blocks_in_loop_;
7543   const ArenaVector<HBasicBlock*>& blocks_;
7544   size_t index_;
7545 
7546   DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
7547 };
7548 
7549 // Returns int64_t value of a properly typed constant.
Int64FromConstant(HConstant * constant)7550 inline int64_t Int64FromConstant(HConstant* constant) {
7551   if (constant->IsIntConstant()) {
7552     return constant->AsIntConstant()->GetValue();
7553   } else if (constant->IsLongConstant()) {
7554     return constant->AsLongConstant()->GetValue();
7555   } else {
7556     DCHECK(constant->IsNullConstant()) << constant->DebugName();
7557     return 0;
7558   }
7559 }
7560 
7561 // Returns true iff instruction is an integral constant (and sets value on success).
IsInt64AndGet(HInstruction * instruction,int64_t * value)7562 inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
7563   if (instruction->IsIntConstant()) {
7564     *value = instruction->AsIntConstant()->GetValue();
7565     return true;
7566   } else if (instruction->IsLongConstant()) {
7567     *value = instruction->AsLongConstant()->GetValue();
7568     return true;
7569   } else if (instruction->IsNullConstant()) {
7570     *value = 0;
7571     return true;
7572   }
7573   return false;
7574 }
7575 
7576 // Returns true iff instruction is the given integral constant.
IsInt64Value(HInstruction * instruction,int64_t value)7577 inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
7578   int64_t val = 0;
7579   return IsInt64AndGet(instruction, &val) && val == value;
7580 }
7581 
7582 // Returns true iff instruction is a zero bit pattern.
IsZeroBitPattern(HInstruction * instruction)7583 inline bool IsZeroBitPattern(HInstruction* instruction) {
7584   return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
7585 }
7586 
7587 #define INSTRUCTION_TYPE_CHECK(type, super)                                    \
7588   inline bool HInstruction::Is##type() const { return GetKind() == k##type; }  \
7589   inline const H##type* HInstruction::As##type() const {                       \
7590     return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
7591   }                                                                            \
7592   inline H##type* HInstruction::As##type() {                                   \
7593     return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
7594   }
7595 
FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)7596   FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
7597 #undef INSTRUCTION_TYPE_CHECK
7598 
7599 // Create space in `blocks` for adding `number_of_new_blocks` entries
7600 // starting at location `at`. Blocks after `at` are moved accordingly.
7601 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
7602                         size_t number_of_new_blocks,
7603                         size_t after) {
7604   DCHECK_LT(after, blocks->size());
7605   size_t old_size = blocks->size();
7606   size_t new_size = old_size + number_of_new_blocks;
7607   blocks->resize(new_size);
7608   std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
7609 }
7610 
7611 /*
7612  * Hunt "under the hood" of array lengths (leading to array references),
7613  * null checks (also leading to array references), and new arrays
7614  * (leading to the actual length). This makes it more likely related
7615  * instructions become actually comparable.
7616  */
HuntForDeclaration(HInstruction * instruction)7617 inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
7618   while (instruction->IsArrayLength() ||
7619          instruction->IsNullCheck() ||
7620          instruction->IsNewArray()) {
7621     instruction = instruction->IsNewArray()
7622         ? instruction->AsNewArray()->GetLength()
7623         : instruction->InputAt(0);
7624   }
7625   return instruction;
7626 }
7627 
7628 void RemoveEnvironmentUses(HInstruction* instruction);
7629 bool HasEnvironmentUsedByOthers(HInstruction* instruction);
7630 void ResetEnvironmentInputRecords(HInstruction* instruction);
7631 
7632 }  // namespace art
7633 
7634 #endif  // ART_COMPILER_OPTIMIZING_NODES_H_
7635